diff --git a/venv/Lib/site-packages/appdirs-1.4.4.dist-info/INSTALLER b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/appdirs-1.4.4.dist-info/LICENSE.txt b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/LICENSE.txt new file mode 100644 index 00000000..107c6140 --- /dev/null +++ b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/LICENSE.txt @@ -0,0 +1,23 @@ +# This is the MIT license + +Copyright (c) 2010 ActiveState Software Inc. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/venv/Lib/site-packages/appdirs-1.4.4.dist-info/METADATA b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/METADATA new file mode 100644 index 00000000..f9507310 --- /dev/null +++ b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/METADATA @@ -0,0 +1,264 @@ +Metadata-Version: 2.1 +Name: appdirs +Version: 1.4.4 +Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir". +Home-page: http://github.com/ActiveState/appdirs +Author: Trent Mick +Author-email: trentm@gmail.com +Maintainer: Jeff Rouse +Maintainer-email: jr@its.to +License: MIT +Keywords: application directory log cache user +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Topic :: Software Development :: Libraries :: Python Modules + + +.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png + :target: http://travis-ci.org/ActiveState/appdirs + +the problem +=========== + +What directory should your app use for storing user data? If running on Mac OS X, you +should use:: + + ~/Library/Application Support/ + +If on Windows (at least English Win XP) that should be:: + + C:\Documents and Settings\\Application Data\Local Settings\\ + +or possibly:: + + C:\Documents and Settings\\Application Data\\ + +for `roaming profiles `_ but that is another story. + +On Linux (and other Unices) the dir, according to the `XDG +spec `_, is:: + + ~/.local/share/ + + +``appdirs`` to the rescue +========================= + +This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will +help you choose an appropriate: + +- user data dir (``user_data_dir``) +- user config dir (``user_config_dir``) +- user cache dir (``user_cache_dir``) +- site data dir (``site_data_dir``) +- site config dir (``site_config_dir``) +- user log dir (``user_log_dir``) + +and also: + +- is a single module so other Python packages can include their own private copy +- is slightly opinionated on the directory names used. Look for "OPINION" in + documentation and code for when an opinion is being applied. + + +some example output +=================== + +On Mac OS X:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/Users/trentm/Library/Application Support/SuperApp' + >>> site_data_dir(appname, appauthor) + '/Library/Application Support/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/Users/trentm/Library/Caches/SuperApp' + >>> user_log_dir(appname, appauthor) + '/Users/trentm/Library/Logs/SuperApp' + +On Windows 7:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp' + >>> user_data_dir(appname, appauthor, roaming=True) + 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp' + >>> user_cache_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache' + >>> user_log_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs' + +On Linux:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/home/trentm/.local/share/SuperApp + >>> site_data_dir(appname, appauthor) + '/usr/local/share/SuperApp' + >>> site_data_dir(appname, appauthor, multipath=True) + '/usr/local/share/SuperApp:/usr/share/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/home/trentm/.cache/SuperApp' + >>> user_log_dir(appname, appauthor) + '/home/trentm/.cache/SuperApp/log' + >>> user_config_dir(appname) + '/home/trentm/.config/SuperApp' + >>> site_config_dir(appname) + '/etc/xdg/SuperApp' + >>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc' + >>> site_config_dir(appname, multipath=True) + '/etc/SuperApp:/usr/local/etc/SuperApp' + + +``AppDirs`` for convenience +=========================== + +:: + + >>> from appdirs import AppDirs + >>> dirs = AppDirs("SuperApp", "Acme") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp' + >>> dirs.site_data_dir + '/Library/Application Support/SuperApp' + >>> dirs.user_cache_dir + '/Users/trentm/Library/Caches/SuperApp' + >>> dirs.user_log_dir + '/Users/trentm/Library/Logs/SuperApp' + + + +Per-version isolation +===================== + +If you have multiple versions of your app in use that you want to be +able to run side-by-side, then you may want version-isolation for these +dirs:: + + >>> from appdirs import AppDirs + >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp/1.0' + >>> dirs.site_data_dir + '/Library/Application Support/SuperApp/1.0' + >>> dirs.user_cache_dir + '/Users/trentm/Library/Caches/SuperApp/1.0' + >>> dirs.user_log_dir + '/Users/trentm/Library/Logs/SuperApp/1.0' + + + +appdirs Changelog +================= + +appdirs 1.4.4 +------------- +- [PR #92] Don't import appdirs from setup.py + +Project officially classified as Stable which is important +for inclusion in other distros such as ActivePython. + +First of several incremental releases to catch up on maintenance. + +appdirs 1.4.3 +------------- +- [PR #76] Python 3.6 invalid escape sequence deprecation fixes +- Fix for Python 3.6 support + +appdirs 1.4.2 +------------- +- [PR #84] Allow installing without setuptools +- [PR #86] Fix string delimiters in setup.py description +- Add Python 3.6 support + +appdirs 1.4.1 +------------- +- [issue #38] Fix _winreg import on Windows Py3 +- [issue #55] Make appname optional + +appdirs 1.4.0 +------------- +- [PR #42] AppAuthor is now optional on Windows +- [issue 41] Support Jython on Windows, Mac, and Unix-like platforms. Windows + support requires `JNA `_. +- [PR #44] Fix incorrect behaviour of the site_config_dir method + +appdirs 1.3.0 +------------- +- [Unix, issue 16] Conform to XDG standard, instead of breaking it for + everybody +- [Unix] Removes gratuitous case mangling of the case, since \*nix-es are + usually case sensitive, so mangling is not wise +- [Unix] Fixes the utterly wrong behaviour in ``site_data_dir``, return result + based on XDG_DATA_DIRS and make room for respecting the standard which + specifies XDG_DATA_DIRS is a multiple-value variable +- [Issue 6] Add ``*_config_dir`` which are distinct on nix-es, according to + XDG specs; on Windows and Mac return the corresponding ``*_data_dir`` + +appdirs 1.2.0 +------------- + +- [Unix] Put ``user_log_dir`` under the *cache* dir on Unix. Seems to be more + typical. +- [issue 9] Make ``unicode`` work on py3k. + +appdirs 1.1.0 +------------- + +- [issue 4] Add ``AppDirs.user_log_dir``. +- [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec + `_. +- [Mac, issue 5] Fix ``site_data_dir()`` on Mac. +- [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports + Python3 now. +- [Windows] Append "Cache" to ``user_cache_dir`` on Windows by default. Use + ``opinion=False`` option to disable this. +- Add ``appdirs.AppDirs`` convenience class. Usage: + + >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp/1.0' + +- [Windows] Cherry-pick Komodo's change to downgrade paths to the Windows short + paths if there are high bit chars. +- [Linux] Change default ``user_cache_dir()`` on Linux to be singular, e.g. + "~/.superapp/cache". +- [Windows] Add ``roaming`` option to ``user_data_dir()`` (for use on Windows only) + and change the default ``user_data_dir`` behaviour to use a *non*-roaming + profile dir (``CSIDL_LOCAL_APPDATA`` instead of ``CSIDL_APPDATA``). Why? Because + a large roaming profile can cause login speed issues. The "only syncs on + logout" behaviour can cause surprises in appdata info. + + +appdirs 1.0.1 (never released) +------------------------------ + +Started this changelog 27 July 2010. Before that this module originated in the +`Komodo `_ product as ``applib.py`` and then +as `applib/location.py +`_ (used by +`PyPM `_ in `ActivePython +`_). This is basically a fork of +applib.py 1.0.1 and applib/location.py 1.0.1. + + + diff --git a/venv/Lib/site-packages/appdirs-1.4.4.dist-info/RECORD b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/RECORD new file mode 100644 index 00000000..5f3c584d --- /dev/null +++ b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/appdirs.cpython-36.pyc,, +appdirs-1.4.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +appdirs-1.4.4.dist-info/LICENSE.txt,sha256=Nt200KdFqTqyAyA9cZCBSxuJcn0lTK_0jHp6-71HAAs,1097 +appdirs-1.4.4.dist-info/METADATA,sha256=k5TVfXMNKGHTfp2wm6EJKTuGwGNuoQR5TqQgH8iwG8M,8981 +appdirs-1.4.4.dist-info/RECORD,, +appdirs-1.4.4.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +appdirs-1.4.4.dist-info/top_level.txt,sha256=nKncE8CUqZERJ6VuQWL4_bkunSPDNfn7KZqb4Tr5YEM,8 +appdirs.py,sha256=g99s2sXhnvTEm79oj4bWI0Toapc-_SmKKNXvOXHkVic,24720 diff --git a/venv/Lib/site-packages/appdirs-1.4.4.dist-info/WHEEL b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/appdirs-1.4.4.dist-info/top_level.txt b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/top_level.txt new file mode 100644 index 00000000..d64bc321 --- /dev/null +++ b/venv/Lib/site-packages/appdirs-1.4.4.dist-info/top_level.txt @@ -0,0 +1 @@ +appdirs diff --git a/venv/Lib/site-packages/appdirs.py b/venv/Lib/site-packages/appdirs.py new file mode 100644 index 00000000..2acd1deb --- /dev/null +++ b/venv/Lib/site-packages/appdirs.py @@ -0,0 +1,608 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petrișor + +"""Utilities for determining application-specific dirs. + +See for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version__ = "1.4.4" +__version_info__ = tuple(int(segment) for segment in __version__.split(".")) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "Mac OS X", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/ + Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\\Application Data\\ + Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ + Win 7 (not roaming): C:\Users\\AppData\Local\\ + Win 7 (roaming): C:\Users\\AppData\Roaming\\ + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/', + if XDG_DATA_DIRS is not set + + Typical site data directories are: + Mac OS X: /Library/Application Support/ + Unix: /usr/local/share/ or /usr/share/ + Win XP: C:\Documents and Settings\All Users\Application Data\\ + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user config directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by default "~/.config/". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set + + Typical site config directories are: + Mac OS X: same as site_data_dir + Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/ + Unix: ~/.cache/ (XDG default) + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache + Vista: C:\Users\\AppData\Local\\\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific state dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user state directories are: + Mac OS X: same as user_data_dir + Unix: ~/.local/state/ # or in $XDG_STATE_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow this Debian proposal + to extend the XDG spec and support $XDG_STATE_HOME. + + That means, by default "~/.local/state/". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user log directories are: + Mac OS X: ~/Library/Logs/ + Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs + Vista: C:\Users\\AppData\Local\\\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname=None, appauthor=None, version=None, + roaming=False, multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_state_dir(self): + return user_state_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + +#---- internal support stuff + +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + if PY3: + import winreg as _winreg + else: + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernel.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + import win32com.shell + _get_win_folder = _get_win_folder_with_pywin32 + except ImportError: + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +#---- self test code + +if __name__ == "__main__": + appname = "MyApp" + appauthor = "MyCompany" + + props = ("user_data_dir", + "user_config_dir", + "user_cache_dir", + "user_state_dir", + "user_log_dir", + "site_data_dir", + "site_config_dir") + + print("-- app dirs %s --" % __version__) + + print("-- app dirs (with optional 'version')") + dirs = AppDirs(appname, appauthor, version="1.0") + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'version')") + dirs = AppDirs(appname, appauthor) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'appauthor')") + dirs = AppDirs(appname) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = AppDirs(appname, appauthor=False) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/venv/Lib/site-packages/clonevirtualenv.py b/venv/Lib/site-packages/clonevirtualenv.py new file mode 100644 index 00000000..a9d84db8 --- /dev/null +++ b/venv/Lib/site-packages/clonevirtualenv.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python +from __future__ import with_statement + +import logging +import optparse +import os +import os.path +import re +import shutil +import subprocess +import sys +import itertools + +__version__ = '0.5.4' + + +logger = logging.getLogger() + + +env_bin_dir = 'bin' +if sys.platform == 'win32': + env_bin_dir = 'Scripts' + + +class UserError(Exception): + pass + + +def _dirmatch(path, matchwith): + """Check if path is within matchwith's tree. + + >>> _dirmatch('/home/foo/bar', '/home/foo/bar') + True + >>> _dirmatch('/home/foo/bar/', '/home/foo/bar') + True + >>> _dirmatch('/home/foo/bar/etc', '/home/foo/bar') + True + >>> _dirmatch('/home/foo/bar2', '/home/foo/bar') + False + >>> _dirmatch('/home/foo/bar2/etc', '/home/foo/bar') + False + """ + matchlen = len(matchwith) + if (path.startswith(matchwith) + and path[matchlen:matchlen + 1] in [os.sep, '']): + return True + return False + + +def _virtualenv_sys(venv_path): + "obtain version and path info from a virtualenv." + executable = os.path.join(venv_path, env_bin_dir, 'python') + # Must use "executable" as the first argument rather than as the + # keyword argument "executable" to get correct value from sys.path + p = subprocess.Popen([executable, + '-c', 'import sys;' + 'print (sys.version[:3]);' + 'print ("\\n".join(sys.path));'], + env={}, + stdout=subprocess.PIPE) + stdout, err = p.communicate() + assert not p.returncode and stdout + lines = stdout.decode('utf-8').splitlines() + return lines[0], list(filter(bool, lines[1:])) + + +def clone_virtualenv(src_dir, dst_dir): + if not os.path.exists(src_dir): + raise UserError('src dir %r does not exist' % src_dir) + if os.path.exists(dst_dir): + raise UserError('dest dir %r exists' % dst_dir) + #sys_path = _virtualenv_syspath(src_dir) + logger.info('cloning virtualenv \'%s\' => \'%s\'...' % + (src_dir, dst_dir)) + shutil.copytree(src_dir, dst_dir, symlinks=True, + ignore=shutil.ignore_patterns('*.pyc')) + version, sys_path = _virtualenv_sys(dst_dir) + logger.info('fixing scripts in bin...') + fixup_scripts(src_dir, dst_dir, version) + + has_old = lambda s: any(i for i in s if _dirmatch(i, src_dir)) + + if has_old(sys_path): + # only need to fix stuff in sys.path if we have old + # paths in the sys.path of new python env. right? + logger.info('fixing paths in sys.path...') + fixup_syspath_items(sys_path, src_dir, dst_dir) + v_sys = _virtualenv_sys(dst_dir) + remaining = has_old(v_sys[1]) + assert not remaining, v_sys + fix_symlink_if_necessary(src_dir, dst_dir) + +def fix_symlink_if_necessary(src_dir, dst_dir): + #sometimes the source virtual environment has symlinks that point to itself + #one example is $OLD_VIRTUAL_ENV/local/lib points to $OLD_VIRTUAL_ENV/lib + #this function makes sure + #$NEW_VIRTUAL_ENV/local/lib will point to $NEW_VIRTUAL_ENV/lib + #usually this goes unnoticed unless one tries to upgrade a package though pip, so this bug is hard to find. + logger.info("scanning for internal symlinks that point to the original virtual env") + for dirpath, dirnames, filenames in os.walk(dst_dir): + for a_file in itertools.chain(filenames, dirnames): + full_file_path = os.path.join(dirpath, a_file) + if os.path.islink(full_file_path): + target = os.path.realpath(full_file_path) + if target.startswith(src_dir): + new_target = target.replace(src_dir, dst_dir) + logger.debug('fixing symlink in %s' % (full_file_path,)) + os.remove(full_file_path) + os.symlink(new_target, full_file_path) + + +def fixup_scripts(old_dir, new_dir, version, rewrite_env_python=False): + bin_dir = os.path.join(new_dir, env_bin_dir) + root, dirs, files = next(os.walk(bin_dir)) + pybinre = re.compile(r'pythonw?([0-9]+(\.[0-9]+(\.[0-9]+)?)?)?$') + for file_ in files: + filename = os.path.join(root, file_) + if file_ in ['python', 'python%s' % version, 'activate_this.py']: + continue + elif file_.startswith('python') and pybinre.match(file_): + # ignore other possible python binaries + continue + elif file_.endswith('.pyc'): + # ignore compiled files + continue + elif file_ == 'activate' or file_.startswith('activate.'): + fixup_activate(os.path.join(root, file_), old_dir, new_dir) + elif os.path.islink(filename): + fixup_link(filename, old_dir, new_dir) + elif os.path.isfile(filename): + fixup_script_(root, file_, old_dir, new_dir, version, + rewrite_env_python=rewrite_env_python) + + +def fixup_script_(root, file_, old_dir, new_dir, version, + rewrite_env_python=False): + old_shebang = '#!%s/bin/python' % os.path.normcase(os.path.abspath(old_dir)) + new_shebang = '#!%s/bin/python' % os.path.normcase(os.path.abspath(new_dir)) + env_shebang = '#!/usr/bin/env python' + + filename = os.path.join(root, file_) + with open(filename, 'rb') as f: + if f.read(2) != b'#!': + # no shebang + return + f.seek(0) + lines = f.readlines() + + if not lines: + # warn: empty script + return + + def rewrite_shebang(version=None): + logger.debug('fixing %s' % filename) + shebang = new_shebang + if version: + shebang = shebang + version + shebang = (shebang + '\n').encode('utf-8') + with open(filename, 'wb') as f: + f.write(shebang) + f.writelines(lines[1:]) + + try: + bang = lines[0].decode('utf-8').strip() + except UnicodeDecodeError: + # binary file + return + + # This takes care of the scheme in which shebang is of type + # '#!/venv/bin/python3' while the version of system python + # is of type 3.x e.g. 3.5. + short_version = bang[len(old_shebang):] + + if not bang.startswith('#!'): + return + elif bang == old_shebang: + rewrite_shebang() + elif (bang.startswith(old_shebang) + and bang[len(old_shebang):] == version): + rewrite_shebang(version) + elif (bang.startswith(old_shebang) + and short_version + and bang[len(old_shebang):] == short_version): + rewrite_shebang(short_version) + elif rewrite_env_python and bang.startswith(env_shebang): + if bang == env_shebang: + rewrite_shebang() + elif bang[len(env_shebang):] == version: + rewrite_shebang(version) + else: + # can't do anything + return + + +def fixup_activate(filename, old_dir, new_dir): + logger.debug('fixing %s' % filename) + with open(filename, 'rb') as f: + data = f.read().decode('utf-8') + + data = data.replace(old_dir, new_dir) + with open(filename, 'wb') as f: + f.write(data.encode('utf-8')) + + +def fixup_link(filename, old_dir, new_dir, target=None): + logger.debug('fixing %s' % filename) + if target is None: + target = os.readlink(filename) + + origdir = os.path.dirname(os.path.abspath(filename)).replace( + new_dir, old_dir) + if not os.path.isabs(target): + target = os.path.abspath(os.path.join(origdir, target)) + rellink = True + else: + rellink = False + + if _dirmatch(target, old_dir): + if rellink: + # keep relative links, but don't keep original in case it + # traversed up out of, then back into the venv. + # so, recreate a relative link from absolute. + target = target[len(origdir):].lstrip(os.sep) + else: + target = target.replace(old_dir, new_dir, 1) + + # else: links outside the venv, replaced with absolute path to target. + _replace_symlink(filename, target) + + +def _replace_symlink(filename, newtarget): + tmpfn = "%s.new" % filename + os.symlink(newtarget, tmpfn) + os.rename(tmpfn, filename) + + +def fixup_syspath_items(syspath, old_dir, new_dir): + for path in syspath: + if not os.path.isdir(path): + continue + path = os.path.normcase(os.path.abspath(path)) + if _dirmatch(path, old_dir): + path = path.replace(old_dir, new_dir, 1) + if not os.path.exists(path): + continue + elif not _dirmatch(path, new_dir): + continue + root, dirs, files = next(os.walk(path)) + for file_ in files: + filename = os.path.join(root, file_) + if filename.endswith('.pth'): + fixup_pth_file(filename, old_dir, new_dir) + elif filename.endswith('.egg-link'): + fixup_egglink_file(filename, old_dir, new_dir) + + +def fixup_pth_file(filename, old_dir, new_dir): + logger.debug('fixup_pth_file %s' % filename) + + with open(filename, 'r') as f: + lines = f.readlines() + + has_change = False + + for num, line in enumerate(lines): + line = (line.decode('utf-8') if hasattr(line, 'decode') else line).strip() + + if not line or line.startswith('#') or line.startswith('import '): + continue + elif _dirmatch(line, old_dir): + lines[num] = line.replace(old_dir, new_dir, 1) + has_change = True + + if has_change: + with open(filename, 'w') as f: + payload = os.linesep.join([l.strip() for l in lines]) + os.linesep + f.write(payload) + + +def fixup_egglink_file(filename, old_dir, new_dir): + logger.debug('fixing %s' % filename) + with open(filename, 'rb') as f: + link = f.read().decode('utf-8').strip() + if _dirmatch(link, old_dir): + link = link.replace(old_dir, new_dir, 1) + with open(filename, 'wb') as f: + link = (link + '\n').encode('utf-8') + f.write(link) + + +def main(): + parser = optparse.OptionParser("usage: %prog [options]" + " /path/to/existing/venv /path/to/cloned/venv") + parser.add_option('-v', + action="count", + dest='verbose', + default=False, + help='verbosity') + options, args = parser.parse_args() + try: + old_dir, new_dir = args + except ValueError: + print("virtualenv-clone %s" % (__version__,)) + parser.error("not enough arguments given.") + old_dir = os.path.realpath(old_dir) + new_dir = os.path.realpath(new_dir) + loglevel = (logging.WARNING, logging.INFO, logging.DEBUG)[min(2, + options.verbose)] + logging.basicConfig(level=loglevel, format='%(message)s') + try: + clone_virtualenv(old_dir, new_dir) + except UserError: + e = sys.exc_info()[1] + parser.error(str(e)) + + +if __name__ == '__main__': + main() diff --git a/venv/Lib/site-packages/distlib-0.3.1.dist-info/INSTALLER b/venv/Lib/site-packages/distlib-0.3.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/distlib-0.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/distlib-0.3.1.dist-info/METADATA b/venv/Lib/site-packages/distlib-0.3.1.dist-info/METADATA new file mode 100644 index 00000000..54f5f649 --- /dev/null +++ b/venv/Lib/site-packages/distlib-0.3.1.dist-info/METADATA @@ -0,0 +1,24 @@ +Metadata-Version: 1.1 +Name: distlib +Version: 0.3.1 +Summary: Distribution utilities +Description: Low-level components of distutils2/packaging, augmented with higher-level APIs for making packaging easier. +Home-page: https://bitbucket.org/pypa/distlib +Author: Vinay Sajip +Author-email: vinay_sajip@red-dove.com +License: Python license +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Download-URL: https://bitbucket.org/pypa/distlib/downloads/distlib-0.3.1.zip diff --git a/venv/Lib/site-packages/distlib-0.3.1.dist-info/RECORD b/venv/Lib/site-packages/distlib-0.3.1.dist-info/RECORD new file mode 100644 index 00000000..1b5d02a6 --- /dev/null +++ b/venv/Lib/site-packages/distlib-0.3.1.dist-info/RECORD @@ -0,0 +1,45 @@ +distlib-0.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +distlib-0.3.1.dist-info/METADATA,sha256=i6wrPilVkro9BXvaHkwVsaemMZCx5xbWc8jS9oR_ZJw,1128 +distlib-0.3.1.dist-info/RECORD,, +distlib-0.3.1.dist-info/WHEEL,sha256=R4LNelR33E9ZPEGiPwrdPrrHnwkFEjiMPbVCAWVjsxI,106 +distlib/__init__.py,sha256=3veAk2rPznOB2gsK6tjbbh0TQMmGE5P82eE9wXq6NIk,581 +distlib/__pycache__/__init__.cpython-36.pyc,, +distlib/__pycache__/compat.cpython-36.pyc,, +distlib/__pycache__/database.cpython-36.pyc,, +distlib/__pycache__/index.cpython-36.pyc,, +distlib/__pycache__/locators.cpython-36.pyc,, +distlib/__pycache__/manifest.cpython-36.pyc,, +distlib/__pycache__/markers.cpython-36.pyc,, +distlib/__pycache__/metadata.cpython-36.pyc,, +distlib/__pycache__/resources.cpython-36.pyc,, +distlib/__pycache__/scripts.cpython-36.pyc,, +distlib/__pycache__/util.cpython-36.pyc,, +distlib/__pycache__/version.cpython-36.pyc,, +distlib/__pycache__/wheel.cpython-36.pyc,, +distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 +distlib/_backport/__pycache__/__init__.cpython-36.pyc,, +distlib/_backport/__pycache__/misc.cpython-36.pyc,, +distlib/_backport/__pycache__/shutil.cpython-36.pyc,, +distlib/_backport/__pycache__/sysconfig.cpython-36.pyc,, +distlib/_backport/__pycache__/tarfile.cpython-36.pyc,, +distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 +distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707 +distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 +distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854 +distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 +distlib/compat.py,sha256=ADA56xiAxar3mU6qemlBhNbsrFPosXRhO44RzsbJPqk,41408 +distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059 +distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066 +distlib/locators.py,sha256=c9E4cDEacJ_uKbuE5BqAVocoWp6rsuBGTkiNDQq3zV4,52100 +distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 +distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387 +distlib/metadata.py,sha256=z2KPy3h3tcDnb9Xs7nAqQ5Oz0bqjWAUFmKWcFKRoodg,38962 +distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766 +distlib/scripts.py,sha256=_MAj3sMuv56kuM8FsiIWXqbT0gmumPGaOR_atOzn4a4,17180 +distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768 +distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984 +distlib/util.py,sha256=f2jZCPrcLCt6LcnC0gUy-Fur60tXD8reA7k4rDpHMDw,59845 +distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391 +distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112 +distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840 +distlib/wheel.py,sha256=v6DnwTqhNHwrEVFr8_YeiTW6G4ftP_evsywNgrmdb2o,41144 diff --git a/venv/Lib/site-packages/distlib-0.3.1.dist-info/WHEEL b/venv/Lib/site-packages/distlib-0.3.1.dist-info/WHEEL new file mode 100644 index 00000000..78f54a19 --- /dev/null +++ b/venv/Lib/site-packages/distlib-0.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: distlib 0.3.1.dev0 +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any \ No newline at end of file diff --git a/venv/Lib/site-packages/distlib/__init__.py b/venv/Lib/site-packages/distlib/__init__.py new file mode 100644 index 00000000..63d916e3 --- /dev/null +++ b/venv/Lib/site-packages/distlib/__init__.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2019 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import logging + +__version__ = '0.3.1' + +class DistlibException(Exception): + pass + +try: + from logging import NullHandler +except ImportError: # pragma: no cover + class NullHandler(logging.Handler): + def handle(self, record): pass + def emit(self, record): pass + def createLock(self): self.lock = None + +logger = logging.getLogger(__name__) +logger.addHandler(NullHandler()) diff --git a/venv/Lib/site-packages/distlib/_backport/__init__.py b/venv/Lib/site-packages/distlib/_backport/__init__.py new file mode 100644 index 00000000..f7dbf4c9 --- /dev/null +++ b/venv/Lib/site-packages/distlib/_backport/__init__.py @@ -0,0 +1,6 @@ +"""Modules copied from Python 3 standard libraries, for internal use only. + +Individual classes and functions are found in d2._backport.misc. Intended +usage is to always import things missing from 3.1 from that module: the +built-in/stdlib objects will be used if found. +""" diff --git a/venv/Lib/site-packages/distlib/_backport/misc.py b/venv/Lib/site-packages/distlib/_backport/misc.py new file mode 100644 index 00000000..cfb318d3 --- /dev/null +++ b/venv/Lib/site-packages/distlib/_backport/misc.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Backports for individual classes and functions.""" + +import os +import sys + +__all__ = ['cache_from_source', 'callable', 'fsencode'] + + +try: + from imp import cache_from_source +except ImportError: + def cache_from_source(py_file, debug=__debug__): + ext = debug and 'c' or 'o' + return py_file + ext + + +try: + callable = callable +except NameError: + from collections import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode +except AttributeError: + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, str): + return filename.encode(sys.getfilesystemencoding()) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) diff --git a/venv/Lib/site-packages/distlib/_backport/shutil.py b/venv/Lib/site-packages/distlib/_backport/shutil.py new file mode 100644 index 00000000..10ed3625 --- /dev/null +++ b/venv/Lib/site-packages/distlib/_backport/shutil.py @@ -0,0 +1,764 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Utility functions for copying and archiving files and directory trees. + +XXX The functions here don't copy the resource fork or other metadata on Mac. + +""" + +import os +import sys +import stat +from os.path import abspath +import fnmatch +try: + from collections.abc import Callable +except ImportError: + from collections import Callable +import errno +from . import tarfile + +try: + import bz2 + _BZ2_SUPPORTED = True +except ImportError: + _BZ2_SUPPORTED = False + +try: + from pwd import getpwnam +except ImportError: + getpwnam = None + +try: + from grp import getgrnam +except ImportError: + getgrnam = None + +__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", + "copytree", "move", "rmtree", "Error", "SpecialFileError", + "ExecError", "make_archive", "get_archive_formats", + "register_archive_format", "unregister_archive_format", + "get_unpack_formats", "register_unpack_format", + "unregister_unpack_format", "unpack_archive", "ignore_patterns"] + +class Error(EnvironmentError): + pass + +class SpecialFileError(EnvironmentError): + """Raised when trying to do a kind of operation (e.g. copying) which is + not supported on a special file (e.g. a named pipe)""" + +class ExecError(EnvironmentError): + """Raised when a command could not be executed""" + +class ReadError(EnvironmentError): + """Raised when an archive cannot be read""" + +class RegistryError(Exception): + """Raised when a registry operation with the archiving + and unpacking registries fails""" + + +try: + WindowsError +except NameError: + WindowsError = None + +def copyfileobj(fsrc, fdst, length=16*1024): + """copy data from file-like object fsrc to file-like object fdst""" + while 1: + buf = fsrc.read(length) + if not buf: + break + fdst.write(buf) + +def _samefile(src, dst): + # Macintosh, Unix. + if hasattr(os.path, 'samefile'): + try: + return os.path.samefile(src, dst) + except OSError: + return False + + # All other platforms: check for same pathname. + return (os.path.normcase(os.path.abspath(src)) == + os.path.normcase(os.path.abspath(dst))) + +def copyfile(src, dst): + """Copy data from src to dst""" + if _samefile(src, dst): + raise Error("`%s` and `%s` are the same file" % (src, dst)) + + for fn in [src, dst]: + try: + st = os.stat(fn) + except OSError: + # File most likely does not exist + pass + else: + # XXX What about other special files? (sockets, devices...) + if stat.S_ISFIFO(st.st_mode): + raise SpecialFileError("`%s` is a named pipe" % fn) + + with open(src, 'rb') as fsrc: + with open(dst, 'wb') as fdst: + copyfileobj(fsrc, fdst) + +def copymode(src, dst): + """Copy mode bits from src to dst""" + if hasattr(os, 'chmod'): + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + os.chmod(dst, mode) + +def copystat(src, dst): + """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + if hasattr(os, 'utime'): + os.utime(dst, (st.st_atime, st.st_mtime)) + if hasattr(os, 'chmod'): + os.chmod(dst, mode) + if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): + try: + os.chflags(dst, st.st_flags) + except OSError as why: + if (not hasattr(errno, 'EOPNOTSUPP') or + why.errno != errno.EOPNOTSUPP): + raise + +def copy(src, dst): + """Copy data and mode bits ("cp src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copymode(src, dst) + +def copy2(src, dst): + """Copy data and all stat info ("cp -p src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copystat(src, dst) + +def ignore_patterns(*patterns): + """Function that can be used as copytree() ignore parameter. + + Patterns is a sequence of glob-style patterns + that are used to exclude files""" + def _ignore_patterns(path, names): + ignored_names = [] + for pattern in patterns: + ignored_names.extend(fnmatch.filter(names, pattern)) + return set(ignored_names) + return _ignore_patterns + +def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, + ignore_dangling_symlinks=False): + """Recursively copy a directory tree. + + The destination directory must not already exist. + If exception(s) occur, an Error is raised with a list of reasons. + + If the optional symlinks flag is true, symbolic links in the + source tree result in symbolic links in the destination tree; if + it is false, the contents of the files pointed to by symbolic + links are copied. If the file pointed by the symlink doesn't + exist, an exception will be added in the list of errors raised in + an Error exception at the end of the copy process. + + You can set the optional ignore_dangling_symlinks flag to true if you + want to silence this exception. Notice that this has no effect on + platforms that don't support os.symlink. + + The optional ignore argument is a callable. If given, it + is called with the `src` parameter, which is the directory + being visited by copytree(), and `names` which is the list of + `src` contents, as returned by os.listdir(): + + callable(src, names) -> ignored_names + + Since copytree() is called recursively, the callable will be + called once for each directory that is copied. It returns a + list of names relative to the `src` directory that should + not be copied. + + The optional copy_function argument is a callable that will be used + to copy each file. It will be called with the source path and the + destination path as arguments. By default, copy2() is used, but any + function that supports the same signature (like copy()) can be used. + + """ + names = os.listdir(src) + if ignore is not None: + ignored_names = ignore(src, names) + else: + ignored_names = set() + + os.makedirs(dst) + errors = [] + for name in names: + if name in ignored_names: + continue + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + try: + if os.path.islink(srcname): + linkto = os.readlink(srcname) + if symlinks: + os.symlink(linkto, dstname) + else: + # ignore dangling symlink if the flag is on + if not os.path.exists(linkto) and ignore_dangling_symlinks: + continue + # otherwise let the copy occurs. copy2 will raise an error + copy_function(srcname, dstname) + elif os.path.isdir(srcname): + copytree(srcname, dstname, symlinks, ignore, copy_function) + else: + # Will raise a SpecialFileError for unsupported file types + copy_function(srcname, dstname) + # catch the Error from the recursive copytree so that we can + # continue with other files + except Error as err: + errors.extend(err.args[0]) + except EnvironmentError as why: + errors.append((srcname, dstname, str(why))) + try: + copystat(src, dst) + except OSError as why: + if WindowsError is not None and isinstance(why, WindowsError): + # Copying file access times may fail on Windows + pass + else: + errors.extend((src, dst, str(why))) + if errors: + raise Error(errors) + +def rmtree(path, ignore_errors=False, onerror=None): + """Recursively delete a directory tree. + + If ignore_errors is set, errors are ignored; otherwise, if onerror + is set, it is called to handle the error with arguments (func, + path, exc_info) where func is os.listdir, os.remove, or os.rmdir; + path is the argument to that function that caused it to fail; and + exc_info is a tuple returned by sys.exc_info(). If ignore_errors + is false and onerror is None, an exception is raised. + + """ + if ignore_errors: + def onerror(*args): + pass + elif onerror is None: + def onerror(*args): + raise + try: + if os.path.islink(path): + # symlinks to directories are forbidden, see bug #1669 + raise OSError("Cannot call rmtree on a symbolic link") + except OSError: + onerror(os.path.islink, path, sys.exc_info()) + # can't continue even if onerror hook returns + return + names = [] + try: + names = os.listdir(path) + except os.error: + onerror(os.listdir, path, sys.exc_info()) + for name in names: + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except os.error: + mode = 0 + if stat.S_ISDIR(mode): + rmtree(fullname, ignore_errors, onerror) + else: + try: + os.remove(fullname) + except os.error: + onerror(os.remove, fullname, sys.exc_info()) + try: + os.rmdir(path) + except os.error: + onerror(os.rmdir, path, sys.exc_info()) + + +def _basename(path): + # A basename() variant which first strips the trailing slash, if present. + # Thus we always get the last component of the path, even for directories. + return os.path.basename(path.rstrip(os.path.sep)) + +def move(src, dst): + """Recursively move a file or directory to another location. This is + similar to the Unix "mv" command. + + If the destination is a directory or a symlink to a directory, the source + is moved inside the directory. The destination path must not already + exist. + + If the destination already exists but is not a directory, it may be + overwritten depending on os.rename() semantics. + + If the destination is on our current filesystem, then rename() is used. + Otherwise, src is copied to the destination and then removed. + A lot more could be done here... A look at a mv.c shows a lot of + the issues this implementation glosses over. + + """ + real_dst = dst + if os.path.isdir(dst): + if _samefile(src, dst): + # We might be on a case insensitive filesystem, + # perform the rename anyway. + os.rename(src, dst) + return + + real_dst = os.path.join(dst, _basename(src)) + if os.path.exists(real_dst): + raise Error("Destination path '%s' already exists" % real_dst) + try: + os.rename(src, real_dst) + except OSError: + if os.path.isdir(src): + if _destinsrc(src, dst): + raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) + copytree(src, real_dst, symlinks=True) + rmtree(src) + else: + copy2(src, real_dst) + os.unlink(src) + +def _destinsrc(src, dst): + src = abspath(src) + dst = abspath(dst) + if not src.endswith(os.path.sep): + src += os.path.sep + if not dst.endswith(os.path.sep): + dst += os.path.sep + return dst.startswith(src) + +def _get_gid(name): + """Returns a gid, given a group name.""" + if getgrnam is None or name is None: + return None + try: + result = getgrnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _get_uid(name): + """Returns an uid, given a user name.""" + if getpwnam is None or name is None: + return None + try: + result = getpwnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, + owner=None, group=None, logger=None): + """Create a (possibly compressed) tar file from all the files under + 'base_dir'. + + 'compress' must be "gzip" (the default), "bzip2", or None. + + 'owner' and 'group' can be used to define an owner and a group for the + archive that is being built. If not provided, the current owner and group + will be used. + + The output tar file will be named 'base_name' + ".tar", possibly plus + the appropriate compression extension (".gz", or ".bz2"). + + Returns the output filename. + """ + tar_compression = {'gzip': 'gz', None: ''} + compress_ext = {'gzip': '.gz'} + + if _BZ2_SUPPORTED: + tar_compression['bzip2'] = 'bz2' + compress_ext['bzip2'] = '.bz2' + + # flags for compression program, each element of list will be an argument + if compress is not None and compress not in compress_ext: + raise ValueError("bad value for 'compress', or compression format not " + "supported : {0}".format(compress)) + + archive_name = base_name + '.tar' + compress_ext.get(compress, '') + archive_dir = os.path.dirname(archive_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # creating the tarball + if logger is not None: + logger.info('Creating tar archive') + + uid = _get_uid(owner) + gid = _get_gid(group) + + def _set_uid_gid(tarinfo): + if gid is not None: + tarinfo.gid = gid + tarinfo.gname = group + if uid is not None: + tarinfo.uid = uid + tarinfo.uname = owner + return tarinfo + + if not dry_run: + tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) + try: + tar.add(base_dir, filter=_set_uid_gid) + finally: + tar.close() + + return archive_name + +def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): + # XXX see if we want to keep an external call here + if verbose: + zipoptions = "-r" + else: + zipoptions = "-rq" + from distutils.errors import DistutilsExecError + from distutils.spawn import spawn + try: + spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) + except DistutilsExecError: + # XXX really should distinguish between "couldn't find + # external 'zip' command" and "zip failed". + raise ExecError("unable to create zip file '%s': " + "could neither import the 'zipfile' module nor " + "find a standalone zip utility") % zip_filename + +def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): + """Create a zip file from all the files under 'base_dir'. + + The output zip file will be named 'base_name' + ".zip". Uses either the + "zipfile" Python module (if available) or the InfoZIP "zip" utility + (if installed and found on the default search path). If neither tool is + available, raises ExecError. Returns the name of the output zip + file. + """ + zip_filename = base_name + ".zip" + archive_dir = os.path.dirname(base_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # If zipfile module is not available, try spawning an external 'zip' + # command. + try: + import zipfile + except ImportError: + zipfile = None + + if zipfile is None: + _call_external_zip(base_dir, zip_filename, verbose, dry_run) + else: + if logger is not None: + logger.info("creating '%s' and adding '%s' to it", + zip_filename, base_dir) + + if not dry_run: + zip = zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_DEFLATED) + + for dirpath, dirnames, filenames in os.walk(base_dir): + for name in filenames: + path = os.path.normpath(os.path.join(dirpath, name)) + if os.path.isfile(path): + zip.write(path, path) + if logger is not None: + logger.info("adding '%s'", path) + zip.close() + + return zip_filename + +_ARCHIVE_FORMATS = { + 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), + 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), + 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), + 'zip': (_make_zipfile, [], "ZIP file"), + } + +if _BZ2_SUPPORTED: + _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], + "bzip2'ed tar-file") + +def get_archive_formats(): + """Returns a list of supported formats for archiving and unarchiving. + + Each element of the returned sequence is a tuple (name, description) + """ + formats = [(name, registry[2]) for name, registry in + _ARCHIVE_FORMATS.items()] + formats.sort() + return formats + +def register_archive_format(name, function, extra_args=None, description=''): + """Registers an archive format. + + name is the name of the format. function is the callable that will be + used to create archives. If provided, extra_args is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_archive_formats() function. + """ + if extra_args is None: + extra_args = [] + if not isinstance(function, Callable): + raise TypeError('The %s object is not callable' % function) + if not isinstance(extra_args, (tuple, list)): + raise TypeError('extra_args needs to be a sequence') + for element in extra_args: + if not isinstance(element, (tuple, list)) or len(element) !=2: + raise TypeError('extra_args elements are : (arg_name, value)') + + _ARCHIVE_FORMATS[name] = (function, extra_args, description) + +def unregister_archive_format(name): + del _ARCHIVE_FORMATS[name] + +def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, + dry_run=0, owner=None, group=None, logger=None): + """Create an archive file (eg. zip or tar). + + 'base_name' is the name of the file to create, minus any format-specific + extension; 'format' is the archive format: one of "zip", "tar", "bztar" + or "gztar". + + 'root_dir' is a directory that will be the root directory of the + archive; ie. we typically chdir into 'root_dir' before creating the + archive. 'base_dir' is the directory where we start archiving from; + ie. 'base_dir' will be the common prefix of all files and + directories in the archive. 'root_dir' and 'base_dir' both default + to the current directory. Returns the name of the archive file. + + 'owner' and 'group' are used when creating a tar archive. By default, + uses the current owner and group. + """ + save_cwd = os.getcwd() + if root_dir is not None: + if logger is not None: + logger.debug("changing into '%s'", root_dir) + base_name = os.path.abspath(base_name) + if not dry_run: + os.chdir(root_dir) + + if base_dir is None: + base_dir = os.curdir + + kwargs = {'dry_run': dry_run, 'logger': logger} + + try: + format_info = _ARCHIVE_FORMATS[format] + except KeyError: + raise ValueError("unknown archive format '%s'" % format) + + func = format_info[0] + for arg, val in format_info[1]: + kwargs[arg] = val + + if format != 'zip': + kwargs['owner'] = owner + kwargs['group'] = group + + try: + filename = func(base_name, base_dir, **kwargs) + finally: + if root_dir is not None: + if logger is not None: + logger.debug("changing back to '%s'", save_cwd) + os.chdir(save_cwd) + + return filename + + +def get_unpack_formats(): + """Returns a list of supported formats for unpacking. + + Each element of the returned sequence is a tuple + (name, extensions, description) + """ + formats = [(name, info[0], info[3]) for name, info in + _UNPACK_FORMATS.items()] + formats.sort() + return formats + +def _check_unpack_options(extensions, function, extra_args): + """Checks what gets registered as an unpacker.""" + # first make sure no other unpacker is registered for this extension + existing_extensions = {} + for name, info in _UNPACK_FORMATS.items(): + for ext in info[0]: + existing_extensions[ext] = name + + for extension in extensions: + if extension in existing_extensions: + msg = '%s is already registered for "%s"' + raise RegistryError(msg % (extension, + existing_extensions[extension])) + + if not isinstance(function, Callable): + raise TypeError('The registered function must be a callable') + + +def register_unpack_format(name, extensions, function, extra_args=None, + description=''): + """Registers an unpack format. + + `name` is the name of the format. `extensions` is a list of extensions + corresponding to the format. + + `function` is the callable that will be + used to unpack archives. The callable will receive archives to unpack. + If it's unable to handle an archive, it needs to raise a ReadError + exception. + + If provided, `extra_args` is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_unpack_formats() function. + """ + if extra_args is None: + extra_args = [] + _check_unpack_options(extensions, function, extra_args) + _UNPACK_FORMATS[name] = extensions, function, extra_args, description + +def unregister_unpack_format(name): + """Removes the pack format from the registry.""" + del _UNPACK_FORMATS[name] + +def _ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + +def _unpack_zipfile(filename, extract_dir): + """Unpack zip `filename` to `extract_dir` + """ + try: + import zipfile + except ImportError: + raise ReadError('zlib not supported, cannot unpack this archive.') + + if not zipfile.is_zipfile(filename): + raise ReadError("%s is not a zip file" % filename) + + zip = zipfile.ZipFile(filename) + try: + for info in zip.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name: + continue + + target = os.path.join(extract_dir, *name.split('/')) + if not target: + continue + + _ensure_directory(target) + if not name.endswith('/'): + # file + data = zip.read(info.filename) + f = open(target, 'wb') + try: + f.write(data) + finally: + f.close() + del data + finally: + zip.close() + +def _unpack_tarfile(filename, extract_dir): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise ReadError( + "%s is not a compressed or uncompressed tar file" % filename) + try: + tarobj.extractall(extract_dir) + finally: + tarobj.close() + +_UNPACK_FORMATS = { + 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), + 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), + 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") + } + +if _BZ2_SUPPORTED: + _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], + "bzip2'ed tar-file") + +def _find_unpack_format(filename): + for name, info in _UNPACK_FORMATS.items(): + for extension in info[0]: + if filename.endswith(extension): + return name + return None + +def unpack_archive(filename, extract_dir=None, format=None): + """Unpack an archive. + + `filename` is the name of the archive. + + `extract_dir` is the name of the target directory, where the archive + is unpacked. If not provided, the current working directory is used. + + `format` is the archive format: one of "zip", "tar", or "gztar". Or any + other registered format. If not provided, unpack_archive will use the + filename extension and see if an unpacker was registered for that + extension. + + In case none is found, a ValueError is raised. + """ + if extract_dir is None: + extract_dir = os.getcwd() + + if format is not None: + try: + format_info = _UNPACK_FORMATS[format] + except KeyError: + raise ValueError("Unknown unpack format '{0}'".format(format)) + + func = format_info[1] + func(filename, extract_dir, **dict(format_info[2])) + else: + # we need to look at the registered unpackers supported extensions + format = _find_unpack_format(filename) + if format is None: + raise ReadError("Unknown archive format '{0}'".format(filename)) + + func = _UNPACK_FORMATS[format][1] + kwargs = dict(_UNPACK_FORMATS[format][2]) + func(filename, extract_dir, **kwargs) diff --git a/venv/Lib/site-packages/distlib/_backport/sysconfig.cfg b/venv/Lib/site-packages/distlib/_backport/sysconfig.cfg new file mode 100644 index 00000000..1746bd01 --- /dev/null +++ b/venv/Lib/site-packages/distlib/_backport/sysconfig.cfg @@ -0,0 +1,84 @@ +[posix_prefix] +# Configuration directories. Some of these come straight out of the +# configure script. They are for implementing the other variables, not to +# be used directly in [resource_locations]. +confdir = /etc +datadir = /usr/share +libdir = /usr/lib +statedir = /var +# User resource directory +local = ~/.local/{distribution.name} + +stdlib = {base}/lib/python{py_version_short} +platstdlib = {platbase}/lib/python{py_version_short} +purelib = {base}/lib/python{py_version_short}/site-packages +platlib = {platbase}/lib/python{py_version_short}/site-packages +include = {base}/include/python{py_version_short}{abiflags} +platinclude = {platbase}/include/python{py_version_short}{abiflags} +data = {base} + +[posix_home] +stdlib = {base}/lib/python +platstdlib = {base}/lib/python +purelib = {base}/lib/python +platlib = {base}/lib/python +include = {base}/include/python +platinclude = {base}/include/python +scripts = {base}/bin +data = {base} + +[nt] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2_home] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[nt_user] +stdlib = {userbase}/Python{py_version_nodot} +platstdlib = {userbase}/Python{py_version_nodot} +purelib = {userbase}/Python{py_version_nodot}/site-packages +platlib = {userbase}/Python{py_version_nodot}/site-packages +include = {userbase}/Python{py_version_nodot}/Include +scripts = {userbase}/Scripts +data = {userbase} + +[posix_user] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[osx_framework_user] +stdlib = {userbase}/lib/python +platstdlib = {userbase}/lib/python +purelib = {userbase}/lib/python/site-packages +platlib = {userbase}/lib/python/site-packages +include = {userbase}/include +scripts = {userbase}/bin +data = {userbase} diff --git a/venv/Lib/site-packages/distlib/_backport/sysconfig.py b/venv/Lib/site-packages/distlib/_backport/sysconfig.py new file mode 100644 index 00000000..b470a373 --- /dev/null +++ b/venv/Lib/site-packages/distlib/_backport/sysconfig.py @@ -0,0 +1,786 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Access to Python's configuration information.""" + +import codecs +import os +import re +import sys +from os.path import pardir, realpath +try: + import configparser +except ImportError: + import ConfigParser as configparser + + +__all__ = [ + 'get_config_h_filename', + 'get_config_var', + 'get_config_vars', + 'get_makefile_filename', + 'get_path', + 'get_path_names', + 'get_paths', + 'get_platform', + 'get_python_version', + 'get_scheme_names', + 'parse_config_h', +] + + +def _safe_realpath(path): + try: + return realpath(path) + except OSError: + return path + + +if sys.executable: + _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) +else: + # sys.executable can be empty if argv[0] has been changed and Python is + # unable to retrieve the real program name + _PROJECT_BASE = _safe_realpath(os.getcwd()) + +if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) +# PC/VS7.1 +if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) +# PC/AMD64 +if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) + + +def is_python_build(): + for fn in ("Setup.dist", "Setup.local"): + if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): + return True + return False + +_PYTHON_BUILD = is_python_build() + +_cfg_read = False + +def _ensure_cfg_read(): + global _cfg_read + if not _cfg_read: + from ..resources import finder + backport_package = __name__.rsplit('.', 1)[0] + _finder = finder(backport_package) + _cfgfile = _finder.find('sysconfig.cfg') + assert _cfgfile, 'sysconfig.cfg exists' + with _cfgfile.as_stream() as s: + _SCHEMES.readfp(s) + if _PYTHON_BUILD: + for scheme in ('posix_prefix', 'posix_home'): + _SCHEMES.set(scheme, 'include', '{srcdir}/Include') + _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') + + _cfg_read = True + + +_SCHEMES = configparser.RawConfigParser() +_VAR_REPL = re.compile(r'\{([^{]*?)\}') + +def _expand_globals(config): + _ensure_cfg_read() + if config.has_section('globals'): + globals = config.items('globals') + else: + globals = tuple() + + sections = config.sections() + for section in sections: + if section == 'globals': + continue + for option, value in globals: + if config.has_option(section, option): + continue + config.set(section, option, value) + config.remove_section('globals') + + # now expanding local variables defined in the cfg file + # + for section in config.sections(): + variables = dict(config.items(section)) + + def _replacer(matchobj): + name = matchobj.group(1) + if name in variables: + return variables[name] + return matchobj.group(0) + + for option, value in config.items(section): + config.set(section, option, _VAR_REPL.sub(_replacer, value)) + +#_expand_globals(_SCHEMES) + +_PY_VERSION = '%s.%s.%s' % sys.version_info[:3] +_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2] +_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2] +_PREFIX = os.path.normpath(sys.prefix) +_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) +_CONFIG_VARS = None +_USER_BASE = None + + +def _subst_vars(path, local_vars): + """In the string `path`, replace tokens like {some.thing} with the + corresponding value from the map `local_vars`. + + If there is no corresponding value, leave the token unchanged. + """ + def _replacer(matchobj): + name = matchobj.group(1) + if name in local_vars: + return local_vars[name] + elif name in os.environ: + return os.environ[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, path) + + +def _extend_dict(target_dict, other_dict): + target_keys = target_dict.keys() + for key, value in other_dict.items(): + if key in target_keys: + continue + target_dict[key] = value + + +def _expand_vars(scheme, vars): + res = {} + if vars is None: + vars = {} + _extend_dict(vars, get_config_vars()) + + for key, value in _SCHEMES.items(scheme): + if os.name in ('posix', 'nt'): + value = os.path.expanduser(value) + res[key] = os.path.normpath(_subst_vars(value, vars)) + return res + + +def format_value(value, vars): + def _replacer(matchobj): + name = matchobj.group(1) + if name in vars: + return vars[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, value) + + +def _get_default_scheme(): + if os.name == 'posix': + # the default scheme for posix is posix_prefix + return 'posix_prefix' + return os.name + + +def _getuserbase(): + env_base = os.environ.get("PYTHONUSERBASE", None) + + def joinuser(*args): + return os.path.expanduser(os.path.join(*args)) + + # what about 'os2emx', 'riscos' ? + if os.name == "nt": + base = os.environ.get("APPDATA") or "~" + if env_base: + return env_base + else: + return joinuser(base, "Python") + + if sys.platform == "darwin": + framework = get_config_var("PYTHONFRAMEWORK") + if framework: + if env_base: + return env_base + else: + return joinuser("~", "Library", framework, "%d.%d" % + sys.version_info[:2]) + + if env_base: + return env_base + else: + return joinuser("~", ".local") + + +def _parse_makefile(filename, vars=None): + """Parse a Makefile-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + # Regexes needed for parsing Makefile (and similar syntaxes, + # like old-style Setup files). + _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") + _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") + _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") + + if vars is None: + vars = {} + done = {} + notdone = {} + + with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: + lines = f.readlines() + + for line in lines: + if line.startswith('#') or line.strip() == '': + continue + m = _variable_rx.match(line) + if m: + n, v = m.group(1, 2) + v = v.strip() + # `$$' is a literal `$' in make + tmpv = v.replace('$$', '') + + if "$" in tmpv: + notdone[n] = v + else: + try: + v = int(v) + except ValueError: + # insert literal `$' + done[n] = v.replace('$$', '$') + else: + done[n] = v + + # do variable interpolation here + variables = list(notdone.keys()) + + # Variables with a 'PY_' prefix in the makefile. These need to + # be made available without that prefix through sysconfig. + # Special care is needed to ensure that variable expansion works, even + # if the expansion uses the name without a prefix. + renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') + + while len(variables) > 0: + for name in tuple(variables): + value = notdone[name] + m = _findvar1_rx.search(value) or _findvar2_rx.search(value) + if m is not None: + n = m.group(1) + found = True + if n in done: + item = str(done[n]) + elif n in notdone: + # get it on a subsequent round + found = False + elif n in os.environ: + # do it like make: fall back to environment + item = os.environ[n] + + elif n in renamed_variables: + if (name.startswith('PY_') and + name[3:] in renamed_variables): + item = "" + + elif 'PY_' + n in notdone: + found = False + + else: + item = str(done['PY_' + n]) + + else: + done[n] = item = "" + + if found: + after = value[m.end():] + value = value[:m.start()] + item + after + if "$" in after: + notdone[name] = value + else: + try: + value = int(value) + except ValueError: + done[name] = value.strip() + else: + done[name] = value + variables.remove(name) + + if (name.startswith('PY_') and + name[3:] in renamed_variables): + + name = name[3:] + if name not in done: + done[name] = value + + else: + # bogus variable reference (e.g. "prefix=$/opt/python"); + # just drop it since we can't deal + done[name] = value + variables.remove(name) + + # strip spurious spaces + for k, v in done.items(): + if isinstance(v, str): + done[k] = v.strip() + + # save the results in the global dictionary + vars.update(done) + return vars + + +def get_makefile_filename(): + """Return the path of the Makefile.""" + if _PYTHON_BUILD: + return os.path.join(_PROJECT_BASE, "Makefile") + if hasattr(sys, 'abiflags'): + config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) + else: + config_dir_name = 'config' + return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') + + +def _init_posix(vars): + """Initialize the module as appropriate for POSIX systems.""" + # load the installed Makefile: + makefile = get_makefile_filename() + try: + _parse_makefile(makefile, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % makefile + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # load the installed pyconfig.h: + config_h = get_config_h_filename() + try: + with open(config_h) as f: + parse_config_h(f, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % config_h + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # On AIX, there are wrong paths to the linker scripts in the Makefile + # -- these paths are relative to the Python source, but when installed + # the scripts are in another directory. + if _PYTHON_BUILD: + vars['LDSHARED'] = vars['BLDSHARED'] + + +def _init_non_posix(vars): + """Initialize the module as appropriate for NT""" + # set basic install directories + vars['LIBDEST'] = get_path('stdlib') + vars['BINLIBDEST'] = get_path('platstdlib') + vars['INCLUDEPY'] = get_path('include') + vars['SO'] = '.pyd' + vars['EXE'] = '.exe' + vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT + vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) + +# +# public APIs +# + + +def parse_config_h(fp, vars=None): + """Parse a config.h-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + if vars is None: + vars = {} + define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") + undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") + + while True: + line = fp.readline() + if not line: + break + m = define_rx.match(line) + if m: + n, v = m.group(1, 2) + try: + v = int(v) + except ValueError: + pass + vars[n] = v + else: + m = undef_rx.match(line) + if m: + vars[m.group(1)] = 0 + return vars + + +def get_config_h_filename(): + """Return the path of pyconfig.h.""" + if _PYTHON_BUILD: + if os.name == "nt": + inc_dir = os.path.join(_PROJECT_BASE, "PC") + else: + inc_dir = _PROJECT_BASE + else: + inc_dir = get_path('platinclude') + return os.path.join(inc_dir, 'pyconfig.h') + + +def get_scheme_names(): + """Return a tuple containing the schemes names.""" + return tuple(sorted(_SCHEMES.sections())) + + +def get_path_names(): + """Return a tuple containing the paths names.""" + # xxx see if we want a static list + return _SCHEMES.options('posix_prefix') + + +def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): + """Return a mapping containing an install scheme. + + ``scheme`` is the install scheme name. If not provided, it will + return the default scheme for the current platform. + """ + _ensure_cfg_read() + if expand: + return _expand_vars(scheme, vars) + else: + return dict(_SCHEMES.items(scheme)) + + +def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): + """Return a path corresponding to the scheme. + + ``scheme`` is the install scheme name. + """ + return get_paths(scheme, vars, expand)[name] + + +def get_config_vars(*args): + """With no arguments, return a dictionary of all configuration + variables relevant for the current platform. + + On Unix, this means every variable defined in Python's installed Makefile; + On Windows and Mac OS it's a much smaller set. + + With arguments, return a list of values that result from looking up + each argument in the configuration variable dictionary. + """ + global _CONFIG_VARS + if _CONFIG_VARS is None: + _CONFIG_VARS = {} + # Normalized versions of prefix and exec_prefix are handy to have; + # in fact, these are the standard versions used most places in the + # distutils2 module. + _CONFIG_VARS['prefix'] = _PREFIX + _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX + _CONFIG_VARS['py_version'] = _PY_VERSION + _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT + _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] + _CONFIG_VARS['base'] = _PREFIX + _CONFIG_VARS['platbase'] = _EXEC_PREFIX + _CONFIG_VARS['projectbase'] = _PROJECT_BASE + try: + _CONFIG_VARS['abiflags'] = sys.abiflags + except AttributeError: + # sys.abiflags may not be defined on all platforms. + _CONFIG_VARS['abiflags'] = '' + + if os.name in ('nt', 'os2'): + _init_non_posix(_CONFIG_VARS) + if os.name == 'posix': + _init_posix(_CONFIG_VARS) + # Setting 'userbase' is done below the call to the + # init function to enable using 'get_config_var' in + # the init-function. + if sys.version >= '2.6': + _CONFIG_VARS['userbase'] = _getuserbase() + + if 'srcdir' not in _CONFIG_VARS: + _CONFIG_VARS['srcdir'] = _PROJECT_BASE + else: + _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) + + # Convert srcdir into an absolute path if it appears necessary. + # Normally it is relative to the build directory. However, during + # testing, for example, we might be running a non-installed python + # from a different directory. + if _PYTHON_BUILD and os.name == "posix": + base = _PROJECT_BASE + try: + cwd = os.getcwd() + except OSError: + cwd = None + if (not os.path.isabs(_CONFIG_VARS['srcdir']) and + base != cwd): + # srcdir is relative and we are not in the same directory + # as the executable. Assume executable is in the build + # directory and make srcdir absolute. + srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) + _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) + + if sys.platform == 'darwin': + kernel_version = os.uname()[2] # Kernel version (8.4.3) + major_version = int(kernel_version.split('.')[0]) + + if major_version < 8: + # On Mac OS X before 10.4, check if -arch and -isysroot + # are in CFLAGS or LDFLAGS and remove them if they are. + # This is needed when building extensions on a 10.3 system + # using a universal build of python. + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + flags = _CONFIG_VARS[key] + flags = re.sub(r'-arch\s+\w+\s', ' ', flags) + flags = re.sub('-isysroot [^ \t]*', ' ', flags) + _CONFIG_VARS[key] = flags + else: + # Allow the user to override the architecture flags using + # an environment variable. + # NOTE: This name was introduced by Apple in OSX 10.5 and + # is used by several scripting languages distributed with + # that OS release. + if 'ARCHFLAGS' in os.environ: + arch = os.environ['ARCHFLAGS'] + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub(r'-arch\s+\w+\s', ' ', flags) + flags = flags + ' ' + arch + _CONFIG_VARS[key] = flags + + # If we're on OSX 10.5 or later and the user tries to + # compiles an extension using an SDK that is not present + # on the current machine it is better to not use an SDK + # than to fail. + # + # The major usecase for this is users using a Python.org + # binary installer on OSX 10.6: that installer uses + # the 10.4u SDK, but that SDK is not installed by default + # when you install Xcode. + # + CFLAGS = _CONFIG_VARS.get('CFLAGS', '') + m = re.search(r'-isysroot\s+(\S+)', CFLAGS) + if m is not None: + sdk = m.group(1) + if not os.path.exists(sdk): + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) + _CONFIG_VARS[key] = flags + + if args: + vals = [] + for name in args: + vals.append(_CONFIG_VARS.get(name)) + return vals + else: + return _CONFIG_VARS + + +def get_config_var(name): + """Return the value of a single variable using the dictionary returned by + 'get_config_vars()'. + + Equivalent to get_config_vars().get(name) + """ + return get_config_vars().get(name) + + +def get_platform(): + """Return a string that identifies the current platform. + + This is used mainly to distinguish platform-specific build directories and + platform-specific built distributions. Typically includes the OS name + and version and the architecture (as supplied by 'os.uname()'), + although the exact information included depends on the OS; eg. for IRIX + the architecture isn't particularly important (IRIX only runs on SGI + hardware), but for Linux the kernel version isn't particularly + important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + irix-5.3 + irix64-6.2 + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win-ia64 (64bit Windows on Itanium) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + """ + if os.name == 'nt': + # sniff sys.version for architecture. + prefix = " bit (" + i = sys.version.find(prefix) + if i == -1: + return sys.platform + j = sys.version.find(")", i) + look = sys.version[i+len(prefix):j].lower() + if look == 'amd64': + return 'win-amd64' + if look == 'itanium': + return 'win-ia64' + return sys.platform + + if os.name != "posix" or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + osname, host, release, version, machine = os.uname() + + # Convert the OS name to lowercase, remove '/' characters + # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_') + machine = machine.replace('/', '-') + + if osname[:5] == "linux": + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + elif osname[:5] == "sunos": + if release[0] >= "5": # SunOS 5 == Solaris 2 + osname = "solaris" + release = "%d.%s" % (int(release[0]) - 3, release[2:]) + # fall through to standard osname-release-machine representation + elif osname[:4] == "irix": # could be "irix64"! + return "%s-%s" % (osname, release) + elif osname[:3] == "aix": + return "%s-%s.%s" % (osname, version, release) + elif osname[:6] == "cygwin": + osname = "cygwin" + rel_re = re.compile(r'[\d.]+') + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == "darwin": + # + # For our purposes, we'll assume that the system version from + # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set + # to. This makes the compatibility story a bit more sane because the + # machine is going to compile and link as if it were + # MACOSX_DEPLOYMENT_TARGET. + cfgvars = get_config_vars() + macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') + + if True: + # Always calculate the release of the running machine, + # needed to determine if we can build fat binaries or not. + + macrelease = macver + # Get the system version. Reading this plist is a documented + # way to get the system version (see the documentation for + # the Gestalt Manager) + try: + f = open('/System/Library/CoreServices/SystemVersion.plist') + except IOError: + # We're on a plain darwin box, fall back to the default + # behaviour. + pass + else: + try: + m = re.search(r'ProductUserVisibleVersion\s*' + r'(.*?)', f.read()) + finally: + f.close() + if m is not None: + macrelease = '.'.join(m.group(1).split('.')[:2]) + # else: fall back to the default behaviour + + if not macver: + macver = macrelease + + if macver: + release = macver + osname = "macosx" + + if ((macrelease + '.') >= '10.4.' and + '-arch' in get_config_vars().get('CFLAGS', '').strip()): + # The universal build will build fat binaries, but not on + # systems before 10.4 + # + # Try to detect 4-way universal builds, those have machine-type + # 'universal' instead of 'fat'. + + machine = 'fat' + cflags = get_config_vars().get('CFLAGS') + + archs = re.findall(r'-arch\s+(\S+)', cflags) + archs = tuple(sorted(set(archs))) + + if len(archs) == 1: + machine = archs[0] + elif archs == ('i386', 'ppc'): + machine = 'fat' + elif archs == ('i386', 'x86_64'): + machine = 'intel' + elif archs == ('i386', 'ppc', 'x86_64'): + machine = 'fat3' + elif archs == ('ppc64', 'x86_64'): + machine = 'fat64' + elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): + machine = 'universal' + else: + raise ValueError( + "Don't know machine value for archs=%r" % (archs,)) + + elif machine == 'i386': + # On OSX the machine type returned by uname is always the + # 32-bit variant, even if the executable architecture is + # the 64-bit variant + if sys.maxsize >= 2**32: + machine = 'x86_64' + + elif machine in ('PowerPC', 'Power_Macintosh'): + # Pick a sane name for the PPC architecture. + # See 'i386' case + if sys.maxsize >= 2**32: + machine = 'ppc64' + else: + machine = 'ppc' + + return "%s-%s-%s" % (osname, release, machine) + + +def get_python_version(): + return _PY_VERSION_SHORT + + +def _print_dict(title, data): + for index, (key, value) in enumerate(sorted(data.items())): + if index == 0: + print('%s: ' % (title)) + print('\t%s = "%s"' % (key, value)) + + +def _main(): + """Display all information sysconfig detains.""" + print('Platform: "%s"' % get_platform()) + print('Python version: "%s"' % get_python_version()) + print('Current installation scheme: "%s"' % _get_default_scheme()) + print() + _print_dict('Paths', get_paths()) + print() + _print_dict('Variables', get_config_vars()) + + +if __name__ == '__main__': + _main() diff --git a/venv/Lib/site-packages/distlib/_backport/tarfile.py b/venv/Lib/site-packages/distlib/_backport/tarfile.py new file mode 100644 index 00000000..d66d8566 --- /dev/null +++ b/venv/Lib/site-packages/distlib/_backport/tarfile.py @@ -0,0 +1,2607 @@ +#------------------------------------------------------------------- +# tarfile.py +#------------------------------------------------------------------- +# Copyright (C) 2002 Lars Gustaebel +# All rights reserved. +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +from __future__ import print_function + +"""Read from and write to tar format archives. +""" + +__version__ = "$Revision$" + +version = "0.9.0" +__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" +__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" +__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" +__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." + +#--------- +# Imports +#--------- +import sys +import os +import stat +import errno +import time +import struct +import copy +import re + +try: + import grp, pwd +except ImportError: + grp = pwd = None + +# os.symlink on Windows prior to 6.0 raises NotImplementedError +symlink_exception = (AttributeError, NotImplementedError) +try: + # WindowsError (1314) will be raised if the caller does not hold the + # SeCreateSymbolicLinkPrivilege privilege + symlink_exception += (WindowsError,) +except NameError: + pass + +# from tarfile import * +__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] + +if sys.version_info[0] < 3: + import __builtin__ as builtins +else: + import builtins + +_open = builtins.open # Since 'open' is TarFile.open + +#--------------------------------------------------------- +# tar constants +#--------------------------------------------------------- +NUL = b"\0" # the null character +BLOCKSIZE = 512 # length of processing blocks +RECORDSIZE = BLOCKSIZE * 20 # length of records +GNU_MAGIC = b"ustar \0" # magic gnu tar string +POSIX_MAGIC = b"ustar\x0000" # magic posix tar string + +LENGTH_NAME = 100 # maximum length of a filename +LENGTH_LINK = 100 # maximum length of a linkname +LENGTH_PREFIX = 155 # maximum length of the prefix field + +REGTYPE = b"0" # regular file +AREGTYPE = b"\0" # regular file +LNKTYPE = b"1" # link (inside tarfile) +SYMTYPE = b"2" # symbolic link +CHRTYPE = b"3" # character special device +BLKTYPE = b"4" # block special device +DIRTYPE = b"5" # directory +FIFOTYPE = b"6" # fifo special device +CONTTYPE = b"7" # contiguous file + +GNUTYPE_LONGNAME = b"L" # GNU tar longname +GNUTYPE_LONGLINK = b"K" # GNU tar longlink +GNUTYPE_SPARSE = b"S" # GNU tar sparse file + +XHDTYPE = b"x" # POSIX.1-2001 extended header +XGLTYPE = b"g" # POSIX.1-2001 global header +SOLARIS_XHDTYPE = b"X" # Solaris extended header + +USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format +GNU_FORMAT = 1 # GNU tar format +PAX_FORMAT = 2 # POSIX.1-2001 (pax) format +DEFAULT_FORMAT = GNU_FORMAT + +#--------------------------------------------------------- +# tarfile constants +#--------------------------------------------------------- +# File types that tarfile supports: +SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, + SYMTYPE, DIRTYPE, FIFOTYPE, + CONTTYPE, CHRTYPE, BLKTYPE, + GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# File types that will be treated as a regular file. +REGULAR_TYPES = (REGTYPE, AREGTYPE, + CONTTYPE, GNUTYPE_SPARSE) + +# File types that are part of the GNU tar format. +GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# Fields from a pax header that override a TarInfo attribute. +PAX_FIELDS = ("path", "linkpath", "size", "mtime", + "uid", "gid", "uname", "gname") + +# Fields from a pax header that are affected by hdrcharset. +PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname")) + +# Fields in a pax header that are numbers, all other fields +# are treated as strings. +PAX_NUMBER_FIELDS = { + "atime": float, + "ctime": float, + "mtime": float, + "uid": int, + "gid": int, + "size": int +} + +#--------------------------------------------------------- +# Bits used in the mode field, values in octal. +#--------------------------------------------------------- +S_IFLNK = 0o120000 # symbolic link +S_IFREG = 0o100000 # regular file +S_IFBLK = 0o060000 # block device +S_IFDIR = 0o040000 # directory +S_IFCHR = 0o020000 # character device +S_IFIFO = 0o010000 # fifo + +TSUID = 0o4000 # set UID on execution +TSGID = 0o2000 # set GID on execution +TSVTX = 0o1000 # reserved + +TUREAD = 0o400 # read by owner +TUWRITE = 0o200 # write by owner +TUEXEC = 0o100 # execute/search by owner +TGREAD = 0o040 # read by group +TGWRITE = 0o020 # write by group +TGEXEC = 0o010 # execute/search by group +TOREAD = 0o004 # read by other +TOWRITE = 0o002 # write by other +TOEXEC = 0o001 # execute/search by other + +#--------------------------------------------------------- +# initialization +#--------------------------------------------------------- +if os.name in ("nt", "ce"): + ENCODING = "utf-8" +else: + ENCODING = sys.getfilesystemencoding() + +#--------------------------------------------------------- +# Some useful functions +#--------------------------------------------------------- + +def stn(s, length, encoding, errors): + """Convert a string to a null-terminated bytes object. + """ + s = s.encode(encoding, errors) + return s[:length] + (length - len(s)) * NUL + +def nts(s, encoding, errors): + """Convert a null-terminated bytes object to a string. + """ + p = s.find(b"\0") + if p != -1: + s = s[:p] + return s.decode(encoding, errors) + +def nti(s): + """Convert a number field to a python number. + """ + # There are two possible encodings for a number field, see + # itn() below. + if s[0] != chr(0o200): + try: + n = int(nts(s, "ascii", "strict") or "0", 8) + except ValueError: + raise InvalidHeaderError("invalid header") + else: + n = 0 + for i in range(len(s) - 1): + n <<= 8 + n += ord(s[i + 1]) + return n + +def itn(n, digits=8, format=DEFAULT_FORMAT): + """Convert a python number to a number field. + """ + # POSIX 1003.1-1988 requires numbers to be encoded as a string of + # octal digits followed by a null-byte, this allows values up to + # (8**(digits-1))-1. GNU tar allows storing numbers greater than + # that if necessary. A leading 0o200 byte indicates this particular + # encoding, the following digits-1 bytes are a big-endian + # representation. This allows values up to (256**(digits-1))-1. + if 0 <= n < 8 ** (digits - 1): + s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL + else: + if format != GNU_FORMAT or n >= 256 ** (digits - 1): + raise ValueError("overflow in number field") + + if n < 0: + # XXX We mimic GNU tar's behaviour with negative numbers, + # this could raise OverflowError. + n = struct.unpack("L", struct.pack("l", n))[0] + + s = bytearray() + for i in range(digits - 1): + s.insert(0, n & 0o377) + n >>= 8 + s.insert(0, 0o200) + return s + +def calc_chksums(buf): + """Calculate the checksum for a member's header by summing up all + characters except for the chksum field which is treated as if + it was filled with spaces. According to the GNU tar sources, + some tars (Sun and NeXT) calculate chksum with signed char, + which will be different if there are chars in the buffer with + the high bit set. So we calculate two checksums, unsigned and + signed. + """ + unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) + signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) + return unsigned_chksum, signed_chksum + +def copyfileobj(src, dst, length=None): + """Copy length bytes from fileobj src to fileobj dst. + If length is None, copy the entire content. + """ + if length == 0: + return + if length is None: + while True: + buf = src.read(16*1024) + if not buf: + break + dst.write(buf) + return + + BUFSIZE = 16 * 1024 + blocks, remainder = divmod(length, BUFSIZE) + for b in range(blocks): + buf = src.read(BUFSIZE) + if len(buf) < BUFSIZE: + raise IOError("end of file reached") + dst.write(buf) + + if remainder != 0: + buf = src.read(remainder) + if len(buf) < remainder: + raise IOError("end of file reached") + dst.write(buf) + return + +filemode_table = ( + ((S_IFLNK, "l"), + (S_IFREG, "-"), + (S_IFBLK, "b"), + (S_IFDIR, "d"), + (S_IFCHR, "c"), + (S_IFIFO, "p")), + + ((TUREAD, "r"),), + ((TUWRITE, "w"),), + ((TUEXEC|TSUID, "s"), + (TSUID, "S"), + (TUEXEC, "x")), + + ((TGREAD, "r"),), + ((TGWRITE, "w"),), + ((TGEXEC|TSGID, "s"), + (TSGID, "S"), + (TGEXEC, "x")), + + ((TOREAD, "r"),), + ((TOWRITE, "w"),), + ((TOEXEC|TSVTX, "t"), + (TSVTX, "T"), + (TOEXEC, "x")) +) + +def filemode(mode): + """Convert a file's mode to a string of the form + -rwxrwxrwx. + Used by TarFile.list() + """ + perm = [] + for table in filemode_table: + for bit, char in table: + if mode & bit == bit: + perm.append(char) + break + else: + perm.append("-") + return "".join(perm) + +class TarError(Exception): + """Base exception.""" + pass +class ExtractError(TarError): + """General exception for extract errors.""" + pass +class ReadError(TarError): + """Exception for unreadable tar archives.""" + pass +class CompressionError(TarError): + """Exception for unavailable compression methods.""" + pass +class StreamError(TarError): + """Exception for unsupported operations on stream-like TarFiles.""" + pass +class HeaderError(TarError): + """Base exception for header errors.""" + pass +class EmptyHeaderError(HeaderError): + """Exception for empty headers.""" + pass +class TruncatedHeaderError(HeaderError): + """Exception for truncated headers.""" + pass +class EOFHeaderError(HeaderError): + """Exception for end of file headers.""" + pass +class InvalidHeaderError(HeaderError): + """Exception for invalid headers.""" + pass +class SubsequentHeaderError(HeaderError): + """Exception for missing and invalid extended headers.""" + pass + +#--------------------------- +# internal stream interface +#--------------------------- +class _LowLevelFile(object): + """Low-level file object. Supports reading and writing. + It is used instead of a regular file object for streaming + access. + """ + + def __init__(self, name, mode): + mode = { + "r": os.O_RDONLY, + "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + }[mode] + if hasattr(os, "O_BINARY"): + mode |= os.O_BINARY + self.fd = os.open(name, mode, 0o666) + + def close(self): + os.close(self.fd) + + def read(self, size): + return os.read(self.fd, size) + + def write(self, s): + os.write(self.fd, s) + +class _Stream(object): + """Class that serves as an adapter between TarFile and + a stream-like object. The stream-like object only + needs to have a read() or write() method and is accessed + blockwise. Use of gzip or bzip2 compression is possible. + A stream-like object could be for example: sys.stdin, + sys.stdout, a socket, a tape device etc. + + _Stream is intended to be used only internally. + """ + + def __init__(self, name, mode, comptype, fileobj, bufsize): + """Construct a _Stream object. + """ + self._extfileobj = True + if fileobj is None: + fileobj = _LowLevelFile(name, mode) + self._extfileobj = False + + if comptype == '*': + # Enable transparent compression detection for the + # stream interface + fileobj = _StreamProxy(fileobj) + comptype = fileobj.getcomptype() + + self.name = name or "" + self.mode = mode + self.comptype = comptype + self.fileobj = fileobj + self.bufsize = bufsize + self.buf = b"" + self.pos = 0 + self.closed = False + + try: + if comptype == "gz": + try: + import zlib + except ImportError: + raise CompressionError("zlib module is not available") + self.zlib = zlib + self.crc = zlib.crc32(b"") + if mode == "r": + self._init_read_gz() + else: + self._init_write_gz() + + if comptype == "bz2": + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + if mode == "r": + self.dbuf = b"" + self.cmp = bz2.BZ2Decompressor() + else: + self.cmp = bz2.BZ2Compressor() + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + def __del__(self): + if hasattr(self, "closed") and not self.closed: + self.close() + + def _init_write_gz(self): + """Initialize for writing with gzip compression. + """ + self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, + -self.zlib.MAX_WBITS, + self.zlib.DEF_MEM_LEVEL, + 0) + timestamp = struct.pack(" self.bufsize: + self.fileobj.write(self.buf[:self.bufsize]) + self.buf = self.buf[self.bufsize:] + + def close(self): + """Close the _Stream object. No operation should be + done on it afterwards. + """ + if self.closed: + return + + if self.mode == "w" and self.comptype != "tar": + self.buf += self.cmp.flush() + + if self.mode == "w" and self.buf: + self.fileobj.write(self.buf) + self.buf = b"" + if self.comptype == "gz": + # The native zlib crc is an unsigned 32-bit integer, but + # the Python wrapper implicitly casts that to a signed C + # long. So, on a 32-bit box self.crc may "look negative", + # while the same crc on a 64-bit box may "look positive". + # To avoid irksome warnings from the `struct` module, force + # it to look positive on all boxes. + self.fileobj.write(struct.pack("= 0: + blocks, remainder = divmod(pos - self.pos, self.bufsize) + for i in range(blocks): + self.read(self.bufsize) + self.read(remainder) + else: + raise StreamError("seeking backwards is not allowed") + return self.pos + + def read(self, size=None): + """Return the next size number of bytes from the stream. + If size is not defined, return all bytes of the stream + up to EOF. + """ + if size is None: + t = [] + while True: + buf = self._read(self.bufsize) + if not buf: + break + t.append(buf) + buf = "".join(t) + else: + buf = self._read(size) + self.pos += len(buf) + return buf + + def _read(self, size): + """Return size bytes from the stream. + """ + if self.comptype == "tar": + return self.__read(size) + + c = len(self.dbuf) + while c < size: + buf = self.__read(self.bufsize) + if not buf: + break + try: + buf = self.cmp.decompress(buf) + except IOError: + raise ReadError("invalid compressed data") + self.dbuf += buf + c += len(buf) + buf = self.dbuf[:size] + self.dbuf = self.dbuf[size:] + return buf + + def __read(self, size): + """Return size bytes from stream. If internal buffer is empty, + read another block from the stream. + """ + c = len(self.buf) + while c < size: + buf = self.fileobj.read(self.bufsize) + if not buf: + break + self.buf += buf + c += len(buf) + buf = self.buf[:size] + self.buf = self.buf[size:] + return buf +# class _Stream + +class _StreamProxy(object): + """Small proxy class that enables transparent compression + detection for the Stream interface (mode 'r|*'). + """ + + def __init__(self, fileobj): + self.fileobj = fileobj + self.buf = self.fileobj.read(BLOCKSIZE) + + def read(self, size): + self.read = self.fileobj.read + return self.buf + + def getcomptype(self): + if self.buf.startswith(b"\037\213\010"): + return "gz" + if self.buf.startswith(b"BZh91"): + return "bz2" + return "tar" + + def close(self): + self.fileobj.close() +# class StreamProxy + +class _BZ2Proxy(object): + """Small proxy class that enables external file object + support for "r:bz2" and "w:bz2" modes. This is actually + a workaround for a limitation in bz2 module's BZ2File + class which (unlike gzip.GzipFile) has no support for + a file object argument. + """ + + blocksize = 16 * 1024 + + def __init__(self, fileobj, mode): + self.fileobj = fileobj + self.mode = mode + self.name = getattr(self.fileobj, "name", None) + self.init() + + def init(self): + import bz2 + self.pos = 0 + if self.mode == "r": + self.bz2obj = bz2.BZ2Decompressor() + self.fileobj.seek(0) + self.buf = b"" + else: + self.bz2obj = bz2.BZ2Compressor() + + def read(self, size): + x = len(self.buf) + while x < size: + raw = self.fileobj.read(self.blocksize) + if not raw: + break + data = self.bz2obj.decompress(raw) + self.buf += data + x += len(data) + + buf = self.buf[:size] + self.buf = self.buf[size:] + self.pos += len(buf) + return buf + + def seek(self, pos): + if pos < self.pos: + self.init() + self.read(pos - self.pos) + + def tell(self): + return self.pos + + def write(self, data): + self.pos += len(data) + raw = self.bz2obj.compress(data) + self.fileobj.write(raw) + + def close(self): + if self.mode == "w": + raw = self.bz2obj.flush() + self.fileobj.write(raw) +# class _BZ2Proxy + +#------------------------ +# Extraction file object +#------------------------ +class _FileInFile(object): + """A thin wrapper around an existing file object that + provides a part of its data as an individual file + object. + """ + + def __init__(self, fileobj, offset, size, blockinfo=None): + self.fileobj = fileobj + self.offset = offset + self.size = size + self.position = 0 + + if blockinfo is None: + blockinfo = [(0, size)] + + # Construct a map with data and zero blocks. + self.map_index = 0 + self.map = [] + lastpos = 0 + realpos = self.offset + for offset, size in blockinfo: + if offset > lastpos: + self.map.append((False, lastpos, offset, None)) + self.map.append((True, offset, offset + size, realpos)) + realpos += size + lastpos = offset + size + if lastpos < self.size: + self.map.append((False, lastpos, self.size, None)) + + def seekable(self): + if not hasattr(self.fileobj, "seekable"): + # XXX gzip.GzipFile and bz2.BZ2File + return True + return self.fileobj.seekable() + + def tell(self): + """Return the current file position. + """ + return self.position + + def seek(self, position): + """Seek to a position in the file. + """ + self.position = position + + def read(self, size=None): + """Read data from the file. + """ + if size is None: + size = self.size - self.position + else: + size = min(size, self.size - self.position) + + buf = b"" + while size > 0: + while True: + data, start, stop, offset = self.map[self.map_index] + if start <= self.position < stop: + break + else: + self.map_index += 1 + if self.map_index == len(self.map): + self.map_index = 0 + length = min(size, stop - self.position) + if data: + self.fileobj.seek(offset + (self.position - start)) + buf += self.fileobj.read(length) + else: + buf += NUL * length + size -= length + self.position += length + return buf +#class _FileInFile + + +class ExFileObject(object): + """File-like object for reading an archive member. + Is returned by TarFile.extractfile(). + """ + blocksize = 1024 + + def __init__(self, tarfile, tarinfo): + self.fileobj = _FileInFile(tarfile.fileobj, + tarinfo.offset_data, + tarinfo.size, + tarinfo.sparse) + self.name = tarinfo.name + self.mode = "r" + self.closed = False + self.size = tarinfo.size + + self.position = 0 + self.buffer = b"" + + def readable(self): + return True + + def writable(self): + return False + + def seekable(self): + return self.fileobj.seekable() + + def read(self, size=None): + """Read at most size bytes from the file. If size is not + present or None, read all data until EOF is reached. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + buf = b"" + if self.buffer: + if size is None: + buf = self.buffer + self.buffer = b"" + else: + buf = self.buffer[:size] + self.buffer = self.buffer[size:] + + if size is None: + buf += self.fileobj.read() + else: + buf += self.fileobj.read(size - len(buf)) + + self.position += len(buf) + return buf + + # XXX TextIOWrapper uses the read1() method. + read1 = read + + def readline(self, size=-1): + """Read one entire line from the file. If size is present + and non-negative, return a string with at most that + size, which may be an incomplete line. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + while True: + buf = self.fileobj.read(self.blocksize) + self.buffer += buf + if not buf or b"\n" in buf: + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + pos = len(self.buffer) + break + + if size != -1: + pos = min(size, pos) + + buf = self.buffer[:pos] + self.buffer = self.buffer[pos:] + self.position += len(buf) + return buf + + def readlines(self): + """Return a list with all remaining lines. + """ + result = [] + while True: + line = self.readline() + if not line: break + result.append(line) + return result + + def tell(self): + """Return the current file position. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + return self.position + + def seek(self, pos, whence=os.SEEK_SET): + """Seek to a position in the file. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + if whence == os.SEEK_SET: + self.position = min(max(pos, 0), self.size) + elif whence == os.SEEK_CUR: + if pos < 0: + self.position = max(self.position + pos, 0) + else: + self.position = min(self.position + pos, self.size) + elif whence == os.SEEK_END: + self.position = max(min(self.size + pos, self.size), 0) + else: + raise ValueError("Invalid argument") + + self.buffer = b"" + self.fileobj.seek(self.position) + + def close(self): + """Close the file object. + """ + self.closed = True + + def __iter__(self): + """Get an iterator over the file's lines. + """ + while True: + line = self.readline() + if not line: + break + yield line +#class ExFileObject + +#------------------ +# Exported Classes +#------------------ +class TarInfo(object): + """Informational class which holds the details about an + archive member given by a tar header block. + TarInfo objects are returned by TarFile.getmember(), + TarFile.getmembers() and TarFile.gettarinfo() and are + usually created internally. + """ + + __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", + "chksum", "type", "linkname", "uname", "gname", + "devmajor", "devminor", + "offset", "offset_data", "pax_headers", "sparse", + "tarfile", "_sparse_structs", "_link_target") + + def __init__(self, name=""): + """Construct a TarInfo object. name is the optional name + of the member. + """ + self.name = name # member name + self.mode = 0o644 # file permissions + self.uid = 0 # user id + self.gid = 0 # group id + self.size = 0 # file size + self.mtime = 0 # modification time + self.chksum = 0 # header checksum + self.type = REGTYPE # member type + self.linkname = "" # link name + self.uname = "" # user name + self.gname = "" # group name + self.devmajor = 0 # device major number + self.devminor = 0 # device minor number + + self.offset = 0 # the tar header starts here + self.offset_data = 0 # the file's data starts here + + self.sparse = None # sparse member information + self.pax_headers = {} # pax header information + + # In pax headers the "name" and "linkname" field are called + # "path" and "linkpath". + def _getpath(self): + return self.name + def _setpath(self, name): + self.name = name + path = property(_getpath, _setpath) + + def _getlinkpath(self): + return self.linkname + def _setlinkpath(self, linkname): + self.linkname = linkname + linkpath = property(_getlinkpath, _setlinkpath) + + def __repr__(self): + return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) + + def get_info(self): + """Return the TarInfo's attributes as a dictionary. + """ + info = { + "name": self.name, + "mode": self.mode & 0o7777, + "uid": self.uid, + "gid": self.gid, + "size": self.size, + "mtime": self.mtime, + "chksum": self.chksum, + "type": self.type, + "linkname": self.linkname, + "uname": self.uname, + "gname": self.gname, + "devmajor": self.devmajor, + "devminor": self.devminor + } + + if info["type"] == DIRTYPE and not info["name"].endswith("/"): + info["name"] += "/" + + return info + + def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): + """Return a tar header as a string of 512 byte blocks. + """ + info = self.get_info() + + if format == USTAR_FORMAT: + return self.create_ustar_header(info, encoding, errors) + elif format == GNU_FORMAT: + return self.create_gnu_header(info, encoding, errors) + elif format == PAX_FORMAT: + return self.create_pax_header(info, encoding) + else: + raise ValueError("invalid format") + + def create_ustar_header(self, info, encoding, errors): + """Return the object as a ustar header block. + """ + info["magic"] = POSIX_MAGIC + + if len(info["linkname"]) > LENGTH_LINK: + raise ValueError("linkname is too long") + + if len(info["name"]) > LENGTH_NAME: + info["prefix"], info["name"] = self._posix_split_name(info["name"]) + + return self._create_header(info, USTAR_FORMAT, encoding, errors) + + def create_gnu_header(self, info, encoding, errors): + """Return the object as a GNU header block sequence. + """ + info["magic"] = GNU_MAGIC + + buf = b"" + if len(info["linkname"]) > LENGTH_LINK: + buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) + + if len(info["name"]) > LENGTH_NAME: + buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) + + return buf + self._create_header(info, GNU_FORMAT, encoding, errors) + + def create_pax_header(self, info, encoding): + """Return the object as a ustar header block. If it cannot be + represented this way, prepend a pax extended header sequence + with supplement information. + """ + info["magic"] = POSIX_MAGIC + pax_headers = self.pax_headers.copy() + + # Test string fields for values that exceed the field length or cannot + # be represented in ASCII encoding. + for name, hname, length in ( + ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), + ("uname", "uname", 32), ("gname", "gname", 32)): + + if hname in pax_headers: + # The pax header has priority. + continue + + # Try to encode the string as ASCII. + try: + info[name].encode("ascii", "strict") + except UnicodeEncodeError: + pax_headers[hname] = info[name] + continue + + if len(info[name]) > length: + pax_headers[hname] = info[name] + + # Test number fields for values that exceed the field limit or values + # that like to be stored as float. + for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): + if name in pax_headers: + # The pax header has priority. Avoid overflow. + info[name] = 0 + continue + + val = info[name] + if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): + pax_headers[name] = str(val) + info[name] = 0 + + # Create a pax extended header if necessary. + if pax_headers: + buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) + else: + buf = b"" + + return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") + + @classmethod + def create_pax_global_header(cls, pax_headers): + """Return the object as a pax global header block sequence. + """ + return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8") + + def _posix_split_name(self, name): + """Split a name longer than 100 chars into a prefix + and a name part. + """ + prefix = name[:LENGTH_PREFIX + 1] + while prefix and prefix[-1] != "/": + prefix = prefix[:-1] + + name = name[len(prefix):] + prefix = prefix[:-1] + + if not prefix or len(name) > LENGTH_NAME: + raise ValueError("name is too long") + return prefix, name + + @staticmethod + def _create_header(info, format, encoding, errors): + """Return a header block. info is a dictionary with file + information, format must be one of the *_FORMAT constants. + """ + parts = [ + stn(info.get("name", ""), 100, encoding, errors), + itn(info.get("mode", 0) & 0o7777, 8, format), + itn(info.get("uid", 0), 8, format), + itn(info.get("gid", 0), 8, format), + itn(info.get("size", 0), 12, format), + itn(info.get("mtime", 0), 12, format), + b" ", # checksum field + info.get("type", REGTYPE), + stn(info.get("linkname", ""), 100, encoding, errors), + info.get("magic", POSIX_MAGIC), + stn(info.get("uname", ""), 32, encoding, errors), + stn(info.get("gname", ""), 32, encoding, errors), + itn(info.get("devmajor", 0), 8, format), + itn(info.get("devminor", 0), 8, format), + stn(info.get("prefix", ""), 155, encoding, errors) + ] + + buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) + chksum = calc_chksums(buf[-BLOCKSIZE:])[0] + buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] + return buf + + @staticmethod + def _create_payload(payload): + """Return the string payload filled with zero bytes + up to the next 512 byte border. + """ + blocks, remainder = divmod(len(payload), BLOCKSIZE) + if remainder > 0: + payload += (BLOCKSIZE - remainder) * NUL + return payload + + @classmethod + def _create_gnu_long_header(cls, name, type, encoding, errors): + """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence + for name. + """ + name = name.encode(encoding, errors) + NUL + + info = {} + info["name"] = "././@LongLink" + info["type"] = type + info["size"] = len(name) + info["magic"] = GNU_MAGIC + + # create extended header + name blocks. + return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ + cls._create_payload(name) + + @classmethod + def _create_pax_generic_header(cls, pax_headers, type, encoding): + """Return a POSIX.1-2008 extended or global header sequence + that contains a list of keyword, value pairs. The values + must be strings. + """ + # Check if one of the fields contains surrogate characters and thereby + # forces hdrcharset=BINARY, see _proc_pax() for more information. + binary = False + for keyword, value in pax_headers.items(): + try: + value.encode("utf8", "strict") + except UnicodeEncodeError: + binary = True + break + + records = b"" + if binary: + # Put the hdrcharset field at the beginning of the header. + records += b"21 hdrcharset=BINARY\n" + + for keyword, value in pax_headers.items(): + keyword = keyword.encode("utf8") + if binary: + # Try to restore the original byte representation of `value'. + # Needless to say, that the encoding must match the string. + value = value.encode(encoding, "surrogateescape") + else: + value = value.encode("utf8") + + l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' + n = p = 0 + while True: + n = l + len(str(p)) + if n == p: + break + p = n + records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" + + # We use a hardcoded "././@PaxHeader" name like star does + # instead of the one that POSIX recommends. + info = {} + info["name"] = "././@PaxHeader" + info["type"] = type + info["size"] = len(records) + info["magic"] = POSIX_MAGIC + + # Create pax header + record blocks. + return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ + cls._create_payload(records) + + @classmethod + def frombuf(cls, buf, encoding, errors): + """Construct a TarInfo object from a 512 byte bytes object. + """ + if len(buf) == 0: + raise EmptyHeaderError("empty header") + if len(buf) != BLOCKSIZE: + raise TruncatedHeaderError("truncated header") + if buf.count(NUL) == BLOCKSIZE: + raise EOFHeaderError("end of file header") + + chksum = nti(buf[148:156]) + if chksum not in calc_chksums(buf): + raise InvalidHeaderError("bad checksum") + + obj = cls() + obj.name = nts(buf[0:100], encoding, errors) + obj.mode = nti(buf[100:108]) + obj.uid = nti(buf[108:116]) + obj.gid = nti(buf[116:124]) + obj.size = nti(buf[124:136]) + obj.mtime = nti(buf[136:148]) + obj.chksum = chksum + obj.type = buf[156:157] + obj.linkname = nts(buf[157:257], encoding, errors) + obj.uname = nts(buf[265:297], encoding, errors) + obj.gname = nts(buf[297:329], encoding, errors) + obj.devmajor = nti(buf[329:337]) + obj.devminor = nti(buf[337:345]) + prefix = nts(buf[345:500], encoding, errors) + + # Old V7 tar format represents a directory as a regular + # file with a trailing slash. + if obj.type == AREGTYPE and obj.name.endswith("/"): + obj.type = DIRTYPE + + # The old GNU sparse format occupies some of the unused + # space in the buffer for up to 4 sparse structures. + # Save the them for later processing in _proc_sparse(). + if obj.type == GNUTYPE_SPARSE: + pos = 386 + structs = [] + for i in range(4): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[482]) + origsize = nti(buf[483:495]) + obj._sparse_structs = (structs, isextended, origsize) + + # Remove redundant slashes from directories. + if obj.isdir(): + obj.name = obj.name.rstrip("/") + + # Reconstruct a ustar longname. + if prefix and obj.type not in GNU_TYPES: + obj.name = prefix + "/" + obj.name + return obj + + @classmethod + def fromtarfile(cls, tarfile): + """Return the next TarInfo object from TarFile object + tarfile. + """ + buf = tarfile.fileobj.read(BLOCKSIZE) + obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) + obj.offset = tarfile.fileobj.tell() - BLOCKSIZE + return obj._proc_member(tarfile) + + #-------------------------------------------------------------------------- + # The following are methods that are called depending on the type of a + # member. The entry point is _proc_member() which can be overridden in a + # subclass to add custom _proc_*() methods. A _proc_*() method MUST + # implement the following + # operations: + # 1. Set self.offset_data to the position where the data blocks begin, + # if there is data that follows. + # 2. Set tarfile.offset to the position where the next member's header will + # begin. + # 3. Return self or another valid TarInfo object. + def _proc_member(self, tarfile): + """Choose the right processing method depending on + the type and call it. + """ + if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): + return self._proc_gnulong(tarfile) + elif self.type == GNUTYPE_SPARSE: + return self._proc_sparse(tarfile) + elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): + return self._proc_pax(tarfile) + else: + return self._proc_builtin(tarfile) + + def _proc_builtin(self, tarfile): + """Process a builtin type or an unknown type which + will be treated as a regular file. + """ + self.offset_data = tarfile.fileobj.tell() + offset = self.offset_data + if self.isreg() or self.type not in SUPPORTED_TYPES: + # Skip the following data blocks. + offset += self._block(self.size) + tarfile.offset = offset + + # Patch the TarInfo object with saved global + # header information. + self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) + + return self + + def _proc_gnulong(self, tarfile): + """Process the blocks that hold a GNU longname + or longlink member. + """ + buf = tarfile.fileobj.read(self._block(self.size)) + + # Fetch the next header and process it. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Patch the TarInfo object from the next header with + # the longname information. + next.offset = self.offset + if self.type == GNUTYPE_LONGNAME: + next.name = nts(buf, tarfile.encoding, tarfile.errors) + elif self.type == GNUTYPE_LONGLINK: + next.linkname = nts(buf, tarfile.encoding, tarfile.errors) + + return next + + def _proc_sparse(self, tarfile): + """Process a GNU sparse header plus extra headers. + """ + # We already collected some sparse structures in frombuf(). + structs, isextended, origsize = self._sparse_structs + del self._sparse_structs + + # Collect sparse structures from extended header blocks. + while isextended: + buf = tarfile.fileobj.read(BLOCKSIZE) + pos = 0 + for i in range(21): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + if offset and numbytes: + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[504]) + self.sparse = structs + + self.offset_data = tarfile.fileobj.tell() + tarfile.offset = self.offset_data + self._block(self.size) + self.size = origsize + return self + + def _proc_pax(self, tarfile): + """Process an extended or global header as described in + POSIX.1-2008. + """ + # Read the header information. + buf = tarfile.fileobj.read(self._block(self.size)) + + # A pax header stores supplemental information for either + # the following file (extended) or all following files + # (global). + if self.type == XGLTYPE: + pax_headers = tarfile.pax_headers + else: + pax_headers = tarfile.pax_headers.copy() + + # Check if the pax header contains a hdrcharset field. This tells us + # the encoding of the path, linkpath, uname and gname fields. Normally, + # these fields are UTF-8 encoded but since POSIX.1-2008 tar + # implementations are allowed to store them as raw binary strings if + # the translation to UTF-8 fails. + match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) + if match is not None: + pax_headers["hdrcharset"] = match.group(1).decode("utf8") + + # For the time being, we don't care about anything other than "BINARY". + # The only other value that is currently allowed by the standard is + # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. + hdrcharset = pax_headers.get("hdrcharset") + if hdrcharset == "BINARY": + encoding = tarfile.encoding + else: + encoding = "utf8" + + # Parse pax header information. A record looks like that: + # "%d %s=%s\n" % (length, keyword, value). length is the size + # of the complete record including the length field itself and + # the newline. keyword and value are both UTF-8 encoded strings. + regex = re.compile(br"(\d+) ([^=]+)=") + pos = 0 + while True: + match = regex.match(buf, pos) + if not match: + break + + length, keyword = match.groups() + length = int(length) + value = buf[match.end(2) + 1:match.start(1) + length - 1] + + # Normally, we could just use "utf8" as the encoding and "strict" + # as the error handler, but we better not take the risk. For + # example, GNU tar <= 1.23 is known to store filenames it cannot + # translate to UTF-8 as raw strings (unfortunately without a + # hdrcharset=BINARY header). + # We first try the strict standard encoding, and if that fails we + # fall back on the user's encoding and error handler. + keyword = self._decode_pax_field(keyword, "utf8", "utf8", + tarfile.errors) + if keyword in PAX_NAME_FIELDS: + value = self._decode_pax_field(value, encoding, tarfile.encoding, + tarfile.errors) + else: + value = self._decode_pax_field(value, "utf8", "utf8", + tarfile.errors) + + pax_headers[keyword] = value + pos += length + + # Fetch the next header. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Process GNU sparse information. + if "GNU.sparse.map" in pax_headers: + # GNU extended sparse format version 0.1. + self._proc_gnusparse_01(next, pax_headers) + + elif "GNU.sparse.size" in pax_headers: + # GNU extended sparse format version 0.0. + self._proc_gnusparse_00(next, pax_headers, buf) + + elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": + # GNU extended sparse format version 1.0. + self._proc_gnusparse_10(next, pax_headers, tarfile) + + if self.type in (XHDTYPE, SOLARIS_XHDTYPE): + # Patch the TarInfo object with the extended header info. + next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) + next.offset = self.offset + + if "size" in pax_headers: + # If the extended header replaces the size field, + # we need to recalculate the offset where the next + # header starts. + offset = next.offset_data + if next.isreg() or next.type not in SUPPORTED_TYPES: + offset += next._block(next.size) + tarfile.offset = offset + + return next + + def _proc_gnusparse_00(self, next, pax_headers, buf): + """Process a GNU tar extended sparse header, version 0.0. + """ + offsets = [] + for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): + offsets.append(int(match.group(1))) + numbytes = [] + for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): + numbytes.append(int(match.group(1))) + next.sparse = list(zip(offsets, numbytes)) + + def _proc_gnusparse_01(self, next, pax_headers): + """Process a GNU tar extended sparse header, version 0.1. + """ + sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _proc_gnusparse_10(self, next, pax_headers, tarfile): + """Process a GNU tar extended sparse header, version 1.0. + """ + fields = None + sparse = [] + buf = tarfile.fileobj.read(BLOCKSIZE) + fields, buf = buf.split(b"\n", 1) + fields = int(fields) + while len(sparse) < fields * 2: + if b"\n" not in buf: + buf += tarfile.fileobj.read(BLOCKSIZE) + number, buf = buf.split(b"\n", 1) + sparse.append(int(number)) + next.offset_data = tarfile.fileobj.tell() + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _apply_pax_info(self, pax_headers, encoding, errors): + """Replace fields with supplemental information from a previous + pax extended or global header. + """ + for keyword, value in pax_headers.items(): + if keyword == "GNU.sparse.name": + setattr(self, "path", value) + elif keyword == "GNU.sparse.size": + setattr(self, "size", int(value)) + elif keyword == "GNU.sparse.realsize": + setattr(self, "size", int(value)) + elif keyword in PAX_FIELDS: + if keyword in PAX_NUMBER_FIELDS: + try: + value = PAX_NUMBER_FIELDS[keyword](value) + except ValueError: + value = 0 + if keyword == "path": + value = value.rstrip("/") + setattr(self, keyword, value) + + self.pax_headers = pax_headers.copy() + + def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): + """Decode a single field from a pax record. + """ + try: + return value.decode(encoding, "strict") + except UnicodeDecodeError: + return value.decode(fallback_encoding, fallback_errors) + + def _block(self, count): + """Round up a byte count by BLOCKSIZE and return it, + e.g. _block(834) => 1024. + """ + blocks, remainder = divmod(count, BLOCKSIZE) + if remainder: + blocks += 1 + return blocks * BLOCKSIZE + + def isreg(self): + return self.type in REGULAR_TYPES + def isfile(self): + return self.isreg() + def isdir(self): + return self.type == DIRTYPE + def issym(self): + return self.type == SYMTYPE + def islnk(self): + return self.type == LNKTYPE + def ischr(self): + return self.type == CHRTYPE + def isblk(self): + return self.type == BLKTYPE + def isfifo(self): + return self.type == FIFOTYPE + def issparse(self): + return self.sparse is not None + def isdev(self): + return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) +# class TarInfo + +class TarFile(object): + """The TarFile Class provides an interface to tar archives. + """ + + debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) + + dereference = False # If true, add content of linked file to the + # tar file, else the link. + + ignore_zeros = False # If true, skips empty or invalid blocks and + # continues processing. + + errorlevel = 1 # If 0, fatal errors only appear in debug + # messages (if debug >= 0). If > 0, errors + # are passed to the caller as exceptions. + + format = DEFAULT_FORMAT # The format to use when creating an archive. + + encoding = ENCODING # Encoding for 8-bit character strings. + + errors = None # Error handler for unicode conversion. + + tarinfo = TarInfo # The default TarInfo class to use. + + fileobject = ExFileObject # The default ExFileObject class to use. + + def __init__(self, name=None, mode="r", fileobj=None, format=None, + tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, + errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None): + """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. `mode' + defaults to 'r'. + If `fileobj' is given, it is used for reading or writing data. If it + can be determined, `mode' is overridden by `fileobj's mode. + `fileobj' is not closed, when TarFile is closed. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + self.mode = mode + self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] + + if not fileobj: + if self.mode == "a" and not os.path.exists(name): + # Create nonexistent files in append mode. + self.mode = "w" + self._mode = "wb" + fileobj = bltn_open(name, self._mode) + self._extfileobj = False + else: + if name is None and hasattr(fileobj, "name"): + name = fileobj.name + if hasattr(fileobj, "mode"): + self._mode = fileobj.mode + self._extfileobj = True + self.name = os.path.abspath(name) if name else None + self.fileobj = fileobj + + # Init attributes. + if format is not None: + self.format = format + if tarinfo is not None: + self.tarinfo = tarinfo + if dereference is not None: + self.dereference = dereference + if ignore_zeros is not None: + self.ignore_zeros = ignore_zeros + if encoding is not None: + self.encoding = encoding + self.errors = errors + + if pax_headers is not None and self.format == PAX_FORMAT: + self.pax_headers = pax_headers + else: + self.pax_headers = {} + + if debug is not None: + self.debug = debug + if errorlevel is not None: + self.errorlevel = errorlevel + + # Init datastructures. + self.closed = False + self.members = [] # list of members as TarInfo objects + self._loaded = False # flag if all members have been read + self.offset = self.fileobj.tell() + # current position in the archive file + self.inodes = {} # dictionary caching the inodes of + # archive members already added + + try: + if self.mode == "r": + self.firstmember = None + self.firstmember = self.next() + + if self.mode == "a": + # Move to the end of the archive, + # before the first empty block. + while True: + self.fileobj.seek(self.offset) + try: + tarinfo = self.tarinfo.fromtarfile(self) + self.members.append(tarinfo) + except EOFHeaderError: + self.fileobj.seek(self.offset) + break + except HeaderError as e: + raise ReadError(str(e)) + + if self.mode in "aw": + self._loaded = True + + if self.pax_headers: + buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) + self.fileobj.write(buf) + self.offset += len(buf) + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + #-------------------------------------------------------------------------- + # Below are the classmethods which act as alternate constructors to the + # TarFile class. The open() method is the only one that is needed for + # public use; it is the "super"-constructor and is able to select an + # adequate "sub"-constructor for a particular compression using the mapping + # from OPEN_METH. + # + # This concept allows one to subclass TarFile without losing the comfort of + # the super-constructor. A sub-constructor is registered and made available + # by adding it to the mapping in OPEN_METH. + + @classmethod + def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): + """Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + """ + + if not name and not fileobj: + raise ValueError("nothing to open") + + if mode in ("r", "r:*"): + # Find out which *open() is appropriate for opening the file. + for comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + if fileobj is not None: + saved_pos = fileobj.tell() + try: + return func(name, "r", fileobj, **kwargs) + except (ReadError, CompressionError) as e: + if fileobj is not None: + fileobj.seek(saved_pos) + continue + raise ReadError("file could not be opened successfully") + + elif ":" in mode: + filemode, comptype = mode.split(":", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + # Select the *open() function according to + # given compression. + if comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + else: + raise CompressionError("unknown compression type %r" % comptype) + return func(name, filemode, fileobj, **kwargs) + + elif "|" in mode: + filemode, comptype = mode.split("|", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + if filemode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + stream = _Stream(name, filemode, comptype, fileobj, bufsize) + try: + t = cls(name, filemode, stream, **kwargs) + except: + stream.close() + raise + t._extfileobj = False + return t + + elif mode in "aw": + return cls.taropen(name, mode, fileobj, **kwargs) + + raise ValueError("undiscernible mode") + + @classmethod + def taropen(cls, name, mode="r", fileobj=None, **kwargs): + """Open uncompressed tar archive name for reading or writing. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + return cls(name, mode, fileobj, **kwargs) + + @classmethod + def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open gzip compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + try: + import gzip + gzip.GzipFile + except (ImportError, AttributeError): + raise CompressionError("gzip module is not available") + + extfileobj = fileobj is not None + try: + fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) + t = cls.taropen(name, mode, fileobj, **kwargs) + except IOError: + if not extfileobj and fileobj is not None: + fileobj.close() + if fileobj is None: + raise + raise ReadError("not a gzip file") + except: + if not extfileobj and fileobj is not None: + fileobj.close() + raise + t._extfileobj = extfileobj + return t + + @classmethod + def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open bzip2 compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'.") + + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + + if fileobj is not None: + fileobj = _BZ2Proxy(fileobj, mode) + else: + fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) + + try: + t = cls.taropen(name, mode, fileobj, **kwargs) + except (IOError, EOFError): + fileobj.close() + raise ReadError("not a bzip2 file") + t._extfileobj = False + return t + + # All *open() methods are registered here. + OPEN_METH = { + "tar": "taropen", # uncompressed tar + "gz": "gzopen", # gzip compressed tar + "bz2": "bz2open" # bzip2 compressed tar + } + + #-------------------------------------------------------------------------- + # The public methods which TarFile provides: + + def close(self): + """Close the TarFile. In write-mode, two finishing zero blocks are + appended to the archive. + """ + if self.closed: + return + + if self.mode in "aw": + self.fileobj.write(NUL * (BLOCKSIZE * 2)) + self.offset += (BLOCKSIZE * 2) + # fill up the end with zero-blocks + # (like option -b20 for tar does) + blocks, remainder = divmod(self.offset, RECORDSIZE) + if remainder > 0: + self.fileobj.write(NUL * (RECORDSIZE - remainder)) + + if not self._extfileobj: + self.fileobj.close() + self.closed = True + + def getmember(self, name): + """Return a TarInfo object for member `name'. If `name' can not be + found in the archive, KeyError is raised. If a member occurs more + than once in the archive, its last occurrence is assumed to be the + most up-to-date version. + """ + tarinfo = self._getmember(name) + if tarinfo is None: + raise KeyError("filename %r not found" % name) + return tarinfo + + def getmembers(self): + """Return the members of the archive as a list of TarInfo objects. The + list has the same order as the members in the archive. + """ + self._check() + if not self._loaded: # if we want to obtain a list of + self._load() # all members, we first have to + # scan the whole archive. + return self.members + + def getnames(self): + """Return the members of the archive as a list of their names. It has + the same order as the list returned by getmembers(). + """ + return [tarinfo.name for tarinfo in self.getmembers()] + + def gettarinfo(self, name=None, arcname=None, fileobj=None): + """Create a TarInfo object for either the file `name' or the file + object `fileobj' (using os.fstat on its file descriptor). You can + modify some of the TarInfo's attributes before you add it using + addfile(). If given, `arcname' specifies an alternative name for the + file in the archive. + """ + self._check("aw") + + # When fileobj is given, replace name by + # fileobj's real name. + if fileobj is not None: + name = fileobj.name + + # Building the name of the member in the archive. + # Backward slashes are converted to forward slashes, + # Absolute paths are turned to relative paths. + if arcname is None: + arcname = name + drv, arcname = os.path.splitdrive(arcname) + arcname = arcname.replace(os.sep, "/") + arcname = arcname.lstrip("/") + + # Now, fill the TarInfo object with + # information specific for the file. + tarinfo = self.tarinfo() + tarinfo.tarfile = self + + # Use os.stat or os.lstat, depending on platform + # and if symlinks shall be resolved. + if fileobj is None: + if hasattr(os, "lstat") and not self.dereference: + statres = os.lstat(name) + else: + statres = os.stat(name) + else: + statres = os.fstat(fileobj.fileno()) + linkname = "" + + stmd = statres.st_mode + if stat.S_ISREG(stmd): + inode = (statres.st_ino, statres.st_dev) + if not self.dereference and statres.st_nlink > 1 and \ + inode in self.inodes and arcname != self.inodes[inode]: + # Is it a hardlink to an already + # archived file? + type = LNKTYPE + linkname = self.inodes[inode] + else: + # The inode is added only if its valid. + # For win32 it is always 0. + type = REGTYPE + if inode[0]: + self.inodes[inode] = arcname + elif stat.S_ISDIR(stmd): + type = DIRTYPE + elif stat.S_ISFIFO(stmd): + type = FIFOTYPE + elif stat.S_ISLNK(stmd): + type = SYMTYPE + linkname = os.readlink(name) + elif stat.S_ISCHR(stmd): + type = CHRTYPE + elif stat.S_ISBLK(stmd): + type = BLKTYPE + else: + return None + + # Fill the TarInfo object with all + # information we can get. + tarinfo.name = arcname + tarinfo.mode = stmd + tarinfo.uid = statres.st_uid + tarinfo.gid = statres.st_gid + if type == REGTYPE: + tarinfo.size = statres.st_size + else: + tarinfo.size = 0 + tarinfo.mtime = statres.st_mtime + tarinfo.type = type + tarinfo.linkname = linkname + if pwd: + try: + tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] + except KeyError: + pass + if grp: + try: + tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] + except KeyError: + pass + + if type in (CHRTYPE, BLKTYPE): + if hasattr(os, "major") and hasattr(os, "minor"): + tarinfo.devmajor = os.major(statres.st_rdev) + tarinfo.devminor = os.minor(statres.st_rdev) + return tarinfo + + def list(self, verbose=True): + """Print a table of contents to sys.stdout. If `verbose' is False, only + the names of the members are printed. If it is True, an `ls -l'-like + output is produced. + """ + self._check() + + for tarinfo in self: + if verbose: + print(filemode(tarinfo.mode), end=' ') + print("%s/%s" % (tarinfo.uname or tarinfo.uid, + tarinfo.gname or tarinfo.gid), end=' ') + if tarinfo.ischr() or tarinfo.isblk(): + print("%10s" % ("%d,%d" \ + % (tarinfo.devmajor, tarinfo.devminor)), end=' ') + else: + print("%10d" % tarinfo.size, end=' ') + print("%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6], end=' ') + + print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ') + + if verbose: + if tarinfo.issym(): + print("->", tarinfo.linkname, end=' ') + if tarinfo.islnk(): + print("link to", tarinfo.linkname, end=' ') + print() + + def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): + """Add the file `name' to the archive. `name' may be any type of file + (directory, fifo, symbolic link, etc.). If given, `arcname' + specifies an alternative name for the file in the archive. + Directories are added recursively by default. This can be avoided by + setting `recursive' to False. `exclude' is a function that should + return True for each filename to be excluded. `filter' is a function + that expects a TarInfo object argument and returns the changed + TarInfo object, if it returns None the TarInfo object will be + excluded from the archive. + """ + self._check("aw") + + if arcname is None: + arcname = name + + # Exclude pathnames. + if exclude is not None: + import warnings + warnings.warn("use the filter argument instead", + DeprecationWarning, 2) + if exclude(name): + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Skip if somebody tries to archive the archive... + if self.name is not None and os.path.abspath(name) == self.name: + self._dbg(2, "tarfile: Skipped %r" % name) + return + + self._dbg(1, name) + + # Create a TarInfo object from the file. + tarinfo = self.gettarinfo(name, arcname) + + if tarinfo is None: + self._dbg(1, "tarfile: Unsupported type %r" % name) + return + + # Change or exclude the TarInfo object. + if filter is not None: + tarinfo = filter(tarinfo) + if tarinfo is None: + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Append the tar header and data to the archive. + if tarinfo.isreg(): + f = bltn_open(name, "rb") + self.addfile(tarinfo, f) + f.close() + + elif tarinfo.isdir(): + self.addfile(tarinfo) + if recursive: + for f in os.listdir(name): + self.add(os.path.join(name, f), os.path.join(arcname, f), + recursive, exclude, filter=filter) + + else: + self.addfile(tarinfo) + + def addfile(self, tarinfo, fileobj=None): + """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is + given, tarinfo.size bytes are read from it and added to the archive. + You can create TarInfo objects using gettarinfo(). + On Windows platforms, `fileobj' should always be opened with mode + 'rb' to avoid irritation about the file size. + """ + self._check("aw") + + tarinfo = copy.copy(tarinfo) + + buf = tarinfo.tobuf(self.format, self.encoding, self.errors) + self.fileobj.write(buf) + self.offset += len(buf) + + # If there's data to follow, append it. + if fileobj is not None: + copyfileobj(fileobj, self.fileobj, tarinfo.size) + blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) + if remainder > 0: + self.fileobj.write(NUL * (BLOCKSIZE - remainder)) + blocks += 1 + self.offset += blocks * BLOCKSIZE + + self.members.append(tarinfo) + + def extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 0o700 + # Do not set_attrs directories, as we will do that further down + self.extract(tarinfo, path, set_attrs=not tarinfo.isdir()) + + # Reverse sort directories. + directories.sort(key=lambda a: a.name) + directories.reverse() + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extract(self, member, path="", set_attrs=True): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. `member' may be a filename or a TarInfo object. You can + specify a different directory using `path'. File attributes (owner, + mtime, mode) are set unless `set_attrs' is False. + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + # Prepare the link target for makelink(). + if tarinfo.islnk(): + tarinfo._link_target = os.path.join(path, tarinfo.linkname) + + try: + self._extract_member(tarinfo, os.path.join(path, tarinfo.name), + set_attrs=set_attrs) + except EnvironmentError as e: + if self.errorlevel > 0: + raise + else: + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) + else: + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extractfile(self, member): + """Extract a member from the archive as a file object. `member' may be + a filename or a TarInfo object. If `member' is a regular file, a + file-like object is returned. If `member' is a link, a file-like + object is constructed from the link's target. If `member' is none of + the above, None is returned. + The file-like object is read-only and provides the following + methods: read(), readline(), readlines(), seek() and tell() + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + if tarinfo.isreg(): + return self.fileobject(self, tarinfo) + + elif tarinfo.type not in SUPPORTED_TYPES: + # If a member's type is unknown, it is treated as a + # regular file. + return self.fileobject(self, tarinfo) + + elif tarinfo.islnk() or tarinfo.issym(): + if isinstance(self.fileobj, _Stream): + # A small but ugly workaround for the case that someone tries + # to extract a (sym)link as a file-object from a non-seekable + # stream of tar blocks. + raise StreamError("cannot extract (sym)link as file object") + else: + # A (sym)link's file object is its target's file object. + return self.extractfile(self._find_link_target(tarinfo)) + else: + # If there's no data associated with the member (directory, chrdev, + # blkdev, etc.), return None instead of a file object. + return None + + def _extract_member(self, tarinfo, targetpath, set_attrs=True): + """Extract the TarInfo object tarinfo to a physical + file called targetpath. + """ + # Fetch the TarInfo object for the given name + # and build the destination pathname, replacing + # forward slashes to platform specific separators. + targetpath = targetpath.rstrip("/") + targetpath = targetpath.replace("/", os.sep) + + # Create all upper directories. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + # Create directories that are not part of the archive with + # default permissions. + os.makedirs(upperdirs) + + if tarinfo.islnk() or tarinfo.issym(): + self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) + else: + self._dbg(1, tarinfo.name) + + if tarinfo.isreg(): + self.makefile(tarinfo, targetpath) + elif tarinfo.isdir(): + self.makedir(tarinfo, targetpath) + elif tarinfo.isfifo(): + self.makefifo(tarinfo, targetpath) + elif tarinfo.ischr() or tarinfo.isblk(): + self.makedev(tarinfo, targetpath) + elif tarinfo.islnk() or tarinfo.issym(): + self.makelink(tarinfo, targetpath) + elif tarinfo.type not in SUPPORTED_TYPES: + self.makeunknown(tarinfo, targetpath) + else: + self.makefile(tarinfo, targetpath) + + if set_attrs: + self.chown(tarinfo, targetpath) + if not tarinfo.issym(): + self.chmod(tarinfo, targetpath) + self.utime(tarinfo, targetpath) + + #-------------------------------------------------------------------------- + # Below are the different file methods. They are called via + # _extract_member() when extract() is called. They can be replaced in a + # subclass to implement other functionality. + + def makedir(self, tarinfo, targetpath): + """Make a directory called targetpath. + """ + try: + # Use a safe mode for the directory, the real mode is set + # later in _extract_member(). + os.mkdir(targetpath, 0o700) + except EnvironmentError as e: + if e.errno != errno.EEXIST: + raise + + def makefile(self, tarinfo, targetpath): + """Make a file called targetpath. + """ + source = self.fileobj + source.seek(tarinfo.offset_data) + target = bltn_open(targetpath, "wb") + if tarinfo.sparse is not None: + for offset, size in tarinfo.sparse: + target.seek(offset) + copyfileobj(source, target, size) + else: + copyfileobj(source, target, tarinfo.size) + target.seek(tarinfo.size) + target.truncate() + target.close() + + def makeunknown(self, tarinfo, targetpath): + """Make a file from a TarInfo object with an unknown type + at targetpath. + """ + self.makefile(tarinfo, targetpath) + self._dbg(1, "tarfile: Unknown file type %r, " \ + "extracted as regular file." % tarinfo.type) + + def makefifo(self, tarinfo, targetpath): + """Make a fifo called targetpath. + """ + if hasattr(os, "mkfifo"): + os.mkfifo(targetpath) + else: + raise ExtractError("fifo not supported by system") + + def makedev(self, tarinfo, targetpath): + """Make a character or block device called targetpath. + """ + if not hasattr(os, "mknod") or not hasattr(os, "makedev"): + raise ExtractError("special devices not supported by system") + + mode = tarinfo.mode + if tarinfo.isblk(): + mode |= stat.S_IFBLK + else: + mode |= stat.S_IFCHR + + os.mknod(targetpath, mode, + os.makedev(tarinfo.devmajor, tarinfo.devminor)) + + def makelink(self, tarinfo, targetpath): + """Make a (symbolic) link called targetpath. If it cannot be created + (platform limitation), we try to make a copy of the referenced file + instead of a link. + """ + try: + # For systems that support symbolic and hard links. + if tarinfo.issym(): + os.symlink(tarinfo.linkname, targetpath) + else: + # See extract(). + if os.path.exists(tarinfo._link_target): + os.link(tarinfo._link_target, targetpath) + else: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except symlink_exception: + if tarinfo.issym(): + linkpath = os.path.join(os.path.dirname(tarinfo.name), + tarinfo.linkname) + else: + linkpath = tarinfo.linkname + else: + try: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except KeyError: + raise ExtractError("unable to resolve link inside archive") + + def chown(self, tarinfo, targetpath): + """Set owner of targetpath according to tarinfo. + """ + if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: + # We have to be root to do so. + try: + g = grp.getgrnam(tarinfo.gname)[2] + except KeyError: + g = tarinfo.gid + try: + u = pwd.getpwnam(tarinfo.uname)[2] + except KeyError: + u = tarinfo.uid + try: + if tarinfo.issym() and hasattr(os, "lchown"): + os.lchown(targetpath, u, g) + else: + if sys.platform != "os2emx": + os.chown(targetpath, u, g) + except EnvironmentError as e: + raise ExtractError("could not change owner") + + def chmod(self, tarinfo, targetpath): + """Set file permissions of targetpath according to tarinfo. + """ + if hasattr(os, 'chmod'): + try: + os.chmod(targetpath, tarinfo.mode) + except EnvironmentError as e: + raise ExtractError("could not change mode") + + def utime(self, tarinfo, targetpath): + """Set modification time of targetpath according to tarinfo. + """ + if not hasattr(os, 'utime'): + return + try: + os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) + except EnvironmentError as e: + raise ExtractError("could not change modification time") + + #-------------------------------------------------------------------------- + def next(self): + """Return the next member of the archive as a TarInfo object, when + TarFile is opened for reading. Return None if there is no more + available. + """ + self._check("ra") + if self.firstmember is not None: + m = self.firstmember + self.firstmember = None + return m + + # Read the next block. + self.fileobj.seek(self.offset) + tarinfo = None + while True: + try: + tarinfo = self.tarinfo.fromtarfile(self) + except EOFHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + except InvalidHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + elif self.offset == 0: + raise ReadError(str(e)) + except EmptyHeaderError: + if self.offset == 0: + raise ReadError("empty file") + except TruncatedHeaderError as e: + if self.offset == 0: + raise ReadError(str(e)) + except SubsequentHeaderError as e: + raise ReadError(str(e)) + break + + if tarinfo is not None: + self.members.append(tarinfo) + else: + self._loaded = True + + return tarinfo + + #-------------------------------------------------------------------------- + # Little helper methods: + + def _getmember(self, name, tarinfo=None, normalize=False): + """Find an archive member by name from bottom to top. + If tarinfo is given, it is used as the starting point. + """ + # Ensure that all members have been loaded. + members = self.getmembers() + + # Limit the member search list up to tarinfo. + if tarinfo is not None: + members = members[:members.index(tarinfo)] + + if normalize: + name = os.path.normpath(name) + + for member in reversed(members): + if normalize: + member_name = os.path.normpath(member.name) + else: + member_name = member.name + + if name == member_name: + return member + + def _load(self): + """Read through the entire archive file and look for readable + members. + """ + while True: + tarinfo = self.next() + if tarinfo is None: + break + self._loaded = True + + def _check(self, mode=None): + """Check if TarFile is still open, and if the operation's mode + corresponds to TarFile's mode. + """ + if self.closed: + raise IOError("%s is closed" % self.__class__.__name__) + if mode is not None and self.mode not in mode: + raise IOError("bad operation for mode %r" % self.mode) + + def _find_link_target(self, tarinfo): + """Find the target member of a symlink or hardlink member in the + archive. + """ + if tarinfo.issym(): + # Always search the entire archive. + linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname + limit = None + else: + # Search the archive before the link, because a hard link is + # just a reference to an already archived file. + linkname = tarinfo.linkname + limit = tarinfo + + member = self._getmember(linkname, tarinfo=limit, normalize=True) + if member is None: + raise KeyError("linkname %r not found" % linkname) + return member + + def __iter__(self): + """Provide an iterator object. + """ + if self._loaded: + return iter(self.members) + else: + return TarIter(self) + + def _dbg(self, level, msg): + """Write debugging output to sys.stderr. + """ + if level <= self.debug: + print(msg, file=sys.stderr) + + def __enter__(self): + self._check() + return self + + def __exit__(self, type, value, traceback): + if type is None: + self.close() + else: + # An exception occurred. We must not call close() because + # it would try to write end-of-archive blocks and padding. + if not self._extfileobj: + self.fileobj.close() + self.closed = True +# class TarFile + +class TarIter(object): + """Iterator Class. + + for tarinfo in TarFile(...): + suite... + """ + + def __init__(self, tarfile): + """Construct a TarIter object. + """ + self.tarfile = tarfile + self.index = 0 + def __iter__(self): + """Return iterator object. + """ + return self + + def __next__(self): + """Return the next item using TarFile's next() method. + When all members have been read, set TarFile as _loaded. + """ + # Fix for SF #1100429: Under rare circumstances it can + # happen that getmembers() is called during iteration, + # which will cause TarIter to stop prematurely. + if not self.tarfile._loaded: + tarinfo = self.tarfile.next() + if not tarinfo: + self.tarfile._loaded = True + raise StopIteration + else: + try: + tarinfo = self.tarfile.members[self.index] + except IndexError: + raise StopIteration + self.index += 1 + return tarinfo + + next = __next__ # for Python 2.x + +#-------------------- +# exported functions +#-------------------- +def is_tarfile(name): + """Return True if name points to a tar archive that we + are able to handle, else return False. + """ + try: + t = open(name) + t.close() + return True + except TarError: + return False + +bltn_open = open +open = TarFile.open diff --git a/venv/Lib/site-packages/distlib/compat.py b/venv/Lib/site-packages/distlib/compat.py new file mode 100644 index 00000000..c316fd97 --- /dev/null +++ b/venv/Lib/site-packages/distlib/compat.py @@ -0,0 +1,1120 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import absolute_import + +import os +import re +import sys + +try: + import ssl +except ImportError: # pragma: no cover + ssl = None + +if sys.version_info[0] < 3: # pragma: no cover + from StringIO import StringIO + string_types = basestring, + text_type = unicode + from types import FileType as file_type + import __builtin__ as builtins + import ConfigParser as configparser + from ._backport import shutil + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, + pathname2url, ContentTooShortError, splittype) + + def quote(s): + if isinstance(s, unicode): + s = s.encode('utf-8') + return _quote(s) + + import urllib2 + from urllib2 import (Request, urlopen, URLError, HTTPError, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib2 import HTTPSHandler + import httplib + import xmlrpclib + import Queue as queue + from HTMLParser import HTMLParser + import htmlentitydefs + raw_input = raw_input + from itertools import ifilter as filter + from itertools import ifilterfalse as filterfalse + + _userprog = None + def splituser(host): + """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + global _userprog + if _userprog is None: + import re + _userprog = re.compile('^(.*)@(.*)$') + + match = _userprog.match(host) + if match: return match.group(1, 2) + return None, host + +else: # pragma: no cover + from io import StringIO + string_types = str, + text_type = str + from io import TextIOWrapper as file_type + import builtins + import configparser + import shutil + from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote, + unquote, urlsplit, urlunsplit, splittype) + from urllib.request import (urlopen, urlretrieve, Request, url2pathname, + pathname2url, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib.request import HTTPSHandler + from urllib.error import HTTPError, URLError, ContentTooShortError + import http.client as httplib + import urllib.request as urllib2 + import xmlrpc.client as xmlrpclib + import queue + from html.parser import HTMLParser + import html.entities as htmlentitydefs + raw_input = input + from itertools import filterfalse + filter = filter + +try: + from ssl import match_hostname, CertificateError +except ImportError: # pragma: no cover + class CertificateError(ValueError): + pass + + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + parts = dn.split('.') + leftmost, remainder = parts[0], parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +try: + from types import SimpleNamespace as Container +except ImportError: # pragma: no cover + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: # pragma: no cover + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if not normdir in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + +# ZipFile is a context manager in 2.7, but not in 2.6 + +from zipfile import ZipFile as BaseZipFile + +if hasattr(BaseZipFile, '__enter__'): # pragma: no cover + ZipFile = BaseZipFile +else: # pragma: no cover + from zipfile import ZipExtFile as BaseZipExtFile + + class ZipExtFile(BaseZipExtFile): + def __init__(self, base): + self.__dict__.update(base.__dict__) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + class ZipFile(BaseZipFile): + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + def open(self, *args, **kwargs): + base = BaseZipFile.open(self, *args, **kwargs) + return ZipExtFile(base) + +try: + from platform import python_implementation +except ImportError: # pragma: no cover + def python_implementation(): + """Return a string identifying the Python implementation.""" + if 'PyPy' in sys.version: + return 'PyPy' + if os.name == 'java': + return 'Jython' + if sys.version.startswith('IronPython'): + return 'IronPython' + return 'CPython' + +try: + import sysconfig +except ImportError: # pragma: no cover + from ._backport import sysconfig + +try: + callable = callable +except NameError: # pragma: no cover + from collections.abc import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode + fsdecode = os.fsdecode +except AttributeError: # pragma: no cover + # Issue #99: on some systems (e.g. containerised), + # sys.getfilesystemencoding() returns None, and we need a real value, + # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and + # sys.getfilesystemencoding(): the return value is "the user’s preference + # according to the result of nl_langinfo(CODESET), or None if the + # nl_langinfo(CODESET) failed." + _fsencoding = sys.getfilesystemencoding() or 'utf-8' + if _fsencoding == 'mbcs': + _fserrors = 'strict' + else: + _fserrors = 'surrogateescape' + + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, text_type): + return filename.encode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + def fsdecode(filename): + if isinstance(filename, text_type): + return filename + elif isinstance(filename, bytes): + return filename.decode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + +try: + from tokenize import detect_encoding +except ImportError: # pragma: no cover + from codecs import BOM_UTF8, lookup + import re + + cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") + + def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + + def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + try: + filename = readline.__self__.name + except AttributeError: + filename = None + bom_found = False + encoding = None + default = 'utf-8' + def read_or_stop(): + try: + return readline() + except StopIteration: + return b'' + + def find_cookie(line): + try: + # Decode as UTF-8. Either the line is an encoding declaration, + # in which case it should be pure ASCII, or it must be UTF-8 + # per default encoding. + line_string = line.decode('utf-8') + except UnicodeDecodeError: + msg = "invalid or missing encoding declaration" + if filename is not None: + msg = '{} for {!r}'.format(msg, filename) + raise SyntaxError(msg) + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + if filename is None: + msg = "unknown encoding: " + encoding + else: + msg = "unknown encoding for {!r}: {}".format(filename, + encoding) + raise SyntaxError(msg) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + if filename is None: + msg = 'encoding problem: utf-8' + else: + msg = 'encoding problem for {!r}: utf-8'.format(filename) + raise SyntaxError(msg) + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + +# For converting & <-> & etc. +try: + from html import escape +except ImportError: + from cgi import escape +if sys.version_info[:2] < (3, 4): + unescape = HTMLParser().unescape +else: + from html import unescape + +try: + from collections import ChainMap +except ImportError: # pragma: no cover + from collections import MutableMapping + + try: + from reprlib import recursive_repr as _recursive_repr + except ImportError: + def _recursive_repr(fillvalue='...'): + ''' + Decorator to make a repr function return fillvalue for a recursive + call + ''' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + +try: + from importlib.util import cache_from_source # Python >= 3.4 +except ImportError: # pragma: no cover + try: + from imp import cache_from_source + except ImportError: # pragma: no cover + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix + +try: + from collections import OrderedDict +except ImportError: # pragma: no cover +## {{{ http://code.activestate.com/recipes/576693/ (r9) +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. + try: + from thread import get_ident as _get_ident + except ImportError: + from dummy_thread import get_ident as _get_ident + + try: + from _abcoll import KeysView, ValuesView, ItemsView + except ImportError: + pass + + + class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running=None): + 'od.__repr__() <==> repr(od)' + if not _repr_running: _repr_running = {} + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext' : 'ext_convert', + 'cfg' : 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + #print d, rest + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + #rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance(value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance(value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and\ + isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/venv/Lib/site-packages/distlib/database.py b/venv/Lib/site-packages/distlib/database.py new file mode 100644 index 00000000..0a90c300 --- /dev/null +++ b/venv/Lib/site-packages/distlib/database.py @@ -0,0 +1,1339 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""PEP 376 implementation.""" + +from __future__ import unicode_literals + +import base64 +import codecs +import contextlib +import hashlib +import logging +import os +import posixpath +import sys +import zipimport + +from . import DistlibException, resources +from .compat import StringIO +from .version import get_scheme, UnsupportedVersionError +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) +from .util import (parse_requirement, cached_property, parse_name_and_version, + read_exports, write_exports, CSVReader, CSVWriter) + + +__all__ = ['Distribution', 'BaseInstalledDistribution', + 'InstalledDistribution', 'EggInfoDistribution', + 'DistributionPath'] + + +logger = logging.getLogger(__name__) + +EXPORTS_FILENAME = 'pydist-exports.json' +COMMANDS_FILENAME = 'pydist-commands.json' + +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', + 'RESOURCES', EXPORTS_FILENAME, 'SHARED') + +DISTINFO_EXT = '.dist-info' + + +class _Cache(object): + """ + A simple cache mapping names and .dist-info paths to distributions + """ + def __init__(self): + """ + Initialise an instance. There is normally one for each DistributionPath. + """ + self.name = {} + self.path = {} + self.generated = False + + def clear(self): + """ + Clear the cache, setting it to its initial state. + """ + self.name.clear() + self.path.clear() + self.generated = False + + def add(self, dist): + """ + Add a distribution to the cache. + :param dist: The distribution to add. + """ + if dist.path not in self.path: + self.path[dist.path] = dist + self.name.setdefault(dist.key, []).append(dist) + + +class DistributionPath(object): + """ + Represents a set of distributions installed on a path (typically sys.path). + """ + def __init__(self, path=None, include_egg=False): + """ + Create an instance from a path, optionally including legacy (distutils/ + setuptools/distribute) distributions. + :param path: The path to use, as a list of directories. If not specified, + sys.path is used. + :param include_egg: If True, this instance will look for and return legacy + distributions as well as those based on PEP 376. + """ + if path is None: + path = sys.path + self.path = path + self._include_dist = True + self._include_egg = include_egg + + self._cache = _Cache() + self._cache_egg = _Cache() + self._cache_enabled = True + self._scheme = get_scheme('default') + + def _get_cache_enabled(self): + return self._cache_enabled + + def _set_cache_enabled(self, value): + self._cache_enabled = value + + cache_enabled = property(_get_cache_enabled, _set_cache_enabled) + + def clear_cache(self): + """ + Clears the internal cache. + """ + self._cache.clear() + self._cache_egg.clear() + + + def _yield_distributions(self): + """ + Yield .dist-info and/or .egg(-info) distributions. + """ + # We need to check if we've seen some resources already, because on + # some Linux systems (e.g. some Debian/Ubuntu variants) there are + # symlinks which alias other files in the environment. + seen = set() + for path in self.path: + finder = resources.finder_for_path(path) + if finder is None: + continue + r = finder.find('') + if not r or not r.is_container: + continue + rset = sorted(r.resources) + for entry in rset: + r = finder.find(entry) + if not r or r.path in seen: + continue + if self._include_dist and entry.endswith(DISTINFO_EXT): + possible_filenames = [METADATA_FILENAME, + WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME] + for metadata_filename in possible_filenames: + metadata_path = posixpath.join(entry, metadata_filename) + pydist = finder.find(metadata_path) + if pydist: + break + else: + continue + + with contextlib.closing(pydist.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + logger.debug('Found %s', r.path) + seen.add(r.path) + yield new_dist_class(r.path, metadata=metadata, + env=self) + elif self._include_egg and entry.endswith(('.egg-info', + '.egg')): + logger.debug('Found %s', r.path) + seen.add(r.path) + yield old_dist_class(r.path, self) + + def _generate_cache(self): + """ + Scan the path for distributions and populate the cache with + those that are found. + """ + gen_dist = not self._cache.generated + gen_egg = self._include_egg and not self._cache_egg.generated + if gen_dist or gen_egg: + for dist in self._yield_distributions(): + if isinstance(dist, InstalledDistribution): + self._cache.add(dist) + else: + self._cache_egg.add(dist) + + if gen_dist: + self._cache.generated = True + if gen_egg: + self._cache_egg.generated = True + + @classmethod + def distinfo_dirname(cls, name, version): + """ + The *name* and *version* parameters are converted into their + filename-escaped form, i.e. any ``'-'`` characters are replaced + with ``'_'`` other than the one in ``'dist-info'`` and the one + separating the name from the version number. + + :parameter name: is converted to a standard distribution name by replacing + any runs of non- alphanumeric characters with a single + ``'-'``. + :type name: string + :parameter version: is converted to a standard version string. Spaces + become dots, and all other non-alphanumeric characters + (except dots) become dashes, with runs of multiple + dashes condensed to a single dash. + :type version: string + :returns: directory name + :rtype: string""" + name = name.replace('-', '_') + return '-'.join([name, version]) + DISTINFO_EXT + + def get_distributions(self): + """ + Provides an iterator that looks for distributions and returns + :class:`InstalledDistribution` or + :class:`EggInfoDistribution` instances for each one of them. + + :rtype: iterator of :class:`InstalledDistribution` and + :class:`EggInfoDistribution` instances + """ + if not self._cache_enabled: + for dist in self._yield_distributions(): + yield dist + else: + self._generate_cache() + + for dist in self._cache.path.values(): + yield dist + + if self._include_egg: + for dist in self._cache_egg.path.values(): + yield dist + + def get_distribution(self, name): + """ + Looks for a named distribution on the path. + + This function only returns the first result found, as no more than one + value is expected. If nothing is found, ``None`` is returned. + + :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` + or ``None`` + """ + result = None + name = name.lower() + if not self._cache_enabled: + for dist in self._yield_distributions(): + if dist.key == name: + result = dist + break + else: + self._generate_cache() + + if name in self._cache.name: + result = self._cache.name[name][0] + elif self._include_egg and name in self._cache_egg.name: + result = self._cache_egg.name[name][0] + return result + + def provides_distribution(self, name, version=None): + """ + Iterates over all distributions to find which distributions provide *name*. + If a *version* is provided, it will be used to filter the results. + + This function only returns the first result found, since no more than + one values are expected. If the directory is not found, returns ``None``. + + :parameter version: a version specifier that indicates the version + required, conforming to the format in ``PEP-345`` + + :type name: string + :type version: string + """ + matcher = None + if version is not None: + try: + matcher = self._scheme.matcher('%s (%s)' % (name, version)) + except ValueError: + raise DistlibException('invalid name or version: %r, %r' % + (name, version)) + + for dist in self.get_distributions(): + # We hit a problem on Travis where enum34 was installed and doesn't + # have a provides attribute ... + if not hasattr(dist, 'provides'): + logger.debug('No "provides": %s', dist) + else: + provided = dist.provides + + for p in provided: + p_name, p_ver = parse_name_and_version(p) + if matcher is None: + if p_name == name: + yield dist + break + else: + if p_name == name and matcher.match(p_ver): + yield dist + break + + def get_file_path(self, name, relative_path): + """ + Return the path to a resource file. + """ + dist = self.get_distribution(name) + if dist is None: + raise LookupError('no distribution named %r found' % name) + return dist.get_resource_path(relative_path) + + def get_exported_entries(self, category, name=None): + """ + Return all of the exported entries in a particular category. + + :param category: The category to search for entries. + :param name: If specified, only entries with that name are returned. + """ + for dist in self.get_distributions(): + r = dist.exports + if category in r: + d = r[category] + if name is not None: + if name in d: + yield d[name] + else: + for v in d.values(): + yield v + + +class Distribution(object): + """ + A base class for distributions, whether installed or from indexes. + Either way, it must have some metadata, so that's all that's needed + for construction. + """ + + build_time_dependency = False + """ + Set to True if it's known to be only a build-time dependency (i.e. + not needed after installation). + """ + + requested = False + """A boolean that indicates whether the ``REQUESTED`` metadata file is + present (in other words, whether the package was installed by user + request or it was installed as a dependency).""" + + def __init__(self, metadata): + """ + Initialise an instance. + :param metadata: The instance of :class:`Metadata` describing this + distribution. + """ + self.metadata = metadata + self.name = metadata.name + self.key = self.name.lower() # for case-insensitive comparisons + self.version = metadata.version + self.locator = None + self.digest = None + self.extras = None # additional features requested + self.context = None # environment marker overrides + self.download_urls = set() + self.digests = {} + + @property + def source_url(self): + """ + The source archive download URL for this distribution. + """ + return self.metadata.source_url + + download_url = source_url # Backward compatibility + + @property + def name_and_version(self): + """ + A utility property which displays the name and version in parentheses. + """ + return '%s (%s)' % (self.name, self.version) + + @property + def provides(self): + """ + A set of distribution names and versions provided by this distribution. + :return: A set of "name (version)" strings. + """ + plist = self.metadata.provides + s = '%s (%s)' % (self.name, self.version) + if s not in plist: + plist.append(s) + return plist + + def _get_requirements(self, req_attr): + md = self.metadata + logger.debug('Getting requirements from metadata %r', md.todict()) + reqts = getattr(md, req_attr) + return set(md.get_requirements(reqts, extras=self.extras, + env=self.context)) + + @property + def run_requires(self): + return self._get_requirements('run_requires') + + @property + def meta_requires(self): + return self._get_requirements('meta_requires') + + @property + def build_requires(self): + return self._get_requirements('build_requires') + + @property + def test_requires(self): + return self._get_requirements('test_requires') + + @property + def dev_requires(self): + return self._get_requirements('dev_requires') + + def matches_requirement(self, req): + """ + Say if this instance matches (fulfills) a requirement. + :param req: The requirement to match. + :rtype req: str + :return: True if it matches, else False. + """ + # Requirement may contain extras - parse to lose those + # from what's passed to the matcher + r = parse_requirement(req) + scheme = get_scheme(self.metadata.scheme) + try: + matcher = scheme.matcher(r.requirement) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + result = False + for p in self.provides: + p_name, p_ver = parse_name_and_version(p) + if p_name != name: + continue + try: + result = matcher.match(p_ver) + break + except UnsupportedVersionError: + pass + return result + + def __repr__(self): + """ + Return a textual representation of this instance, + """ + if self.source_url: + suffix = ' [%s]' % self.source_url + else: + suffix = '' + return '' % (self.name, self.version, suffix) + + def __eq__(self, other): + """ + See if this distribution is the same as another. + :param other: The distribution to compare with. To be equal to one + another. distributions must have the same type, name, + version and source_url. + :return: True if it is the same, else False. + """ + if type(other) is not type(self): + result = False + else: + result = (self.name == other.name and + self.version == other.version and + self.source_url == other.source_url) + return result + + def __hash__(self): + """ + Compute hash in a way which matches the equality test. + """ + return hash(self.name) + hash(self.version) + hash(self.source_url) + + +class BaseInstalledDistribution(Distribution): + """ + This is the base class for installed distributions (whether PEP 376 or + legacy). + """ + + hasher = None + + def __init__(self, metadata, path, env=None): + """ + Initialise an instance. + :param metadata: An instance of :class:`Metadata` which describes the + distribution. This will normally have been initialised + from a metadata file in the ``path``. + :param path: The path of the ``.dist-info`` or ``.egg-info`` + directory for the distribution. + :param env: This is normally the :class:`DistributionPath` + instance where this distribution was found. + """ + super(BaseInstalledDistribution, self).__init__(metadata) + self.path = path + self.dist_path = env + + def get_hash(self, data, hasher=None): + """ + Get the hash of some data, using a particular hash algorithm, if + specified. + + :param data: The data to be hashed. + :type data: bytes + :param hasher: The name of a hash implementation, supported by hashlib, + or ``None``. Examples of valid values are ``'sha1'``, + ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and + ``'sha512'``. If no hasher is specified, the ``hasher`` + attribute of the :class:`InstalledDistribution` instance + is used. If the hasher is determined to be ``None``, MD5 + is used as the hashing algorithm. + :returns: The hash of the data. If a hasher was explicitly specified, + the returned hash will be prefixed with the specified hasher + followed by '='. + :rtype: str + """ + if hasher is None: + hasher = self.hasher + if hasher is None: + hasher = hashlib.md5 + prefix = '' + else: + hasher = getattr(hashlib, hasher) + prefix = '%s=' % self.hasher + digest = hasher(data).digest() + digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') + return '%s%s' % (prefix, digest) + + +class InstalledDistribution(BaseInstalledDistribution): + """ + Created with the *path* of the ``.dist-info`` directory provided to the + constructor. It reads the metadata contained in ``pydist.json`` when it is + instantiated., or uses a passed in Metadata instance (useful for when + dry-run mode is being used). + """ + + hasher = 'sha256' + + def __init__(self, path, metadata=None, env=None): + self.modules = [] + self.finder = finder = resources.finder_for_path(path) + if finder is None: + raise ValueError('finder unavailable for %s' % path) + if env and env._cache_enabled and path in env._cache.path: + metadata = env._cache.path[path].metadata + elif metadata is None: + r = finder.find(METADATA_FILENAME) + # Temporary - for Wheel 0.23 support + if r is None: + r = finder.find(WHEEL_METADATA_FILENAME) + # Temporary - for legacy support + if r is None: + r = finder.find(LEGACY_METADATA_FILENAME) + if r is None: + raise ValueError('no %s found in %s' % (METADATA_FILENAME, + path)) + with contextlib.closing(r.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + + super(InstalledDistribution, self).__init__(metadata, path, env) + + if env and env._cache_enabled: + env._cache.add(self) + + r = finder.find('REQUESTED') + self.requested = r is not None + p = os.path.join(path, 'top_level.txt') + if os.path.exists(p): + with open(p, 'rb') as f: + data = f.read().decode('utf-8') + self.modules = data.splitlines() + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def _get_records(self): + """ + Get the list of installed files for the distribution + :return: A list of tuples of path, hash and size. Note that hash and + size might be ``None`` for some entries. The path is exactly + as stored in the file (which is as in PEP 376). + """ + results = [] + r = self.get_distinfo_resource('RECORD') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as record_reader: + # Base location is parent dir of .dist-info dir + #base_location = os.path.dirname(self.path) + #base_location = os.path.abspath(base_location) + for row in record_reader: + missing = [None for i in range(len(row), 3)] + path, checksum, size = row + missing + #if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) + results.append((path, checksum, size)) + return results + + @cached_property + def exports(self): + """ + Return the information exported by this distribution. + :return: A dictionary of exports, mapping an export category to a dict + of :class:`ExportEntry` instances describing the individual + export entries, and keyed by name. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + result = self.read_exports() + return result + + def read_exports(self): + """ + Read exports data from a file in .ini format. + + :return: A dictionary of exports, mapping an export category to a list + of :class:`ExportEntry` instances describing the individual + export entries. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + with contextlib.closing(r.as_stream()) as stream: + result = read_exports(stream) + return result + + def write_exports(self, exports): + """ + Write a dictionary of exports to a file in .ini format. + :param exports: A dictionary of exports, mapping an export category to + a list of :class:`ExportEntry` instances describing the + individual export entries. + """ + rf = self.get_distinfo_file(EXPORTS_FILENAME) + with open(rf, 'w') as f: + write_exports(exports, f) + + def get_resource_path(self, relative_path): + """ + NOTE: This API may change in the future. + + Return the absolute path to a resource file with the given relative + path. + + :param relative_path: The path, relative to .dist-info, of the resource + of interest. + :return: The absolute path where the resource is to be found. + """ + r = self.get_distinfo_resource('RESOURCES') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as resources_reader: + for relative, destination in resources_reader: + if relative == relative_path: + return destination + raise KeyError('no resource file with relative path %r ' + 'is installed' % relative_path) + + def list_installed_files(self): + """ + Iterates over the ``RECORD`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: iterator of (path, hash, size) + """ + for result in self._get_records(): + yield result + + def write_installed_files(self, paths, prefix, dry_run=False): + """ + Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any + existing ``RECORD`` file is silently overwritten. + + prefix is used to determine when to write absolute paths. + """ + prefix = os.path.join(prefix, '') + base = os.path.dirname(self.path) + base_under_prefix = base.startswith(prefix) + base = os.path.join(base, '') + record_path = self.get_distinfo_file('RECORD') + logger.info('creating %s', record_path) + if dry_run: + return None + with CSVWriter(record_path) as writer: + for path in paths: + if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): + # do not put size and hash, as in PEP-376 + hash_value = size = '' + else: + size = '%d' % os.path.getsize(path) + with open(path, 'rb') as fp: + hash_value = self.get_hash(fp.read()) + if path.startswith(base) or (base_under_prefix and + path.startswith(prefix)): + path = os.path.relpath(path, base) + writer.writerow((path, hash_value, size)) + + # add the RECORD file itself + if record_path.startswith(base): + record_path = os.path.relpath(record_path, base) + writer.writerow((record_path, '', '')) + return record_path + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + base = os.path.dirname(self.path) + record_path = self.get_distinfo_file('RECORD') + for path, hash_value, size in self.list_installed_files(): + if not os.path.isabs(path): + path = os.path.join(base, path) + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + elif os.path.isfile(path): + actual_size = str(os.path.getsize(path)) + if size and actual_size != size: + mismatches.append((path, 'size', size, actual_size)) + elif hash_value: + if '=' in hash_value: + hasher = hash_value.split('=', 1)[0] + else: + hasher = None + + with open(path, 'rb') as f: + actual_hash = self.get_hash(f.read(), hasher) + if actual_hash != hash_value: + mismatches.append((path, 'hash', hash_value, actual_hash)) + return mismatches + + @cached_property + def shared_locations(self): + """ + A dictionary of shared locations whose keys are in the set 'prefix', + 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. + The corresponding value is the absolute path of that category for + this distribution, and takes into account any paths selected by the + user at installation time (e.g. via command-line arguments). In the + case of the 'namespace' key, this would be a list of absolute paths + for the roots of namespace packages in this distribution. + + The first time this property is accessed, the relevant information is + read from the SHARED file in the .dist-info directory. + """ + result = {} + shared_path = os.path.join(self.path, 'SHARED') + if os.path.isfile(shared_path): + with codecs.open(shared_path, 'r', encoding='utf-8') as f: + lines = f.read().splitlines() + for line in lines: + key, value = line.split('=', 1) + if key == 'namespace': + result.setdefault(key, []).append(value) + else: + result[key] = value + return result + + def write_shared_locations(self, paths, dry_run=False): + """ + Write shared location information to the SHARED file in .dist-info. + :param paths: A dictionary as described in the documentation for + :meth:`shared_locations`. + :param dry_run: If True, the action is logged but no file is actually + written. + :return: The path of the file written to. + """ + shared_path = os.path.join(self.path, 'SHARED') + logger.info('creating %s', shared_path) + if dry_run: + return None + lines = [] + for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): + path = paths[key] + if os.path.isdir(paths[key]): + lines.append('%s=%s' % (key, path)) + for ns in paths.get('namespace', ()): + lines.append('namespace=%s' % ns) + + with codecs.open(shared_path, 'w', encoding='utf-8') as f: + f.write('\n'.join(lines)) + return shared_path + + def get_distinfo_resource(self, path): + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + finder = resources.finder_for_path(self.path) + if finder is None: + raise DistlibException('Unable to get a finder for %s' % self.path) + return finder.find(path) + + def get_distinfo_file(self, path): + """ + Returns a path located under the ``.dist-info`` directory. Returns a + string representing the path. + + :parameter path: a ``'/'``-separated path relative to the + ``.dist-info`` directory or an absolute path; + If *path* is an absolute path and doesn't start + with the ``.dist-info`` directory path, + a :class:`DistlibException` is raised + :type path: str + :rtype: str + """ + # Check if it is an absolute path # XXX use relpath, add tests + if path.find(os.sep) >= 0: + # it's an absolute path? + distinfo_dirname, path = path.split(os.sep)[-2:] + if distinfo_dirname != self.path.split(os.sep)[-1]: + raise DistlibException( + 'dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) + + # The file must be relative + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + + return os.path.join(self.path, path) + + def list_distinfo_files(self): + """ + Iterates over the ``RECORD`` entries and returns paths for each line if + the path is pointing to a file located in the ``.dist-info`` directory + or one of its subdirectories. + + :returns: iterator of paths + """ + base = os.path.dirname(self.path) + for path, checksum, size in self._get_records(): + # XXX add separator or use real relpath algo + if not os.path.isabs(path): + path = os.path.join(base, path) + if path.startswith(self.path): + yield path + + def __eq__(self, other): + return (isinstance(other, InstalledDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +class EggInfoDistribution(BaseInstalledDistribution): + """Created with the *path* of the ``.egg-info`` directory or file provided + to the constructor. It reads the metadata contained in the file itself, or + if the given path happens to be a directory, the metadata is read from the + file ``PKG-INFO`` under that directory.""" + + requested = True # as we have no way of knowing, assume it was + shared_locations = {} + + def __init__(self, path, env=None): + def set_name_and_version(s, n, v): + s.name = n + s.key = n.lower() # for case-insensitive comparisons + s.version = v + + self.path = path + self.dist_path = env + if env and env._cache_enabled and path in env._cache_egg.path: + metadata = env._cache_egg.path[path].metadata + set_name_and_version(self, metadata.name, metadata.version) + else: + metadata = self._get_metadata(path) + + # Need to be set before caching + set_name_and_version(self, metadata.name, metadata.version) + + if env and env._cache_enabled: + env._cache_egg.add(self) + super(EggInfoDistribution, self).__init__(metadata, path, env) + + def _get_metadata(self, path): + requires = None + + def parse_requires_data(data): + """Create a list of dependencies from a requires.txt file. + + *data*: the contents of a setuptools-produced requires.txt file. + """ + reqs = [] + lines = data.splitlines() + for line in lines: + line = line.strip() + if line.startswith('['): + logger.warning('Unexpected line: quitting requirement scan: %r', + line) + break + r = parse_requirement(line) + if not r: + logger.warning('Not recognised as a requirement: %r', line) + continue + if r.extras: + logger.warning('extra requirements in requires.txt are ' + 'not supported') + if not r.constraints: + reqs.append(r.name) + else: + cons = ', '.join('%s%s' % c for c in r.constraints) + reqs.append('%s (%s)' % (r.name, cons)) + return reqs + + def parse_requires_path(req_path): + """Create a list of dependencies from a requires.txt file. + + *req_path*: the path to a setuptools-produced requires.txt file. + """ + + reqs = [] + try: + with codecs.open(req_path, 'r', 'utf-8') as fp: + reqs = parse_requires_data(fp.read()) + except IOError: + pass + return reqs + + tl_path = tl_data = None + if path.endswith('.egg'): + if os.path.isdir(path): + p = os.path.join(path, 'EGG-INFO') + meta_path = os.path.join(p, 'PKG-INFO') + metadata = Metadata(path=meta_path, scheme='legacy') + req_path = os.path.join(p, 'requires.txt') + tl_path = os.path.join(p, 'top_level.txt') + requires = parse_requires_path(req_path) + else: + # FIXME handle the case where zipfile is not available + zipf = zipimport.zipimporter(path) + fileobj = StringIO( + zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + metadata = Metadata(fileobj=fileobj, scheme='legacy') + try: + data = zipf.get_data('EGG-INFO/requires.txt') + tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') + requires = parse_requires_data(data.decode('utf-8')) + except IOError: + requires = None + elif path.endswith('.egg-info'): + if os.path.isdir(path): + req_path = os.path.join(path, 'requires.txt') + requires = parse_requires_path(req_path) + path = os.path.join(path, 'PKG-INFO') + tl_path = os.path.join(path, 'top_level.txt') + metadata = Metadata(path=path, scheme='legacy') + else: + raise DistlibException('path must end with .egg-info or .egg, ' + 'got %r' % path) + + if requires: + metadata.add_requirements(requires) + # look for top-level modules in top_level.txt, if present + if tl_data is None: + if tl_path is not None and os.path.exists(tl_path): + with open(tl_path, 'rb') as f: + tl_data = f.read().decode('utf-8') + if not tl_data: + tl_data = [] + else: + tl_data = tl_data.splitlines() + self.modules = tl_data + return metadata + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + for path, _, _ in self.list_installed_files(): + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + return mismatches + + def list_installed_files(self): + """ + Iterates over the ``installed-files.txt`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: a list of (path, hash, size) + """ + + def _md5(path): + f = open(path, 'rb') + try: + content = f.read() + finally: + f.close() + return hashlib.md5(content).hexdigest() + + def _size(path): + return os.stat(path).st_size + + record_path = os.path.join(self.path, 'installed-files.txt') + result = [] + if os.path.exists(record_path): + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + p = os.path.normpath(os.path.join(self.path, line)) + # "./" is present as a marker between installed files + # and installation metadata files + if not os.path.exists(p): + logger.warning('Non-existent file: %s', p) + if p.endswith(('.pyc', '.pyo')): + continue + #otherwise fall through and fail + if not os.path.isdir(p): + result.append((p, _md5(p), _size(p))) + result.append((record_path, None, None)) + return result + + def list_distinfo_files(self, absolute=False): + """ + Iterates over the ``installed-files.txt`` entries and returns paths for + each line if the path is pointing to a file located in the + ``.egg-info`` directory or one of its subdirectories. + + :parameter absolute: If *absolute* is ``True``, each returned path is + transformed into a local absolute path. Otherwise the + raw value from ``installed-files.txt`` is returned. + :type absolute: boolean + :returns: iterator of paths + """ + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + skip = True + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + if line == './': + skip = False + continue + if not skip: + p = os.path.normpath(os.path.join(self.path, line)) + if p.startswith(self.path): + if absolute: + yield p + else: + yield line + + def __eq__(self, other): + return (isinstance(other, EggInfoDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + +new_dist_class = InstalledDistribution +old_dist_class = EggInfoDistribution + + +class DependencyGraph(object): + """ + Represents a dependency graph between distributions. + + The dependency relationships are stored in an ``adjacency_list`` that maps + distributions to a list of ``(other, label)`` tuples where ``other`` + is a distribution and the edge is labeled with ``label`` (i.e. the version + specifier, if such was provided). Also, for more efficient traversal, for + every distribution ``x``, a list of predecessors is kept in + ``reverse_list[x]``. An edge from distribution ``a`` to + distribution ``b`` means that ``a`` depends on ``b``. If any missing + dependencies are found, they are stored in ``missing``, which is a + dictionary that maps distributions to a list of requirements that were not + provided by any other distributions. + """ + + def __init__(self): + self.adjacency_list = {} + self.reverse_list = {} + self.missing = {} + + def add_distribution(self, distribution): + """Add the *distribution* to the graph. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + """ + self.adjacency_list[distribution] = [] + self.reverse_list[distribution] = [] + #self.missing[distribution] = [] + + def add_edge(self, x, y, label=None): + """Add an edge from distribution *x* to distribution *y* with the given + *label*. + + :type x: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type y: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type label: ``str`` or ``None`` + """ + self.adjacency_list[x].append((y, label)) + # multiple edges are allowed, so be careful + if x not in self.reverse_list[y]: + self.reverse_list[y].append(x) + + def add_missing(self, distribution, requirement): + """ + Add a missing *requirement* for the given *distribution*. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + :type requirement: ``str`` + """ + logger.debug('%s missing %r', distribution, requirement) + self.missing.setdefault(distribution, []).append(requirement) + + def _repr_dist(self, dist): + return '%s %s' % (dist.name, dist.version) + + def repr_node(self, dist, level=1): + """Prints only a subgraph""" + output = [self._repr_dist(dist)] + for other, label in self.adjacency_list[dist]: + dist = self._repr_dist(other) + if label is not None: + dist = '%s [%s]' % (dist, label) + output.append(' ' * level + str(dist)) + suboutput = self.repr_node(other, level + 1) + subs = suboutput.split('\n') + output.extend(subs[1:]) + return '\n'.join(output) + + def to_dot(self, f, skip_disconnected=True): + """Writes a DOT output for the graph to the provided file *f*. + + If *skip_disconnected* is set to ``True``, then all distributions + that are not dependent on any other distribution are skipped. + + :type f: has to support ``file``-like operations + :type skip_disconnected: ``bool`` + """ + disconnected = [] + + f.write("digraph dependencies {\n") + for dist, adjs in self.adjacency_list.items(): + if len(adjs) == 0 and not skip_disconnected: + disconnected.append(dist) + for other, label in adjs: + if not label is None: + f.write('"%s" -> "%s" [label="%s"]\n' % + (dist.name, other.name, label)) + else: + f.write('"%s" -> "%s"\n' % (dist.name, other.name)) + if not skip_disconnected and len(disconnected) > 0: + f.write('subgraph disconnected {\n') + f.write('label = "Disconnected"\n') + f.write('bgcolor = red\n') + + for dist in disconnected: + f.write('"%s"' % dist.name) + f.write('\n') + f.write('}\n') + f.write('}\n') + + def topological_sort(self): + """ + Perform a topological sort of the graph. + :return: A tuple, the first element of which is a topologically sorted + list of distributions, and the second element of which is a + list of distributions that cannot be sorted because they have + circular dependencies and so form a cycle. + """ + result = [] + # Make a shallow copy of the adjacency list + alist = {} + for k, v in self.adjacency_list.items(): + alist[k] = v[:] + while True: + # See what we can remove in this run + to_remove = [] + for k, v in list(alist.items())[:]: + if not v: + to_remove.append(k) + del alist[k] + if not to_remove: + # What's left in alist (if anything) is a cycle. + break + # Remove from the adjacency list of others + for k, v in alist.items(): + alist[k] = [(d, r) for d, r in v if d not in to_remove] + logger.debug('Moving to result: %s', + ['%s (%s)' % (d.name, d.version) for d in to_remove]) + result.extend(to_remove) + return result, list(alist.keys()) + + def __repr__(self): + """Representation of the graph""" + output = [] + for dist, adjs in self.adjacency_list.items(): + output.append(self.repr_node(dist)) + return '\n'.join(output) + + +def make_graph(dists, scheme='default'): + """Makes a dependency graph from the given distributions. + + :parameter dists: a list of distributions + :type dists: list of :class:`distutils2.database.InstalledDistribution` and + :class:`distutils2.database.EggInfoDistribution` instances + :rtype: a :class:`DependencyGraph` instance + """ + scheme = get_scheme(scheme) + graph = DependencyGraph() + provided = {} # maps names to lists of (version, dist) tuples + + # first, build the graph and find out what's provided + for dist in dists: + graph.add_distribution(dist) + + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + provided.setdefault(name, []).append((version, dist)) + + # now make the edges + for dist in dists: + requires = (dist.run_requires | dist.meta_requires | + dist.build_requires | dist.dev_requires) + for req in requires: + try: + matcher = scheme.matcher(req) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + matched = False + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + graph.add_edge(dist, provider, req) + matched = True + break + if not matched: + graph.add_missing(dist, req) + return graph + + +def get_dependent_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + dependent on *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + dep = [dist] # dependent distributions + todo = graph.reverse_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop() + dep.append(d) + for succ in graph.reverse_list[d]: + if succ not in dep: + todo.append(succ) + + dep.pop(0) # remove dist from dep, was there to prevent infinite loops + return dep + + +def get_required_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + required by *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + req = [] # required distributions + todo = graph.adjacency_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop()[0] + req.append(d) + for pred in graph.adjacency_list[d]: + if pred not in req: + todo.append(pred) + + return req + + +def make_dist(name, version, **kwargs): + """ + A convenience method for making a dist given just a name and version. + """ + summary = kwargs.pop('summary', 'Placeholder for summary') + md = Metadata(**kwargs) + md.name = name + md.version = version + md.summary = summary or 'Placeholder for summary' + return Distribution(md) diff --git a/venv/Lib/site-packages/distlib/index.py b/venv/Lib/site-packages/distlib/index.py new file mode 100644 index 00000000..7a87cdcf --- /dev/null +++ b/venv/Lib/site-packages/distlib/index.py @@ -0,0 +1,516 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import hashlib +import logging +import os +import shutil +import subprocess +import tempfile +try: + from threading import Thread +except ImportError: + from dummy_threading import Thread + +from . import DistlibException +from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, + urlparse, build_opener, string_types) +from .util import cached_property, zip_dir, ServerProxy + +logger = logging.getLogger(__name__) + +DEFAULT_INDEX = 'https://pypi.org/pypi' +DEFAULT_REALM = 'pypi' + +class PackageIndex(object): + """ + This class represents a package index compatible with PyPI, the Python + Package Index. + """ + + boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' + + def __init__(self, url=None): + """ + Initialise an instance. + + :param url: The URL of the index. If not specified, the URL for PyPI is + used. + """ + self.url = url or DEFAULT_INDEX + self.read_configuration() + scheme, netloc, path, params, query, frag = urlparse(self.url) + if params or query or frag or scheme not in ('http', 'https'): + raise DistlibException('invalid repository: %s' % self.url) + self.password_handler = None + self.ssl_verifier = None + self.gpg = None + self.gpg_home = None + with open(os.devnull, 'w') as sink: + # Use gpg by default rather than gpg2, as gpg2 insists on + # prompting for passwords + for s in ('gpg', 'gpg2'): + try: + rc = subprocess.check_call([s, '--version'], stdout=sink, + stderr=sink) + if rc == 0: + self.gpg = s + break + except OSError: + pass + + def _get_pypirc_command(self): + """ + Get the distutils command for interacting with PyPI configurations. + :return: the command. + """ + from distutils.core import Distribution + from distutils.config import PyPIRCCommand + d = Distribution() + return PyPIRCCommand(d) + + def read_configuration(self): + """ + Read the PyPI access configuration as supported by distutils, getting + PyPI to do the actual work. This populates ``username``, ``password``, + ``realm`` and ``url`` attributes from the configuration. + """ + # get distutils to do the work + c = self._get_pypirc_command() + c.repository = self.url + cfg = c._read_pypirc() + self.username = cfg.get('username') + self.password = cfg.get('password') + self.realm = cfg.get('realm', 'pypi') + self.url = cfg.get('repository', self.url) + + def save_configuration(self): + """ + Save the PyPI access configuration. You must have set ``username`` and + ``password`` attributes before calling this method. + + Again, distutils is used to do the actual work. + """ + self.check_credentials() + # get distutils to do the work + c = self._get_pypirc_command() + c._store_pypirc(self.username, self.password) + + def check_credentials(self): + """ + Check that ``username`` and ``password`` have been set, and raise an + exception if not. + """ + if self.username is None or self.password is None: + raise DistlibException('username and password must be set') + pm = HTTPPasswordMgr() + _, netloc, _, _, _, _ = urlparse(self.url) + pm.add_password(self.realm, netloc, self.username, self.password) + self.password_handler = HTTPBasicAuthHandler(pm) + + def register(self, metadata): + """ + Register a distribution on PyPI, using the provided metadata. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the distribution to be + registered. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + metadata.validate() + d = metadata.todict() + d[':action'] = 'verify' + request = self.encode_request(d.items(), []) + response = self.send_request(request) + d[':action'] = 'submit' + request = self.encode_request(d.items(), []) + return self.send_request(request) + + def _reader(self, name, stream, outbuf): + """ + Thread runner for reading lines of from a subprocess into a buffer. + + :param name: The logical name of the stream (used for logging only). + :param stream: The stream to read from. This will typically a pipe + connected to the output stream of a subprocess. + :param outbuf: The list to append the read lines to. + """ + while True: + s = stream.readline() + if not s: + break + s = s.decode('utf-8').rstrip() + outbuf.append(s) + logger.debug('%s: %s' % (name, s)) + stream.close() + + def get_sign_command(self, filename, signer, sign_password, + keystore=None): + """ + Return a suitable command for signing a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The signing command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + if sign_password is not None: + cmd.extend(['--batch', '--passphrase-fd', '0']) + td = tempfile.mkdtemp() + sf = os.path.join(td, os.path.basename(filename) + '.asc') + cmd.extend(['--detach-sign', '--armor', '--local-user', + signer, '--output', sf, filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd, sf + + def run_command(self, cmd, input_data=None): + """ + Run a command in a child process , passing it any input data specified. + + :param cmd: The command to run. + :param input_data: If specified, this must be a byte string containing + data to be sent to the child process. + :return: A tuple consisting of the subprocess' exit code, a list of + lines read from the subprocess' ``stdout``, and a list of + lines read from the subprocess' ``stderr``. + """ + kwargs = { + 'stdout': subprocess.PIPE, + 'stderr': subprocess.PIPE, + } + if input_data is not None: + kwargs['stdin'] = subprocess.PIPE + stdout = [] + stderr = [] + p = subprocess.Popen(cmd, **kwargs) + # We don't use communicate() here because we may need to + # get clever with interacting with the command + t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) + t1.start() + t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) + t2.start() + if input_data is not None: + p.stdin.write(input_data) + p.stdin.close() + + p.wait() + t1.join() + t2.join() + return p.returncode, stdout, stderr + + def sign_file(self, filename, signer, sign_password, keystore=None): + """ + Sign a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The absolute pathname of the file where the signature is + stored. + """ + cmd, sig_file = self.get_sign_command(filename, signer, sign_password, + keystore) + rc, stdout, stderr = self.run_command(cmd, + sign_password.encode('utf-8')) + if rc != 0: + raise DistlibException('sign command failed with error ' + 'code %s' % rc) + return sig_file + + def upload_file(self, metadata, filename, signer=None, sign_password=None, + filetype='sdist', pyversion='source', keystore=None): + """ + Upload a release file to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the file to be uploaded. + :param filename: The pathname of the file to be uploaded. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param filetype: The type of the file being uploaded. This is the + distutils command which produced that file, e.g. + ``sdist`` or ``bdist_wheel``. + :param pyversion: The version of Python which the release relates + to. For code compatible with any Python, this would + be ``source``, otherwise it would be e.g. ``3.2``. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.exists(filename): + raise DistlibException('not found: %s' % filename) + metadata.validate() + d = metadata.todict() + sig_file = None + if signer: + if not self.gpg: + logger.warning('no signing program available - not signed') + else: + sig_file = self.sign_file(filename, signer, sign_password, + keystore) + with open(filename, 'rb') as f: + file_data = f.read() + md5_digest = hashlib.md5(file_data).hexdigest() + sha256_digest = hashlib.sha256(file_data).hexdigest() + d.update({ + ':action': 'file_upload', + 'protocol_version': '1', + 'filetype': filetype, + 'pyversion': pyversion, + 'md5_digest': md5_digest, + 'sha256_digest': sha256_digest, + }) + files = [('content', os.path.basename(filename), file_data)] + if sig_file: + with open(sig_file, 'rb') as f: + sig_data = f.read() + files.append(('gpg_signature', os.path.basename(sig_file), + sig_data)) + shutil.rmtree(os.path.dirname(sig_file)) + request = self.encode_request(d.items(), files) + return self.send_request(request) + + def upload_documentation(self, metadata, doc_dir): + """ + Upload documentation to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the documentation to be + uploaded. + :param doc_dir: The pathname of the directory which contains the + documentation. This should be the directory that + contains the ``index.html`` for the documentation. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.isdir(doc_dir): + raise DistlibException('not a directory: %r' % doc_dir) + fn = os.path.join(doc_dir, 'index.html') + if not os.path.exists(fn): + raise DistlibException('not found: %r' % fn) + metadata.validate() + name, version = metadata.name, metadata.version + zip_data = zip_dir(doc_dir).getvalue() + fields = [(':action', 'doc_upload'), + ('name', name), ('version', version)] + files = [('content', name, zip_data)] + request = self.encode_request(fields, files) + return self.send_request(request) + + def get_verify_command(self, signature_filename, data_filename, + keystore=None): + """ + Return a suitable command for verifying a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The verifying command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + cmd.extend(['--verify', signature_filename, data_filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd + + def verify_signature(self, signature_filename, data_filename, + keystore=None): + """ + Verify a signature for a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: True if the signature was verified, else False. + """ + if not self.gpg: + raise DistlibException('verification unavailable because gpg ' + 'unavailable') + cmd = self.get_verify_command(signature_filename, data_filename, + keystore) + rc, stdout, stderr = self.run_command(cmd) + if rc not in (0, 1): + raise DistlibException('verify command failed with error ' + 'code %s' % rc) + return rc == 0 + + def download_file(self, url, destfile, digest=None, reporthook=None): + """ + This is a convenience method for downloading a file from an URL. + Normally, this will be a file from the index, though currently + no check is made for this (i.e. a file can be downloaded from + anywhere). + + The method is just like the :func:`urlretrieve` function in the + standard library, except that it allows digest computation to be + done during download and checking that the downloaded data + matched any expected value. + + :param url: The URL of the file to be downloaded (assumed to be + available via an HTTP GET request). + :param destfile: The pathname where the downloaded file is to be + saved. + :param digest: If specified, this must be a (hasher, value) + tuple, where hasher is the algorithm used (e.g. + ``'md5'``) and ``value`` is the expected value. + :param reporthook: The same as for :func:`urlretrieve` in the + standard library. + """ + if digest is None: + digester = None + logger.debug('No digest specified') + else: + if isinstance(digest, (list, tuple)): + hasher, digest = digest + else: + hasher = 'md5' + digester = getattr(hashlib, hasher)() + logger.debug('Digest specified: %s' % digest) + # The following code is equivalent to urlretrieve. + # We need to do it this way so that we can compute the + # digest of the file as we go. + with open(destfile, 'wb') as dfp: + # addinfourl is not a context manager on 2.x + # so we have to use try/finally + sfp = self.send_request(Request(url)) + try: + headers = sfp.info() + blocksize = 8192 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, blocksize, size) + while True: + block = sfp.read(blocksize) + if not block: + break + read += len(block) + dfp.write(block) + if digester: + digester.update(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, blocksize, size) + finally: + sfp.close() + + # check that we got the whole file, if we can + if size >= 0 and read < size: + raise DistlibException( + 'retrieval incomplete: got only %d out of %d bytes' + % (read, size)) + # if we have a digest, it must match. + if digester: + actual = digester.hexdigest() + if digest != actual: + raise DistlibException('%s digest mismatch for %s: expected ' + '%s, got %s' % (hasher, destfile, + digest, actual)) + logger.debug('Digest verified: %s', digest) + + def send_request(self, req): + """ + Send a standard library :class:`Request` to PyPI and return its + response. + + :param req: The request to send. + :return: The HTTP response from PyPI (a standard library HTTPResponse). + """ + handlers = [] + if self.password_handler: + handlers.append(self.password_handler) + if self.ssl_verifier: + handlers.append(self.ssl_verifier) + opener = build_opener(*handlers) + return opener.open(req) + + def encode_request(self, fields, files): + """ + Encode fields and files for posting to an HTTP server. + + :param fields: The fields to send as a list of (fieldname, value) + tuples. + :param files: The files to send as a list of (fieldname, filename, + file_bytes) tuple. + """ + # Adapted from packaging, which in turn was adapted from + # http://code.activestate.com/recipes/146306 + + parts = [] + boundary = self.boundary + for k, values in fields: + if not isinstance(values, (list, tuple)): + values = [values] + + for v in values: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"' % + k).encode('utf-8'), + b'', + v.encode('utf-8'))) + for key, filename, value in files: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"; filename="%s"' % + (key, filename)).encode('utf-8'), + b'', + value)) + + parts.extend((b'--' + boundary + b'--', b'')) + + body = b'\r\n'.join(parts) + ct = b'multipart/form-data; boundary=' + boundary + headers = { + 'Content-type': ct, + 'Content-length': str(len(body)) + } + return Request(self.url, body, headers) + + def search(self, terms, operator=None): + if isinstance(terms, string_types): + terms = {'name': terms} + rpc_proxy = ServerProxy(self.url, timeout=3.0) + try: + return rpc_proxy.search(terms, operator or 'and') + finally: + rpc_proxy('close')() diff --git a/venv/Lib/site-packages/distlib/locators.py b/venv/Lib/site-packages/distlib/locators.py new file mode 100644 index 00000000..12a1d063 --- /dev/null +++ b/venv/Lib/site-packages/distlib/locators.py @@ -0,0 +1,1302 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# + +import gzip +from io import BytesIO +import json +import logging +import os +import posixpath +import re +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import zlib + +from . import DistlibException +from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, + queue, quote, unescape, string_types, build_opener, + HTTPRedirectHandler as BaseRedirectHandler, text_type, + Request, HTTPError, URLError) +from .database import Distribution, DistributionPath, make_dist +from .metadata import Metadata, MetadataInvalidError +from .util import (cached_property, parse_credentials, ensure_slash, + split_filename, get_project_data, parse_requirement, + parse_name_and_version, ServerProxy, normalize_name) +from .version import get_scheme, UnsupportedVersionError +from .wheel import Wheel, is_compatible + +logger = logging.getLogger(__name__) + +HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') +CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) +HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') +DEFAULT_INDEX = 'https://pypi.org/pypi' + +def get_all_distribution_names(url=None): + """ + Return all distribution names known by an index. + :param url: The URL of the index. + :return: A list of all known distribution names. + """ + if url is None: + url = DEFAULT_INDEX + client = ServerProxy(url, timeout=3.0) + try: + return client.list_packages() + finally: + client('close')() + +class RedirectHandler(BaseRedirectHandler): + """ + A class to work around a bug in some Python 3.2.x releases. + """ + # There's a bug in the base version for some 3.2.x + # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header + # returns e.g. /abc, it bails because it says the scheme '' + # is bogus, when actually it should use the request's + # URL for the scheme. See Python issue #13696. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + newurl = None + for key in ('location', 'uri'): + if key in headers: + newurl = headers[key] + break + if newurl is None: # pragma: no cover + return + urlparts = urlparse(newurl) + if urlparts.scheme == '': + newurl = urljoin(req.get_full_url(), newurl) + if hasattr(headers, 'replace_header'): + headers.replace_header(key, newurl) + else: + headers[key] = newurl + return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, + headers) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + +class Locator(object): + """ + A base class for locators - things that locate distributions. + """ + source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') + binary_extensions = ('.egg', '.exe', '.whl') + excluded_extensions = ('.pdf',) + + # A list of tags indicating which wheels you want to match. The default + # value of None matches against the tags compatible with the running + # Python. If you want to match other values, set wheel_tags on a locator + # instance to a list of tuples (pyver, abi, arch) which you want to match. + wheel_tags = None + + downloadable_extensions = source_extensions + ('.whl',) + + def __init__(self, scheme='default'): + """ + Initialise an instance. + :param scheme: Because locators look for most recent versions, they + need to know the version scheme to use. This specifies + the current PEP-recommended scheme - use ``'legacy'`` + if you need to support existing distributions on PyPI. + """ + self._cache = {} + self.scheme = scheme + # Because of bugs in some of the handlers on some of the platforms, + # we use our own opener rather than just using urlopen. + self.opener = build_opener(RedirectHandler()) + # If get_project() is called from locate(), the matcher instance + # is set from the requirement passed to locate(). See issue #18 for + # why this can be useful to know. + self.matcher = None + self.errors = queue.Queue() + + def get_errors(self): + """ + Return any errors which have occurred. + """ + result = [] + while not self.errors.empty(): # pragma: no cover + try: + e = self.errors.get(False) + result.append(e) + except self.errors.Empty: + continue + self.errors.task_done() + return result + + def clear_errors(self): + """ + Clear any errors which may have been logged. + """ + # Just get the errors and throw them away + self.get_errors() + + def clear_cache(self): + self._cache.clear() + + def _get_scheme(self): + return self._scheme + + def _set_scheme(self, value): + self._scheme = value + + scheme = property(_get_scheme, _set_scheme) + + def _get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This should be implemented in subclasses. + + If called from a locate() request, self.matcher will be set to a + matcher for the requirement to satisfy, otherwise it will be None. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This calls _get_project to do all the work, and just implements a caching layer on top. + """ + if self._cache is None: # pragma: no cover + result = self._get_project(name) + elif name in self._cache: + result = self._cache[name] + else: + self.clear_errors() + result = self._get_project(name) + self._cache[name] = result + return result + + def score_url(self, url): + """ + Give an url a score which can be used to choose preferred URLs + for a given project release. + """ + t = urlparse(url) + basename = posixpath.basename(t.path) + compatible = True + is_wheel = basename.endswith('.whl') + is_downloadable = basename.endswith(self.downloadable_extensions) + if is_wheel: + compatible = is_compatible(Wheel(basename), self.wheel_tags) + return (t.scheme == 'https', 'pypi.org' in t.netloc, + is_downloadable, is_wheel, compatible, basename) + + def prefer_url(self, url1, url2): + """ + Choose one of two URLs where both are candidates for distribution + archives for the same version of a distribution (for example, + .tar.gz vs. zip). + + The current implementation favours https:// URLs over http://, archives + from PyPI over those from other locations, wheel compatibility (if a + wheel) and then the archive name. + """ + result = url2 + if url1: + s1 = self.score_url(url1) + s2 = self.score_url(url2) + if s1 > s2: + result = url1 + if result != url2: + logger.debug('Not replacing %r with %r', url1, url2) + else: + logger.debug('Replacing %r with %r', url1, url2) + return result + + def split_filename(self, filename, project_name): + """ + Attempt to split a filename in project name, version and Python version. + """ + return split_filename(filename, project_name) + + def convert_url_to_download_info(self, url, project_name): + """ + See if a URL is a candidate for a download URL for a project (the URL + has typically been scraped from an HTML page). + + If it is, a dictionary is returned with keys "name", "version", + "filename" and "url"; otherwise, None is returned. + """ + def same_project(name1, name2): + return normalize_name(name1) == normalize_name(name2) + + result = None + scheme, netloc, path, params, query, frag = urlparse(url) + if frag.lower().startswith('egg='): # pragma: no cover + logger.debug('%s: version hint in fragment: %r', + project_name, frag) + m = HASHER_HASH.match(frag) + if m: + algo, digest = m.groups() + else: + algo, digest = None, None + origpath = path + if path and path[-1] == '/': # pragma: no cover + path = path[:-1] + if path.endswith('.whl'): + try: + wheel = Wheel(path) + if not is_compatible(wheel, self.wheel_tags): + logger.debug('Wheel not compatible: %s', path) + else: + if project_name is None: + include = True + else: + include = same_project(wheel.name, project_name) + if include: + result = { + 'name': wheel.name, + 'version': wheel.version, + 'filename': wheel.filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + 'python-version': ', '.join( + ['.'.join(list(v[2:])) for v in wheel.pyver]), + } + except Exception as e: # pragma: no cover + logger.warning('invalid path for wheel: %s', path) + elif not path.endswith(self.downloadable_extensions): # pragma: no cover + logger.debug('Not downloadable: %s', path) + else: # downloadable extension + path = filename = posixpath.basename(path) + for ext in self.downloadable_extensions: + if path.endswith(ext): + path = path[:-len(ext)] + t = self.split_filename(path, project_name) + if not t: # pragma: no cover + logger.debug('No match for project/version: %s', path) + else: + name, version, pyver = t + if not project_name or same_project(project_name, name): + result = { + 'name': name, + 'version': version, + 'filename': filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + #'packagetype': 'sdist', + } + if pyver: # pragma: no cover + result['python-version'] = pyver + break + if result and algo: + result['%s_digest' % algo] = digest + return result + + def _get_digest(self, info): + """ + Get a digest from a dictionary by looking at a "digests" dictionary + or keys of the form 'algo_digest'. + + Returns a 2-tuple (algo, digest) if found, else None. Currently + looks only for SHA256, then MD5. + """ + result = None + if 'digests' in info: + digests = info['digests'] + for algo in ('sha256', 'md5'): + if algo in digests: + result = (algo, digests[algo]) + break + if not result: + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break + return result + + def _update_version_data(self, result, info): + """ + Update a result dictionary (the final result from _get_project) with a + dictionary for a specific version, which typically holds information + gleaned from a filename or URL for an archive for the distribution. + """ + name = info.pop('name') + version = info.pop('version') + if version in result: + dist = result[version] + md = dist.metadata + else: + dist = make_dist(name, version, scheme=self.scheme) + md = dist.metadata + dist.digest = digest = self._get_digest(info) + url = info['url'] + result['digests'][url] = digest + if md.source_url != info['url']: + md.source_url = self.prefer_url(md.source_url, url) + result['urls'].setdefault(version, set()).add(url) + dist.locator = self + result[version] = dist + + def locate(self, requirement, prereleases=False): + """ + Find the most recent distribution which matches the given + requirement. + + :param requirement: A requirement of the form 'foo (1.0)' or perhaps + 'foo (>= 1.0, < 2.0, != 1.3)' + :param prereleases: If ``True``, allow pre-release versions + to be located. Otherwise, pre-release versions + are not returned. + :return: A :class:`Distribution` instance, or ``None`` if no such + distribution could be located. + """ + result = None + r = parse_requirement(requirement) + if r is None: # pragma: no cover + raise DistlibException('Not a valid requirement: %r' % requirement) + scheme = get_scheme(self.scheme) + self.matcher = matcher = scheme.matcher(r.requirement) + logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) + versions = self.get_project(r.name) + if len(versions) > 2: # urls and digests keys are present + # sometimes, versions are invalid + slist = [] + vcls = matcher.version_class + for k in versions: + if k in ('urls', 'digests'): + continue + try: + if not matcher.match(k): + logger.debug('%s did not match %r', matcher, k) + else: + if prereleases or not vcls(k).is_prerelease: + slist.append(k) + else: + logger.debug('skipping pre-release ' + 'version %s of %s', k, matcher.name) + except Exception: # pragma: no cover + logger.warning('error matching %s with %r', matcher, k) + pass # slist.append(k) + if len(slist) > 1: + slist = sorted(slist, key=scheme.key) + if slist: + logger.debug('sorted list: %s', slist) + version = slist[-1] + result = versions[version] + if result: + if r.extras: + result.extras = r.extras + result.download_urls = versions.get('urls', {}).get(version, set()) + d = {} + sd = versions.get('digests', {}) + for url in result.download_urls: + if url in sd: # pragma: no cover + d[url] = sd[url] + result.digests = d + self.matcher = None + return result + + +class PyPIRPCLocator(Locator): + """ + This locator uses XML-RPC to locate distributions. It therefore + cannot be used with simple mirrors (that only mirror file content). + """ + def __init__(self, url, **kwargs): + """ + Initialise an instance. + + :param url: The URL to use for XML-RPC. + :param kwargs: Passed to the superclass constructor. + """ + super(PyPIRPCLocator, self).__init__(**kwargs) + self.base_url = url + self.client = ServerProxy(url, timeout=3.0) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + return set(self.client.list_packages()) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + versions = self.client.package_releases(name, True) + for v in versions: + urls = self.client.release_urls(name, v) + data = self.client.release_data(name, v) + metadata = Metadata(scheme=self.scheme) + metadata.name = data['name'] + metadata.version = data['version'] + metadata.license = data.get('license') + metadata.keywords = data.get('keywords', []) + metadata.summary = data.get('summary') + dist = Distribution(metadata) + if urls: + info = urls[0] + metadata.source_url = info['url'] + dist.digest = self._get_digest(info) + dist.locator = self + result[v] = dist + for info in urls: + url = info['url'] + digest = self._get_digest(info) + result['urls'].setdefault(v, set()).add(url) + result['digests'][url] = digest + return result + +class PyPIJSONLocator(Locator): + """ + This locator uses PyPI's JSON interface. It's very limited in functionality + and probably not worth using. + """ + def __init__(self, url, **kwargs): + super(PyPIJSONLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + url = urljoin(self.base_url, '%s/json' % quote(name)) + try: + resp = self.opener.open(url) + data = resp.read().decode() # for now + d = json.loads(data) + md = Metadata(scheme=self.scheme) + data = d['info'] + md.name = data['name'] + md.version = data['version'] + md.license = data.get('license') + md.keywords = data.get('keywords', []) + md.summary = data.get('summary') + dist = Distribution(md) + dist.locator = self + urls = d['urls'] + result[md.version] = dist + for info in d['urls']: + url = info['url'] + dist.download_urls.add(url) + dist.digests[url] = self._get_digest(info) + result['urls'].setdefault(md.version, set()).add(url) + result['digests'][url] = self._get_digest(info) + # Now get other releases + for version, infos in d['releases'].items(): + if version == md.version: + continue # already done + omd = Metadata(scheme=self.scheme) + omd.name = md.name + omd.version = version + odist = Distribution(omd) + odist.locator = self + result[version] = odist + for info in infos: + url = info['url'] + odist.download_urls.add(url) + odist.digests[url] = self._get_digest(info) + result['urls'].setdefault(version, set()).add(url) + result['digests'][url] = self._get_digest(info) +# for info in urls: +# md.source_url = info['url'] +# dist.digest = self._get_digest(info) +# dist.locator = self +# for info in urls: +# url = info['url'] +# result['urls'].setdefault(md.version, set()).add(url) +# result['digests'][url] = self._get_digest(info) + except Exception as e: + self.errors.put(text_type(e)) + logger.exception('JSON fetch failed: %s', e) + return result + + +class Page(object): + """ + This class represents a scraped HTML page. + """ + # The following slightly hairy-looking regex just looks for the contents of + # an anchor link, which has an attribute "href" either immediately preceded + # or immediately followed by a "rel" attribute. The attribute values can be + # declared with double quotes, single quotes or no quotes - which leads to + # the length of the expression. + _href = re.compile(""" +(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)? +href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)) +(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))? +""", re.I | re.S | re.X) + _base = re.compile(r"""]+)""", re.I | re.S) + + def __init__(self, data, url): + """ + Initialise an instance with the Unicode page contents and the URL they + came from. + """ + self.data = data + self.base_url = self.url = url + m = self._base.search(self.data) + if m: + self.base_url = m.group(1) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + @cached_property + def links(self): + """ + Return the URLs of all the links on a page together with information + about their "rel" attribute, for determining which ones to treat as + downloads and which ones to queue for further scraping. + """ + def clean(url): + "Tidy up an URL." + scheme, netloc, path, params, query, frag = urlparse(url) + return urlunparse((scheme, netloc, quote(path), + params, query, frag)) + + result = set() + for match in self._href.finditer(self.data): + d = match.groupdict('') + rel = (d['rel1'] or d['rel2'] or d['rel3'] or + d['rel4'] or d['rel5'] or d['rel6']) + url = d['url1'] or d['url2'] or d['url3'] + url = urljoin(self.base_url, url) + url = unescape(url) + url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) + result.add((url, rel)) + # We sort the result, hoping to bring the most recent versions + # to the front + result = sorted(result, key=lambda t: t[0], reverse=True) + return result + + +class SimpleScrapingLocator(Locator): + """ + A locator which scrapes HTML pages to locate downloads for a distribution. + This runs multiple threads to do the I/O; performance is at least as good + as pip's PackageFinder, which works in an analogous fashion. + """ + + # These are used to deal with various Content-Encoding schemes. + decoders = { + 'deflate': zlib.decompress, + 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(), + 'none': lambda b: b, + } + + def __init__(self, url, timeout=None, num_workers=10, **kwargs): + """ + Initialise an instance. + :param url: The root URL to use for scraping. + :param timeout: The timeout, in seconds, to be applied to requests. + This defaults to ``None`` (no timeout specified). + :param num_workers: The number of worker threads you want to do I/O, + This defaults to 10. + :param kwargs: Passed to the superclass. + """ + super(SimpleScrapingLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + self.timeout = timeout + self._page_cache = {} + self._seen = set() + self._to_fetch = queue.Queue() + self._bad_hosts = set() + self.skip_externals = False + self.num_workers = num_workers + self._lock = threading.RLock() + # See issue #45: we need to be resilient when the locator is used + # in a thread, e.g. with concurrent.futures. We can't use self._lock + # as it is for coordinating our internal threads - the ones created + # in _prepare_threads. + self._gplock = threading.RLock() + self.platform_check = False # See issue #112 + + def _prepare_threads(self): + """ + Threads are created only when get_project is called, and terminate + before it returns. They are there primarily to parallelise I/O (i.e. + fetching web pages). + """ + self._threads = [] + for i in range(self.num_workers): + t = threading.Thread(target=self._fetch) + t.setDaemon(True) + t.start() + self._threads.append(t) + + def _wait_threads(self): + """ + Tell all the threads to terminate (by sending a sentinel value) and + wait for them to do so. + """ + # Note that you need two loops, since you can't say which + # thread will get each sentinel + for t in self._threads: + self._to_fetch.put(None) # sentinel + for t in self._threads: + t.join() + self._threads = [] + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + with self._gplock: + self.result = result + self.project_name = name + url = urljoin(self.base_url, '%s/' % quote(name)) + self._seen.clear() + self._page_cache.clear() + self._prepare_threads() + try: + logger.debug('Queueing %s', url) + self._to_fetch.put(url) + self._to_fetch.join() + finally: + self._wait_threads() + del self.result + return result + + platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|' + r'win(32|_amd64)|macosx_?\d+)\b', re.I) + + def _is_platform_dependent(self, url): + """ + Does an URL refer to a platform-specific download? + """ + return self.platform_dependent.search(url) + + def _process_download(self, url): + """ + See if an URL is a suitable download for a project. + + If it is, register information in the result dictionary (for + _get_project) about the specific version it's for. + + Note that the return value isn't actually used other than as a boolean + value. + """ + if self.platform_check and self._is_platform_dependent(url): + info = None + else: + info = self.convert_url_to_download_info(url, self.project_name) + logger.debug('process_download: %s -> %s', url, info) + if info: + with self._lock: # needed because self.result is shared + self._update_version_data(self.result, info) + return info + + def _should_queue(self, link, referrer, rel): + """ + Determine whether a link URL from a referring page and with a + particular "rel" attribute should be queued for scraping. + """ + scheme, netloc, path, _, _, _ = urlparse(link) + if path.endswith(self.source_extensions + self.binary_extensions + + self.excluded_extensions): + result = False + elif self.skip_externals and not link.startswith(self.base_url): + result = False + elif not referrer.startswith(self.base_url): + result = False + elif rel not in ('homepage', 'download'): + result = False + elif scheme not in ('http', 'https', 'ftp'): + result = False + elif self._is_platform_dependent(link): + result = False + else: + host = netloc.split(':', 1)[0] + if host.lower() == 'localhost': + result = False + else: + result = True + logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, + referrer, result) + return result + + def _fetch(self): + """ + Get a URL to fetch from the work queue, get the HTML page, examine its + links for download candidates and candidates for further scraping. + + This is a handy method to run in a thread. + """ + while True: + url = self._to_fetch.get() + try: + if url: + page = self.get_page(url) + if page is None: # e.g. after an error + continue + for link, rel in page.links: + if link not in self._seen: + try: + self._seen.add(link) + if (not self._process_download(link) and + self._should_queue(link, url, rel)): + logger.debug('Queueing %s from %s', link, url) + self._to_fetch.put(link) + except MetadataInvalidError: # e.g. invalid versions + pass + except Exception as e: # pragma: no cover + self.errors.put(text_type(e)) + finally: + # always do this, to avoid hangs :-) + self._to_fetch.task_done() + if not url: + #logger.debug('Sentinel seen, quitting.') + break + + def get_page(self, url): + """ + Get the HTML for an URL, possibly from an in-memory cache. + + XXX TODO Note: this cache is never actually cleared. It's assumed that + the data won't get stale over the lifetime of a locator instance (not + necessarily true for the default_locator). + """ + # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api + scheme, netloc, path, _, _, _ = urlparse(url) + if scheme == 'file' and os.path.isdir(url2pathname(path)): + url = urljoin(ensure_slash(url), 'index.html') + + if url in self._page_cache: + result = self._page_cache[url] + logger.debug('Returning %s from cache: %s', url, result) + else: + host = netloc.split(':', 1)[0] + result = None + if host in self._bad_hosts: + logger.debug('Skipping %s due to bad host %s', url, host) + else: + req = Request(url, headers={'Accept-encoding': 'identity'}) + try: + logger.debug('Fetching %s', url) + resp = self.opener.open(req, timeout=self.timeout) + logger.debug('Fetched %s', url) + headers = resp.info() + content_type = headers.get('Content-Type', '') + if HTML_CONTENT_TYPE.match(content_type): + final_url = resp.geturl() + data = resp.read() + encoding = headers.get('Content-Encoding') + if encoding: + decoder = self.decoders[encoding] # fail if not found + data = decoder(data) + encoding = 'utf-8' + m = CHARSET.search(content_type) + if m: + encoding = m.group(1) + try: + data = data.decode(encoding) + except UnicodeError: # pragma: no cover + data = data.decode('latin-1') # fallback + result = Page(data, final_url) + self._page_cache[final_url] = result + except HTTPError as e: + if e.code != 404: + logger.exception('Fetch failed: %s: %s', url, e) + except URLError as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + with self._lock: + self._bad_hosts.add(host) + except Exception as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + finally: + self._page_cache[url] = result # even if None (failure) + return result + + _distname_re = re.compile(']*>([^<]+)<') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + page = self.get_page(self.base_url) + if not page: + raise DistlibException('Unable to get %s' % self.base_url) + for match in self._distname_re.finditer(page.data): + result.add(match.group(1)) + return result + +class DirectoryLocator(Locator): + """ + This class locates distributions in a directory tree. + """ + + def __init__(self, path, **kwargs): + """ + Initialise an instance. + :param path: The root of the directory tree to search. + :param kwargs: Passed to the superclass constructor, + except for: + * recursive - if True (the default), subdirectories are + recursed into. If False, only the top-level directory + is searched, + """ + self.recursive = kwargs.pop('recursive', True) + super(DirectoryLocator, self).__init__(**kwargs) + path = os.path.abspath(path) + if not os.path.isdir(path): # pragma: no cover + raise DistlibException('Not a directory: %r' % path) + self.base_dir = path + + def should_include(self, filename, parent): + """ + Should a filename be considered as a candidate for a distribution + archive? As well as the filename, the directory which contains it + is provided, though not used by the current implementation. + """ + return filename.endswith(self.downloadable_extensions) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, name) + if info: + self._update_version_data(result, info) + if not self.recursive: + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, None) + if info: + result.add(info['name']) + if not self.recursive: + break + return result + +class JSONLocator(Locator): + """ + This locator uses special extended metadata (not available on PyPI) and is + the basis of performant dependency resolution in distlib. Other locators + require archive downloads before dependencies can be determined! As you + might imagine, that can be slow. + """ + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + data = get_project_data(name) + if data: + for info in data.get('files', []): + if info['ptype'] != 'sdist' or info['pyversion'] != 'source': + continue + # We don't store summary in project metadata as it makes + # the data bigger for no benefit during dependency + # resolution + dist = make_dist(data['name'], info['version'], + summary=data.get('summary', + 'Placeholder for summary'), + scheme=self.scheme) + md = dist.metadata + md.source_url = info['url'] + # TODO SHA256 digest + if 'digest' in info and info['digest']: + dist.digest = ('md5', info['digest']) + md.dependencies = info.get('requirements', {}) + dist.exports = info.get('exports', {}) + result[dist.version] = dist + result['urls'].setdefault(dist.version, set()).add(info['url']) + return result + +class DistPathLocator(Locator): + """ + This locator finds installed distributions in a path. It can be useful for + adding to an :class:`AggregatingLocator`. + """ + def __init__(self, distpath, **kwargs): + """ + Initialise an instance. + + :param distpath: A :class:`DistributionPath` instance to search. + """ + super(DistPathLocator, self).__init__(**kwargs) + assert isinstance(distpath, DistributionPath) + self.distpath = distpath + + def _get_project(self, name): + dist = self.distpath.get_distribution(name) + if dist is None: + result = {'urls': {}, 'digests': {}} + else: + result = { + dist.version: dist, + 'urls': {dist.version: set([dist.source_url])}, + 'digests': {dist.version: set([None])} + } + return result + + +class AggregatingLocator(Locator): + """ + This class allows you to chain and/or merge a list of locators. + """ + def __init__(self, *locators, **kwargs): + """ + Initialise an instance. + + :param locators: The list of locators to search. + :param kwargs: Passed to the superclass constructor, + except for: + * merge - if False (the default), the first successful + search from any of the locators is returned. If True, + the results from all locators are merged (this can be + slow). + """ + self.merge = kwargs.pop('merge', False) + self.locators = locators + super(AggregatingLocator, self).__init__(**kwargs) + + def clear_cache(self): + super(AggregatingLocator, self).clear_cache() + for locator in self.locators: + locator.clear_cache() + + def _set_scheme(self, value): + self._scheme = value + for locator in self.locators: + locator.scheme = value + + scheme = property(Locator.scheme.fget, _set_scheme) + + def _get_project(self, name): + result = {} + for locator in self.locators: + d = locator.get_project(name) + if d: + if self.merge: + files = result.get('urls', {}) + digests = result.get('digests', {}) + # next line could overwrite result['urls'], result['digests'] + result.update(d) + df = result.get('urls') + if files and df: + for k, v in files.items(): + if k in df: + df[k] |= v + else: + df[k] = v + dd = result.get('digests') + if digests and dd: + dd.update(digests) + else: + # See issue #18. If any dists are found and we're looking + # for specific constraints, we only return something if + # a match is found. For example, if a DirectoryLocator + # returns just foo (1.0) while we're looking for + # foo (>= 2.0), we'll pretend there was nothing there so + # that subsequent locators can be queried. Otherwise we + # would just return foo (1.0) which would then lead to a + # failure to find foo (>= 2.0), because other locators + # weren't searched. Note that this only matters when + # merge=False. + if self.matcher is None: + found = True + else: + found = False + for k in d: + if self.matcher.match(k): + found = True + break + if found: + result = d + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for locator in self.locators: + try: + result |= locator.get_distribution_names() + except NotImplementedError: + pass + return result + + +# We use a legacy scheme simply because most of the dists on PyPI use legacy +# versions which don't conform to PEP 426 / PEP 440. +default_locator = AggregatingLocator( + JSONLocator(), + SimpleScrapingLocator('https://pypi.org/simple/', + timeout=3.0), + scheme='legacy') + +locate = default_locator.locate + +NAME_VERSION_RE = re.compile(r'(?P[\w-]+)\s*' + r'\(\s*(==\s*)?(?P[^)]+)\)$') + +class DependencyFinder(object): + """ + Locate dependencies for distributions. + """ + + def __init__(self, locator=None): + """ + Initialise an instance, using the specified locator + to locate distributions. + """ + self.locator = locator or default_locator + self.scheme = get_scheme(self.locator.scheme) + + def add_distribution(self, dist): + """ + Add a distribution to the finder. This will update internal information + about who provides what. + :param dist: The distribution to add. + """ + logger.debug('adding distribution %s', dist) + name = dist.key + self.dists_by_name[name] = dist + self.dists[(name, dist.version)] = dist + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + self.provided.setdefault(name, set()).add((version, dist)) + + def remove_distribution(self, dist): + """ + Remove a distribution from the finder. This will update internal + information about who provides what. + :param dist: The distribution to remove. + """ + logger.debug('removing distribution %s', dist) + name = dist.key + del self.dists_by_name[name] + del self.dists[(name, dist.version)] + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Remove from provided: %s, %s, %s', name, version, dist) + s = self.provided[name] + s.remove((version, dist)) + if not s: + del self.provided[name] + + def get_matcher(self, reqt): + """ + Get a version matcher for a requirement. + :param reqt: The requirement + :type reqt: str + :return: A version matcher (an instance of + :class:`distlib.version.Matcher`). + """ + try: + matcher = self.scheme.matcher(reqt) + except UnsupportedVersionError: # pragma: no cover + # XXX compat-mode if cannot read the version + name = reqt.split()[0] + matcher = self.scheme.matcher(name) + return matcher + + def find_providers(self, reqt): + """ + Find the distributions which can fulfill a requirement. + + :param reqt: The requirement. + :type reqt: str + :return: A set of distribution which can fulfill the requirement. + """ + matcher = self.get_matcher(reqt) + name = matcher.key # case-insensitive + result = set() + provided = self.provided + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + result.add(provider) + break + return result + + def try_to_replace(self, provider, other, problems): + """ + Attempt to replace one provider with another. This is typically used + when resolving dependencies from multiple sources, e.g. A requires + (B >= 1.0) while C requires (B >= 1.1). + + For successful replacement, ``provider`` must meet all the requirements + which ``other`` fulfills. + + :param provider: The provider we are trying to replace with. + :param other: The provider we're trying to replace. + :param problems: If False is returned, this will contain what + problems prevented replacement. This is currently + a tuple of the literal string 'cantreplace', + ``provider``, ``other`` and the set of requirements + that ``provider`` couldn't fulfill. + :return: True if we can replace ``other`` with ``provider``, else + False. + """ + rlist = self.reqts[other] + unmatched = set() + for s in rlist: + matcher = self.get_matcher(s) + if not matcher.match(provider.version): + unmatched.add(s) + if unmatched: + # can't replace other with provider + problems.add(('cantreplace', provider, other, + frozenset(unmatched))) + result = False + else: + # can replace other with provider + self.remove_distribution(other) + del self.reqts[other] + for s in rlist: + self.reqts.setdefault(provider, set()).add(s) + self.add_distribution(provider) + result = True + return result + + def find(self, requirement, meta_extras=None, prereleases=False): + """ + Find a distribution and all distributions it depends on. + + :param requirement: The requirement specifying the distribution to + find, or a Distribution instance. + :param meta_extras: A list of meta extras such as :test:, :build: and + so on. + :param prereleases: If ``True``, allow pre-release versions to be + returned - otherwise, don't return prereleases + unless they're all that's available. + + Return a set of :class:`Distribution` instances and a set of + problems. + + The distributions returned should be such that they have the + :attr:`required` attribute set to ``True`` if they were + from the ``requirement`` passed to ``find()``, and they have the + :attr:`build_time_dependency` attribute set to ``True`` unless they + are post-installation dependencies of the ``requirement``. + + The problems should be a tuple consisting of the string + ``'unsatisfied'`` and the requirement which couldn't be satisfied + by any distribution known to the locator. + """ + + self.provided = {} + self.dists = {} + self.dists_by_name = {} + self.reqts = {} + + meta_extras = set(meta_extras or []) + if ':*:' in meta_extras: + meta_extras.remove(':*:') + # :meta: and :run: are implicitly included + meta_extras |= set([':test:', ':build:', ':dev:']) + + if isinstance(requirement, Distribution): + dist = odist = requirement + logger.debug('passed %s as requirement', odist) + else: + dist = odist = self.locator.locate(requirement, + prereleases=prereleases) + if dist is None: + raise DistlibException('Unable to locate %r' % requirement) + logger.debug('located %s', odist) + dist.requested = True + problems = set() + todo = set([dist]) + install_dists = set([odist]) + while todo: + dist = todo.pop() + name = dist.key # case-insensitive + if name not in self.dists_by_name: + self.add_distribution(dist) + else: + #import pdb; pdb.set_trace() + other = self.dists_by_name[name] + if other != dist: + self.try_to_replace(dist, other, problems) + + ireqts = dist.run_requires | dist.meta_requires + sreqts = dist.build_requires + ereqts = set() + if meta_extras and dist in install_dists: + for key in ('test', 'build', 'dev'): + e = ':%s:' % key + if e in meta_extras: + ereqts |= getattr(dist, '%s_requires' % key) + all_reqts = ireqts | sreqts | ereqts + for r in all_reqts: + providers = self.find_providers(r) + if not providers: + logger.debug('No providers found for %r', r) + provider = self.locator.locate(r, prereleases=prereleases) + # If no provider is found and we didn't consider + # prereleases, consider them now. + if provider is None and not prereleases: + provider = self.locator.locate(r, prereleases=True) + if provider is None: + logger.debug('Cannot satisfy %r', r) + problems.add(('unsatisfied', r)) + else: + n, v = provider.key, provider.version + if (n, v) not in self.dists: + todo.add(provider) + providers.add(provider) + if r in ireqts and dist in install_dists: + install_dists.add(provider) + logger.debug('Adding %s to install_dists', + provider.name_and_version) + for p in providers: + name = p.key + if name not in self.dists_by_name: + self.reqts.setdefault(p, set()).add(r) + else: + other = self.dists_by_name[name] + if other != p: + # see if other can be replaced by p + self.try_to_replace(p, other, problems) + + dists = set(self.dists.values()) + for dist in dists: + dist.build_time_dependency = dist not in install_dists + if dist.build_time_dependency: + logger.debug('%s is a build-time dependency only.', + dist.name_and_version) + logger.debug('find done for %s', odist) + return dists, problems diff --git a/venv/Lib/site-packages/distlib/manifest.py b/venv/Lib/site-packages/distlib/manifest.py new file mode 100644 index 00000000..ca0fe442 --- /dev/null +++ b/venv/Lib/site-packages/distlib/manifest.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2013 Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Class representing the list of files in a distribution. + +Equivalent to distutils.filelist, but fixes some problems. +""" +import fnmatch +import logging +import os +import re +import sys + +from . import DistlibException +from .compat import fsdecode +from .util import convert_path + + +__all__ = ['Manifest'] + +logger = logging.getLogger(__name__) + +# a \ followed by some spaces + EOL +_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M) +_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) + +# +# Due to the different results returned by fnmatch.translate, we need +# to do slightly different processing for Python 2.7 and 3.2 ... this needed +# to be brought in for Python 3.6 onwards. +# +_PYTHON_VERSION = sys.version_info[:2] + +class Manifest(object): + """A list of files built by on exploring the filesystem and filtered by + applying various patterns to what we find there. + """ + + def __init__(self, base=None): + """ + Initialise an instance. + + :param base: The base directory to explore under. + """ + self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) + self.prefix = self.base + os.sep + self.allfiles = None + self.files = set() + + # + # Public API + # + + def findall(self): + """Find all files under the base and set ``allfiles`` to the absolute + pathnames of files found. + """ + from stat import S_ISREG, S_ISDIR, S_ISLNK + + self.allfiles = allfiles = [] + root = self.base + stack = [root] + pop = stack.pop + push = stack.append + + while stack: + root = pop() + names = os.listdir(root) + + for name in names: + fullname = os.path.join(root, name) + + # Avoid excess stat calls -- just one will do, thank you! + stat = os.stat(fullname) + mode = stat.st_mode + if S_ISREG(mode): + allfiles.append(fsdecode(fullname)) + elif S_ISDIR(mode) and not S_ISLNK(mode): + push(fullname) + + def add(self, item): + """ + Add a file to the manifest. + + :param item: The pathname to add. This can be relative to the base. + """ + if not item.startswith(self.prefix): + item = os.path.join(self.base, item) + self.files.add(os.path.normpath(item)) + + def add_many(self, items): + """ + Add a list of files to the manifest. + + :param items: The pathnames to add. These can be relative to the base. + """ + for item in items: + self.add(item) + + def sorted(self, wantdirs=False): + """ + Return sorted files in directory order + """ + + def add_dir(dirs, d): + dirs.add(d) + logger.debug('add_dir added %s', d) + if d != self.base: + parent, _ = os.path.split(d) + assert parent not in ('', '/') + add_dir(dirs, parent) + + result = set(self.files) # make a copy! + if wantdirs: + dirs = set() + for f in result: + add_dir(dirs, os.path.dirname(f)) + result |= dirs + return [os.path.join(*path_tuple) for path_tuple in + sorted(os.path.split(path) for path in result)] + + def clear(self): + """Clear all collected files.""" + self.files = set() + self.allfiles = [] + + def process_directive(self, directive): + """ + Process a directive which either adds some files from ``allfiles`` to + ``files``, or removes some files from ``files``. + + :param directive: The directive to process. This should be in a format + compatible with distutils ``MANIFEST.in`` files: + + http://docs.python.org/distutils/sourcedist.html#commands + """ + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dirpattern). + action, patterns, thedir, dirpattern = self._parse_directive(directive) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=True): + logger.warning('no files found matching %r', pattern) + + elif action == 'exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=True) + #if not found: + # logger.warning('no previously-included files ' + # 'found matching %r', pattern) + + elif action == 'global-include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=False): + logger.warning('no files found matching %r ' + 'anywhere in distribution', pattern) + + elif action == 'global-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=False) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found anywhere in ' + # 'distribution', pattern) + + elif action == 'recursive-include': + for pattern in patterns: + if not self._include_pattern(pattern, prefix=thedir): + logger.warning('no files found matching %r ' + 'under directory %r', pattern, thedir) + + elif action == 'recursive-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, prefix=thedir) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found under directory %r', + # pattern, thedir) + + elif action == 'graft': + if not self._include_pattern(None, prefix=dirpattern): + logger.warning('no directories found matching %r', + dirpattern) + + elif action == 'prune': + if not self._exclude_pattern(None, prefix=dirpattern): + logger.warning('no previously-included directories found ' + 'matching %r', dirpattern) + else: # pragma: no cover + # This should never happen, as it should be caught in + # _parse_template_line + raise DistlibException( + 'invalid action %r' % action) + + # + # Private API + # + + def _parse_directive(self, directive): + """ + Validate a directive. + :param directive: The directive to validate. + :return: A tuple of action, patterns, thedir, dir_patterns + """ + words = directive.split() + if len(words) == 1 and words[0] not in ('include', 'exclude', + 'global-include', + 'global-exclude', + 'recursive-include', + 'recursive-exclude', + 'graft', 'prune'): + # no action given, let's use the default 'include' + words.insert(0, 'include') + + action = words[0] + patterns = thedir = dir_pattern = None + + if action in ('include', 'exclude', + 'global-include', 'global-exclude'): + if len(words) < 2: + raise DistlibException( + '%r expects ...' % action) + + patterns = [convert_path(word) for word in words[1:]] + + elif action in ('recursive-include', 'recursive-exclude'): + if len(words) < 3: + raise DistlibException( + '%r expects ...' % action) + + thedir = convert_path(words[1]) + patterns = [convert_path(word) for word in words[2:]] + + elif action in ('graft', 'prune'): + if len(words) != 2: + raise DistlibException( + '%r expects a single ' % action) + + dir_pattern = convert_path(words[1]) + + else: + raise DistlibException('unknown action %r' % action) + + return action, patterns, thedir, dir_pattern + + def _include_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. + + Patterns are not quite the same as implemented by the 'fnmatch' + module: '*' and '?' match non-special characters, where "special" + is platform-dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found. + """ + # XXX docstring lying about what the special chars are? + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + + # delayed loading of allfiles list + if self.allfiles is None: + self.findall() + + for name in self.allfiles: + if pattern_re.search(name): + self.files.add(name) + found = True + return found + + def _exclude_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. + + Other parameters are the same as for 'include_pattern()', above. + The list 'self.files' is modified in place. Return True if files are + found. + + This API is public to allow e.g. exclusion of SCM subdirs, e.g. when + packaging source distributions + """ + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + for f in list(self.files): + if pattern_re.search(f): + self.files.remove(f) + found = True + return found + + def _translate_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Translate a shell-like wildcard pattern to a compiled regular + expression. + + Return the compiled regex. If 'is_regex' true, + then 'pattern' is directly compiled to a regex (if it's a string) + or just returned as-is (assumes it's a regex object). + """ + if is_regex: + if isinstance(pattern, str): + return re.compile(pattern) + else: + return pattern + + if _PYTHON_VERSION > (3, 2): + # ditch start and end characters + start, _, end = self._glob_to_re('_').partition('_') + + if pattern: + pattern_re = self._glob_to_re(pattern) + if _PYTHON_VERSION > (3, 2): + assert pattern_re.startswith(start) and pattern_re.endswith(end) + else: + pattern_re = '' + + base = re.escape(os.path.join(self.base, '')) + if prefix is not None: + # ditch end of pattern character + if _PYTHON_VERSION <= (3, 2): + empty_pattern = self._glob_to_re('') + prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] + else: + prefix_re = self._glob_to_re(prefix) + assert prefix_re.startswith(start) and prefix_re.endswith(end) + prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] + sep = os.sep + if os.sep == '\\': + sep = r'\\' + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + sep.join((prefix_re, + '.*' + pattern_re)) + else: + pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] + pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, + pattern_re, end) + else: # no prefix -- respect anchor flag + if anchor: + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + pattern_re + else: + pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) + + return re.compile(pattern_re) + + def _glob_to_re(self, pattern): + """Translate a shell-like glob pattern to a regular expression. + + Return a string containing the regex. Differs from + 'fnmatch.translate()' in that '*' does not match "special characters" + (which are platform-specific). + """ + pattern_re = fnmatch.translate(pattern) + + # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which + # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, + # and by extension they shouldn't match such "special characters" under + # any OS. So change all non-escaped dots in the RE to match any + # character except the special characters (currently: just os.sep). + sep = os.sep + if os.sep == '\\': + # we're using a regex to manipulate a regex, so we need + # to escape the backslash twice + sep = r'\\\\' + escaped = r'\1[^%s]' % sep + pattern_re = re.sub(r'((? y, + '!=': lambda x, y: x != y, + '<': lambda x, y: x < y, + '<=': lambda x, y: x == y or x < y, + '>': lambda x, y: x > y, + '>=': lambda x, y: x == y or x > y, + 'and': lambda x, y: x and y, + 'or': lambda x, y: x or y, + 'in': lambda x, y: x in y, + 'not in': lambda x, y: x not in y, + } + + def evaluate(self, expr, context): + """ + Evaluate a marker expression returned by the :func:`parse_requirement` + function in the specified context. + """ + if isinstance(expr, string_types): + if expr[0] in '\'"': + result = expr[1:-1] + else: + if expr not in context: + raise SyntaxError('unknown variable: %s' % expr) + result = context[expr] + else: + assert isinstance(expr, dict) + op = expr['op'] + if op not in self.operations: + raise NotImplementedError('op not implemented: %s' % op) + elhs = expr['lhs'] + erhs = expr['rhs'] + if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): + raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) + + lhs = self.evaluate(elhs, context) + rhs = self.evaluate(erhs, context) + result = self.operations[op](lhs, rhs) + return result + +def default_context(): + def format_full_version(info): + version = '%s.%s.%s' % (info.major, info.minor, info.micro) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + if hasattr(sys, 'implementation'): + implementation_version = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + implementation_version = '0' + implementation_name = '' + + result = { + 'implementation_name': implementation_name, + 'implementation_version': implementation_version, + 'os_name': os.name, + 'platform_machine': platform.machine(), + 'platform_python_implementation': platform.python_implementation(), + 'platform_release': platform.release(), + 'platform_system': platform.system(), + 'platform_version': platform.version(), + 'platform_in_venv': str(in_venv()), + 'python_full_version': platform.python_version(), + 'python_version': platform.python_version()[:3], + 'sys_platform': sys.platform, + } + return result + +DEFAULT_CONTEXT = default_context() +del default_context + +evaluator = Evaluator() + +def interpret(marker, execution_context=None): + """ + Interpret a marker and return a result depending on environment. + + :param marker: The marker to interpret. + :type marker: str + :param execution_context: The context used for name lookup. + :type execution_context: mapping + """ + try: + expr, rest = parse_marker(marker) + except Exception as e: + raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) + if rest and rest[0] != '#': + raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) + context = dict(DEFAULT_CONTEXT) + if execution_context: + context.update(execution_context) + return evaluator.evaluate(expr, context) diff --git a/venv/Lib/site-packages/distlib/metadata.py b/venv/Lib/site-packages/distlib/metadata.py new file mode 100644 index 00000000..6d5e2360 --- /dev/null +++ b/venv/Lib/site-packages/distlib/metadata.py @@ -0,0 +1,1056 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Implementation of the Metadata for Python packages PEPs. + +Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and withdrawn 2.0). +""" +from __future__ import unicode_literals + +import codecs +from email import message_from_file +import json +import logging +import re + + +from . import DistlibException, __version__ +from .compat import StringIO, string_types, text_type +from .markers import interpret +from .util import extract_by_key, get_extras +from .version import get_scheme, PEP440_VERSION_RE + +logger = logging.getLogger(__name__) + + +class MetadataMissingError(DistlibException): + """A required metadata is missing""" + + +class MetadataConflictError(DistlibException): + """Attempt to read or write metadata fields that are conflictual.""" + + +class MetadataUnrecognizedVersionError(DistlibException): + """Unknown metadata version number.""" + + +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" + +# public API of this module +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] + +# Encoding used for the PKG-INFO files +PKG_INFO_ENCODING = 'utf-8' + +# preferred version. Hopefully will be changed +# to 1.2 once PEP 345 is supported everywhere +PKG_INFO_PREFERRED_VERSION = '1.1' + +_LINE_PREFIX_1_2 = re.compile('\n \\|') +_LINE_PREFIX_PRE_1_2 = re.compile('\n ') +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License', 'Classifier', 'Download-URL', 'Obsoletes', + 'Provides', 'Requires') + +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', + 'Download-URL') + +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External') + +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', + 'Obsoletes-Dist', 'Requires-External', 'Maintainer', + 'Maintainer-email', 'Project-URL') + +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', + 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', + 'Provides-Extra') + +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', + 'Setup-Requires-Dist', 'Extension') + +# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in +# the metadata. Include them in the tuple literal below to allow them +# (for now). +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', + 'Requires', 'Provides') + +_566_MARKERS = ('Description-Content-Type',) + +_ALL_FIELDS = set() +_ALL_FIELDS.update(_241_FIELDS) +_ALL_FIELDS.update(_314_FIELDS) +_ALL_FIELDS.update(_345_FIELDS) +_ALL_FIELDS.update(_426_FIELDS) +_ALL_FIELDS.update(_566_FIELDS) + +EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + + +def _version2fieldlist(version): + if version == '1.0': + return _241_FIELDS + elif version == '1.1': + return _314_FIELDS + elif version == '1.2': + return _345_FIELDS + elif version in ('1.3', '2.1'): + return _345_FIELDS + _566_FIELDS + elif version == '2.0': + return _426_FIELDS + raise MetadataUnrecognizedVersionError(version) + + +def _best_version(fields): + """Detect the best version depending on the fields used.""" + def _has_marker(keys, markers): + for marker in markers: + if marker in keys: + return True + return False + + keys = [] + for key, value in fields.items(): + if value in ([], 'UNKNOWN', None): + continue + keys.append(key) + + possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1'] + + # first let's try to see if a field is not part of one of the version + for key in keys: + if key not in _241_FIELDS and '1.0' in possible_versions: + possible_versions.remove('1.0') + logger.debug('Removed 1.0 due to %s', key) + if key not in _314_FIELDS and '1.1' in possible_versions: + possible_versions.remove('1.1') + logger.debug('Removed 1.1 due to %s', key) + if key not in _345_FIELDS and '1.2' in possible_versions: + possible_versions.remove('1.2') + logger.debug('Removed 1.2 due to %s', key) + if key not in _566_FIELDS and '1.3' in possible_versions: + possible_versions.remove('1.3') + logger.debug('Removed 1.3 due to %s', key) + if key not in _566_FIELDS and '2.1' in possible_versions: + if key != 'Description': # In 2.1, description allowed after headers + possible_versions.remove('2.1') + logger.debug('Removed 2.1 due to %s', key) + if key not in _426_FIELDS and '2.0' in possible_versions: + possible_versions.remove('2.0') + logger.debug('Removed 2.0 due to %s', key) + + # possible_version contains qualified versions + if len(possible_versions) == 1: + return possible_versions[0] # found ! + elif len(possible_versions) == 0: + logger.debug('Out of options - unknown metadata set: %s', fields) + raise MetadataConflictError('Unknown metadata set') + + # let's see if one unique marker is found + is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) + is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) + is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) + is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) + if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1: + raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields') + + # we have the choice, 1.0, or 1.2, or 2.0 + # - 1.0 has a broken Summary field but works with all tools + # - 1.1 is to avoid + # - 1.2 fixes Summary but has little adoption + # - 2.0 adds more features and is very new + if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0: + # we couldn't find any specific marker + if PKG_INFO_PREFERRED_VERSION in possible_versions: + return PKG_INFO_PREFERRED_VERSION + if is_1_1: + return '1.1' + if is_1_2: + return '1.2' + if is_2_1: + return '2.1' + + return '2.0' + +# This follows the rules about transforming keys as described in +# https://www.python.org/dev/peps/pep-0566/#id17 +_ATTR2FIELD = { + name.lower().replace("-", "_"): name for name in _ALL_FIELDS +} +_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} + +_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') +_VERSIONS_FIELDS = ('Requires-Python',) +_VERSION_FIELDS = ('Version',) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', + 'Requires', 'Provides', 'Obsoletes-Dist', + 'Provides-Dist', 'Requires-Dist', 'Requires-External', + 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', + 'Provides-Extra', 'Extension') +_LISTTUPLEFIELDS = ('Project-URL',) + +_ELEMENTSFIELD = ('Keywords',) + +_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') + +_MISSING = object() + +_FILESAFE = re.compile('[^A-Za-z0-9.]+') + + +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + + +class LegacyMetadata(object): + """The legacy metadata of a release. + + Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can + instantiate the class with one of these arguments (or none): + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content + - *mapping* is a dict-like object + - *scheme* is a version scheme name + """ + # TODO document the mapping API and UNKNOWN default key + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._fields = {} + self.requires_files = [] + self._dependencies = None + self.scheme = scheme + if path is not None: + self.read(path) + elif fileobj is not None: + self.read_file(fileobj) + elif mapping is not None: + self.update(mapping) + self.set_metadata_version() + + def set_metadata_version(self): + self._fields['Metadata-Version'] = _best_version(self._fields) + + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) + + def __getitem__(self, name): + return self.get(name) + + def __setitem__(self, name, value): + return self.set(name, value) + + def __delitem__(self, name): + field_name = self._convert_name(name) + try: + del self._fields[field_name] + except KeyError: + raise KeyError(name) + + def __contains__(self, name): + return (name in self._fields or + self._convert_name(name) in self._fields) + + def _convert_name(self, name): + if name in _ALL_FIELDS: + return name + name = name.replace('-', '_').lower() + return _ATTR2FIELD.get(name, name) + + def _default_value(self, name): + if name in _LISTFIELDS or name in _ELEMENTSFIELD: + return [] + return 'UNKNOWN' + + def _remove_line_prefix(self, value): + if self.metadata_version in ('1.0', '1.1'): + return _LINE_PREFIX_PRE_1_2.sub('\n', value) + else: + return _LINE_PREFIX_1_2.sub('\n', value) + + def __getattr__(self, name): + if name in _ATTR2FIELD: + return self[name] + raise AttributeError(name) + + # + # Public API + # + +# dependencies = property(_get_dependencies, _set_dependencies) + + def get_fullname(self, filesafe=False): + """Return the distribution name with version. + + If filesafe is true, return a filename-escaped form.""" + return _get_name_and_version(self['Name'], self['Version'], filesafe) + + def is_field(self, name): + """return True if name is a valid metadata key""" + name = self._convert_name(name) + return name in _ALL_FIELDS + + def is_multi_field(self, name): + name = self._convert_name(name) + return name in _LISTFIELDS + + def read(self, filepath): + """Read the metadata values from a file path.""" + fp = codecs.open(filepath, 'r', encoding='utf-8') + try: + self.read_file(fp) + finally: + fp.close() + + def read_file(self, fileob): + """Read the metadata values from a file object.""" + msg = message_from_file(fileob) + self._fields['Metadata-Version'] = msg['metadata-version'] + + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue + if field in _LISTFIELDS: + # we can have multiple lines + values = msg.get_all(field) + if field in _LISTTUPLEFIELDS and values is not None: + values = [tuple(value.split(',')) for value in values] + self.set(field, values) + else: + # single line + value = msg[field] + if value is not None and value != 'UNKNOWN': + self.set(field, value) + + # PEP 566 specifies that the body be used for the description, if + # available + body = msg.get_payload() + self["Description"] = body if body else self["Description"] + # logger.debug('Attempting to set metadata for %s', self) + # self.set_metadata_version() + + def write(self, filepath, skip_unknown=False): + """Write the metadata fields to filepath.""" + fp = codecs.open(filepath, 'w', encoding='utf-8') + try: + self.write_file(fp, skip_unknown) + finally: + fp.close() + + def write_file(self, fileobject, skip_unknown=False): + """Write the PKG-INFO format data to a file object.""" + self.set_metadata_version() + + for field in _version2fieldlist(self['Metadata-Version']): + values = self.get(field) + if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): + continue + if field in _ELEMENTSFIELD: + self._write_field(fileobject, field, ','.join(values)) + continue + if field not in _LISTFIELDS: + if field == 'Description': + if self.metadata_version in ('1.0', '1.1'): + values = values.replace('\n', '\n ') + else: + values = values.replace('\n', '\n |') + values = [values] + + if field in _LISTTUPLEFIELDS: + values = [','.join(value) for value in values] + + for value in values: + self._write_field(fileobject, field, value) + + def update(self, other=None, **kwargs): + """Set metadata values from the given iterable `other` and kwargs. + + Behavior is like `dict.update`: If `other` has a ``keys`` method, + they are looped over and ``self[key]`` is assigned ``other[key]``. + Else, ``other`` is an iterable of ``(key, value)`` iterables. + + Keys that don't match a metadata field or that have an empty value are + dropped. + """ + def _set(key, value): + if key in _ATTR2FIELD and value: + self.set(self._convert_name(key), value) + + if not other: + # other is None or empty container + pass + elif hasattr(other, 'keys'): + for k in other.keys(): + _set(k, other[k]) + else: + for k, v in other: + _set(k, v) + + if kwargs: + for k, v in kwargs.items(): + _set(k, v) + + def set(self, name, value): + """Control then set a metadata field.""" + name = self._convert_name(name) + + if ((name in _ELEMENTSFIELD or name == 'Platform') and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [v.strip() for v in value.split(',')] + else: + value = [] + elif (name in _LISTFIELDS and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [value] + else: + value = [] + + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + + scheme = get_scheme(self.scheme) + if name in _PREDICATE_FIELDS and value is not None: + for v in value: + # check that the values are valid + if not scheme.is_valid_matcher(v.split(';')[0]): + logger.warning( + "'%s': '%s' is not valid (field '%s')", + project_name, v, name) + # FIXME this rejects UNKNOWN, is that right? + elif name in _VERSIONS_FIELDS and value is not None: + if not scheme.is_valid_constraint_list(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + elif name in _VERSION_FIELDS and value is not None: + if not scheme.is_valid_version(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + + if name in _UNICODEFIELDS: + if name == 'Description': + value = self._remove_line_prefix(value) + + self._fields[name] = value + + def get(self, name, default=_MISSING): + """Get a metadata field.""" + name = self._convert_name(name) + if name not in self._fields: + if default is _MISSING: + default = self._default_value(name) + return default + if name in _UNICODEFIELDS: + value = self._fields[name] + return value + elif name in _LISTFIELDS: + value = self._fields[name] + if value is None: + return [] + res = [] + for val in value: + if name not in _LISTTUPLEFIELDS: + res.append(val) + else: + # That's for Project-URL + res.append((val[0], val[1])) + return res + + elif name in _ELEMENTSFIELD: + value = self._fields[name] + if isinstance(value, string_types): + return value.split(',') + return self._fields[name] + + def check(self, strict=False): + """Check if the metadata is compliant. If strict is True then raise if + no Name or Version are provided""" + self.set_metadata_version() + + # XXX should check the versions (if the file was loaded) + missing, warnings = [], [] + + for attr in ('Name', 'Version'): # required by PEP 345 + if attr not in self: + missing.append(attr) + + if strict and missing != []: + msg = 'missing required metadata: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + + for attr in ('Home-page', 'Author'): + if attr not in self: + missing.append(attr) + + # checking metadata 1.2 (XXX needs to check 1.1, 1.0) + if self['Metadata-Version'] != '1.2': + return missing, warnings + + scheme = get_scheme(self.scheme) + + def are_valid_constraints(value): + for v in value: + if not scheme.is_valid_matcher(v.split(';')[0]): + return False + return True + + for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), + (_VERSIONS_FIELDS, + scheme.is_valid_constraint_list), + (_VERSION_FIELDS, + scheme.is_valid_version)): + for field in fields: + value = self.get(field, None) + if value is not None and not controller(value): + warnings.append("Wrong value for '%s': %s" % (field, value)) + + return missing, warnings + + def todict(self, skip_missing=False): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + This is as per https://www.python.org/dev/peps/pep-0566/#id17. + """ + self.set_metadata_version() + + fields = _version2fieldlist(self['Metadata-Version']) + + data = {} + + for field_name in fields: + if not skip_missing or field_name in self._fields: + key = _FIELD2ATTR[field_name] + if key != 'project_url': + data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] + + return data + + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + + # Mapping API + # TODO could add iter* variants + + def keys(self): + return list(_version2fieldlist(self['Metadata-Version'])) + + def __iter__(self): + for key in self.keys(): + yield key + + def values(self): + return [self[key] for key in self.keys()] + + def items(self): + return [(key, self[key]) for key in self.keys()] + + def __repr__(self): + return '<%s %s %s>' % (self.__class__.__name__, self.name, + self.version) + + +METADATA_FILENAME = 'pydist.json' +WHEEL_METADATA_FILENAME = 'metadata.json' +LEGACY_METADATA_FILENAME = 'METADATA' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.0 (JSON) + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP440_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy',), + } + + INDEX_KEYS = ('name version license summary description author ' + 'author_email keywords platform home_page classifiers ' + 'download_url') + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy',)), + 'version': (VERSION_MATCHER, ('legacy',)), + 'summary': (SUMMARY_MATCHER, ('legacy',)), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + #import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with open(path, 'rb') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + if not isinstance(data, text_type): + data = data.decode('utf-8') + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), + scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + none_list = (None, list) + none_dict = (None, dict) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': none_list, + 'test_requires': none_list, + 'meta_requires': none_list, + 'extras': ('Provides-Extra', list), + 'modules': none_list, + 'namespaces': none_list, + 'exports': none_dict, + 'commands': none_dict, + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + del none_list, none_dict + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + if key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + result = self._data.get(key, value) + else: + # special cases for PEP 459 + sentinel = object() + result = sentinel + d = self._data.get('extensions') + if d: + if key == 'commands': + result = d.get('python.commands', value) + elif key == 'classifiers': + d = d.get('python.details') + if d: + result = d.get(key, value) + else: + d = d.get('python.exports') + if not d: + d = self._data.get('python.exports') + if d: + result = d.get(key, value) + if result is sentinel: + result = value + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError("'%s' is an invalid value for " + "the '%s' property" % (value, + key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + elif key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + self._data[key] = value + else: + # special cases for PEP 459 + d = self._data.setdefault('extensions', {}) + if key == 'commands': + d['python.commands'] = value + elif key == 'classifiers': + d = d.setdefault('python.details', {}) + d[key] = value + else: + d = d.setdefault('python.exports', {}) + d[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = get_extras(extras or [], self.extras) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, + env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', + missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', + 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), + ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + author = {} + maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + ('extensions', 'python.details', 'license'): 'License', + 'summary': 'Summary', + 'description': 'Description', + ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', + ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', + ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', + 'source_url': 'Download-URL', + ('extensions', 'python.details', 'classifiers'): 'Classifier', + } + + def _to_legacy(self): + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == "%s"' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + # import pdb; pdb.set_trace() + for nk, ok in self.LEGACY_MAPPING.items(): + if not isinstance(nk, tuple): + if nk in nmd: + result[ok] = nmd[nk] + else: + d = nmd + found = True + for k in nk: + try: + d = d[k] + except (KeyError, IndexError): + found = False + break + if found: + result[ok] = d + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: any other fields wanted + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2, + sort_keys=True) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2, + sort_keys=True) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + run_requires = self._data.setdefault('run_requires', []) + always = None + for entry in run_requires: + if 'environment' not in entry and 'extra' not in entry: + always = entry + break + if always is None: + always = { 'requires': requirements } + run_requires.insert(0, always) + else: + rset = set(always['requires']) | set(requirements) + always['requires'] = sorted(rset) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, + self.metadata_version, name, version) diff --git a/venv/Lib/site-packages/distlib/resources.py b/venv/Lib/site-packages/distlib/resources.py new file mode 100644 index 00000000..18840167 --- /dev/null +++ b/venv/Lib/site-packages/distlib/resources.py @@ -0,0 +1,355 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import bisect +import io +import logging +import os +import pkgutil +import shutil +import sys +import types +import zipimport + +from . import DistlibException +from .util import cached_property, get_cache_base, path_to_cache_dir, Cache + +logger = logging.getLogger(__name__) + + +cache = None # created when needed + + +class ResourceCache(Cache): + def __init__(self, base=None): + if base is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('resource-cache')) + super(ResourceCache, self).__init__(base) + + def is_stale(self, resource, path): + """ + Is the cache stale for the given resource? + + :param resource: The :class:`Resource` being cached. + :param path: The path of the resource in the cache. + :return: True if the cache is stale. + """ + # Cache invalidation is a hard problem :-) + return True + + def get(self, resource): + """ + Get a resource into the cache, + + :param resource: A :class:`Resource` instance. + :return: The pathname of the resource in the cache. + """ + prefix, path = resource.finder.get_cache_info(resource) + if prefix is None: + result = path + else: + result = os.path.join(self.base, self.prefix_to_dir(prefix), path) + dirname = os.path.dirname(result) + if not os.path.isdir(dirname): + os.makedirs(dirname) + if not os.path.exists(result): + stale = True + else: + stale = self.is_stale(resource, path) + if stale: + # write the bytes of the resource to the cache location + with open(result, 'wb') as f: + f.write(resource.bytes) + return result + + +class ResourceBase(object): + def __init__(self, finder, name): + self.finder = finder + self.name = name + + +class Resource(ResourceBase): + """ + A class representing an in-package resource, such as a data file. This is + not normally instantiated by user code, but rather by a + :class:`ResourceFinder` which manages the resource. + """ + is_container = False # Backwards compatibility + + def as_stream(self): + """ + Get the resource as a stream. + + This is not a property to make it obvious that it returns a new stream + each time. + """ + return self.finder.get_stream(self) + + @cached_property + def file_path(self): + global cache + if cache is None: + cache = ResourceCache() + return cache.get(self) + + @cached_property + def bytes(self): + return self.finder.get_bytes(self) + + @cached_property + def size(self): + return self.finder.get_size(self) + + +class ResourceContainer(ResourceBase): + is_container = True # Backwards compatibility + + @cached_property + def resources(self): + return self.finder.get_resources(self) + + +class ResourceFinder(object): + """ + Resource finder for file system resources. + """ + + if sys.platform.startswith('java'): + skipped_extensions = ('.pyc', '.pyo', '.class') + else: + skipped_extensions = ('.pyc', '.pyo') + + def __init__(self, module): + self.module = module + self.loader = getattr(module, '__loader__', None) + self.base = os.path.dirname(getattr(module, '__file__', '')) + + def _adjust_path(self, path): + return os.path.realpath(path) + + def _make_path(self, resource_name): + # Issue #50: need to preserve type of path on Python 2.x + # like os.path._get_sep + if isinstance(resource_name, bytes): # should only happen on 2.x + sep = b'/' + else: + sep = '/' + parts = resource_name.split(sep) + parts.insert(0, self.base) + result = os.path.join(*parts) + return self._adjust_path(result) + + def _find(self, path): + return os.path.exists(path) + + def get_cache_info(self, resource): + return None, resource.path + + def find(self, resource_name): + path = self._make_path(resource_name) + if not self._find(path): + result = None + else: + if self._is_directory(path): + result = ResourceContainer(self, resource_name) + else: + result = Resource(self, resource_name) + result.path = path + return result + + def get_stream(self, resource): + return open(resource.path, 'rb') + + def get_bytes(self, resource): + with open(resource.path, 'rb') as f: + return f.read() + + def get_size(self, resource): + return os.path.getsize(resource.path) + + def get_resources(self, resource): + def allowed(f): + return (f != '__pycache__' and not + f.endswith(self.skipped_extensions)) + return set([f for f in os.listdir(resource.path) if allowed(f)]) + + def is_container(self, resource): + return self._is_directory(resource.path) + + _is_directory = staticmethod(os.path.isdir) + + def iterator(self, resource_name): + resource = self.find(resource_name) + if resource is not None: + todo = [resource] + while todo: + resource = todo.pop(0) + yield resource + if resource.is_container: + rname = resource.name + for name in resource.resources: + if not rname: + new_name = name + else: + new_name = '/'.join([rname, name]) + child = self.find(new_name) + if child.is_container: + todo.append(child) + else: + yield child + + +class ZipResourceFinder(ResourceFinder): + """ + Resource finder for resources in .zip files. + """ + def __init__(self, module): + super(ZipResourceFinder, self).__init__(module) + archive = self.loader.archive + self.prefix_len = 1 + len(archive) + # PyPy doesn't have a _files attr on zipimporter, and you can't set one + if hasattr(self.loader, '_files'): + self._files = self.loader._files + else: + self._files = zipimport._zip_directory_cache[archive] + self.index = sorted(self._files) + + def _adjust_path(self, path): + return path + + def _find(self, path): + path = path[self.prefix_len:] + if path in self._files: + result = True + else: + if path and path[-1] != os.sep: + path = path + os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + if not result: + logger.debug('_find failed: %r %r', path, self.loader.prefix) + else: + logger.debug('_find worked: %r %r', path, self.loader.prefix) + return result + + def get_cache_info(self, resource): + prefix = self.loader.archive + path = resource.path[1 + len(prefix):] + return prefix, path + + def get_bytes(self, resource): + return self.loader.get_data(resource.path) + + def get_stream(self, resource): + return io.BytesIO(self.get_bytes(resource)) + + def get_size(self, resource): + path = resource.path[self.prefix_len:] + return self._files[path][3] + + def get_resources(self, resource): + path = resource.path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + plen = len(path) + result = set() + i = bisect.bisect(self.index, path) + while i < len(self.index): + if not self.index[i].startswith(path): + break + s = self.index[i][plen:] + result.add(s.split(os.sep, 1)[0]) # only immediate children + i += 1 + return result + + def _is_directory(self, path): + path = path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + return result + +_finder_registry = { + type(None): ResourceFinder, + zipimport.zipimporter: ZipResourceFinder +} + +try: + # In Python 3.6, _frozen_importlib -> _frozen_importlib_external + try: + import _frozen_importlib_external as _fi + except ImportError: + import _frozen_importlib as _fi + _finder_registry[_fi.SourceFileLoader] = ResourceFinder + _finder_registry[_fi.FileFinder] = ResourceFinder + del _fi +except (ImportError, AttributeError): + pass + + +def register_finder(loader, finder_maker): + _finder_registry[type(loader)] = finder_maker + +_finder_cache = {} + + +def finder(package): + """ + Return a resource finder for a package. + :param package: The name of the package. + :return: A :class:`ResourceFinder` instance for the package. + """ + if package in _finder_cache: + result = _finder_cache[package] + else: + if package not in sys.modules: + __import__(package) + module = sys.modules[package] + path = getattr(module, '__path__', None) + if path is None: + raise DistlibException('You cannot get a finder for a module, ' + 'only for a package') + loader = getattr(module, '__loader__', None) + finder_maker = _finder_registry.get(type(loader)) + if finder_maker is None: + raise DistlibException('Unable to locate finder for %r' % package) + result = finder_maker(module) + _finder_cache[package] = result + return result + + +_dummy_module = types.ModuleType(str('__dummy__')) + + +def finder_for_path(path): + """ + Return a resource finder for a path, which should represent a container. + + :param path: The path. + :return: A :class:`ResourceFinder` instance for the path. + """ + result = None + # calls any path hooks, gets importer into cache + pkgutil.get_importer(path) + loader = sys.path_importer_cache.get(path) + finder = _finder_registry.get(type(loader)) + if finder: + module = _dummy_module + module.__file__ = os.path.join(path, '') + module.__loader__ = loader + result = finder(module) + return result diff --git a/venv/Lib/site-packages/distlib/scripts.py b/venv/Lib/site-packages/distlib/scripts.py new file mode 100644 index 00000000..03f8f21e --- /dev/null +++ b/venv/Lib/site-packages/distlib/scripts.py @@ -0,0 +1,419 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from io import BytesIO +import logging +import os +import re +import struct +import sys + +from .compat import sysconfig, detect_encoding, ZipFile +from .resources import finder +from .util import (FileOperator, get_export_entry, convert_path, + get_executable, in_venv) + +logger = logging.getLogger(__name__) + +_DEFAULT_MANIFEST = ''' + + + + + + + + + + + + +'''.strip() + +# check if Python is called on the first line with this expression +FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') +SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +import re +import sys +from %(module)s import %(import_name)s +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +''' + + +def enquote_executable(executable): + if ' ' in executable: + # make sure we quote only the executable in case of env + # for example /usr/bin/env "/dir with spaces/bin/jython" + # instead of "/usr/bin/env /dir with spaces/bin/jython" + # otherwise whole + if executable.startswith('/usr/bin/env '): + env, _executable = executable.split(' ', 1) + if ' ' in _executable and not _executable.startswith('"'): + executable = '%s "%s"' % (env, _executable) + else: + if not executable.startswith('"'): + executable = '"%s"' % executable + return executable + +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable + +class ScriptMaker(object): + """ + A class to copy or create scripts from source scripts or callable + specifications. + """ + script_template = SCRIPT_TEMPLATE + + executable = None # for shebangs + + def __init__(self, source_dir, target_dir, add_launchers=True, + dry_run=False, fileop=None): + self.source_dir = source_dir + self.target_dir = target_dir + self.add_launchers = add_launchers + self.force = False + self.clobber = False + # It only makes sense to set mode bits on POSIX. + self.set_mode = (os.name == 'posix') or (os.name == 'java' and + os._name == 'posix') + self.variants = set(('', 'X.Y')) + self._fileop = fileop or FileOperator(dry_run) + + self._is_nt = os.name == 'nt' or ( + os.name == 'java' and os._name == 'nt') + self.version_info = sys.version_info + + def _get_alternate_executable(self, executable, options): + if options.get('gui', False) and self._is_nt: # pragma: no cover + dn, fn = os.path.split(executable) + fn = fn.replace('python', 'pythonw') + executable = os.path.join(dn, fn) + return executable + + if sys.platform.startswith('java'): # pragma: no cover + def _is_shell(self, executable): + """ + Determine if the specified executable is a script + (contains a #! line) + """ + try: + with open(executable) as fp: + return fp.read(2) == '#!' + except (OSError, IOError): + logger.warning('Failed to open %s', executable) + return False + + def _fix_jython_executable(self, executable): + if self._is_shell(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty('os.name') == 'Linux': + return executable + elif executable.lower().endswith('jython.exe'): + # Use wrapper exe for Jython on Windows + return executable + return '/usr/bin/env %s' % executable + + def _build_shebang(self, executable, post_interp): + """ + Build a shebang line. In the simple case (on Windows, or a shebang line + which is not too long or contains spaces) use a simple formulation for + the shebang. Otherwise, use /bin/sh as the executable, with a contrived + shebang which allows the script to run either under Python or sh, using + suitable quoting. Thanks to Harald Nordgren for his input. + + See also: http://www.in-ulm.de/~mascheck/various/shebang/#length + https://hg.mozilla.org/mozilla-central/file/tip/mach + """ + if os.name != 'posix': + simple_shebang = True + else: + # Add 3 for '#!' prefix and newline suffix. + shebang_length = len(executable) + len(post_interp) + 3 + if sys.platform == 'darwin': + max_shebang_length = 512 + else: + max_shebang_length = 127 + simple_shebang = ((b' ' not in executable) and + (shebang_length <= max_shebang_length)) + + if simple_shebang: + result = b'#!' + executable + post_interp + b'\n' + else: + result = b'#!/bin/sh\n' + result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' + result += b"' '''" + return result + + def _get_shebang(self, encoding, post_interp=b'', options=None): + enquote = True + if self.executable: + executable = self.executable + enquote = False # assume this will be taken care of + elif not sysconfig.is_python_build(): + executable = get_executable() + elif in_venv(): # pragma: no cover + executable = os.path.join(sysconfig.get_path('scripts'), + 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) + if options: + executable = self._get_alternate_executable(executable, options) + + if sys.platform.startswith('java'): # pragma: no cover + executable = self._fix_jython_executable(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + + # If the user didn't specify an executable, it may be necessary to + # cater for executable paths with spaces (not uncommon on Windows) + if enquote: + executable = enquote_executable(executable) + # Issue #51: don't use fsencode, since we later try to + # check that the shebang is decodable using utf-8. + executable = executable.encode('utf-8') + # in case of IronPython, play safe and enable frames support + if (sys.platform == 'cli' and '-X:Frames' not in post_interp + and '-X:FullFrames' not in post_interp): # pragma: no cover + post_interp += b' -X:Frames' + shebang = self._build_shebang(executable, post_interp) + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable from utf-8' % shebang) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + if encoding != 'utf-8': + try: + shebang.decode(encoding) + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % (shebang, encoding)) + return shebang + + def _get_script_text(self, entry): + return self.script_template % dict(module=entry.prefix, + import_name=entry.suffix.split('.')[0], + func=entry.suffix) + + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + use_launcher = self.add_launchers and self._is_nt + linesep = os.linesep.encode('utf-8') + if not shebang.endswith(linesep): + shebang += linesep + if not use_launcher: + script_bytes = shebang + script_bytes + else: # pragma: no cover + if ext == 'py': + launcher = self._get_launcher('t') + else: + launcher = self._get_launcher('w') + stream = BytesIO() + with ZipFile(stream, 'w') as zf: + zf.writestr('__main__.py', script_bytes) + zip_data = stream.getvalue() + script_bytes = launcher + shebang + zip_data + for name in names: + outname = os.path.join(self.target_dir, name) + if use_launcher: # pragma: no cover + n, e = os.path.splitext(outname) + if e.startswith('.py'): + outname = n + outname = '%s.exe' % outname + try: + self._fileop.write_binary_file(outname, script_bytes) + except Exception: + # Failed writing an executable - it might be in use. + logger.warning('Failed to write executable - trying to ' + 'use .deleteme logic') + dfname = '%s.deleteme' % outname + if os.path.exists(dfname): + os.remove(dfname) # Not allowed to fail here + os.rename(outname, dfname) # nor here + self._fileop.write_binary_file(outname, script_bytes) + logger.debug('Able to replace executable using ' + '.deleteme logic') + try: + os.remove(dfname) + except Exception: + pass # still in use - ignore error + else: + if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + outname = '%s.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_binary_file(outname, script_bytes) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + + def _make_script(self, entry, filenames, options=None): + post_interp = b'' + if options: + args = options.get('interpreter_args', []) + if args: + args = ' %s' % ' '.join(args) + post_interp = args.encode('utf-8') + shebang = self._get_shebang('utf-8', post_interp, options=options) + script = self._get_script_text(entry).encode('utf-8') + name = entry.name + scriptnames = set() + if '' in self.variants: + scriptnames.add(name) + if 'X' in self.variants: + scriptnames.add('%s%s' % (name, self.version_info[0])) + if 'X.Y' in self.variants: + scriptnames.add('%s-%s.%s' % (name, self.version_info[0], + self.version_info[1])) + if options and options.get('gui', False): + ext = 'pyw' + else: + ext = 'py' + self._write_script(scriptnames, shebang, script, filenames, ext) + + def _copy_script(self, script, filenames): + adjust = False + script = os.path.join(self.source_dir, convert_path(script)) + outname = os.path.join(self.target_dir, os.path.basename(script)) + if not self.force and not self._fileop.newer(script, outname): + logger.debug('not copying %s (up-to-date)', script) + return + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, 'rb') + except IOError: # pragma: no cover + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: # pragma: no cover + logger.warning('%s: %s is an empty file (skipping)', + self.get_command_name(), script) + return + + match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if not adjust: + if f: + f.close() + self._fileop.copy_file(script, outname) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + else: + logger.info('copying and adjusting %s -> %s', script, + self.target_dir) + if not self._fileop.dry_run: + encoding, lines = detect_encoding(f.readline) + f.seek(0) + shebang = self._get_shebang(encoding, post_interp) + if b'pythonw' in first_line: # pragma: no cover + ext = 'pyw' + else: + ext = 'py' + n = os.path.basename(outname) + self._write_script([n], shebang, f.read(), filenames, ext) + if f: + f.close() + + @property + def dry_run(self): + return self._fileop.dry_run + + @dry_run.setter + def dry_run(self, value): + self._fileop.dry_run = value + + if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + # Executable launcher support. + # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ + + def _get_launcher(self, kind): + if struct.calcsize('P') == 8: # 64-bit + bits = '64' + else: + bits = '32' + name = '%s%s.exe' % (kind, bits) + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + distlib_package = __name__.rsplit('.', 1)[0] + resource = finder(distlib_package).find(name) + if not resource: + msg = ('Unable to find resource %s in package %s' % (name, + distlib_package)) + raise ValueError(msg) + return resource.bytes + + # Public API follows + + def make(self, specification, options=None): + """ + Make a script. + + :param specification: The specification, which is either a valid export + entry specification (to make a script from a + callable) or a filename (to make a script by + copying from a source location). + :param options: A dictionary of options controlling script generation. + :return: A list of all absolute pathnames written to. + """ + filenames = [] + entry = get_export_entry(specification) + if entry is None: + self._copy_script(specification, filenames) + else: + self._make_script(entry, filenames, options=options) + return filenames + + def make_multiple(self, specifications, options=None): + """ + Take a list of specifications and make scripts from them, + :param specifications: A list of specifications. + :return: A list of all absolute pathnames written to, + """ + filenames = [] + for specification in specifications: + filenames.extend(self.make(specification, options)) + return filenames diff --git a/venv/Lib/site-packages/distlib/t32.exe b/venv/Lib/site-packages/distlib/t32.exe new file mode 100644 index 00000000..8932a18e Binary files /dev/null and b/venv/Lib/site-packages/distlib/t32.exe differ diff --git a/venv/Lib/site-packages/distlib/t64.exe b/venv/Lib/site-packages/distlib/t64.exe new file mode 100644 index 00000000..325b8057 Binary files /dev/null and b/venv/Lib/site-packages/distlib/t64.exe differ diff --git a/venv/Lib/site-packages/distlib/util.py b/venv/Lib/site-packages/distlib/util.py new file mode 100644 index 00000000..01324eae --- /dev/null +++ b/venv/Lib/site-packages/distlib/util.py @@ -0,0 +1,1761 @@ +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import codecs +from collections import deque +import contextlib +import csv +from glob import iglob as std_iglob +import io +import json +import logging +import os +import py_compile +import re +import socket +try: + import ssl +except ImportError: # pragma: no cover + ssl = None +import subprocess +import sys +import tarfile +import tempfile +import textwrap + +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import time + +from . import DistlibException +from .compat import (string_types, text_type, shutil, raw_input, StringIO, + cache_from_source, urlopen, urljoin, httplib, xmlrpclib, + splittype, HTTPHandler, BaseConfigurator, valid_ident, + Container, configparser, URLError, ZipFile, fsdecode, + unquote, urlparse) + +logger = logging.getLogger(__name__) + +# +# Requirement parsing code as per PEP 508 +# + +IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') +VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') +COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') +MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') +OR = re.compile(r'^or\b\s*') +AND = re.compile(r'^and\b\s*') +NON_SPACE = re.compile(r'(\S+)\s*') +STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') + + +def parse_marker(marker_string): + """ + Parse a marker string and return a dictionary containing a marker expression. + + The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in + the expression grammar, or strings. A string contained in quotes is to be + interpreted as a literal string, and a string not contained in quotes is a + variable (such as os_name). + """ + def marker_var(remaining): + # either identifier, or literal string + m = IDENTIFIER.match(remaining) + if m: + result = m.groups()[0] + remaining = remaining[m.end():] + elif not remaining: + raise SyntaxError('unexpected end of input') + else: + q = remaining[0] + if q not in '\'"': + raise SyntaxError('invalid expression: %s' % remaining) + oq = '\'"'.replace(q, '') + remaining = remaining[1:] + parts = [q] + while remaining: + # either a string chunk, or oq, or q to terminate + if remaining[0] == q: + break + elif remaining[0] == oq: + parts.append(oq) + remaining = remaining[1:] + else: + m = STRING_CHUNK.match(remaining) + if not m: + raise SyntaxError('error in string literal: %s' % remaining) + parts.append(m.groups()[0]) + remaining = remaining[m.end():] + else: + s = ''.join(parts) + raise SyntaxError('unterminated string: %s' % s) + parts.append(q) + result = ''.join(parts) + remaining = remaining[1:].lstrip() # skip past closing quote + return result, remaining + + def marker_expr(remaining): + if remaining and remaining[0] == '(': + result, remaining = marker(remaining[1:].lstrip()) + if remaining[0] != ')': + raise SyntaxError('unterminated parenthesis: %s' % remaining) + remaining = remaining[1:].lstrip() + else: + lhs, remaining = marker_var(remaining) + while remaining: + m = MARKER_OP.match(remaining) + if not m: + break + op = m.groups()[0] + remaining = remaining[m.end():] + rhs, remaining = marker_var(remaining) + lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} + result = lhs + return result, remaining + + def marker_and(remaining): + lhs, remaining = marker_expr(remaining) + while remaining: + m = AND.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_expr(remaining) + lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + def marker(remaining): + lhs, remaining = marker_and(remaining) + while remaining: + m = OR.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_and(remaining) + lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + return marker(marker_string) + + +def parse_requirement(req): + """ + Parse a requirement passed in as a string. Return a Container + whose attributes contain the various parts of the requirement. + """ + remaining = req.strip() + if not remaining or remaining.startswith('#'): + return None + m = IDENTIFIER.match(remaining) + if not m: + raise SyntaxError('name expected: %s' % remaining) + distname = m.groups()[0] + remaining = remaining[m.end():] + extras = mark_expr = versions = uri = None + if remaining and remaining[0] == '[': + i = remaining.find(']', 1) + if i < 0: + raise SyntaxError('unterminated extra: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + extras = [] + while s: + m = IDENTIFIER.match(s) + if not m: + raise SyntaxError('malformed extra: %s' % s) + extras.append(m.groups()[0]) + s = s[m.end():] + if not s: + break + if s[0] != ',': + raise SyntaxError('comma expected in extras: %s' % s) + s = s[1:].lstrip() + if not extras: + extras = None + if remaining: + if remaining[0] == '@': + # it's a URI + remaining = remaining[1:].lstrip() + m = NON_SPACE.match(remaining) + if not m: + raise SyntaxError('invalid URI: %s' % remaining) + uri = m.groups()[0] + t = urlparse(uri) + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not (t.scheme and t.netloc): + raise SyntaxError('Invalid URL: %s' % uri) + remaining = remaining[m.end():].lstrip() + else: + + def get_versions(ver_remaining): + """ + Return a list of operator, version tuples if any are + specified, else None. + """ + m = COMPARE_OP.match(ver_remaining) + versions = None + if m: + versions = [] + while True: + op = m.groups()[0] + ver_remaining = ver_remaining[m.end():] + m = VERSION_IDENTIFIER.match(ver_remaining) + if not m: + raise SyntaxError('invalid version: %s' % ver_remaining) + v = m.groups()[0] + versions.append((op, v)) + ver_remaining = ver_remaining[m.end():] + if not ver_remaining or ver_remaining[0] != ',': + break + ver_remaining = ver_remaining[1:].lstrip() + m = COMPARE_OP.match(ver_remaining) + if not m: + raise SyntaxError('invalid constraint: %s' % ver_remaining) + if not versions: + versions = None + return versions, ver_remaining + + if remaining[0] != '(': + versions, remaining = get_versions(remaining) + else: + i = remaining.find(')', 1) + if i < 0: + raise SyntaxError('unterminated parenthesis: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + # As a special diversion from PEP 508, allow a version number + # a.b.c in parentheses as a synonym for ~= a.b.c (because this + # is allowed in earlier PEPs) + if COMPARE_OP.match(s): + versions, _ = get_versions(s) + else: + m = VERSION_IDENTIFIER.match(s) + if not m: + raise SyntaxError('invalid constraint: %s' % s) + v = m.groups()[0] + s = s[m.end():].lstrip() + if s: + raise SyntaxError('invalid constraint: %s' % s) + versions = [('~=', v)] + + if remaining: + if remaining[0] != ';': + raise SyntaxError('invalid requirement: %s' % remaining) + remaining = remaining[1:].lstrip() + + mark_expr, remaining = parse_marker(remaining) + + if remaining and remaining[0] != '#': + raise SyntaxError('unexpected trailing data: %s' % remaining) + + if not versions: + rs = distname + else: + rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) + return Container(name=distname, extras=extras, constraints=versions, + marker=mark_expr, url=uri, requirement=rs) + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + + def get_rel_path(root, path): + # normalizes and returns a lstripped-/-separated path + root = root.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(root) + return path[len(root):].lstrip('/') + + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = get_rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = get_rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + +def in_venv(): + if hasattr(sys, 'real_prefix'): + # virtualenv venvs + result = True + else: + # PEP 405 venvs + result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + return result + + +def get_executable(): +# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as +# changes to the stub launcher mean that sys.executable always points +# to the stub on OS X +# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' +# in os.environ): +# result = os.environ['__PYVENV_LAUNCHER__'] +# else: +# result = sys.executable +# return result + result = os.path.normcase(sys.executable) + if not isinstance(result, text_type): + result = fsdecode(result) + return result + + +def proceed(prompt, allowed_chars, error_prompt=None, default=None): + p = prompt + while True: + s = raw_input(p) + p = prompt + if not s and default: + s = default + if s: + c = s[0].lower() + if c in allowed_chars: + break + if error_prompt: + p = '%c: %s\n%s' % (c, error_prompt, prompt) + return c + + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + +def read_exports(stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + # Try to load as JSON, falling back on legacy format + data = stream.read() + stream = StringIO(data) + try: + jdata = json.load(stream) + result = jdata['extensions']['python.exports']['exports'] + for group, entries in result.items(): + for k, v in entries.items(): + s = '%s = %s' % (k, v) + entry = get_export_entry(s) + assert entry is not None + entries[k] = entry + return result + except Exception: + stream.seek(0, 0) + + def read_stream(cp, stream): + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + + cp = configparser.ConfigParser() + try: + read_stream(cp, stream) + except configparser.MissingSectionHeaderError: + stream.close() + data = textwrap.dedent(data) + stream = StringIO(data) + read_stream(cp, stream) + + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + #entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getwriter('utf-8')(stream) + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + +@contextlib.contextmanager +def chdir(d): + cwd = os.getcwd() + try: + os.chdir(d) + yield + finally: + os.chdir(cwd) + + +@contextlib.contextmanager +def socket_timeout(seconds=15): + cto = socket.getdefaulttimeout() + try: + socket.setdefaulttimeout(seconds) + yield + finally: + socket.setdefaulttimeout(cto) + + +class cached_property(object): + def __init__(self, func): + self.func = func + #for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) + + def __get__(self, obj, cls=None): + if obj is None: + return self + value = self.func(obj) + object.__setattr__(obj, self.func.__name__, value) + #obj.__dict__[self.func.__name__] = value = self.func(obj) + return value + +def convert_path(pathname): + """Return 'pathname' as a name that will work on the native filesystem. + + The path is split on '/' and put back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while os.curdir in paths: + paths.remove(os.curdir) + if not paths: + return os.curdir + return os.path.join(*paths) + + +class FileOperator(object): + def __init__(self, dry_run=False): + self.dry_run = dry_run + self.ensured = set() + self._init_record() + + def _init_record(self): + self.record = False + self.files_written = set() + self.dirs_created = set() + + def record_as_written(self, path): + if self.record: + self.files_written.add(path) + + def newer(self, source, target): + """Tell if the target is newer than the source. + + Returns true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. + + Returns false if both exist and 'target' is the same age or younger + than 'source'. Raise PackagingFileError if 'source' does not exist. + + Note that this test is not very accurate: files created in the same + second will have the same "age". + """ + if not os.path.exists(source): + raise DistlibException("file '%r' does not exist" % + os.path.abspath(source)) + if not os.path.exists(target): + return True + + return os.stat(source).st_mtime > os.stat(target).st_mtime + + def copy_file(self, infile, outfile, check=True): + """Copy a file respecting dry-run and force flags. + """ + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying %s to %s', infile, outfile) + if not self.dry_run: + msg = None + if check: + if os.path.islink(outfile): + msg = '%s is a symlink' % outfile + elif os.path.exists(outfile) and not os.path.isfile(outfile): + msg = '%s is a non-regular file' % outfile + if msg: + raise ValueError(msg + ' which would be overwritten') + shutil.copyfile(infile, outfile) + self.record_as_written(outfile) + + def copy_stream(self, instream, outfile, encoding=None): + assert not os.path.isdir(outfile) + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying stream %s to %s', instream, outfile) + if not self.dry_run: + if encoding is None: + outstream = open(outfile, 'wb') + else: + outstream = codecs.open(outfile, 'w', encoding=encoding) + try: + shutil.copyfileobj(instream, outstream) + finally: + outstream.close() + self.record_as_written(outfile) + + def write_binary_file(self, path, data): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + if os.path.exists(path): + os.remove(path) + with open(path, 'wb') as f: + f.write(data) + self.record_as_written(path) + + def write_text_file(self, path, data, encoding): + self.write_binary_file(path, data.encode(encoding)) + + def set_mode(self, bits, mask, files): + if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): + # Set the executable bits (owner, group, and world) on + # all the files specified. + for f in files: + if self.dry_run: + logger.info("changing mode of %s", f) + else: + mode = (os.stat(f).st_mode | bits) & mask + logger.info("changing mode of %s to %o", f, mode) + os.chmod(f, mode) + + set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) + + def ensure_dir(self, path): + path = os.path.abspath(path) + if path not in self.ensured and not os.path.exists(path): + self.ensured.add(path) + d, f = os.path.split(path) + self.ensure_dir(d) + logger.info('Creating %s' % path) + if not self.dry_run: + os.mkdir(path) + if self.record: + self.dirs_created.add(path) + + def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): + dpath = cache_from_source(path, not optimize) + logger.info('Byte-compiling %s to %s', path, dpath) + if not self.dry_run: + if force or self.newer(path, dpath): + if not prefix: + diagpath = None + else: + assert path.startswith(prefix) + diagpath = path[len(prefix):] + compile_kwargs = {} + if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): + compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH + py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error + self.record_as_written(dpath) + return dpath + + def ensure_removed(self, path): + if os.path.exists(path): + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record: + if path in self.dirs_created: + self.dirs_created.remove(path) + else: + if os.path.islink(path): + s = 'link' + else: + s = 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record: + if path in self.files_written: + self.files_written.remove(path) + + def is_writable(self, path): + result = False + while not result: + if os.path.exists(path): + result = os.access(path, os.W_OK) + break + parent = os.path.dirname(path) + if parent == path: + break + path = parent + return result + + def commit(self): + """ + Commit recorded changes, turn off recording, return + changes. + """ + assert self.record + result = self.files_written, self.dirs_created + self._init_record() + return result + + def rollback(self): + if not self.dry_run: + for f in list(self.files_written): + if os.path.exists(f): + os.remove(f) + # dirs should all be empty now, except perhaps for + # __pycache__ subdirs + # reverse so that subdirs appear before their parents + dirs = sorted(self.dirs_created, reverse=True) + for d in dirs: + flist = os.listdir(d) + if flist: + assert flist == ['__pycache__'] + sd = os.path.join(d, flist[0]) + os.rmdir(sd) + os.rmdir(d) # should fail if non-empty + self._init_record() + +def resolve(module_name, dotted_path): + if module_name in sys.modules: + mod = sys.modules[module_name] + else: + mod = __import__(module_name) + if dotted_path is None: + result = mod + else: + parts = dotted_path.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + +class ExportEntry(object): + def __init__(self, name, prefix, suffix, flags): + self.name = name + self.prefix = prefix + self.suffix = suffix + self.flags = flags + + @cached_property + def value(self): + return resolve(self.prefix, self.suffix) + + def __repr__(self): # pragma: no cover + return '' % (self.name, self.prefix, + self.suffix, self.flags) + + def __eq__(self, other): + if not isinstance(other, ExportEntry): + result = False + else: + result = (self.name == other.name and + self.prefix == other.prefix and + self.suffix == other.suffix and + self.flags == other.flags) + return result + + __hash__ = object.__hash__ + + +ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+) + \s*=\s*(?P(\w+)([:\.]\w+)*) + \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + ''', re.VERBOSE) + +def get_export_entry(specification): + m = ENTRY_RE.search(specification) + if not m: + result = None + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + else: + d = m.groupdict() + name = d['name'] + path = d['callable'] + colons = path.count(':') + if colons == 0: + prefix, suffix = path, None + else: + if colons != 1: + raise DistlibException("Invalid specification " + "'%s'" % specification) + prefix, suffix = path.split(':') + flags = d['flags'] + if flags is None: + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + flags = [] + else: + flags = [f.strip() for f in flags.split(',')] + result = ExportEntry(name, prefix, suffix, flags) + return result + + +def get_cache_base(suffix=None): + """ + Return the default base location for distlib caches. If the directory does + not exist, it is created. Use the suffix provided for the base directory, + and default to '.distlib' if it isn't provided. + + On Windows, if LOCALAPPDATA is defined in the environment, then it is + assumed to be a directory, and will be the parent directory of the result. + On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home + directory - using os.expanduser('~') - will be the parent directory of + the result. + + The result is just the directory '.distlib' in the parent directory as + determined above, or with the name specified with ``suffix``. + """ + if suffix is None: + suffix = '.distlib' + if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: + result = os.path.expandvars('$localappdata') + else: + # Assume posix, or old Windows + result = os.path.expanduser('~') + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if os.path.isdir(result): + usable = os.access(result, os.W_OK) + if not usable: + logger.warning('Directory exists but is not writable: %s', result) + else: + try: + os.makedirs(result) + usable = True + except OSError: + logger.warning('Unable to create %s', result, exc_info=True) + usable = False + if not usable: + result = tempfile.mkdtemp() + logger.warning('Default location unusable, using %s', result) + return os.path.join(result, suffix) + + +def path_to_cache_dir(path): + """ + Convert an absolute path to a directory name for use in a cache. + + The algorithm used is: + + #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. + #. Any occurrence of ``os.sep`` is replaced with ``'--'``. + #. ``'.cache'`` is appended. + """ + d, p = os.path.splitdrive(os.path.abspath(path)) + if d: + d = d.replace(':', '---') + p = p.replace(os.sep, '--') + return d + p + '.cache' + + +def ensure_slash(s): + if not s.endswith('/'): + return s + '/' + return s + + +def parse_credentials(netloc): + username = password = None + if '@' in netloc: + prefix, netloc = netloc.rsplit('@', 1) + if ':' not in prefix: + username = prefix + else: + username, password = prefix.split(':', 1) + if username: + username = unquote(username) + if password: + password = unquote(password) + return username, password, netloc + + +def get_process_umask(): + result = os.umask(0o22) + os.umask(result) + return result + +def is_string_sequence(seq): + result = True + i = None + for i, s in enumerate(seq): + if not isinstance(s, string_types): + result = False + break + assert i is not None + return result + +PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) +PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') + + +def split_filename(filename, project_name=None): + """ + Extract name, version, python version from a filename (no extension) + + Return name, version, pyver or None + """ + result = None + pyver = None + filename = unquote(filename).replace(' ', '-') + m = PYTHON_VERSION.search(filename) + if m: + pyver = m.group(1) + filename = filename[:m.start()] + if project_name and len(filename) > len(project_name) + 1: + m = re.match(re.escape(project_name) + r'\b', filename) + if m: + n = m.end() + result = filename[:n], filename[n + 1:], pyver + if result is None: + m = PROJECT_NAME_AND_VERSION.match(filename) + if m: + result = m.group(1), m.group(3), pyver + return result + +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' + r'\(\s*(?P[^\s)]+)\)$') + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + +def get_extras(requested, available): + result = set() + requested = set(requested or []) + available = set(available or []) + if '*' in requested: + requested.remove('*') + result |= available + for r in requested: + if r == '-': + result.add(r) + elif r.startswith('-'): + unwanted = r[1:] + if unwanted not in available: + logger.warning('undeclared extra: %s' % unwanted) + if unwanted in result: + result.remove(unwanted) + else: + if r not in available: + logger.warning('undeclared extra: %s' % r) + result.add(r) + return result +# +# Extended metadata functionality +# + +def _get_external_data(url): + result = {} + try: + # urlopen might fail if it runs into redirections, + # because of Python issue #13696. Fixed in locators + # using a custom redirect handler. + resp = urlopen(url) + headers = resp.info() + ct = headers.get('Content-Type') + if not ct.startswith('application/json'): + logger.debug('Unexpected response for JSON request: %s', ct) + else: + reader = codecs.getreader('utf-8')(resp) + #data = reader.read().decode('utf-8') + #result = json.loads(data) + result = json.load(reader) + except Exception as e: + logger.exception('Failed to get external data for %s: %s', url, e) + return result + +_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + +def get_project_data(name): + url = '%s/%s/project.json' % (name[0].upper(), name) + url = urljoin(_external_data_base_url, url) + result = _get_external_data(url) + return result + +def get_package_data(name, version): + url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = urljoin(_external_data_base_url, url) + return _get_external_data(url) + + +class Cache(object): + """ + A class implementing a cache for resources that need to live in the file system + e.g. shared libraries. This class was moved from resources to here because it + could be used by other modules, e.g. the wheel module. + """ + + def __init__(self, base): + """ + Initialise an instance. + + :param base: The base directory where the cache should be located. + """ + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if not os.path.isdir(base): # pragma: no cover + os.makedirs(base) + if (os.stat(base).st_mode & 0o77) != 0: + logger.warning('Directory \'%s\' is not private', base) + self.base = os.path.abspath(os.path.normpath(base)) + + def prefix_to_dir(self, prefix): + """ + Converts a resource prefix to a directory name in the cache. + """ + return path_to_cache_dir(prefix) + + def clear(self): + """ + Clear the cache. + """ + not_removed = [] + for fn in os.listdir(self.base): + fn = os.path.join(self.base, fn) + try: + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + except Exception: + not_removed.append(fn) + return not_removed + + +class EventMixin(object): + """ + A very simple publish/subscribe system. + """ + def __init__(self): + self._subscribers = {} + + def add(self, event, subscriber, append=True): + """ + Add a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be added (and called when the + event is published). + :param append: Whether to append or prepend the subscriber to an + existing subscriber list for the event. + """ + subs = self._subscribers + if event not in subs: + subs[event] = deque([subscriber]) + else: + sq = subs[event] + if append: + sq.append(subscriber) + else: + sq.appendleft(subscriber) + + def remove(self, event, subscriber): + """ + Remove a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be removed. + """ + subs = self._subscribers + if event not in subs: + raise ValueError('No subscribers: %r' % event) + subs[event].remove(subscriber) + + def get_subscribers(self, event): + """ + Return an iterator for the subscribers for an event. + :param event: The event to return subscribers for. + """ + return iter(self._subscribers.get(event, ())) + + def publish(self, event, *args, **kwargs): + """ + Publish a event and return a list of values returned by its + subscribers. + + :param event: The event to publish. + :param args: The positional arguments to pass to the event's + subscribers. + :param kwargs: The keyword arguments to pass to the event's + subscribers. + """ + result = [] + for subscriber in self.get_subscribers(event): + try: + value = subscriber(event, *args, **kwargs) + except Exception: + logger.exception('Exception during event publication') + value = None + result.append(value) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', + event, args, kwargs, result) + return result + +# +# Simple sequencing +# +class Sequencer(object): + def __init__(self): + self._preds = {} + self._succs = {} + self._nodes = set() # nodes with no preds/succs + + def add_node(self, node): + self._nodes.add(node) + + def remove_node(self, node, edges=False): + if node in self._nodes: + self._nodes.remove(node) + if edges: + for p in set(self._preds.get(node, ())): + self.remove(p, node) + for s in set(self._succs.get(node, ())): + self.remove(node, s) + # Remove empties + for k, v in list(self._preds.items()): + if not v: + del self._preds[k] + for k, v in list(self._succs.items()): + if not v: + del self._succs[k] + + def add(self, pred, succ): + assert pred != succ + self._preds.setdefault(succ, set()).add(pred) + self._succs.setdefault(pred, set()).add(succ) + + def remove(self, pred, succ): + assert pred != succ + try: + preds = self._preds[succ] + succs = self._succs[pred] + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of anything' % succ) + try: + preds.remove(pred) + succs.remove(succ) + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of %r' % (succ, pred)) + + def is_step(self, step): + return (step in self._preds or step in self._succs or + step in self._nodes) + + def get_steps(self, final): + if not self.is_step(final): + raise ValueError('Unknown: %r' % final) + result = [] + todo = [] + seen = set() + todo.append(final) + while todo: + step = todo.pop(0) + if step in seen: + # if a step was already seen, + # move it to the end (so it will appear earlier + # when reversed on return) ... but not for the + # final step, as that would be confusing for + # users + if step != final: + result.remove(step) + result.append(step) + else: + seen.add(step) + result.append(step) + preds = self._preds.get(step, ()) + todo.extend(preds) + return reversed(result) + + @property + def strong_connections(self): + #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + index_counter = [0] + stack = [] + lowlinks = {} + index = {} + result = [] + + graph = self._succs + + def strongconnect(node): + # set the depth index for this node to the smallest unused index + index[node] = index_counter[0] + lowlinks[node] = index_counter[0] + index_counter[0] += 1 + stack.append(node) + + # Consider successors + try: + successors = graph[node] + except Exception: + successors = [] + for successor in successors: + if successor not in lowlinks: + # Successor has not yet been visited + strongconnect(successor) + lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + elif successor in stack: + # the successor is in the stack and hence in the current + # strongly connected component (SCC) + lowlinks[node] = min(lowlinks[node],index[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == index[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: break + component = tuple(connected_component) + # storing the result + result.append(component) + + for node in graph: + if node not in lowlinks: + strongconnect(node) + + return result + + @property + def dot(self): + result = ['digraph G {'] + for succ in self._preds: + preds = self._preds[succ] + for pred in preds: + result.append(' %s -> %s;' % (pred, succ)) + for node in self._nodes: + result.append(' %s;' % node) + result.append('}') + return '\n'.join(result) + +# +# Unarchiving functionality for zip, tar, tgz, tbz, whl +# + +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', + '.tgz', '.tbz', '.whl') + +def unarchive(archive_filename, dest_dir, format=None, check=True): + + def check_path(path): + if not isinstance(path, text_type): + path = path.decode('utf-8') + p = os.path.abspath(os.path.join(dest_dir, path)) + if not p.startswith(dest_dir) or p[plen] != os.sep: + raise ValueError('path outside destination: %r' % p) + + dest_dir = os.path.abspath(dest_dir) + plen = len(dest_dir) + archive = None + if format is None: + if archive_filename.endswith(('.zip', '.whl')): + format = 'zip' + elif archive_filename.endswith(('.tar.gz', '.tgz')): + format = 'tgz' + mode = 'r:gz' + elif archive_filename.endswith(('.tar.bz2', '.tbz')): + format = 'tbz' + mode = 'r:bz2' + elif archive_filename.endswith('.tar'): + format = 'tar' + mode = 'r' + else: # pragma: no cover + raise ValueError('Unknown format for %r' % archive_filename) + try: + if format == 'zip': + archive = ZipFile(archive_filename, 'r') + if check: + names = archive.namelist() + for name in names: + check_path(name) + else: + archive = tarfile.open(archive_filename, mode) + if check: + names = archive.getnames() + for name in names: + check_path(name) + if format != 'zip' and sys.version_info[0] < 3: + # See Python issue 17153. If the dest path contains Unicode, + # tarfile extraction fails on Python 2.x if a member path name + # contains non-ASCII characters - it leads to an implicit + # bytes -> unicode conversion using ASCII to decode. + for tarinfo in archive.getmembers(): + if not isinstance(tarinfo.name, text_type): + tarinfo.name = tarinfo.name.decode('utf-8') + archive.extractall(dest_dir) + + finally: + if archive: + archive.close() + + +def zip_dir(directory): + """zip a directory tree into a BytesIO object""" + result = io.BytesIO() + dlen = len(directory) + with ZipFile(result, "w") as zf: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + rel = root[dlen:] + dest = os.path.join(rel, name) + zf.write(full, dest) + return result + +# +# Simple progress bar +# + +UNITS = ('', 'K', 'M', 'G','T','P') + + +class Progress(object): + unknown = 'UNKNOWN' + + def __init__(self, minval=0, maxval=100): + assert maxval is None or maxval >= minval + self.min = self.cur = minval + self.max = maxval + self.started = None + self.elapsed = 0 + self.done = False + + def update(self, curval): + assert self.min <= curval + assert self.max is None or curval <= self.max + self.cur = curval + now = time.time() + if self.started is None: + self.started = now + else: + self.elapsed = now - self.started + + def increment(self, incr): + assert incr >= 0 + self.update(self.cur + incr) + + def start(self): + self.update(self.min) + return self + + def stop(self): + if self.max is not None: + self.update(self.max) + self.done = True + + @property + def maximum(self): + return self.unknown if self.max is None else self.max + + @property + def percentage(self): + if self.done: + result = '100 %' + elif self.max is None: + result = ' ?? %' + else: + v = 100.0 * (self.cur - self.min) / (self.max - self.min) + result = '%3d %%' % v + return result + + def format_duration(self, duration): + if (duration <= 0) and self.max is None or self.cur == self.min: + result = '??:??:??' + #elif duration < 1: + # result = '--:--:--' + else: + result = time.strftime('%H:%M:%S', time.gmtime(duration)) + return result + + @property + def ETA(self): + if self.done: + prefix = 'Done' + t = self.elapsed + #import pdb; pdb.set_trace() + else: + prefix = 'ETA ' + if self.max is None: + t = -1 + elif self.elapsed == 0 or (self.cur == self.min): + t = 0 + else: + #import pdb; pdb.set_trace() + t = float(self.max - self.min) + t /= self.cur - self.min + t = (t - 1) * self.elapsed + return '%s: %s' % (prefix, self.format_duration(t)) + + @property + def speed(self): + if self.elapsed == 0: + result = 0.0 + else: + result = (self.cur - self.min) / self.elapsed + for unit in UNITS: + if result < 1000: + break + result /= 1000.0 + return '%d %sB/s' % (result, unit) + +# +# Glob functionality +# + +RICH_GLOB = re.compile(r'\{([^}]*)\}') +_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') +_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') + + +def iglob(path_glob): + """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" + if _CHECK_RECURSIVE_GLOB.search(path_glob): + msg = """invalid glob %r: recursive glob "**" must be used alone""" + raise ValueError(msg % path_glob) + if _CHECK_MISMATCH_SET.search(path_glob): + msg = """invalid glob %r: mismatching set marker '{' or '}'""" + raise ValueError(msg % path_glob) + return _iglob(path_glob) + + +def _iglob(path_glob): + rich_path_glob = RICH_GLOB.split(path_glob, 1) + if len(rich_path_glob) > 1: + assert len(rich_path_glob) == 3, rich_path_glob + prefix, set, suffix = rich_path_glob + for item in set.split(','): + for path in _iglob(''.join((prefix, item, suffix))): + yield path + else: + if '**' not in path_glob: + for item in std_iglob(path_glob): + yield item + else: + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' + else: + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + for fn in _iglob(os.path.join(path, radical)): + yield fn + +if ssl: + from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, + CertificateError) + + +# +# HTTPSConnection which verifies certificates/matches domains +# + + class HTTPSConnection(httplib.HTTPSConnection): + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None + + # noinspection PyPropertyAccess + def connect(self): + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, '_tunnel_host', False): + self.sock = sock + self._tunnel() + + if not hasattr(ssl, 'SSLContext'): + # For 2.x + if self.ca_certs: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, + cert_reqs=cert_reqs, + ssl_version=ssl.PROTOCOL_SSLv23, + ca_certs=self.ca_certs) + else: # pragma: no cover + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 + if self.cert_file: + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + self.sock = context.wrap_socket(sock, **kwargs) + if self.ca_certs and self.check_domain: + try: + match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) + except CertificateError: # pragma: no cover + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + class HTTPSHandler(BaseHTTPSHandler): + def __init__(self, ca_certs, check_domain=True): + BaseHTTPSHandler.__init__(self) + self.ca_certs = ca_certs + self.check_domain = check_domain + + def _conn_maker(self, *args, **kwargs): + """ + This is called to create a connection instance. Normally you'd + pass a connection class to do_open, but it doesn't actually check for + a class, and just expects a callable. As long as we behave just as a + constructor would have, we should be OK. If it ever changes so that + we *must* pass a class, we'll create an UnsafeHTTPSConnection class + which just sets check_domain to False in the class definition, and + choose which one to pass to do_open. + """ + result = HTTPSConnection(*args, **kwargs) + if self.ca_certs: + result.ca_certs = self.ca_certs + result.check_domain = self.check_domain + return result + + def https_open(self, req): + try: + return self.do_open(self._conn_maker, req) + except URLError as e: + if 'certificate verify failed' in str(e.reason): + raise CertificateError('Unable to verify server certificate ' + 'for %s' % req.host) + else: + raise + + # + # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- + # Middle proxy using HTTP listens on port 443, or an index mistakenly serves + # HTML containing a http://xyz link when it should be https://xyz), + # you can use the following handler class, which does not allow HTTP traffic. + # + # It works by inheriting from HTTPHandler - so build_opener won't add a + # handler for HTTP itself. + # + class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + def http_open(self, req): + raise URLError('Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + +# +# XML-RPC with timeouts +# + +_ver_info = sys.version_info[:2] + +if _ver_info == (2, 6): + class HTTP(httplib.HTTP): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + + if ssl: + class HTTPS(httplib.HTTPS): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + +class Transport(xmlrpclib.Transport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.Transport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, x509 = self.get_host_info(host) + if _ver_info == (2, 6): + result = HTTP(h, timeout=self.timeout) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + result = self._connection[1] + return result + +if ssl: + class SafeTransport(xmlrpclib.SafeTransport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.SafeTransport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, kwargs = self.get_host_info(host) + if not kwargs: + kwargs = {} + kwargs['timeout'] = self.timeout + if _ver_info == (2, 6): + result = HTTPS(host, None, **kwargs) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, + **kwargs) + result = self._connection[1] + return result + + +class ServerProxy(xmlrpclib.ServerProxy): + def __init__(self, uri, **kwargs): + self.timeout = timeout = kwargs.pop('timeout', None) + # The above classes only come into play if a timeout + # is specified + if timeout is not None: + scheme, _ = splittype(uri) + use_datetime = kwargs.get('use_datetime', 0) + if scheme == 'https': + tcls = SafeTransport + else: + tcls = Transport + kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) + self.transport = t + xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + +# +# CSV functionality. This is provided because on 2.x, the csv module can't +# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. +# + +def _csv_open(fn, mode, **kwargs): + if sys.version_info[0] < 3: + mode += 'b' + else: + kwargs['newline'] = '' + # Python 3 determines encoding from locale. Force 'utf-8' + # file encoding to match other forced utf-8 encoding + kwargs['encoding'] = 'utf-8' + return open(fn, mode, **kwargs) + + +class CSVBase(object): + defaults = { + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) + } + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.stream.close() + + +class CSVReader(CSVBase): + def __init__(self, **kwargs): + if 'stream' in kwargs: + stream = kwargs['stream'] + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + self.stream = stream + else: + self.stream = _csv_open(kwargs['path'], 'r') + self.reader = csv.reader(self.stream, **self.defaults) + + def __iter__(self): + return self + + def next(self): + result = next(self.reader) + if sys.version_info[0] < 3: + for i, item in enumerate(result): + if not isinstance(item, text_type): + result[i] = item.decode('utf-8') + return result + + __next__ = next + +class CSVWriter(CSVBase): + def __init__(self, fn, **kwargs): + self.stream = _csv_open(fn, 'w') + self.writer = csv.writer(self.stream, **self.defaults) + + def writerow(self, row): + if sys.version_info[0] < 3: + r = [] + for item in row: + if isinstance(item, text_type): + item = item.encode('utf-8') + r.append(item) + row = r + self.writer.writerow(row) + +# +# Configurator functionality +# + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result + + +class SubprocessMixin(object): + """ + Mixin for running subprocesses and capturing their output + """ + def __init__(self, verbose=False, progress=None): + self.verbose = verbose + self.progress = progress + + def reader(self, stream, context): + """ + Read lines from a subprocess' output stream and either pass to a progress + callable (if specified) or write progress information to sys.stderr. + """ + progress = self.progress + verbose = self.verbose + while True: + s = stream.readline() + if not s: + break + if progress is not None: + progress(s, context) + else: + if not verbose: + sys.stderr.write('.') + else: + sys.stderr.write(s.decode('utf-8')) + sys.stderr.flush() + stream.close() + + def run_command(self, cmd, **kwargs): + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, **kwargs) + t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) + t1.start() + t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) + t2.start() + p.wait() + t1.join() + t2.join() + if self.progress is not None: + self.progress('done.', 'main') + elif self.verbose: + sys.stderr.write('done.\n') + return p + + +def normalize_name(name): + """Normalize a python package name a la PEP 503""" + # https://www.python.org/dev/peps/pep-0503/#normalized-names + return re.sub('[-_.]+', '-', name).lower() diff --git a/venv/Lib/site-packages/distlib/version.py b/venv/Lib/site-packages/distlib/version.py new file mode 100644 index 00000000..3eebe18e --- /dev/null +++ b/venv/Lib/site-packages/distlib/version.py @@ -0,0 +1,736 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Implementation of a flexible versioning scheme providing support for PEP-440, +setuptools-compatible and semantic versioning. +""" + +import logging +import re + +from .compat import string_types +from .util import parse_requirement + +__all__ = ['NormalizedVersion', 'NormalizedMatcher', + 'LegacyVersion', 'LegacyMatcher', + 'SemanticVersion', 'SemanticMatcher', + 'UnsupportedVersionError', 'get_scheme'] + +logger = logging.getLogger(__name__) + + +class UnsupportedVersionError(ValueError): + """This is an unsupported version.""" + pass + + +class Version(object): + def __init__(self, s): + self._string = s = s.strip() + self._parts = parts = self.parse(s) + assert isinstance(parts, tuple) + assert len(parts) > 0 + + def parse(self, s): + raise NotImplementedError('please implement in a subclass') + + def _check_compatible(self, other): + if type(self) != type(other): + raise TypeError('cannot compare %r and %r' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + self._check_compatible(other) + return self._parts < other._parts + + def __gt__(self, other): + return not (self.__lt__(other) or self.__eq__(other)) + + def __le__(self, other): + return self.__lt__(other) or self.__eq__(other) + + def __ge__(self, other): + return self.__gt__(other) or self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self._parts) + + def __repr__(self): + return "%s('%s')" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + @property + def is_prerelease(self): + raise NotImplementedError('Please implement in subclasses.') + + +class Matcher(object): + version_class = None + + # value is either a callable or the name of a method + _operators = { + '<': lambda v, c, p: v < c, + '>': lambda v, c, p: v > c, + '<=': lambda v, c, p: v == c or v < c, + '>=': lambda v, c, p: v == c or v > c, + '==': lambda v, c, p: v == c, + '===': lambda v, c, p: v == c, + # by default, compatible => >=. + '~=': lambda v, c, p: v == c or v > c, + '!=': lambda v, c, p: v != c, + } + + # this is a method only to support alternative implementations + # via overriding + def parse_requirement(self, s): + return parse_requirement(s) + + def __init__(self, s): + if self.version_class is None: + raise ValueError('Please specify a version class') + self._string = s = s.strip() + r = self.parse_requirement(s) + if not r: + raise ValueError('Not valid: %r' % s) + self.name = r.name + self.key = self.name.lower() # for case-insensitive comparisons + clist = [] + if r.constraints: + # import pdb; pdb.set_trace() + for op, s in r.constraints: + if s.endswith('.*'): + if op not in ('==', '!='): + raise ValueError('\'.*\' not allowed for ' + '%r constraints' % op) + # Could be a partial version (e.g. for '2.*') which + # won't parse as a version, so keep it as a string + vn, prefix = s[:-2], True + # Just to check that vn is a valid version + self.version_class(vn) + else: + # Should parse as a version, so we can create an + # instance for the comparison + vn, prefix = self.version_class(s), False + clist.append((op, vn, prefix)) + self._parts = tuple(clist) + + def match(self, version): + """ + Check if the provided version matches the constraints. + + :param version: The version to match against this instance. + :type version: String or :class:`Version` instance. + """ + if isinstance(version, string_types): + version = self.version_class(version) + for operator, constraint, prefix in self._parts: + f = self._operators.get(operator) + if isinstance(f, string_types): + f = getattr(self, f) + if not f: + msg = ('%r not implemented ' + 'for %s' % (operator, self.__class__.__name__)) + raise NotImplementedError(msg) + if not f(version, constraint, prefix): + return False + return True + + @property + def exact_version(self): + result = None + if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): + result = self._parts[0][1] + return result + + def _check_compatible(self, other): + if type(self) != type(other) or self.name != other.name: + raise TypeError('cannot compare %s and %s' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self.key == other.key and self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self.key) + hash(self._parts) + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + +PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' + r'(\.(post)(\d+))?(\.(dev)(\d+))?' + r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') + + +def _pep_440_key(s): + s = s.strip() + m = PEP440_VERSION_RE.match(s) + if not m: + raise UnsupportedVersionError('Not a valid version: %s' % s) + groups = m.groups() + nums = tuple(int(v) for v in groups[1].split('.')) + while len(nums) > 1 and nums[-1] == 0: + nums = nums[:-1] + + if not groups[0]: + epoch = 0 + else: + epoch = int(groups[0]) + pre = groups[4:6] + post = groups[7:9] + dev = groups[10:12] + local = groups[13] + if pre == (None, None): + pre = () + else: + pre = pre[0], int(pre[1]) + if post == (None, None): + post = () + else: + post = post[0], int(post[1]) + if dev == (None, None): + dev = () + else: + dev = dev[0], int(dev[1]) + if local is None: + local = () + else: + parts = [] + for part in local.split('.'): + # to ensure that numeric compares as > lexicographic, avoid + # comparing them directly, but encode a tuple which ensures + # correct sorting + if part.isdigit(): + part = (1, int(part)) + else: + part = (0, part) + parts.append(part) + local = tuple(parts) + if not pre: + # either before pre-release, or final release and after + if not post and dev: + # before pre-release + pre = ('a', -1) # to sort before a0 + else: + pre = ('z',) # to sort after all pre-releases + # now look at the state of post and dev. + if not post: + post = ('_',) # sort before 'a' + if not dev: + dev = ('final',) + + #print('%s -> %s' % (s, m.groups())) + return epoch, nums, pre, post, dev, local + + +_normalized_key = _pep_440_key + + +class NormalizedVersion(Version): + """A rational version. + + Good: + 1.2 # equivalent to "1.2.0" + 1.2.0 + 1.2a1 + 1.2.3a2 + 1.2.3b1 + 1.2.3c1 + 1.2.3.4 + TODO: fill this out + + Bad: + 1 # minimum two numbers + 1.2a # release level must have a release serial + 1.2.3b + """ + def parse(self, s): + result = _normalized_key(s) + # _normalized_key loses trailing zeroes in the release + # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 + # However, PEP 440 prefix matching needs it: for example, + # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). + m = PEP440_VERSION_RE.match(s) # must succeed + groups = m.groups() + self._release_clause = tuple(int(v) for v in groups[1].split('.')) + return result + + PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) + + @property + def is_prerelease(self): + return any(t[0] in self.PREREL_TAGS for t in self._parts if t) + + +def _match_prefix(x, y): + x = str(x) + y = str(y) + if x == y: + return True + if not x.startswith(y): + return False + n = len(y) + return x[n] == '.' + + +class NormalizedMatcher(Matcher): + version_class = NormalizedVersion + + # value is either a callable or the name of a method + _operators = { + '~=': '_match_compatible', + '<': '_match_lt', + '>': '_match_gt', + '<=': '_match_le', + '>=': '_match_ge', + '==': '_match_eq', + '===': '_match_arbitrary', + '!=': '_match_ne', + } + + def _adjust_local(self, version, constraint, prefix): + if prefix: + strip_local = '+' not in constraint and version._parts[-1] + else: + # both constraint and version are + # NormalizedVersion instances. + # If constraint does not have a local component, + # ensure the version doesn't, either. + strip_local = not constraint._parts[-1] and version._parts[-1] + if strip_local: + s = version._string.split('+', 1)[0] + version = self.version_class(s) + return version, constraint + + def _match_lt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version >= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_gt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version <= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_le(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version <= constraint + + def _match_ge(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version >= constraint + + def _match_eq(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version == constraint) + else: + result = _match_prefix(version, constraint) + return result + + def _match_arbitrary(self, version, constraint, prefix): + return str(version) == str(constraint) + + def _match_ne(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version != constraint) + else: + result = not _match_prefix(version, constraint) + return result + + def _match_compatible(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version == constraint: + return True + if version < constraint: + return False +# if not prefix: +# return True + release_clause = constraint._release_clause + if len(release_clause) > 1: + release_clause = release_clause[:-1] + pfx = '.'.join([str(i) for i in release_clause]) + return _match_prefix(version, pfx) + +_REPLACEMENTS = ( + (re.compile('[.+-]$'), ''), # remove trailing puncts + (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start + (re.compile('^[.-]'), ''), # remove leading puncts + (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses + (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha + (re.compile(r'\b(pre-alpha|prealpha)\b'), + 'pre.alpha'), # standardise + (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses +) + +_SUFFIX_REPLACEMENTS = ( + (re.compile('^[:~._+-]+'), ''), # remove leading puncts + (re.compile('[,*")([\\]]'), ''), # remove unwanted chars + (re.compile('[~:+_ -]'), '.'), # replace illegal chars + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\.$'), ''), # trailing '.' +) + +_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') + + +def _suggest_semantic_version(s): + """ + Try to suggest a semantic form for a version for which + _suggest_normalized_version couldn't come up with anything. + """ + result = s.strip().lower() + for pat, repl in _REPLACEMENTS: + result = pat.sub(repl, result) + if not result: + result = '0.0.0' + + # Now look for numeric prefix, and separate it out from + # the rest. + #import pdb; pdb.set_trace() + m = _NUMERIC_PREFIX.match(result) + if not m: + prefix = '0.0.0' + suffix = result + else: + prefix = m.groups()[0].split('.') + prefix = [int(i) for i in prefix] + while len(prefix) < 3: + prefix.append(0) + if len(prefix) == 3: + suffix = result[m.end():] + else: + suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] + prefix = prefix[:3] + prefix = '.'.join([str(i) for i in prefix]) + suffix = suffix.strip() + if suffix: + #import pdb; pdb.set_trace() + # massage the suffix. + for pat, repl in _SUFFIX_REPLACEMENTS: + suffix = pat.sub(repl, suffix) + + if not suffix: + result = prefix + else: + sep = '-' if 'dev' in suffix else '+' + result = prefix + sep + suffix + if not is_semver(result): + result = None + return result + + +def _suggest_normalized_version(s): + """Suggest a normalized version close to the given version string. + + If you have a version string that isn't rational (i.e. NormalizedVersion + doesn't like it) then you might be able to get an equivalent (or close) + rational version from this function. + + This does a number of simple normalizations to the given string, based + on observation of versions currently in use on PyPI. Given a dump of + those version during PyCon 2009, 4287 of them: + - 2312 (53.93%) match NormalizedVersion without change + with the automatic suggestion + - 3474 (81.04%) match when using this suggestion method + + @param s {str} An irrational version string. + @returns A rational version string, or None, if couldn't determine one. + """ + try: + _normalized_key(s) + return s # already rational + except UnsupportedVersionError: + pass + + rs = s.lower() + + # part of this could use maketrans + for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), + ('beta', 'b'), ('rc', 'c'), ('-final', ''), + ('-pre', 'c'), + ('-release', ''), ('.release', ''), ('-stable', ''), + ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), + ('final', '')): + rs = rs.replace(orig, repl) + + # if something ends with dev or pre, we add a 0 + rs = re.sub(r"pre$", r"pre0", rs) + rs = re.sub(r"dev$", r"dev0", rs) + + # if we have something like "b-2" or "a.2" at the end of the + # version, that is probably beta, alpha, etc + # let's remove the dash or dot + rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) + + # 1.0-dev-r371 -> 1.0.dev371 + # 0.1-dev-r79 -> 0.1.dev79 + rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) + + # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 + rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) + + # Clean: v0.3, v1.0 + if rs.startswith('v'): + rs = rs[1:] + + # Clean leading '0's on numbers. + #TODO: unintended side-effect on, e.g., "2003.05.09" + # PyPI stats: 77 (~2%) better + rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) + + # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers + # zero. + # PyPI stats: 245 (7.56%) better + rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) + + # the 'dev-rNNN' tag is a dev tag + rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) + + # clean the - when used as a pre delimiter + rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) + + # a terminal "dev" or "devel" can be changed into ".dev0" + rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) + + # a terminal "dev" can be changed into ".dev0" + rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) + + # a terminal "final" or "stable" can be removed + rs = re.sub(r"(final|stable)$", "", rs) + + # The 'r' and the '-' tags are post release tags + # 0.4a1.r10 -> 0.4a1.post10 + # 0.9.33-17222 -> 0.9.33.post17222 + # 0.9.33-r17222 -> 0.9.33.post17222 + rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) + + # Clean 'r' instead of 'dev' usage: + # 0.9.33+r17222 -> 0.9.33.dev17222 + # 1.0dev123 -> 1.0.dev123 + # 1.0.git123 -> 1.0.dev123 + # 1.0.bzr123 -> 1.0.dev123 + # 0.1a0dev.123 -> 0.1a0.dev123 + # PyPI stats: ~150 (~4%) better + rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) + + # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: + # 0.2.pre1 -> 0.2c1 + # 0.2-c1 -> 0.2c1 + # 1.0preview123 -> 1.0c123 + # PyPI stats: ~21 (0.62%) better + rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) + + # Tcl/Tk uses "px" for their post release markers + rs = re.sub(r"p(\d+)$", r".post\1", rs) + + try: + _normalized_key(rs) + except UnsupportedVersionError: + rs = None + return rs + +# +# Legacy version processing (distribute-compatible) +# + +_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) +_VERSION_REPLACE = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + '': None, + '.': None, +} + + +def _legacy_key(s): + def get_parts(s): + result = [] + for p in _VERSION_PART.split(s.lower()): + p = _VERSION_REPLACE.get(p, p) + if p: + if '0' <= p[:1] <= '9': + p = p.zfill(8) + else: + p = '*' + p + result.append(p) + result.append('*final') + return result + + result = [] + for p in get_parts(s): + if p.startswith('*'): + if p < '*final': + while result and result[-1] == '*final-': + result.pop() + while result and result[-1] == '00000000': + result.pop() + result.append(p) + return tuple(result) + + +class LegacyVersion(Version): + def parse(self, s): + return _legacy_key(s) + + @property + def is_prerelease(self): + result = False + for x in self._parts: + if (isinstance(x, string_types) and x.startswith('*') and + x < '*final'): + result = True + break + return result + + +class LegacyMatcher(Matcher): + version_class = LegacyVersion + + _operators = dict(Matcher._operators) + _operators['~='] = '_match_compatible' + + numeric_re = re.compile(r'^(\d+(\.\d+)*)') + + def _match_compatible(self, version, constraint, prefix): + if version < constraint: + return False + m = self.numeric_re.match(str(constraint)) + if not m: + logger.warning('Cannot compute compatible match for version %s ' + ' and constraint %s', version, constraint) + return True + s = m.groups()[0] + if '.' in s: + s = s.rsplit('.', 1)[0] + return _match_prefix(version, s) + +# +# Semantic versioning +# + +_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' + r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' + r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) + + +def is_semver(s): + return _SEMVER_RE.match(s) + + +def _semantic_key(s): + def make_tuple(s, absent): + if s is None: + result = (absent,) + else: + parts = s[1:].split('.') + # We can't compare ints and strings on Python 3, so fudge it + # by zero-filling numeric values so simulate a numeric comparison + result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) + return result + + m = is_semver(s) + if not m: + raise UnsupportedVersionError(s) + groups = m.groups() + major, minor, patch = [int(i) for i in groups[:3]] + # choose the '|' and '*' so that versions sort correctly + pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') + return (major, minor, patch), pre, build + + +class SemanticVersion(Version): + def parse(self, s): + return _semantic_key(s) + + @property + def is_prerelease(self): + return self._parts[1][0] != '|' + + +class SemanticMatcher(Matcher): + version_class = SemanticVersion + + +class VersionScheme(object): + def __init__(self, key, matcher, suggester=None): + self.key = key + self.matcher = matcher + self.suggester = suggester + + def is_valid_version(self, s): + try: + self.matcher.version_class(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_matcher(self, s): + try: + self.matcher(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_constraint_list(self, s): + """ + Used for processing some metadata fields + """ + return self.is_valid_matcher('dummy_name (%s)' % s) + + def suggest(self, s): + if self.suggester is None: + result = None + else: + result = self.suggester(s) + return result + +_SCHEMES = { + 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, + _suggest_normalized_version), + 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), + 'semantic': VersionScheme(_semantic_key, SemanticMatcher, + _suggest_semantic_version), +} + +_SCHEMES['default'] = _SCHEMES['normalized'] + + +def get_scheme(name): + if name not in _SCHEMES: + raise ValueError('unknown scheme name: %r' % name) + return _SCHEMES[name] diff --git a/venv/Lib/site-packages/distlib/w32.exe b/venv/Lib/site-packages/distlib/w32.exe new file mode 100644 index 00000000..e6439e9e Binary files /dev/null and b/venv/Lib/site-packages/distlib/w32.exe differ diff --git a/venv/Lib/site-packages/distlib/w64.exe b/venv/Lib/site-packages/distlib/w64.exe new file mode 100644 index 00000000..46139dbf Binary files /dev/null and b/venv/Lib/site-packages/distlib/w64.exe differ diff --git a/venv/Lib/site-packages/distlib/wheel.py b/venv/Lib/site-packages/distlib/wheel.py new file mode 100644 index 00000000..1e2c7a02 --- /dev/null +++ b/venv/Lib/site-packages/distlib/wheel.py @@ -0,0 +1,1018 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import base64 +import codecs +import datetime +import distutils.util +from email import message_from_file +import hashlib +import imp +import json +import logging +import os +import posixpath +import re +import shutil +import sys +import tempfile +import zipfile + +from . import __version__, DistlibException +from .compat import sysconfig, ZipFile, fsdecode, text_type, filter +from .database import InstalledDistribution +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) +from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, + cached_property, get_cache_base, read_exports, tempdir) +from .version import NormalizedVersion, UnsupportedVersionError + +logger = logging.getLogger(__name__) + +cache = None # created when needed + +if hasattr(sys, 'pypy_version_info'): # pragma: no cover + IMP_PREFIX = 'pp' +elif sys.platform.startswith('java'): # pragma: no cover + IMP_PREFIX = 'jy' +elif sys.platform == 'cli': # pragma: no cover + IMP_PREFIX = 'ip' +else: + IMP_PREFIX = 'cp' + +VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') +if not VER_SUFFIX: # pragma: no cover + VER_SUFFIX = '%s%s' % sys.version_info[:2] +PYVER = 'py' + VER_SUFFIX +IMPVER = IMP_PREFIX + VER_SUFFIX + +ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') + +ABI = sysconfig.get_config_var('SOABI') +if ABI and ABI.startswith('cpython-'): + ABI = ABI.replace('cpython-', 'cp') +else: + def _derive_abi(): + parts = ['cp', VER_SUFFIX] + if sysconfig.get_config_var('Py_DEBUG'): + parts.append('d') + if sysconfig.get_config_var('WITH_PYMALLOC'): + parts.append('m') + if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: + parts.append('u') + return ''.join(parts) + ABI = _derive_abi() + del _derive_abi + +FILENAME_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))? +-(?P\w+\d+(\.\w+\d+)*) +-(?P\w+) +-(?P\w+(\.\w+)*) +\.whl$ +''', re.IGNORECASE | re.VERBOSE) + +NAME_VERSION_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))?$ +''', re.IGNORECASE | re.VERBOSE) + +SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') +SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') +SHEBANG_PYTHON = b'#!python' +SHEBANG_PYTHONW = b'#!pythonw' + +if os.sep == '/': + to_posix = lambda o: o +else: + to_posix = lambda o: o.replace(os.sep, '/') + + +class Mounter(object): + def __init__(self): + self.impure_wheels = {} + self.libs = {} + + def add(self, pathname, extensions): + self.impure_wheels[pathname] = extensions + self.libs.update(extensions) + + def remove(self, pathname): + extensions = self.impure_wheels.pop(pathname) + for k, v in extensions: + if k in self.libs: + del self.libs[k] + + def find_module(self, fullname, path=None): + if fullname in self.libs: + result = self + else: + result = None + return result + + def load_module(self, fullname): + if fullname in sys.modules: + result = sys.modules[fullname] + else: + if fullname not in self.libs: + raise ImportError('unable to find extension for %s' % fullname) + result = imp.load_dynamic(fullname, self.libs[fullname]) + result.__loader__ = self + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] + return result + +_hook = Mounter() + + +class Wheel(object): + """ + Class to build and install from Wheel files (PEP 427). + """ + + wheel_version = (1, 1) + hash_kind = 'sha256' + + def __init__(self, filename=None, sign=False, verify=False): + """ + Initialise an instance using a (valid) filename. + """ + self.sign = sign + self.should_verify = verify + self.buildver = '' + self.pyver = [PYVER] + self.abi = ['none'] + self.arch = ['any'] + self.dirname = os.getcwd() + if filename is None: + self.name = 'dummy' + self.version = '0.1' + self._filename = self.filename + else: + m = NAME_VERSION_RE.match(filename) + if m: + info = m.groupdict('') + self.name = info['nm'] + # Reinstate the local version separator + self.version = info['vn'].replace('_', '-') + self.buildver = info['bn'] + self._filename = self.filename + else: + dirname, filename = os.path.split(filename) + m = FILENAME_RE.match(filename) + if not m: + raise DistlibException('Invalid name or ' + 'filename: %r' % filename) + if dirname: + self.dirname = os.path.abspath(dirname) + self._filename = filename + info = m.groupdict('') + self.name = info['nm'] + self.version = info['vn'] + self.buildver = info['bn'] + self.pyver = info['py'].split('.') + self.abi = info['bi'].split('.') + self.arch = info['ar'].split('.') + + @property + def filename(self): + """ + Build and return a filename from the various components. + """ + if self.buildver: + buildver = '-' + self.buildver + else: + buildver = '' + pyver = '.'.join(self.pyver) + abi = '.'.join(self.abi) + arch = '.'.join(self.arch) + # replace - with _ as a local version separator + version = self.version.replace('-', '_') + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, + pyver, abi, arch) + + @property + def exists(self): + path = os.path.join(self.dirname, self.filename) + return os.path.isfile(path) + + @property + def tags(self): + for pyver in self.pyver: + for abi in self.abi: + for arch in self.arch: + yield pyver, abi, arch + + @cached_property + def metadata(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + wrapper = codecs.getreader('utf-8') + with ZipFile(pathname, 'r') as zf: + wheel_metadata = self.get_wheel_metadata(zf) + wv = wheel_metadata['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # if file_version < (1, 1): + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] + # else: + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] + result = None + for fn in fns: + try: + metadata_filename = posixpath.join(info_dir, fn) + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + if result: + break + except KeyError: + pass + if not result: + raise ValueError('Invalid wheel, because metadata is ' + 'missing: looked in %s' % ', '.join(fns)) + return result + + def get_wheel_metadata(self, zf): + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + metadata_filename = posixpath.join(info_dir, 'WHEEL') + with zf.open(metadata_filename) as bf: + wf = codecs.getreader('utf-8')(bf) + message = message_from_file(wf) + return dict(message) + + @cached_property + def info(self): + pathname = os.path.join(self.dirname, self.filename) + with ZipFile(pathname, 'r') as zf: + result = self.get_wheel_metadata(zf) + return result + + def process_shebang(self, data): + m = SHEBANG_RE.match(data) + if m: + end = m.end() + shebang, data_after_shebang = data[:end], data[end:] + # Preserve any arguments after the interpreter + if b'pythonw' in shebang.lower(): + shebang_python = SHEBANG_PYTHONW + else: + shebang_python = SHEBANG_PYTHON + m = SHEBANG_DETAIL_RE.match(shebang) + if m: + args = b' ' + m.groups()[-1] + else: + args = b'' + shebang = shebang_python + args + data = shebang + data_after_shebang + else: + cr = data.find(b'\r') + lf = data.find(b'\n') + if cr < 0 or cr > lf: + term = b'\n' + else: + if data[cr:cr + 2] == b'\r\n': + term = b'\r\n' + else: + term = b'\r' + data = SHEBANG_PYTHON + term + data + return data + + def get_hash(self, data, hash_kind=None): + if hash_kind is None: + hash_kind = self.hash_kind + try: + hasher = getattr(hashlib, hash_kind) + except AttributeError: + raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + result = hasher(data).digest() + result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') + return hash_kind, result + + def write_record(self, records, record_path, base): + records = list(records) # make a copy, as mutated + p = to_posix(os.path.relpath(record_path, base)) + records.append((p, '', '')) + with CSVWriter(record_path) as writer: + for row in records: + writer.writerow(row) + + def write_records(self, info, libdir, archive_paths): + records = [] + distinfo, info_dir = info + hasher = getattr(hashlib, self.hash_kind) + for ap, p in archive_paths: + with open(p, 'rb') as f: + data = f.read() + digest = '%s=%s' % self.get_hash(data) + size = os.path.getsize(p) + records.append((ap, digest, size)) + + p = os.path.join(distinfo, 'RECORD') + self.write_record(records, p, libdir) + ap = to_posix(os.path.join(info_dir, 'RECORD')) + archive_paths.append((ap, p)) + + def build_zip(self, pathname, archive_paths): + with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: + for ap, p in archive_paths: + logger.debug('Wrote %s to %s in wheel', p, ap) + zf.write(p, ap) + + def build(self, paths, tags=None, wheel_version=None): + """ + Build a wheel from files in specified paths, and use any specified tags + when determining the name of the wheel. + """ + if tags is None: + tags = {} + + libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] + if libkey == 'platlib': + is_pure = 'false' + default_pyver = [IMPVER] + default_abi = [ABI] + default_arch = [ARCH] + else: + is_pure = 'true' + default_pyver = [PYVER] + default_abi = ['none'] + default_arch = ['any'] + + self.pyver = tags.get('pyver', default_pyver) + self.abi = tags.get('abi', default_abi) + self.arch = tags.get('arch', default_arch) + + libdir = paths[libkey] + + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + archive_paths = [] + + # First, stuff which is not in site-packages + for key in ('data', 'headers', 'scripts'): + if key not in paths: + continue + path = paths[key] + if os.path.isdir(path): + for root, dirs, files in os.walk(path): + for fn in files: + p = fsdecode(os.path.join(root, fn)) + rp = os.path.relpath(p, path) + ap = to_posix(os.path.join(data_dir, key, rp)) + archive_paths.append((ap, p)) + if key == 'scripts' and not p.endswith('.exe'): + with open(p, 'rb') as f: + data = f.read() + data = self.process_shebang(data) + with open(p, 'wb') as f: + f.write(data) + + # Now, stuff which is in site-packages, other than the + # distinfo stuff. + path = libdir + distinfo = None + for root, dirs, files in os.walk(path): + if root == path: + # At the top level only, save distinfo for later + # and skip it for now + for i, dn in enumerate(dirs): + dn = fsdecode(dn) + if dn.endswith('.dist-info'): + distinfo = os.path.join(root, dn) + del dirs[i] + break + assert distinfo, '.dist-info directory expected, not found' + + for fn in files: + # comment out next suite to leave .pyc files in + if fsdecode(fn).endswith(('.pyc', '.pyo')): + continue + p = os.path.join(root, fn) + rp = to_posix(os.path.relpath(p, path)) + archive_paths.append((rp, p)) + + # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. + files = os.listdir(distinfo) + for fn in files: + if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): + p = fsdecode(os.path.join(distinfo, fn)) + ap = to_posix(os.path.join(info_dir, fn)) + archive_paths.append((ap, p)) + + wheel_metadata = [ + 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), + 'Generator: distlib %s' % __version__, + 'Root-Is-Purelib: %s' % is_pure, + ] + for pyver, abi, arch in self.tags: + wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) + p = os.path.join(distinfo, 'WHEEL') + with open(p, 'w') as f: + f.write('\n'.join(wheel_metadata)) + ap = to_posix(os.path.join(info_dir, 'WHEEL')) + archive_paths.append((ap, p)) + + # sort the entries by archive path. Not needed by any spec, but it + # keeps the archive listing and RECORD tidier than they would otherwise + # be. Use the number of path segments to keep directory entries together, + # and keep the dist-info stuff at the end. + def sorter(t): + ap = t[0] + n = ap.count('/') + if '.dist-info' in ap: + n += 10000 + return (n, ap) + archive_paths = sorted(archive_paths, key=sorter) + + # Now, at last, RECORD. + # Paths in here are archive paths - nothing else makes sense. + self.write_records((distinfo, info_dir), libdir, archive_paths) + # Now, ready to build the zip file + pathname = os.path.join(self.dirname, self.filename) + self.build_zip(pathname, archive_paths) + return pathname + + def skip_entry(self, arcname): + """ + Determine whether an archive entry should be skipped when verifying + or installing. + """ + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + # We also skip directories, as they won't be in RECORD + # either. See: + # + # https://github.com/pypa/wheel/issues/294 + # https://github.com/pypa/wheel/issues/287 + # https://github.com/pypa/wheel/pull/289 + # + return arcname.endswith(('/', '/RECORD.jws')) + + def install(self, paths, maker, **kwargs): + """ + Install a wheel to the specified paths. If kwarg ``warner`` is + specified, it should be a callable, which will be called with two + tuples indicating the wheel version of this software and the wheel + version in the file, if there is a discrepancy in the versions. + This can be used to issue any warnings to raise any exceptions. + If kwarg ``lib_only`` is True, only the purelib/platlib files are + installed, and the headers, scripts, data and dist-info metadata are + not written. If kwarg ``bytecode_hashed_invalidation`` is True, written + bytecode will try to use file-hash based invalidation (PEP-552) on + supported interpreter versions (CPython 2.7+). + + The return value is a :class:`InstalledDistribution` instance unless + ``options.lib_only`` is True, in which case the return value is ``None``. + """ + + dry_run = maker.dry_run + warner = kwargs.get('warner') + lib_only = kwargs.get('lib_only', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if (file_version != self.wheel_version) and warner: + warner(self.wheel_version, file_version) + + if message['Root-Is-Purelib'] == 'true': + libdir = paths['purelib'] + else: + libdir = paths['platlib'] + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') + script_pfx = posixpath.join(data_dir, 'scripts', '') + + # make a new instance rather than a copy of maker's, + # as we mutate it + fileop = FileOperator(dry_run=dry_run) + fileop.record = True # so we can rollback if needed + + bc = not sys.dont_write_bytecode # Double negatives. Lovely! + + outfiles = [] # for RECORD writing + + # for script copying/shebang processing + workdir = tempfile.mkdtemp() + # set target dir later + # we default add_launchers to False, as the + # Python Launcher should be used instead + maker.source_dir = workdir + maker.target_dir = None + try: + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue + is_script = (u_arcname.startswith(script_pfx) + and not u_arcname.endswith('.exe')) + + if u_arcname.startswith(data_pfx): + _, where, rp = u_arcname.split('/', 2) + outfile = os.path.join(paths[where], convert_path(rp)) + else: + # meant for site-packages. + if u_arcname in (wheel_metadata_name, record_name): + continue + outfile = os.path.join(libdir, convert_path(u_arcname)) + if not is_script: + with zf.open(arcname) as bf: + fileop.copy_stream(bf, outfile) + outfiles.append(outfile) + # Double check the digest of the written file + if not dry_run and row[1]: + with open(outfile, 'rb') as bf: + data = bf.read() + _, newdigest = self.get_hash(data, kind) + if newdigest != digest: + raise DistlibException('digest mismatch ' + 'on write for ' + '%s' % outfile) + if bc and outfile.endswith('.py'): + try: + pyc = fileop.byte_compile(outfile, + hashed_invalidation=bc_hashed_invalidation) + outfiles.append(pyc) + except Exception: + # Don't give up if byte-compilation fails, + # but log it and perhaps warn the user + logger.warning('Byte-compilation failed', + exc_info=True) + else: + fn = os.path.basename(convert_path(arcname)) + workname = os.path.join(workdir, fn) + with zf.open(arcname) as bf: + fileop.copy_stream(bf, workname) + + dn, fn = os.path.split(outfile) + maker.target_dir = dn + filenames = maker.make(fn) + fileop.set_executable_mode(filenames) + outfiles.extend(filenames) + + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + # Generate scripts + + # Try to get pydist.json so we can see if there are + # any commands to generate. If this fails (e.g. because + # of a legacy wheel), log a warning but don't give up. + commands = None + file_version = self.info['Wheel-Version'] + if file_version == '1.0': + # Use legacy info + ep = posixpath.join(info_dir, 'entry_points.txt') + try: + with zf.open(ep) as bwf: + epdata = read_exports(bwf) + commands = {} + for key in ('console', 'gui'): + k = '%s_scripts' % key + if k in epdata: + commands['wrap_%s' % key] = d = {} + for v in epdata[k].values(): + s = '%s:%s' % (v.prefix, v.suffix) + if v.flags: + s += ' [%s]' % ','.join(v.flags) + d[v.name] = s + except Exception: + logger.warning('Unable to read legacy script ' + 'metadata, so cannot generate ' + 'scripts') + else: + try: + with zf.open(metadata_name) as bwf: + wf = wrapper(bwf) + commands = json.load(wf).get('extensions') + if commands: + commands = commands.get('python.commands') + except Exception: + logger.warning('Unable to read JSON metadata, so ' + 'cannot generate scripts') + if commands: + console_scripts = commands.get('wrap_console', {}) + gui_scripts = commands.get('wrap_gui', {}) + if console_scripts or gui_scripts: + script_dir = paths.get('scripts', '') + if not os.path.isdir(script_dir): + raise ValueError('Valid script path not ' + 'specified') + maker.target_dir = script_dir + for k, v in console_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script) + fileop.set_executable_mode(filenames) + + if gui_scripts: + options = {'gui': True } + for k, v in gui_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script, options) + fileop.set_executable_mode(filenames) + + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], + dry_run) + return dist + except Exception: # pragma: no cover + logger.exception('installation failed.') + fileop.rollback() + raise + finally: + shutil.rmtree(workdir) + + def _get_dylib_cache(self): + global cache + if cache is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('dylib-cache'), + '%s.%s' % sys.version_info[:2]) + cache = Cache(base) + return cache + + def _get_extensions(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + arcname = posixpath.join(info_dir, 'EXTENSIONS') + wrapper = codecs.getreader('utf-8') + result = [] + with ZipFile(pathname, 'r') as zf: + try: + with zf.open(arcname) as bf: + wf = wrapper(bf) + extensions = json.load(wf) + cache = self._get_dylib_cache() + prefix = cache.prefix_to_dir(pathname) + cache_base = os.path.join(cache.base, prefix) + if not os.path.isdir(cache_base): + os.makedirs(cache_base) + for name, relpath in extensions.items(): + dest = os.path.join(cache_base, convert_path(relpath)) + if not os.path.exists(dest): + extract = True + else: + file_time = os.stat(dest).st_mtime + file_time = datetime.datetime.fromtimestamp(file_time) + info = zf.getinfo(relpath) + wheel_time = datetime.datetime(*info.date_time) + extract = wheel_time > file_time + if extract: + zf.extract(relpath, cache_base) + result.append((name, dest)) + except KeyError: + pass + return result + + def is_compatible(self): + """ + Determine if a wheel is compatible with the running system. + """ + return is_compatible(self) + + def is_mountable(self): + """ + Determine if a wheel is asserted as mountable by its metadata. + """ + return True # for now - metadata details TBD + + def mount(self, append=False): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if not self.is_compatible(): + msg = 'Wheel %s not compatible with this Python.' % pathname + raise DistlibException(msg) + if not self.is_mountable(): + msg = 'Wheel %s is marked as not mountable.' % pathname + raise DistlibException(msg) + if pathname in sys.path: + logger.debug('%s already in path', pathname) + else: + if append: + sys.path.append(pathname) + else: + sys.path.insert(0, pathname) + extensions = self._get_extensions() + if extensions: + if _hook not in sys.meta_path: + sys.meta_path.append(_hook) + _hook.add(pathname, extensions) + + def unmount(self): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if pathname not in sys.path: + logger.debug('%s not in path', pathname) + else: + sys.path.remove(pathname) + if pathname in _hook.impure_wheels: + _hook.remove(pathname) + if not _hook.impure_wheels: + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + def verify(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # TODO version verification + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + # See issue #115: some wheels have .. in their entries, but + # in the filename ... e.g. __main__..py ! So the check is + # updated to look for .. in the directory portions + p = u_arcname.split('/') + if '..' in p: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + def update(self, modifier, dest_dir=None, **kwargs): + """ + Update the contents of a wheel in a generic way. The modifier should + be a callable which expects a dictionary argument: its keys are + archive-entry paths, and its values are absolute filesystem paths + where the contents the corresponding archive entries can be found. The + modifier is free to change the contents of the files pointed to, add + new entries and remove entries, before returning. This method will + extract the entire contents of the wheel to a temporary location, call + the modifier, and then use the passed (and possibly updated) + dictionary to write a new wheel. If ``dest_dir`` is specified, the new + wheel is written there -- otherwise, the original wheel is overwritten. + + The modifier should return True if it updated the wheel, else False. + This method returns the same value the modifier returns. + """ + + def get_version(path_map, info_dir): + version = path = None + key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) + if key not in path_map: + key = '%s/PKG-INFO' % info_dir + if key in path_map: + path = path_map[key] + version = Metadata(path=path).version + return version, path + + def update_version(version, path): + updated = None + try: + v = NormalizedVersion(version) + i = version.find('-') + if i < 0: + updated = '%s+1' % version + else: + parts = [int(s) for s in version[i + 1:].split('.')] + parts[-1] += 1 + updated = '%s+%s' % (version[:i], + '.'.join(str(i) for i in parts)) + except UnsupportedVersionError: + logger.debug('Cannot update non-compliant (PEP-440) ' + 'version %r', version) + if updated: + md = Metadata(path=path) + md.version = updated + legacy = path.endswith(LEGACY_METADATA_FILENAME) + md.write(path=path, legacy=legacy) + logger.debug('Version updated from %r to %r', version, + updated) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + record_name = posixpath.join(info_dir, 'RECORD') + with tempdir() as workdir: + with ZipFile(pathname, 'r') as zf: + path_map = {} + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if u_arcname == record_name: + continue + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + zf.extract(zinfo, workdir) + path = os.path.join(workdir, convert_path(u_arcname)) + path_map[u_arcname] = path + + # Remember the version. + original_version, _ = get_version(path_map, info_dir) + # Files extracted. Call the modifier. + modified = modifier(path_map, **kwargs) + if modified: + # Something changed - need to build a new wheel. + current_version, path = get_version(path_map, info_dir) + if current_version and (current_version == original_version): + # Add or update local version to signify changes. + update_version(current_version, path) + # Decide where the new wheel goes. + if dest_dir is None: + fd, newpath = tempfile.mkstemp(suffix='.whl', + prefix='wheel-update-', + dir=workdir) + os.close(fd) + else: + if not os.path.isdir(dest_dir): + raise DistlibException('Not a directory: %r' % dest_dir) + newpath = os.path.join(dest_dir, self.filename) + archive_paths = list(path_map.items()) + distinfo = os.path.join(workdir, info_dir) + info = distinfo, info_dir + self.write_records(info, workdir, archive_paths) + self.build_zip(newpath, archive_paths) + if dest_dir is None: + shutil.copyfile(newpath, pathname) + return modified + +def compatible_tags(): + """ + Return (pyver, abi, arch) tuples compatible with this Python. + """ + versions = [VER_SUFFIX] + major = VER_SUFFIX[0] + for minor in range(sys.version_info[1] - 1, - 1, -1): + versions.append(''.join([major, str(minor)])) + + abis = [] + for suffix, _, _ in imp.get_suffixes(): + if suffix.startswith('.abi'): + abis.append(suffix.split('.', 2)[1]) + abis.sort() + if ABI != 'none': + abis.insert(0, ABI) + abis.append('none') + result = [] + + arches = [ARCH] + if sys.platform == 'darwin': + m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) + if m: + name, major, minor, arch = m.groups() + minor = int(minor) + matches = [arch] + if arch in ('i386', 'ppc'): + matches.append('fat') + if arch in ('i386', 'ppc', 'x86_64'): + matches.append('fat3') + if arch in ('ppc64', 'x86_64'): + matches.append('fat64') + if arch in ('i386', 'x86_64'): + matches.append('intel') + if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + matches.append('universal') + while minor >= 0: + for match in matches: + s = '%s_%s_%s_%s' % (name, major, minor, match) + if s != ARCH: # already there + arches.append(s) + minor -= 1 + + # Most specific - our Python version, ABI and arch + for abi in abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) + + # where no ABI / arch dependency, but IMP_PREFIX dependency + for i, version in enumerate(versions): + result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) + if i == 0: + result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) + + # no IMP_PREFIX, ABI or arch dependency + for i, version in enumerate(versions): + result.append((''.join(('py', version)), 'none', 'any')) + if i == 0: + result.append((''.join(('py', version[0])), 'none', 'any')) + return set(result) + + +COMPATIBLE_TAGS = compatible_tags() + +del compatible_tags + + +def is_compatible(wheel, tags=None): + if not isinstance(wheel, Wheel): + wheel = Wheel(wheel) # assume it's a filename + result = False + if tags is None: + tags = COMPATIBLE_TAGS + for ver, abi, arch in tags: + if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: + result = True + break + return result diff --git a/venv/Lib/site-packages/filelock-3.0.12.dist-info/INSTALLER b/venv/Lib/site-packages/filelock-3.0.12.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/filelock-3.0.12.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/filelock-3.0.12.dist-info/LICENSE b/venv/Lib/site-packages/filelock-3.0.12.dist-info/LICENSE new file mode 100644 index 00000000..cf1ab25d --- /dev/null +++ b/venv/Lib/site-packages/filelock-3.0.12.dist-info/LICENSE @@ -0,0 +1,24 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to diff --git a/venv/Lib/site-packages/filelock-3.0.12.dist-info/METADATA b/venv/Lib/site-packages/filelock-3.0.12.dist-info/METADATA new file mode 100644 index 00000000..79d8d479 --- /dev/null +++ b/venv/Lib/site-packages/filelock-3.0.12.dist-info/METADATA @@ -0,0 +1,156 @@ +Metadata-Version: 2.1 +Name: filelock +Version: 3.0.12 +Summary: A platform independent file lock. +Home-page: https://github.com/benediktschmitt/py-filelock +Author: Benedikt Schmitt +Author-email: benedikt@benediktschmitt.de +License: Public Domain +Download-URL: https://github.com/benediktschmitt/py-filelock/archive/master.zip +Platform: UNKNOWN +Classifier: License :: Public Domain +Classifier: Development Status :: 5 - Production/Stable +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Intended Audience :: Developers +Classifier: Topic :: System +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries +Description-Content-Type: text/markdown + +# py-filelock + +![travis-ci](https://travis-ci.org/benediktschmitt/py-filelock.svg?branch=master) + +This package contains a single module, which implements a platform independent +file lock in Python, which provides a simple way of inter-process communication: + +```Python +from filelock import Timeout, FileLock + +lock = FileLock("high_ground.txt.lock") +with lock: + open("high_ground.txt", "a").write("You were the chosen one.") +``` + +**Don't use** a *FileLock* to lock the file you want to write to, instead create +a separate *.lock* file as shown above. + +![animated example](https://raw.githubusercontent.com/benediktschmitt/py-filelock/master/example/example.gif) + + +## Similar libraries + +Perhaps you are looking for something like + +* https://pypi.python.org/pypi/pid/2.1.1 +* https://docs.python.org/3.6/library/msvcrt.html#msvcrt.locking +* or https://docs.python.org/3/library/fcntl.html#fcntl.flock + + +## Installation + +*py-filelock* is available via PyPi: + +``` +$ pip3 install filelock +``` + + +## Documentation + +The documentation for the API is available on +[readthedocs.org](https://filelock.readthedocs.io/). + + +### Examples + +A *FileLock* is used to indicate another process of your application that a +resource or working +directory is currently used. To do so, create a *FileLock* first: + +```Python +from filelock import Timeout, FileLock + +file_path = "high_ground.txt" +lock_path = "high_ground.txt.lock" + +lock = FileLock(lock_path, timeout=1) +``` + +The lock object supports multiple ways for acquiring the lock, including the +ones used to acquire standard Python thread locks: + +```Python +with lock: + open(file_path, "a").write("Hello there!") + +lock.acquire() +try: + open(file_path, "a").write("General Kenobi!") +finally: + lock.release() +``` + +The *acquire()* method accepts also a *timeout* parameter. If the lock cannot be +acquired within *timeout* seconds, a *Timeout* exception is raised: + +```Python +try: + with lock.acquire(timeout=10): + open(file_path, "a").write("I have a bad feeling about this.") +except Timeout: + print("Another instance of this application currently holds the lock.") +``` + +The lock objects are recursive locks, which means that once acquired, they will +not block on successive lock requests: + +```Python +def cite1(): + with lock: + open(file_path, "a").write("I hate it when he does that.") + +def cite2(): + with lock: + open(file_path, "a").write("You don't want to sell me death sticks.") + +# The lock is acquired here. +with lock: + cite1() + cite2() + +# And released here. +``` + + +## FileLock vs SoftFileLock + +The *FileLock* is platform dependent while the *SoftFileLock* is not. Use the +*FileLock* if all instances of your application are running on the same host and +a *SoftFileLock* otherwise. + +The *SoftFileLock* only watches the existence of the lock file. This makes it +ultra portable, but also more prone to dead locks if the application crashes. +You can simply delete the lock file in such cases. + + +## Contributions + +Contributions are always welcome, please make sure they pass all tests before +creating a pull request. Never hesitate to open a new issue, although it may +take some time for me to respond. + + +## License + +This package is [public domain](./LICENSE.rst). + + diff --git a/venv/Lib/site-packages/filelock-3.0.12.dist-info/RECORD b/venv/Lib/site-packages/filelock-3.0.12.dist-info/RECORD new file mode 100644 index 00000000..82f7cd70 --- /dev/null +++ b/venv/Lib/site-packages/filelock-3.0.12.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/filelock.cpython-36.pyc,, +filelock-3.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +filelock-3.0.12.dist-info/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210 +filelock-3.0.12.dist-info/METADATA,sha256=gjzbv9nxtD-Rj2ysjUuG7SLZCHUQl5hMy68Jij8soPw,4343 +filelock-3.0.12.dist-info/RECORD,, +filelock-3.0.12.dist-info/WHEEL,sha256=S8S5VL-stOTSZDYxHyf0KP7eds0J72qrK0Evu3TfyAY,92 +filelock-3.0.12.dist-info/top_level.txt,sha256=NDrf9i5BNogz4hEdsr6Hi7Ws3TlSSKY4Q2Y9_-i2GwU,9 +filelock.py,sha256=5DQTtOaQq7-vgLkZzvOhqhVMh_umfydWgSA8Vuzmf8M,13229 diff --git a/venv/Lib/site-packages/filelock-3.0.12.dist-info/WHEEL b/venv/Lib/site-packages/filelock-3.0.12.dist-info/WHEEL new file mode 100644 index 00000000..c57a5970 --- /dev/null +++ b/venv/Lib/site-packages/filelock-3.0.12.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/filelock-3.0.12.dist-info/top_level.txt b/venv/Lib/site-packages/filelock-3.0.12.dist-info/top_level.txt new file mode 100644 index 00000000..83c2e357 --- /dev/null +++ b/venv/Lib/site-packages/filelock-3.0.12.dist-info/top_level.txt @@ -0,0 +1 @@ +filelock diff --git a/venv/Lib/site-packages/filelock.py b/venv/Lib/site-packages/filelock.py new file mode 100644 index 00000000..4c981672 --- /dev/null +++ b/venv/Lib/site-packages/filelock.py @@ -0,0 +1,451 @@ +# This is free and unencumbered software released into the public domain. +# +# Anyone is free to copy, modify, publish, use, compile, sell, or +# distribute this software, either in source code form or as a compiled +# binary, for any purpose, commercial or non-commercial, and by any +# means. +# +# In jurisdictions that recognize copyright laws, the author or authors +# of this software dedicate any and all copyright interest in the +# software to the public domain. We make this dedication for the benefit +# of the public at large and to the detriment of our heirs and +# successors. We intend this dedication to be an overt act of +# relinquishment in perpetuity of all present and future rights to this +# software under copyright law. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# For more information, please refer to + +""" +A platform independent file lock that supports the with-statement. +""" + + +# Modules +# ------------------------------------------------ +import logging +import os +import threading +import time +try: + import warnings +except ImportError: + warnings = None + +try: + import msvcrt +except ImportError: + msvcrt = None + +try: + import fcntl +except ImportError: + fcntl = None + + +# Backward compatibility +# ------------------------------------------------ +try: + TimeoutError +except NameError: + TimeoutError = OSError + + +# Data +# ------------------------------------------------ +__all__ = [ + "Timeout", + "BaseFileLock", + "WindowsFileLock", + "UnixFileLock", + "SoftFileLock", + "FileLock" +] + +__version__ = "3.0.12" + + +_logger = None +def logger(): + """Returns the logger instance used in this module.""" + global _logger + _logger = _logger or logging.getLogger(__name__) + return _logger + + +# Exceptions +# ------------------------------------------------ +class Timeout(TimeoutError): + """ + Raised when the lock could not be acquired in *timeout* + seconds. + """ + + def __init__(self, lock_file): + """ + """ + #: The path of the file lock. + self.lock_file = lock_file + return None + + def __str__(self): + temp = "The file lock '{}' could not be acquired."\ + .format(self.lock_file) + return temp + + +# Classes +# ------------------------------------------------ + +# This is a helper class which is returned by :meth:`BaseFileLock.acquire` +# and wraps the lock to make sure __enter__ is not called twice when entering +# the with statement. +# If we would simply return *self*, the lock would be acquired again +# in the *__enter__* method of the BaseFileLock, but not released again +# automatically. +# +# :seealso: issue #37 (memory leak) +class _Acquire_ReturnProxy(object): + + def __init__(self, lock): + self.lock = lock + return None + + def __enter__(self): + return self.lock + + def __exit__(self, exc_type, exc_value, traceback): + self.lock.release() + return None + + +class BaseFileLock(object): + """ + Implements the base class of a file lock. + """ + + def __init__(self, lock_file, timeout = -1): + """ + """ + # The path to the lock file. + self._lock_file = lock_file + + # The file descriptor for the *_lock_file* as it is returned by the + # os.open() function. + # This file lock is only NOT None, if the object currently holds the + # lock. + self._lock_file_fd = None + + # The default timeout value. + self.timeout = timeout + + # We use this lock primarily for the lock counter. + self._thread_lock = threading.Lock() + + # The lock counter is used for implementing the nested locking + # mechanism. Whenever the lock is acquired, the counter is increased and + # the lock is only released, when this value is 0 again. + self._lock_counter = 0 + return None + + @property + def lock_file(self): + """ + The path to the lock file. + """ + return self._lock_file + + @property + def timeout(self): + """ + You can set a default timeout for the filelock. It will be used as + fallback value in the acquire method, if no timeout value (*None*) is + given. + + If you want to disable the timeout, set it to a negative value. + + A timeout of 0 means, that there is exactly one attempt to acquire the + file lock. + + .. versionadded:: 2.0.0 + """ + return self._timeout + + @timeout.setter + def timeout(self, value): + """ + """ + self._timeout = float(value) + return None + + # Platform dependent locking + # -------------------------------------------- + + def _acquire(self): + """ + Platform dependent. If the file lock could be + acquired, self._lock_file_fd holds the file descriptor + of the lock file. + """ + raise NotImplementedError() + + def _release(self): + """ + Releases the lock and sets self._lock_file_fd to None. + """ + raise NotImplementedError() + + # Platform independent methods + # -------------------------------------------- + + @property + def is_locked(self): + """ + True, if the object holds the file lock. + + .. versionchanged:: 2.0.0 + + This was previously a method and is now a property. + """ + return self._lock_file_fd is not None + + def acquire(self, timeout=None, poll_intervall=0.05): + """ + Acquires the file lock or fails with a :exc:`Timeout` error. + + .. code-block:: python + + # You can use this method in the context manager (recommended) + with lock.acquire(): + pass + + # Or use an equivalent try-finally construct: + lock.acquire() + try: + pass + finally: + lock.release() + + :arg float timeout: + The maximum time waited for the file lock. + If ``timeout < 0``, there is no timeout and this method will + block until the lock could be acquired. + If ``timeout`` is None, the default :attr:`~timeout` is used. + + :arg float poll_intervall: + We check once in *poll_intervall* seconds if we can acquire the + file lock. + + :raises Timeout: + if the lock could not be acquired in *timeout* seconds. + + .. versionchanged:: 2.0.0 + + This method returns now a *proxy* object instead of *self*, + so that it can be used in a with statement without side effects. + """ + # Use the default timeout, if no timeout is provided. + if timeout is None: + timeout = self.timeout + + # Increment the number right at the beginning. + # We can still undo it, if something fails. + with self._thread_lock: + self._lock_counter += 1 + + lock_id = id(self) + lock_filename = self._lock_file + start_time = time.time() + try: + while True: + with self._thread_lock: + if not self.is_locked: + logger().debug('Attempting to acquire lock %s on %s', lock_id, lock_filename) + self._acquire() + + if self.is_locked: + logger().info('Lock %s acquired on %s', lock_id, lock_filename) + break + elif timeout >= 0 and time.time() - start_time > timeout: + logger().debug('Timeout on acquiring lock %s on %s', lock_id, lock_filename) + raise Timeout(self._lock_file) + else: + logger().debug( + 'Lock %s not acquired on %s, waiting %s seconds ...', + lock_id, lock_filename, poll_intervall + ) + time.sleep(poll_intervall) + except: + # Something did go wrong, so decrement the counter. + with self._thread_lock: + self._lock_counter = max(0, self._lock_counter - 1) + + raise + return _Acquire_ReturnProxy(lock = self) + + def release(self, force = False): + """ + Releases the file lock. + + Please note, that the lock is only completly released, if the lock + counter is 0. + + Also note, that the lock file itself is not automatically deleted. + + :arg bool force: + If true, the lock counter is ignored and the lock is released in + every case. + """ + with self._thread_lock: + + if self.is_locked: + self._lock_counter -= 1 + + if self._lock_counter == 0 or force: + lock_id = id(self) + lock_filename = self._lock_file + + logger().debug('Attempting to release lock %s on %s', lock_id, lock_filename) + self._release() + self._lock_counter = 0 + logger().info('Lock %s released on %s', lock_id, lock_filename) + + return None + + def __enter__(self): + self.acquire() + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.release() + return None + + def __del__(self): + self.release(force = True) + return None + + +# Windows locking mechanism +# ~~~~~~~~~~~~~~~~~~~~~~~~~ + +class WindowsFileLock(BaseFileLock): + """ + Uses the :func:`msvcrt.locking` function to hard lock the lock file on + windows systems. + """ + + def _acquire(self): + open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC + + try: + fd = os.open(self._lock_file, open_mode) + except OSError: + pass + else: + try: + msvcrt.locking(fd, msvcrt.LK_NBLCK, 1) + except (IOError, OSError): + os.close(fd) + else: + self._lock_file_fd = fd + return None + + def _release(self): + fd = self._lock_file_fd + self._lock_file_fd = None + msvcrt.locking(fd, msvcrt.LK_UNLCK, 1) + os.close(fd) + + try: + os.remove(self._lock_file) + # Probably another instance of the application + # that acquired the file lock. + except OSError: + pass + return None + +# Unix locking mechanism +# ~~~~~~~~~~~~~~~~~~~~~~ + +class UnixFileLock(BaseFileLock): + """ + Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems. + """ + + def _acquire(self): + open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC + fd = os.open(self._lock_file, open_mode) + + try: + fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) + except (IOError, OSError): + os.close(fd) + else: + self._lock_file_fd = fd + return None + + def _release(self): + # Do not remove the lockfile: + # + # https://github.com/benediktschmitt/py-filelock/issues/31 + # https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition + fd = self._lock_file_fd + self._lock_file_fd = None + fcntl.flock(fd, fcntl.LOCK_UN) + os.close(fd) + return None + +# Soft lock +# ~~~~~~~~~ + +class SoftFileLock(BaseFileLock): + """ + Simply watches the existence of the lock file. + """ + + def _acquire(self): + open_mode = os.O_WRONLY | os.O_CREAT | os.O_EXCL | os.O_TRUNC + try: + fd = os.open(self._lock_file, open_mode) + except (IOError, OSError): + pass + else: + self._lock_file_fd = fd + return None + + def _release(self): + os.close(self._lock_file_fd) + self._lock_file_fd = None + + try: + os.remove(self._lock_file) + # The file is already deleted and that's what we want. + except OSError: + pass + return None + + +# Platform filelock +# ~~~~~~~~~~~~~~~~~ + +#: Alias for the lock, which should be used for the current platform. On +#: Windows, this is an alias for :class:`WindowsFileLock`, on Unix for +#: :class:`UnixFileLock` and otherwise for :class:`SoftFileLock`. +FileLock = None + +if msvcrt: + FileLock = WindowsFileLock +elif fcntl: + FileLock = UnixFileLock +else: + FileLock = SoftFileLock + + if warnings is not None: + warnings.warn("only soft file lock is available") diff --git a/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/INSTALLER b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/LICENSE b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/LICENSE new file mode 100644 index 00000000..be7e092b --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2017-2019 Jason R. Coombs, Barry Warsaw + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/METADATA b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/METADATA new file mode 100644 index 00000000..4341d832 --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/METADATA @@ -0,0 +1,66 @@ +Metadata-Version: 2.1 +Name: importlib-metadata +Version: 1.7.0 +Summary: Read metadata from Python packages +Home-page: http://importlib-metadata.readthedocs.io/ +Author: Barry Warsaw +Author-email: barry@python.org +License: Apache Software License +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Topic :: Software Development :: Libraries +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2 +Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7 +Requires-Dist: zipp (>=0.5) +Requires-Dist: pathlib2 ; python_version < "3" +Requires-Dist: contextlib2 ; python_version < "3" +Requires-Dist: configparser (>=3.5) ; python_version < "3" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: rst.linker ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: packaging ; extra == 'testing' +Requires-Dist: pep517 ; extra == 'testing' +Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing' + +========================= + ``importlib_metadata`` +========================= + +``importlib_metadata`` is a library to access the metadata for a Python +package. It is intended to be ported to Python 3.8. + + +Usage +===== + +See the `online documentation `_ +for usage details. + +`Finder authors +`_ can +also add support for custom package installers. See the above documentation +for details. + + +Caveats +======= + +This project primarily supports third-party packages installed by PyPA +tools (or other conforming packages). It does not support: + +- Packages in the stdlib. +- Packages installed without metadata. + +Project details +=============== + + * Project home: https://gitlab.com/python-devs/importlib_metadata + * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues + * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git + * Documentation: http://importlib_metadata.readthedocs.io/ + + diff --git a/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/RECORD b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/RECORD new file mode 100644 index 00000000..912145d4 --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/RECORD @@ -0,0 +1,33 @@ +importlib_metadata-1.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_metadata-1.7.0.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571 +importlib_metadata-1.7.0.dist-info/METADATA,sha256=AvM2AcUhNbF_2Yyo8ttyVBCh_qGbRHaRE3MVgrHYDVw,2144 +importlib_metadata-1.7.0.dist-info/RECORD,, +importlib_metadata-1.7.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +importlib_metadata-1.7.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 +importlib_metadata/__init__.py,sha256=phnrEcGP-8cF-_ZZ5peJL4cUVAANOK0CpSWC-0-IVAs,18961 +importlib_metadata/__pycache__/__init__.cpython-36.pyc,, +importlib_metadata/__pycache__/_compat.cpython-36.pyc,, +importlib_metadata/_compat.py,sha256=DnM55BbJKFCcZmJOkArmyO76-0g7pA6HEfzSYWXN88k,4417 +importlib_metadata/docs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_metadata/docs/__pycache__/__init__.cpython-36.pyc,, +importlib_metadata/docs/__pycache__/conf.cpython-36.pyc,, +importlib_metadata/docs/changelog.rst,sha256=6EZfl84T0SQHzAXNlTiTegG0cBTa9wiMt0od0ht2n_8,8739 +importlib_metadata/docs/conf.py,sha256=m-b6Mju5gFkpSHh-lyJ4iwqf_8t4LjYYFRumtutQSZc,5578 +importlib_metadata/docs/index.rst,sha256=rbXrDkLAKLIDccqME5u9CCMEfMKprqzQOkIOuwOnfz4,1907 +importlib_metadata/docs/using.rst,sha256=k_L4Hwwsf10ap9xWejyC-_gLz_WtvRfDOzuJA3o7Zw0,9504 +importlib_metadata/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_metadata/tests/__pycache__/__init__.cpython-36.pyc,, +importlib_metadata/tests/__pycache__/fixtures.cpython-36.pyc,, +importlib_metadata/tests/__pycache__/test_api.cpython-36.pyc,, +importlib_metadata/tests/__pycache__/test_integration.cpython-36.pyc,, +importlib_metadata/tests/__pycache__/test_main.cpython-36.pyc,, +importlib_metadata/tests/__pycache__/test_zip.cpython-36.pyc,, +importlib_metadata/tests/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_metadata/tests/data/__pycache__/__init__.cpython-36.pyc,, +importlib_metadata/tests/data/example-21.12-py3-none-any.whl,sha256=I-kYufETid-tDYyR8f1OFJ3t5u_Io23k0cbQxJTUN4I,1455 +importlib_metadata/tests/data/example-21.12-py3.6.egg,sha256=-EeugFAijkdUO9xyQHTZkQwZoFXK0_QxICBj6R5AAJo,1497 +importlib_metadata/tests/fixtures.py,sha256=Ua_PqyqBhFqkkNGFsXtgMah6vXKQjeqKo1KhhzYdn-w,5752 +importlib_metadata/tests/test_api.py,sha256=YMAGTsRENrtvpw2CSLmRndJMBeT4q_M0GSe-QsnnMZ4,5544 +importlib_metadata/tests/test_integration.py,sha256=ykJpwjSkVwvWHG4gUw4RUrZzU_7JKX8vZyPf_kFIrLE,1579 +importlib_metadata/tests/test_main.py,sha256=dcsDqyxTRtard2j5ysDDvVwfK6vvXdRtZCaQ0QljSR8,9026 +importlib_metadata/tests/test_zip.py,sha256=lOCNPyfJSm9nz0-2RQndM7OQV-_gRjJzyRnvMqXqRSI,2675 diff --git a/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/WHEEL b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/top_level.txt b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/top_level.txt new file mode 100644 index 00000000..bbb07547 --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata-1.7.0.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_metadata diff --git a/venv/Lib/site-packages/importlib_metadata/__init__.py b/venv/Lib/site-packages/importlib_metadata/__init__.py new file mode 100644 index 00000000..b01e7e36 --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/__init__.py @@ -0,0 +1,623 @@ +from __future__ import unicode_literals, absolute_import + +import io +import os +import re +import abc +import csv +import sys +import zipp +import operator +import functools +import itertools +import posixpath +import collections + +from ._compat import ( + install, + NullFinder, + ConfigParser, + suppress, + map, + FileNotFoundError, + IsADirectoryError, + NotADirectoryError, + PermissionError, + pathlib, + ModuleNotFoundError, + MetaPathFinder, + email_message_from_string, + PyPy_repr, + unique_ordered, + str, + ) +from importlib import import_module +from itertools import starmap + + +__metaclass__ = type + + +__all__ = [ + 'Distribution', + 'DistributionFinder', + 'PackageNotFoundError', + 'distribution', + 'distributions', + 'entry_points', + 'files', + 'metadata', + 'requires', + 'version', + ] + + +class PackageNotFoundError(ModuleNotFoundError): + """The package was not found.""" + + def __str__(self): + tmpl = "No package metadata was found for {self.name}" + return tmpl.format(**locals()) + + @property + def name(self): + name, = self.args + return name + + +class EntryPoint( + PyPy_repr, + collections.namedtuple('EntryPointBase', 'name value group')): + """An entry point as defined by Python packaging conventions. + + See `the packaging docs on entry points + `_ + for more information. + """ + + pattern = re.compile( + r'(?P[\w.]+)\s*' + r'(:\s*(?P[\w.]+))?\s*' + r'(?P\[.*\])?\s*$' + ) + """ + A regular expression describing the syntax for an entry point, + which might look like: + + - module + - package.module + - package.module:attribute + - package.module:object.attribute + - package.module:attr [extra1, extra2] + + Other combinations are possible as well. + + The expression is lenient about whitespace around the ':', + following the attr, and following any extras. + """ + + def load(self): + """Load the entry point from its definition. If only a module + is indicated by the value, return that module. Otherwise, + return the named object. + """ + match = self.pattern.match(self.value) + module = import_module(match.group('module')) + attrs = filter(None, (match.group('attr') or '').split('.')) + return functools.reduce(getattr, attrs, module) + + @property + def module(self): + match = self.pattern.match(self.value) + return match.group('module') + + @property + def attr(self): + match = self.pattern.match(self.value) + return match.group('attr') + + @property + def extras(self): + match = self.pattern.match(self.value) + return list(re.finditer(r'\w+', match.group('extras') or '')) + + @classmethod + def _from_config(cls, config): + return [ + cls(name, value, group) + for group in config.sections() + for name, value in config.items(group) + ] + + @classmethod + def _from_text(cls, text): + config = ConfigParser(delimiters='=') + # case sensitive: https://stackoverflow.com/q/1611799/812183 + config.optionxform = str + try: + config.read_string(text) + except AttributeError: # pragma: nocover + # Python 2 has no read_string + config.readfp(io.StringIO(text)) + return EntryPoint._from_config(config) + + def __iter__(self): + """ + Supply iter so one may construct dicts of EntryPoints easily. + """ + return iter((self.name, self)) + + def __reduce__(self): + return ( + self.__class__, + (self.name, self.value, self.group), + ) + + +class PackagePath(pathlib.PurePosixPath): + """A reference to a path in a package""" + + def read_text(self, encoding='utf-8'): + with self.locate().open(encoding=encoding) as stream: + return stream.read() + + def read_binary(self): + with self.locate().open('rb') as stream: + return stream.read() + + def locate(self): + """Return a path-like object for this path""" + return self.dist.locate_file(self) + + +class FileHash: + def __init__(self, spec): + self.mode, _, self.value = spec.partition('=') + + def __repr__(self): + return ''.format(self.mode, self.value) + + +class Distribution: + """A Python distribution package.""" + + @abc.abstractmethod + def read_text(self, filename): + """Attempt to load metadata file given by the name. + + :param filename: The name of the file in the distribution info. + :return: The text if found, otherwise None. + """ + + @abc.abstractmethod + def locate_file(self, path): + """ + Given a path to a file in this distribution, return a path + to it. + """ + + @classmethod + def from_name(cls, name): + """Return the Distribution for the given package name. + + :param name: The name of the distribution package to search for. + :return: The Distribution instance (or subclass thereof) for the named + package, if found. + :raises PackageNotFoundError: When the named package's distribution + metadata cannot be found. + """ + for resolver in cls._discover_resolvers(): + dists = resolver(DistributionFinder.Context(name=name)) + dist = next(iter(dists), None) + if dist is not None: + return dist + else: + raise PackageNotFoundError(name) + + @classmethod + def discover(cls, **kwargs): + """Return an iterable of Distribution objects for all packages. + + Pass a ``context`` or pass keyword arguments for constructing + a context. + + :context: A ``DistributionFinder.Context`` object. + :return: Iterable of Distribution objects for all packages. + """ + context = kwargs.pop('context', None) + if context and kwargs: + raise ValueError("cannot accept context and kwargs") + context = context or DistributionFinder.Context(**kwargs) + return itertools.chain.from_iterable( + resolver(context) + for resolver in cls._discover_resolvers() + ) + + @staticmethod + def at(path): + """Return a Distribution for the indicated metadata path + + :param path: a string or path-like object + :return: a concrete Distribution instance for the path + """ + return PathDistribution(pathlib.Path(path)) + + @staticmethod + def _discover_resolvers(): + """Search the meta_path for resolvers.""" + declared = ( + getattr(finder, 'find_distributions', None) + for finder in sys.meta_path + ) + return filter(None, declared) + + @classmethod + def _local(cls, root='.'): + from pep517 import build, meta + system = build.compat_system(root) + builder = functools.partial( + meta.build, + source_dir=root, + system=system, + ) + return PathDistribution(zipp.Path(meta.build_as_zip(builder))) + + @property + def metadata(self): + """Return the parsed metadata for this Distribution. + + The returned object will have keys that name the various bits of + metadata. See PEP 566 for details. + """ + text = ( + self.read_text('METADATA') + or self.read_text('PKG-INFO') + # This last clause is here to support old egg-info files. Its + # effect is to just end up using the PathDistribution's self._path + # (which points to the egg-info file) attribute unchanged. + or self.read_text('') + ) + return email_message_from_string(text) + + @property + def version(self): + """Return the 'Version' metadata for the distribution package.""" + return self.metadata['Version'] + + @property + def entry_points(self): + return EntryPoint._from_text(self.read_text('entry_points.txt')) + + @property + def files(self): + """Files in this distribution. + + :return: List of PackagePath for this distribution or None + + Result is `None` if the metadata file that enumerates files + (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is + missing. + Result may be empty if the metadata exists but is empty. + """ + file_lines = self._read_files_distinfo() or self._read_files_egginfo() + + def make_file(name, hash=None, size_str=None): + result = PackagePath(name) + result.hash = FileHash(hash) if hash else None + result.size = int(size_str) if size_str else None + result.dist = self + return result + + return file_lines and list(starmap(make_file, csv.reader(file_lines))) + + def _read_files_distinfo(self): + """ + Read the lines of RECORD + """ + text = self.read_text('RECORD') + return text and text.splitlines() + + def _read_files_egginfo(self): + """ + SOURCES.txt might contain literal commas, so wrap each line + in quotes. + """ + text = self.read_text('SOURCES.txt') + return text and map('"{}"'.format, text.splitlines()) + + @property + def requires(self): + """Generated requirements specified for this Distribution""" + reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() + return reqs and list(reqs) + + def _read_dist_info_reqs(self): + return self.metadata.get_all('Requires-Dist') + + def _read_egg_info_reqs(self): + source = self.read_text('requires.txt') + return source and self._deps_from_requires_text(source) + + @classmethod + def _deps_from_requires_text(cls, source): + section_pairs = cls._read_sections(source.splitlines()) + sections = { + section: list(map(operator.itemgetter('line'), results)) + for section, results in + itertools.groupby(section_pairs, operator.itemgetter('section')) + } + return cls._convert_egg_info_reqs_to_simple_reqs(sections) + + @staticmethod + def _read_sections(lines): + section = None + for line in filter(None, lines): + section_match = re.match(r'\[(.*)\]$', line) + if section_match: + section = section_match.group(1) + continue + yield locals() + + @staticmethod + def _convert_egg_info_reqs_to_simple_reqs(sections): + """ + Historically, setuptools would solicit and store 'extra' + requirements, including those with environment markers, + in separate sections. More modern tools expect each + dependency to be defined separately, with any relevant + extras and environment markers attached directly to that + requirement. This method converts the former to the + latter. See _test_deps_from_requires_text for an example. + """ + def make_condition(name): + return name and 'extra == "{name}"'.format(name=name) + + def parse_condition(section): + section = section or '' + extra, sep, markers = section.partition(':') + if extra and markers: + markers = '({markers})'.format(markers=markers) + conditions = list(filter(None, [markers, make_condition(extra)])) + return '; ' + ' and '.join(conditions) if conditions else '' + + for section, deps in sections.items(): + for dep in deps: + yield dep + parse_condition(section) + + +class DistributionFinder(MetaPathFinder): + """ + A MetaPathFinder capable of discovering installed distributions. + """ + + class Context: + """ + Keyword arguments presented by the caller to + ``distributions()`` or ``Distribution.discover()`` + to narrow the scope of a search for distributions + in all DistributionFinders. + + Each DistributionFinder may expect any parameters + and should attempt to honor the canonical + parameters defined below when appropriate. + """ + + name = None + """ + Specific name for which a distribution finder should match. + A name of ``None`` matches all distributions. + """ + + def __init__(self, **kwargs): + vars(self).update(kwargs) + + @property + def path(self): + """ + The path that a distribution finder should search. + + Typically refers to Python package paths and defaults + to ``sys.path``. + """ + return vars(self).get('path', sys.path) + + @abc.abstractmethod + def find_distributions(self, context=Context()): + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching the ``context``, + a DistributionFinder.Context instance. + """ + + +class FastPath: + """ + Micro-optimized class for searching a path for + children. + """ + + def __init__(self, root): + self.root = str(root) + self.base = os.path.basename(self.root).lower() + + def joinpath(self, child): + return pathlib.Path(self.root, child) + + def children(self): + with suppress(Exception): + return os.listdir(self.root or '') + with suppress(Exception): + return self.zip_children() + return [] + + def zip_children(self): + zip_path = zipp.Path(self.root) + names = zip_path.root.namelist() + self.joinpath = zip_path.joinpath + + return unique_ordered( + child.split(posixpath.sep, 1)[0] + for child in names + ) + + def is_egg(self, search): + base = self.base + return ( + base == search.versionless_egg_name + or base.startswith(search.prefix) + and base.endswith('.egg')) + + def search(self, name): + for child in self.children(): + n_low = child.lower() + if (n_low in name.exact_matches + or n_low.startswith(name.prefix) + and n_low.endswith(name.suffixes) + # legacy case: + or self.is_egg(name) and n_low == 'egg-info'): + yield self.joinpath(child) + + +class Prepared: + """ + A prepared search for metadata on a possibly-named package. + """ + normalized = '' + prefix = '' + suffixes = '.dist-info', '.egg-info' + exact_matches = [''][:0] + versionless_egg_name = '' + + def __init__(self, name): + self.name = name + if name is None: + return + self.normalized = name.lower().replace('-', '_') + self.prefix = self.normalized + '-' + self.exact_matches = [ + self.normalized + suffix for suffix in self.suffixes] + self.versionless_egg_name = self.normalized + '.egg' + + +@install +class MetadataPathFinder(NullFinder, DistributionFinder): + """A degenerate finder for distribution packages on the file system. + + This finder supplies only a find_distributions() method for versions + of Python that do not have a PathFinder find_distributions(). + """ + + def find_distributions(self, context=DistributionFinder.Context()): + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching ``context.name`` + (or all names if ``None`` indicated) along the paths in the list + of directories ``context.path``. + """ + found = self._search_paths(context.name, context.path) + return map(PathDistribution, found) + + @classmethod + def _search_paths(cls, name, paths): + """Find metadata directories in paths heuristically.""" + return itertools.chain.from_iterable( + path.search(Prepared(name)) + for path in map(FastPath, paths) + ) + + +class PathDistribution(Distribution): + def __init__(self, path): + """Construct a distribution from a path to the metadata directory. + + :param path: A pathlib.Path or similar object supporting + .joinpath(), __div__, .parent, and .read_text(). + """ + self._path = path + + def read_text(self, filename): + with suppress(FileNotFoundError, IsADirectoryError, KeyError, + NotADirectoryError, PermissionError): + return self._path.joinpath(filename).read_text(encoding='utf-8') + read_text.__doc__ = Distribution.read_text.__doc__ + + def locate_file(self, path): + return self._path.parent / path + + +def distribution(distribution_name): + """Get the ``Distribution`` instance for the named package. + + :param distribution_name: The name of the distribution package as a string. + :return: A ``Distribution`` instance (or subclass thereof). + """ + return Distribution.from_name(distribution_name) + + +def distributions(**kwargs): + """Get all ``Distribution`` instances in the current environment. + + :return: An iterable of ``Distribution`` instances. + """ + return Distribution.discover(**kwargs) + + +def metadata(distribution_name): + """Get the metadata for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: An email.Message containing the parsed metadata. + """ + return Distribution.from_name(distribution_name).metadata + + +def version(distribution_name): + """Get the version string for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: The version string for the package as defined in the package's + "Version" metadata key. + """ + return distribution(distribution_name).version + + +def entry_points(): + """Return EntryPoint objects for all installed packages. + + :return: EntryPoint objects for all installed packages. + """ + eps = itertools.chain.from_iterable( + dist.entry_points for dist in distributions()) + by_group = operator.attrgetter('group') + ordered = sorted(eps, key=by_group) + grouped = itertools.groupby(ordered, by_group) + return { + group: tuple(eps) + for group, eps in grouped + } + + +def files(distribution_name): + """Return a list of files for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: List of files composing the distribution. + """ + return distribution(distribution_name).files + + +def requires(distribution_name): + """ + Return a list of requirements for the named package. + + :return: An iterator of requirements, suitable for + packaging.requirement.Requirement. + """ + return distribution(distribution_name).requires + + +__version__ = version(__name__) diff --git a/venv/Lib/site-packages/importlib_metadata/_compat.py b/venv/Lib/site-packages/importlib_metadata/_compat.py new file mode 100644 index 00000000..303d4a22 --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/_compat.py @@ -0,0 +1,152 @@ +from __future__ import absolute_import, unicode_literals + +import io +import abc +import sys +import email + + +if sys.version_info > (3,): # pragma: nocover + import builtins + from configparser import ConfigParser + import contextlib + FileNotFoundError = builtins.FileNotFoundError + IsADirectoryError = builtins.IsADirectoryError + NotADirectoryError = builtins.NotADirectoryError + PermissionError = builtins.PermissionError + map = builtins.map + from itertools import filterfalse +else: # pragma: nocover + from backports.configparser import ConfigParser + from itertools import imap as map # type: ignore + from itertools import ifilterfalse as filterfalse + import contextlib2 as contextlib + FileNotFoundError = IOError, OSError + IsADirectoryError = IOError, OSError + NotADirectoryError = IOError, OSError + PermissionError = IOError, OSError + +str = type('') + +suppress = contextlib.suppress + +if sys.version_info > (3, 5): # pragma: nocover + import pathlib +else: # pragma: nocover + import pathlib2 as pathlib + +try: + ModuleNotFoundError = builtins.FileNotFoundError +except (NameError, AttributeError): # pragma: nocover + ModuleNotFoundError = ImportError # type: ignore + + +if sys.version_info >= (3,): # pragma: nocover + from importlib.abc import MetaPathFinder +else: # pragma: nocover + class MetaPathFinder(object): + __metaclass__ = abc.ABCMeta + + +__metaclass__ = type +__all__ = [ + 'install', 'NullFinder', 'MetaPathFinder', 'ModuleNotFoundError', + 'pathlib', 'ConfigParser', 'map', 'suppress', 'FileNotFoundError', + 'NotADirectoryError', 'email_message_from_string', + ] + + +def install(cls): + """ + Class decorator for installation on sys.meta_path. + + Adds the backport DistributionFinder to sys.meta_path and + attempts to disable the finder functionality of the stdlib + DistributionFinder. + """ + sys.meta_path.append(cls()) + disable_stdlib_finder() + return cls + + +def disable_stdlib_finder(): + """ + Give the backport primacy for discovering path-based distributions + by monkey-patching the stdlib O_O. + + See #91 for more background for rationale on this sketchy + behavior. + """ + def matches(finder): + return ( + getattr(finder, '__module__', None) == '_frozen_importlib_external' + and hasattr(finder, 'find_distributions') + ) + for finder in filter(matches, sys.meta_path): # pragma: nocover + del finder.find_distributions + + +class NullFinder: + """ + A "Finder" (aka "MetaClassFinder") that never finds any modules, + but may find distributions. + """ + @staticmethod + def find_spec(*args, **kwargs): + return None + + # In Python 2, the import system requires finders + # to have a find_module() method, but this usage + # is deprecated in Python 3 in favor of find_spec(). + # For the purposes of this finder (i.e. being present + # on sys.meta_path but having no other import + # system functionality), the two methods are identical. + find_module = find_spec + + +def py2_message_from_string(text): # nocoverpy3 + # Work around https://bugs.python.org/issue25545 where + # email.message_from_string cannot handle Unicode on Python 2. + io_buffer = io.StringIO(text) + return email.message_from_file(io_buffer) + + +email_message_from_string = ( + py2_message_from_string + if sys.version_info < (3,) else + email.message_from_string + ) + + +class PyPy_repr: + """ + Override repr for EntryPoint objects on PyPy to avoid __iter__ access. + Ref #97, #102. + """ + affected = hasattr(sys, 'pypy_version_info') + + def __compat_repr__(self): # pragma: nocover + def make_param(name): + value = getattr(self, name) + return '{name}={value!r}'.format(**locals()) + params = ', '.join(map(make_param, self._fields)) + return 'EntryPoint({params})'.format(**locals()) + + if affected: # pragma: nocover + __repr__ = __compat_repr__ + del affected + + +# from itertools recipes +def unique_everseen(iterable): # pragma: nocover + "List unique elements, preserving order. Remember all elements ever seen." + seen = set() + seen_add = seen.add + + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + + +unique_ordered = ( + unique_everseen if sys.version_info < (3, 7) else dict.fromkeys) diff --git a/venv/Lib/site-packages/importlib_metadata/docs/__init__.py b/venv/Lib/site-packages/importlib_metadata/docs/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_metadata/docs/changelog.rst b/venv/Lib/site-packages/importlib_metadata/docs/changelog.rst new file mode 100644 index 00000000..0455e667 --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/docs/changelog.rst @@ -0,0 +1,297 @@ +========================= + importlib_metadata NEWS +========================= + +v1.7.0 +====== + +* ``PathNotFoundError`` now has a custom ``__str__`` + mentioning "package metadata" being missing to help + guide users to the cause when the package is installed + but no metadata is present. Closes #124. + +v1.6.1 +====== + +* Added ``Distribution._local()`` as a provisional + demonstration of how to load metadata for a local + package. Implicitly requires that + `pep517 `_ is + installed. Ref #42. +* Ensure inputs to FastPath are Unicode. Closes #121. +* Tests now rely on ``importlib.resources.files`` (and + backport) instead of the older ``path`` function. +* Support any iterable from ``find_distributions``. + Closes #122. + +v1.6.0 +====== + +* Added ``module`` and ``attr`` attributes to ``EntryPoint`` + +v1.5.2 +====== + +* Fix redundant entries from ``FastPath.zip_children``. + Closes #117. + +v1.5.1 +====== + +* Improve reliability and consistency of compatibility + imports for contextlib and pathlib when running tests. + Closes #116. + +v1.5.0 +====== + +* Additional performance optimizations in FastPath now + saves an additional 20% on a typical call. +* Correct for issue where PyOxidizer finder has no + ``__module__`` attribute. Closes #110. + +v1.4.0 +====== + +* Through careful optimization, ``distribution()`` is + 3-4x faster. Thanks to Antony Lee for the + contribution. Closes #95. + +* When searching through ``sys.path``, if any error + occurs attempting to list a path entry, that entry + is skipped, making the system much more lenient + to errors. Closes #94. + +v1.3.0 +====== + +* Improve custom finders documentation. Closes #105. + +v1.2.0 +====== + +* Once again, drop support for Python 3.4. Ref #104. + +v1.1.3 +====== + +* Restored support for Python 3.4 due to improper version + compatibility declarations in the v1.1.0 and v1.1.1 + releases. Closes #104. + +v1.1.2 +====== + +* Repaired project metadata to correctly declare the + ``python_requires`` directive. Closes #103. + +v1.1.1 +====== + +* Fixed ``repr(EntryPoint)`` on PyPy 3 also. Closes #102. + +v1.1.0 +====== + +* Dropped support for Python 3.4. +* EntryPoints are now pickleable. Closes #96. +* Fixed ``repr(EntryPoint)`` on PyPy 2. Closes #97. + +v1.0.0 +====== + +* Project adopts semver for versioning. + +* Removed compatibility shim introduced in 0.23. + +* For better compatibility with the stdlib implementation and to + avoid the same distributions being discovered by the stdlib and + backport implementations, the backport now disables the + stdlib DistributionFinder during initialization (import time). + Closes #91 and closes #100. + +0.23 +==== +* Added a compatibility shim to prevent failures on beta releases + of Python before the signature changed to accept the + "context" parameter on find_distributions. This workaround + will have a limited lifespan, not to extend beyond release of + Python 3.8 final. + +0.22 +==== +* Renamed ``package`` parameter to ``distribution_name`` + as `recommended `_ + in the following functions: ``distribution``, ``metadata``, + ``version``, ``files``, and ``requires``. This + backward-incompatible change is expected to have little impact + as these functions are assumed to be primarily used with + positional parameters. + +0.21 +==== +* ``importlib.metadata`` now exposes the ``DistributionFinder`` + metaclass and references it in the docs for extending the + search algorithm. +* Add ``Distribution.at`` for constructing a Distribution object + from a known metadata directory on the file system. Closes #80. +* Distribution finders now receive a context object that + supplies ``.path`` and ``.name`` properties. This change + introduces a fundamental backward incompatibility for + any projects implementing a ``find_distributions`` method + on a ``MetaPathFinder``. This new layer of abstraction + allows this context to be supplied directly or constructed + on demand and opens the opportunity for a + ``find_distributions`` method to solicit additional + context from the caller. Closes #85. + +0.20 +==== +* Clarify in the docs that calls to ``.files`` could return + ``None`` when the metadata is not present. Closes #69. +* Return all requirements and not just the first for dist-info + packages. Closes #67. + +0.19 +==== +* Restrain over-eager egg metadata resolution. +* Add support for entry points with colons in the name. Closes #75. + +0.18 +==== +* Parse entry points case sensitively. Closes #68 +* Add a version constraint on the backport configparser package. Closes #66 + +0.17 +==== +* Fix a permission problem in the tests on Windows. + +0.16 +==== +* Don't crash if there exists an EGG-INFO directory on sys.path. + +0.15 +==== +* Fix documentation. + +0.14 +==== +* Removed ``local_distribution`` function from the API. + **This backward-incompatible change removes this + behavior summarily**. Projects should remove their + reliance on this behavior. A replacement behavior is + under review in the `pep517 project + `_. Closes #42. + +0.13 +==== +* Update docstrings to match PEP 8. Closes #63. +* Merged modules into one module. Closes #62. + +0.12 +==== +* Add support for eggs. !65; Closes #19. + +0.11 +==== +* Support generic zip files (not just wheels). Closes #59 +* Support zip files with multiple distributions in them. Closes #60 +* Fully expose the public API in ``importlib_metadata.__all__``. + +0.10 +==== +* The ``Distribution`` ABC is now officially part of the public API. + Closes #37. +* Fixed support for older single file egg-info formats. Closes #43. +* Fixed a testing bug when ``$CWD`` has spaces in the path. Closes #50. +* Add Python 3.8 to the ``tox`` testing matrix. + +0.9 +=== +* Fixed issue where entry points without an attribute would raise an + Exception. Closes #40. +* Removed unused ``name`` parameter from ``entry_points()``. Closes #44. +* ``DistributionFinder`` classes must now be instantiated before + being placed on ``sys.meta_path``. + +0.8 +=== +* This library can now discover/enumerate all installed packages. **This + backward-incompatible change alters the protocol finders must + implement to support distribution package discovery.** Closes #24. +* The signature of ``find_distributions()`` on custom installer finders + should now accept two parameters, ``name`` and ``path`` and + these parameters must supply defaults. +* The ``entry_points()`` method no longer accepts a package name + but instead returns all entry points in a dictionary keyed by the + ``EntryPoint.group``. The ``resolve`` method has been removed. Instead, + call ``EntryPoint.load()``, which has the same semantics as + ``pkg_resources`` and ``entrypoints``. **This is a backward incompatible + change.** +* Metadata is now always returned as Unicode text regardless of + Python version. Closes #29. +* This library can now discover metadata for a 'local' package (found + in the current-working directory). Closes #27. +* Added ``files()`` function for resolving files from a distribution. +* Added a new ``requires()`` function, which returns the requirements + for a package suitable for parsing by + ``packaging.requirements.Requirement``. Closes #18. +* The top-level ``read_text()`` function has been removed. Use + ``PackagePath.read_text()`` on instances returned by the ``files()`` + function. **This is a backward incompatible change.** +* Release dates are now automatically injected into the changelog + based on SCM tags. + +0.7 +=== +* Fixed issue where packages with dashes in their names would + not be discovered. Closes #21. +* Distribution lookup is now case-insensitive. Closes #20. +* Wheel distributions can no longer be discovered by their module + name. Like Path distributions, they must be indicated by their + distribution package name. + +0.6 +=== +* Removed ``importlib_metadata.distribution`` function. Now + the public interface is primarily the utility functions exposed + in ``importlib_metadata.__all__``. Closes #14. +* Added two new utility functions ``read_text`` and + ``metadata``. + +0.5 +=== +* Updated README and removed details about Distribution + class, now considered private. Closes #15. +* Added test suite support for Python 3.4+. +* Fixed SyntaxErrors on Python 3.4 and 3.5. !12 +* Fixed errors on Windows joining Path elements. !15 + +0.4 +=== +* Housekeeping. + +0.3 +=== +* Added usage documentation. Closes #8 +* Add support for getting metadata from wheels on ``sys.path``. Closes #9 + +0.2 +=== +* Added ``importlib_metadata.entry_points()``. Closes #1 +* Added ``importlib_metadata.resolve()``. Closes #12 +* Add support for Python 2.7. Closes #4 + +0.1 +=== +* Initial release. + + +.. + Local Variables: + mode: change-log-mode + indent-tabs-mode: nil + sentence-end-double-space: t + fill-column: 78 + coding: utf-8 + End: diff --git a/venv/Lib/site-packages/importlib_metadata/docs/conf.py b/venv/Lib/site-packages/importlib_metadata/docs/conf.py new file mode 100644 index 00000000..129a7a4e --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/docs/conf.py @@ -0,0 +1,185 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# importlib_metadata documentation build configuration file, created by +# sphinx-quickstart on Thu Nov 30 10:21:00 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'rst.linker', + 'sphinx.ext.autodoc', + 'sphinx.ext.coverage', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.viewcode', + ] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'importlib_metadata' +copyright = '2017-2019, Jason R. Coombs, Barry Warsaw' +author = 'Jason R. Coombs, Barry Warsaw' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '0.1' +# The full version, including alpha/beta/rc tags. +release = '0.1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'default' + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# This is required for the alabaster theme +# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars +html_sidebars = { + '**': [ + 'relations.html', # needs 'show_related': True theme option to display + 'searchbox.html', + ] + } + + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = 'importlib_metadatadoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', + } + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'importlib_metadata.tex', + 'importlib\\_metadata Documentation', + 'Brett Cannon, Barry Warsaw', 'manual'), + ] + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'importlib_metadata', 'importlib_metadata Documentation', + [author], 1) + ] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'importlib_metadata', 'importlib_metadata Documentation', + author, 'importlib_metadata', 'One line description of project.', + 'Miscellaneous'), + ] + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), + 'importlib_resources': ( + 'https://importlib-resources.readthedocs.io/en/latest/', None + ), + } + + +# For rst.linker, inject release dates into changelog.rst +link_files = { + 'changelog.rst': dict( + replace=[ + dict( + pattern=r'^(?m)((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n', + with_scm='{text}\n{rev[timestamp]:%Y-%m-%d}\n\n', + ), + ], + ), + } diff --git a/venv/Lib/site-packages/importlib_metadata/docs/index.rst b/venv/Lib/site-packages/importlib_metadata/docs/index.rst new file mode 100644 index 00000000..530197cf --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/docs/index.rst @@ -0,0 +1,50 @@ +=============================== + Welcome to importlib_metadata +=============================== + +``importlib_metadata`` is a library which provides an API for accessing an +installed package's metadata (see :pep:`566`), such as its entry points or its top-level +name. This functionality intends to replace most uses of ``pkg_resources`` +`entry point API`_ and `metadata API`_. Along with :mod:`importlib.resources` in +Python 3.7 and newer (backported as :doc:`importlib_resources ` for older +versions of Python), this can eliminate the need to use the older and less +efficient ``pkg_resources`` package. + +``importlib_metadata`` is a backport of Python 3.8's standard library +:doc:`importlib.metadata ` module for Python 2.7, and 3.4 through 3.7. Users of +Python 3.8 and beyond are encouraged to use the standard library module. +When imported on Python 3.8 and later, ``importlib_metadata`` replaces the +DistributionFinder behavior from the stdlib, but leaves the API in tact. +Developers looking for detailed API descriptions should refer to the Python +3.8 standard library documentation. + +The documentation here includes a general :ref:`usage ` guide. + + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + using.rst + changelog (links).rst + + +Project details +=============== + + * Project home: https://gitlab.com/python-devs/importlib_metadata + * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues + * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git + * Documentation: http://importlib_metadata.readthedocs.io/ + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + + +.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points +.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api diff --git a/venv/Lib/site-packages/importlib_metadata/docs/using.rst b/venv/Lib/site-packages/importlib_metadata/docs/using.rst new file mode 100644 index 00000000..11965147 --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/docs/using.rst @@ -0,0 +1,260 @@ +.. _using: + +================================= + Using :mod:`!importlib_metadata` +================================= + +``importlib_metadata`` is a library that provides for access to installed +package metadata. Built in part on Python's import system, this library +intends to replace similar functionality in the `entry point +API`_ and `metadata API`_ of ``pkg_resources``. Along with +:mod:`importlib.resources` in Python 3.7 +and newer (backported as :doc:`importlib_resources ` for older versions of +Python), this can eliminate the need to use the older and less efficient +``pkg_resources`` package. + +By "installed package" we generally mean a third-party package installed into +Python's ``site-packages`` directory via tools such as `pip +`_. Specifically, +it means a package with either a discoverable ``dist-info`` or ``egg-info`` +directory, and metadata defined by :pep:`566` or its older specifications. +By default, package metadata can live on the file system or in zip archives on +:data:`sys.path`. Through an extension mechanism, the metadata can live almost +anywhere. + + +Overview +======== + +Let's say you wanted to get the version string for a package you've installed +using ``pip``. We start by creating a virtual environment and installing +something into it:: + + $ python3 -m venv example + $ source example/bin/activate + (example) $ pip install importlib_metadata + (example) $ pip install wheel + +You can get the version string for ``wheel`` by running the following:: + + (example) $ python + >>> from importlib_metadata import version + >>> version('wheel') + '0.32.3' + +You can also get the set of entry points keyed by group, such as +``console_scripts``, ``distutils.commands`` and others. Each group contains a +sequence of :ref:`EntryPoint ` objects. + +You can get the :ref:`metadata for a distribution `:: + + >>> list(metadata('wheel')) + ['Metadata-Version', 'Name', 'Version', 'Summary', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Project-URL', 'Project-URL', 'Project-URL', 'Keywords', 'Platform', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Requires-Python', 'Provides-Extra', 'Requires-Dist', 'Requires-Dist'] + +You can also get a :ref:`distribution's version number `, list its +:ref:`constituent files `, and get a list of the distribution's +:ref:`requirements`. + + +Functional API +============== + +This package provides the following functionality via its public API. + + +.. _entry-points: + +Entry points +------------ + +The ``entry_points()`` function returns a dictionary of all entry points, +keyed by group. Entry points are represented by ``EntryPoint`` instances; +each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and +a ``.load()`` method to resolve the value. There are also ``.module``, +``.attr``, and ``.extras`` attributes for getting the components of the +``.value`` attribute:: + + >>> eps = entry_points() + >>> list(eps) + ['console_scripts', 'distutils.commands', 'distutils.setup_keywords', 'egg_info.writers', 'setuptools.installation'] + >>> scripts = eps['console_scripts'] + >>> wheel = [ep for ep in scripts if ep.name == 'wheel'][0] + >>> wheel + EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts') + >>> wheel.module + 'wheel.cli' + >>> wheel.attr + 'main' + >>> wheel.extras + [] + >>> main = wheel.load() + >>> main + + +The ``group`` and ``name`` are arbitrary values defined by the package author +and usually a client will wish to resolve all entry points for a particular +group. Read `the setuptools docs +`_ +for more information on entry points, their definition, and usage. + + +.. _metadata: + +Distribution metadata +--------------------- + +Every distribution includes some metadata, which you can extract using the +``metadata()`` function:: + + >>> wheel_metadata = metadata('wheel') + +The keys of the returned data structure [#f1]_ name the metadata keywords, and +their values are returned unparsed from the distribution metadata:: + + >>> wheel_metadata['Requires-Python'] + '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*' + + +.. _version: + +Distribution versions +--------------------- + +The ``version()`` function is the quickest way to get a distribution's version +number, as a string:: + + >>> version('wheel') + '0.32.3' + + +.. _files: + +Distribution files +------------------ + +You can also get the full set of files contained within a distribution. The +``files()`` function takes a distribution package name and returns all of the +files installed by this distribution. Each file object returned is a +``PackagePath``, a :class:`pathlib.Path` derived object with additional ``dist``, +``size``, and ``hash`` properties as indicated by the metadata. For example:: + + >>> util = [p for p in files('wheel') if 'util.py' in str(p)][0] + >>> util + PackagePath('wheel/util.py') + >>> util.size + 859 + >>> util.dist + + >>> util.hash + + +Once you have the file, you can also read its contents:: + + >>> print(util.read_text()) + import base64 + import sys + ... + def as_bytes(s): + if isinstance(s, text_type): + return s.encode('utf-8') + return s + +In the case where the metadata file listing files +(RECORD or SOURCES.txt) is missing, ``files()`` will +return ``None``. The caller may wish to wrap calls to +``files()`` in `always_iterable +`_ +or otherwise guard against this condition if the target +distribution is not known to have the metadata present. + +.. _requirements: + +Distribution requirements +------------------------- + +To get the full set of requirements for a distribution, use the ``requires()`` +function:: + + >>> requires('wheel') + ["pytest (>=3.0.0) ; extra == 'test'", "pytest-cov ; extra == 'test'"] + + +Distributions +============= + +While the above API is the most common and convenient usage, you can get all +of that information from the ``Distribution`` class. A ``Distribution`` is an +abstract object that represents the metadata for a Python package. You can +get the ``Distribution`` instance:: + + >>> from importlib_metadata import distribution + >>> dist = distribution('wheel') + +Thus, an alternative way to get the version number is through the +``Distribution`` instance:: + + >>> dist.version + '0.32.3' + +There are all kinds of additional metadata available on the ``Distribution`` +instance:: + + >>> d.metadata['Requires-Python'] + '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*' + >>> d.metadata['License'] + 'MIT' + +The full set of available metadata is not described here. See :pep:`566` +for additional details. + + +Extending the search algorithm +============================== + +Because package metadata is not available through :data:`sys.path` searches, or +package loaders directly, the metadata for a package is found through import +system `finders`_. To find a distribution package's metadata, +``importlib.metadata`` queries the list of :term:`meta path finders ` on +:data:`sys.meta_path`. + +By default ``importlib_metadata`` installs a finder for distribution packages +found on the file system. This finder doesn't actually find any *packages*, +but it can find the packages' metadata. + +The abstract class :py:class:`importlib.abc.MetaPathFinder` defines the +interface expected of finders by Python's import system. +``importlib_metadata`` extends this protocol by looking for an optional +``find_distributions`` callable on the finders from +:data:`sys.meta_path` and presents this extended interface as the +``DistributionFinder`` abstract base class, which defines this abstract +method:: + + @abc.abstractmethod + def find_distributions(context=DistributionFinder.Context()): + """Return an iterable of all Distribution instances capable of + loading the metadata for packages for the indicated ``context``. + """ + +The ``DistributionFinder.Context`` object provides ``.path`` and ``.name`` +properties indicating the path to search and name to match and may +supply other relevant context. + +What this means in practice is that to support finding distribution package +metadata in locations other than the file system, subclass +``Distribution`` and implement the abstract methods. Then from +a custom finder, return instances of this derived ``Distribution`` in the +``find_distributions()`` method. + + +.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points +.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api +.. _`finders`: https://docs.python.org/3/reference/import.html#finders-and-loaders + + +.. rubric:: Footnotes + +.. [#f1] Technically, the returned distribution metadata object is an + :class:`email.message.EmailMessage` + instance, but this is an implementation detail, and not part of the + stable API. You should only use dictionary-like methods and syntax + to access the metadata contents. diff --git a/venv/Lib/site-packages/importlib_metadata/tests/__init__.py b/venv/Lib/site-packages/importlib_metadata/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_metadata/tests/data/__init__.py b/venv/Lib/site-packages/importlib_metadata/tests/data/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_metadata/tests/data/example-21.12-py3-none-any.whl b/venv/Lib/site-packages/importlib_metadata/tests/data/example-21.12-py3-none-any.whl new file mode 100644 index 00000000..641ab07f Binary files /dev/null and b/venv/Lib/site-packages/importlib_metadata/tests/data/example-21.12-py3-none-any.whl differ diff --git a/venv/Lib/site-packages/importlib_metadata/tests/data/example-21.12-py3.6.egg b/venv/Lib/site-packages/importlib_metadata/tests/data/example-21.12-py3.6.egg new file mode 100644 index 00000000..cdb298a1 Binary files /dev/null and b/venv/Lib/site-packages/importlib_metadata/tests/data/example-21.12-py3.6.egg differ diff --git a/venv/Lib/site-packages/importlib_metadata/tests/fixtures.py b/venv/Lib/site-packages/importlib_metadata/tests/fixtures.py new file mode 100644 index 00000000..20982fa1 --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/tests/fixtures.py @@ -0,0 +1,232 @@ +from __future__ import unicode_literals + +import os +import sys +import shutil +import tempfile +import textwrap +import test.support + +from .._compat import pathlib, contextlib + + +__metaclass__ = type + + +@contextlib.contextmanager +def tempdir(): + tmpdir = tempfile.mkdtemp() + try: + yield pathlib.Path(tmpdir) + finally: + shutil.rmtree(tmpdir) + + +@contextlib.contextmanager +def save_cwd(): + orig = os.getcwd() + try: + yield + finally: + os.chdir(orig) + + +@contextlib.contextmanager +def tempdir_as_cwd(): + with tempdir() as tmp: + with save_cwd(): + os.chdir(str(tmp)) + yield tmp + + +@contextlib.contextmanager +def install_finder(finder): + sys.meta_path.append(finder) + try: + yield + finally: + sys.meta_path.remove(finder) + + +class Fixtures: + def setUp(self): + self.fixtures = contextlib.ExitStack() + self.addCleanup(self.fixtures.close) + + +class SiteDir(Fixtures): + def setUp(self): + super(SiteDir, self).setUp() + self.site_dir = self.fixtures.enter_context(tempdir()) + + +class OnSysPath(Fixtures): + @staticmethod + @contextlib.contextmanager + def add_sys_path(dir): + sys.path[:0] = [str(dir)] + try: + yield + finally: + sys.path.remove(str(dir)) + + def setUp(self): + super(OnSysPath, self).setUp() + self.fixtures.enter_context(self.add_sys_path(self.site_dir)) + + +class DistInfoPkg(OnSysPath, SiteDir): + files = { + "distinfo_pkg-1.0.0.dist-info": { + "METADATA": """ + Name: distinfo-pkg + Author: Steven Ma + Version: 1.0.0 + Requires-Dist: wheel >= 1.0 + Requires-Dist: pytest; extra == 'test' + """, + "RECORD": "mod.py,sha256=abc,20\n", + "entry_points.txt": """ + [entries] + main = mod:main + ns:sub = mod:main + """ + }, + "mod.py": """ + def main(): + print("hello world") + """, + } + + def setUp(self): + super(DistInfoPkg, self).setUp() + build_files(DistInfoPkg.files, self.site_dir) + + +class DistInfoPkgOffPath(SiteDir): + def setUp(self): + super(DistInfoPkgOffPath, self).setUp() + build_files(DistInfoPkg.files, self.site_dir) + + +class EggInfoPkg(OnSysPath, SiteDir): + files = { + "egginfo_pkg.egg-info": { + "PKG-INFO": """ + Name: egginfo-pkg + Author: Steven Ma + License: Unknown + Version: 1.0.0 + Classifier: Intended Audience :: Developers + Classifier: Topic :: Software Development :: Libraries + """, + "SOURCES.txt": """ + mod.py + egginfo_pkg.egg-info/top_level.txt + """, + "entry_points.txt": """ + [entries] + main = mod:main + """, + "requires.txt": """ + wheel >= 1.0; python_version >= "2.7" + [test] + pytest + """, + "top_level.txt": "mod\n" + }, + "mod.py": """ + def main(): + print("hello world") + """, + } + + def setUp(self): + super(EggInfoPkg, self).setUp() + build_files(EggInfoPkg.files, prefix=self.site_dir) + + +class EggInfoFile(OnSysPath, SiteDir): + files = { + "egginfo_file.egg-info": """ + Metadata-Version: 1.0 + Name: egginfo_file + Version: 0.1 + Summary: An example package + Home-page: www.example.com + Author: Eric Haffa-Vee + Author-email: eric@example.coms + License: UNKNOWN + Description: UNKNOWN + Platform: UNKNOWN + """, + } + + def setUp(self): + super(EggInfoFile, self).setUp() + build_files(EggInfoFile.files, prefix=self.site_dir) + + +class LocalPackage: + files = { + "setup.py": """ + import setuptools + setuptools.setup(name="local-pkg", version="2.0.1") + """, + } + + def setUp(self): + self.fixtures = contextlib.ExitStack() + self.addCleanup(self.fixtures.close) + self.fixtures.enter_context(tempdir_as_cwd()) + build_files(self.files) + + +def build_files(file_defs, prefix=pathlib.Path()): + """Build a set of files/directories, as described by the + + file_defs dictionary. Each key/value pair in the dictionary is + interpreted as a filename/contents pair. If the contents value is a + dictionary, a directory is created, and the dictionary interpreted + as the files within it, recursively. + + For example: + + {"README.txt": "A README file", + "foo": { + "__init__.py": "", + "bar": { + "__init__.py": "", + }, + "baz.py": "# Some code", + } + } + """ + for name, contents in file_defs.items(): + full_name = prefix / name + if isinstance(contents, dict): + full_name.mkdir() + build_files(contents, prefix=full_name) + else: + if isinstance(contents, bytes): + with full_name.open('wb') as f: + f.write(contents) + else: + with full_name.open('w') as f: + f.write(DALS(contents)) + + +class FileBuilder: + def unicode_filename(self): + return test.support.FS_NONASCII or \ + self.skip("File system does not support non-ascii.") + + +def DALS(str): + "Dedent and left-strip" + return textwrap.dedent(str).lstrip() + + +class NullFinder: + def find_module(self, name): + pass diff --git a/venv/Lib/site-packages/importlib_metadata/tests/test_api.py b/venv/Lib/site-packages/importlib_metadata/tests/test_api.py new file mode 100644 index 00000000..aa346ddb --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/tests/test_api.py @@ -0,0 +1,176 @@ +import re +import textwrap +import unittest + +from . import fixtures +from .. import ( + Distribution, PackageNotFoundError, __version__, distribution, + entry_points, files, metadata, requires, version, + ) + +try: + from collections.abc import Iterator +except ImportError: + from collections import Iterator # noqa: F401 + +try: + from builtins import str as text +except ImportError: + from __builtin__ import unicode as text + + +class APITests( + fixtures.EggInfoPkg, + fixtures.DistInfoPkg, + fixtures.EggInfoFile, + unittest.TestCase): + + version_pattern = r'\d+\.\d+(\.\d)?' + + def test_retrieves_version_of_self(self): + pkg_version = version('egginfo-pkg') + assert isinstance(pkg_version, text) + assert re.match(self.version_pattern, pkg_version) + + def test_retrieves_version_of_distinfo_pkg(self): + pkg_version = version('distinfo-pkg') + assert isinstance(pkg_version, text) + assert re.match(self.version_pattern, pkg_version) + + def test_for_name_does_not_exist(self): + with self.assertRaises(PackageNotFoundError): + distribution('does-not-exist') + + def test_for_top_level(self): + self.assertEqual( + distribution('egginfo-pkg').read_text('top_level.txt').strip(), + 'mod') + + def test_read_text(self): + top_level = [ + path for path in files('egginfo-pkg') + if path.name == 'top_level.txt' + ][0] + self.assertEqual(top_level.read_text(), 'mod\n') + + def test_entry_points(self): + entries = dict(entry_points()['entries']) + ep = entries['main'] + self.assertEqual(ep.value, 'mod:main') + self.assertEqual(ep.extras, []) + + def test_metadata_for_this_package(self): + md = metadata('egginfo-pkg') + assert md['author'] == 'Steven Ma' + assert md['LICENSE'] == 'Unknown' + assert md['Name'] == 'egginfo-pkg' + classifiers = md.get_all('Classifier') + assert 'Topic :: Software Development :: Libraries' in classifiers + + def test_importlib_metadata_version(self): + assert re.match(self.version_pattern, __version__) + + @staticmethod + def _test_files(files): + root = files[0].root + for file in files: + assert file.root == root + assert not file.hash or file.hash.value + assert not file.hash or file.hash.mode == 'sha256' + assert not file.size or file.size >= 0 + assert file.locate().exists() + assert isinstance(file.read_binary(), bytes) + if file.name.endswith('.py'): + file.read_text() + + def test_file_hash_repr(self): + try: + assertRegex = self.assertRegex + except AttributeError: + # Python 2 + assertRegex = self.assertRegexpMatches + + util = [ + p for p in files('distinfo-pkg') + if p.name == 'mod.py' + ][0] + assertRegex( + repr(util.hash), + '') + + def test_files_dist_info(self): + self._test_files(files('distinfo-pkg')) + + def test_files_egg_info(self): + self._test_files(files('egginfo-pkg')) + + def test_version_egg_info_file(self): + self.assertEqual(version('egginfo-file'), '0.1') + + def test_requires_egg_info_file(self): + requirements = requires('egginfo-file') + self.assertIsNone(requirements) + + def test_requires_egg_info(self): + deps = requires('egginfo-pkg') + assert len(deps) == 2 + assert any( + dep == 'wheel >= 1.0; python_version >= "2.7"' + for dep in deps + ) + + def test_requires_dist_info(self): + deps = requires('distinfo-pkg') + assert len(deps) == 2 + assert all(deps) + assert 'wheel >= 1.0' in deps + assert "pytest; extra == 'test'" in deps + + def test_more_complex_deps_requires_text(self): + requires = textwrap.dedent(""" + dep1 + dep2 + + [:python_version < "3"] + dep3 + + [extra1] + dep4 + + [extra2:python_version < "3"] + dep5 + """) + deps = sorted(Distribution._deps_from_requires_text(requires)) + expected = [ + 'dep1', + 'dep2', + 'dep3; python_version < "3"', + 'dep4; extra == "extra1"', + 'dep5; (python_version < "3") and extra == "extra2"', + ] + # It's important that the environment marker expression be + # wrapped in parentheses to avoid the following 'and' binding more + # tightly than some other part of the environment expression. + + assert deps == expected + + +class OffSysPathTests(fixtures.DistInfoPkgOffPath, unittest.TestCase): + def test_find_distributions_specified_path(self): + dists = Distribution.discover(path=[str(self.site_dir)]) + assert any( + dist.metadata['Name'] == 'distinfo-pkg' + for dist in dists + ) + + def test_distribution_at_pathlib(self): + """Demonstrate how to load metadata direct from a directory. + """ + dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' + dist = Distribution.at(dist_info_path) + assert dist.version == '1.0.0' + + def test_distribution_at_str(self): + dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' + dist = Distribution.at(str(dist_info_path)) + assert dist.version == '1.0.0' diff --git a/venv/Lib/site-packages/importlib_metadata/tests/test_integration.py b/venv/Lib/site-packages/importlib_metadata/tests/test_integration.py new file mode 100644 index 00000000..cbb940bd --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/tests/test_integration.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +from __future__ import unicode_literals + +import unittest +import packaging.requirements +import packaging.version + +from . import fixtures +from .. import ( + Distribution, + _compat, + version, + ) + + +class IntegrationTests(fixtures.DistInfoPkg, unittest.TestCase): + + def test_package_spec_installed(self): + """ + Illustrate the recommended procedure to determine if + a specified version of a package is installed. + """ + def is_installed(package_spec): + req = packaging.requirements.Requirement(package_spec) + return version(req.name) in req.specifier + + assert is_installed('distinfo-pkg==1.0') + assert is_installed('distinfo-pkg>=1.0,<2.0') + assert not is_installed('distinfo-pkg<1.0') + + +class FinderTests(fixtures.Fixtures, unittest.TestCase): + + def test_finder_without_module(self): + class ModuleFreeFinder(fixtures.NullFinder): + """ + A finder without an __module__ attribute + """ + def __getattribute__(self, name): + if name == '__module__': + raise AttributeError(name) + return super().__getattribute__(name) + + self.fixtures.enter_context( + fixtures.install_finder(ModuleFreeFinder())) + _compat.disable_stdlib_finder() + + +class LocalProjectTests(fixtures.LocalPackage, unittest.TestCase): + def test_find_local(self): + dist = Distribution._local() + assert dist.metadata['Name'] == 'local-pkg' + assert dist.version == '2.0.1' diff --git a/venv/Lib/site-packages/importlib_metadata/tests/test_main.py b/venv/Lib/site-packages/importlib_metadata/tests/test_main.py new file mode 100644 index 00000000..4ffdd5d6 --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/tests/test_main.py @@ -0,0 +1,285 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import re +import json +import pickle +import textwrap +import unittest +import importlib +import importlib_metadata +import pyfakefs.fake_filesystem_unittest as ffs + +from . import fixtures +from .. import ( + Distribution, EntryPoint, MetadataPathFinder, + PackageNotFoundError, distributions, + entry_points, metadata, version, + ) + +try: + from builtins import str as text +except ImportError: + from __builtin__ import unicode as text + + +class BasicTests(fixtures.DistInfoPkg, unittest.TestCase): + version_pattern = r'\d+\.\d+(\.\d)?' + + def test_retrieves_version_of_self(self): + dist = Distribution.from_name('distinfo-pkg') + assert isinstance(dist.version, text) + assert re.match(self.version_pattern, dist.version) + + def test_for_name_does_not_exist(self): + with self.assertRaises(PackageNotFoundError): + Distribution.from_name('does-not-exist') + + def test_package_not_found_mentions_metadata(self): + """ + When a package is not found, that could indicate that the + packgae is not installed or that it is installed without + metadata. Ensure the exception mentions metadata to help + guide users toward the cause. See #124. + """ + with self.assertRaises(PackageNotFoundError) as ctx: + Distribution.from_name('does-not-exist') + + assert "metadata" in str(ctx.exception) + + def test_new_style_classes(self): + self.assertIsInstance(Distribution, type) + self.assertIsInstance(MetadataPathFinder, type) + + +class ImportTests(fixtures.DistInfoPkg, unittest.TestCase): + def test_import_nonexistent_module(self): + # Ensure that the MetadataPathFinder does not crash an import of a + # non-existent module. + with self.assertRaises(ImportError): + importlib.import_module('does_not_exist') + + def test_resolve(self): + entries = dict(entry_points()['entries']) + ep = entries['main'] + self.assertEqual(ep.load().__name__, "main") + + def test_entrypoint_with_colon_in_name(self): + entries = dict(entry_points()['entries']) + ep = entries['ns:sub'] + self.assertEqual(ep.value, 'mod:main') + + def test_resolve_without_attr(self): + ep = EntryPoint( + name='ep', + value='importlib_metadata', + group='grp', + ) + assert ep.load() is importlib_metadata + + +class NameNormalizationTests( + fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): + @staticmethod + def pkg_with_dashes(site_dir): + """ + Create minimal metadata for a package with dashes + in the name (and thus underscores in the filename). + """ + metadata_dir = site_dir / 'my_pkg.dist-info' + metadata_dir.mkdir() + metadata = metadata_dir / 'METADATA' + with metadata.open('w') as strm: + strm.write('Version: 1.0\n') + return 'my-pkg' + + def test_dashes_in_dist_name_found_as_underscores(self): + """ + For a package with a dash in the name, the dist-info metadata + uses underscores in the name. Ensure the metadata loads. + """ + pkg_name = self.pkg_with_dashes(self.site_dir) + assert version(pkg_name) == '1.0' + + @staticmethod + def pkg_with_mixed_case(site_dir): + """ + Create minimal metadata for a package with mixed case + in the name. + """ + metadata_dir = site_dir / 'CherryPy.dist-info' + metadata_dir.mkdir() + metadata = metadata_dir / 'METADATA' + with metadata.open('w') as strm: + strm.write('Version: 1.0\n') + return 'CherryPy' + + def test_dist_name_found_as_any_case(self): + """ + Ensure the metadata loads when queried with any case. + """ + pkg_name = self.pkg_with_mixed_case(self.site_dir) + assert version(pkg_name) == '1.0' + assert version(pkg_name.lower()) == '1.0' + assert version(pkg_name.upper()) == '1.0' + + +class NonASCIITests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): + @staticmethod + def pkg_with_non_ascii_description(site_dir): + """ + Create minimal metadata for a package with non-ASCII in + the description. + """ + metadata_dir = site_dir / 'portend.dist-info' + metadata_dir.mkdir() + metadata = metadata_dir / 'METADATA' + with metadata.open('w', encoding='utf-8') as fp: + fp.write('Description: pôrˈtend\n') + return 'portend' + + @staticmethod + def pkg_with_non_ascii_description_egg_info(site_dir): + """ + Create minimal metadata for an egg-info package with + non-ASCII in the description. + """ + metadata_dir = site_dir / 'portend.dist-info' + metadata_dir.mkdir() + metadata = metadata_dir / 'METADATA' + with metadata.open('w', encoding='utf-8') as fp: + fp.write(textwrap.dedent(""" + Name: portend + + pôrˈtend + """).lstrip()) + return 'portend' + + def test_metadata_loads(self): + pkg_name = self.pkg_with_non_ascii_description(self.site_dir) + meta = metadata(pkg_name) + assert meta['Description'] == 'pôrˈtend' + + def test_metadata_loads_egg_info(self): + pkg_name = self.pkg_with_non_ascii_description_egg_info(self.site_dir) + meta = metadata(pkg_name) + assert meta.get_payload() == 'pôrˈtend\n' + + +class DiscoveryTests(fixtures.EggInfoPkg, + fixtures.DistInfoPkg, + unittest.TestCase): + + def test_package_discovery(self): + dists = list(distributions()) + assert all( + isinstance(dist, Distribution) + for dist in dists + ) + assert any( + dist.metadata['Name'] == 'egginfo-pkg' + for dist in dists + ) + assert any( + dist.metadata['Name'] == 'distinfo-pkg' + for dist in dists + ) + + def test_invalid_usage(self): + with self.assertRaises(ValueError): + list(distributions(context='something', name='else')) + + +class DirectoryTest(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): + def test_egg_info(self): + # make an `EGG-INFO` directory that's unrelated + self.site_dir.joinpath('EGG-INFO').mkdir() + # used to crash with `IsADirectoryError` + with self.assertRaises(PackageNotFoundError): + version('unknown-package') + + def test_egg(self): + egg = self.site_dir.joinpath('foo-3.6.egg') + egg.mkdir() + with self.add_sys_path(egg): + with self.assertRaises(PackageNotFoundError): + version('foo') + + +class MissingSysPath(fixtures.OnSysPath, unittest.TestCase): + site_dir = '/does-not-exist' + + def test_discovery(self): + """ + Discovering distributions should succeed even if + there is an invalid path on sys.path. + """ + importlib_metadata.distributions() + + +class InaccessibleSysPath(fixtures.OnSysPath, ffs.TestCase): + site_dir = '/access-denied' + + def setUp(self): + super(InaccessibleSysPath, self).setUp() + self.setUpPyfakefs() + self.fs.create_dir(self.site_dir, perm_bits=000) + + def test_discovery(self): + """ + Discovering distributions should succeed even if + there is an invalid path on sys.path. + """ + list(importlib_metadata.distributions()) + + +class TestEntryPoints(unittest.TestCase): + def __init__(self, *args): + super(TestEntryPoints, self).__init__(*args) + self.ep = importlib_metadata.EntryPoint('name', 'value', 'group') + + def test_entry_point_pickleable(self): + revived = pickle.loads(pickle.dumps(self.ep)) + assert revived == self.ep + + def test_immutable(self): + """EntryPoints should be immutable""" + with self.assertRaises(AttributeError): + self.ep.name = 'badactor' + + def test_repr(self): + assert 'EntryPoint' in repr(self.ep) + assert 'name=' in repr(self.ep) + assert "'name'" in repr(self.ep) + + def test_hashable(self): + """EntryPoints should be hashable""" + hash(self.ep) + + def test_json_dump(self): + """ + json should not expect to be able to dump an EntryPoint + """ + with self.assertRaises(Exception): + json.dumps(self.ep) + + def test_module(self): + assert self.ep.module == 'value' + + def test_attr(self): + assert self.ep.attr is None + + +class FileSystem( + fixtures.OnSysPath, fixtures.SiteDir, fixtures.FileBuilder, + unittest.TestCase): + def test_unicode_dir_on_sys_path(self): + """ + Ensure a Unicode subdirectory of a directory on sys.path + does not crash. + """ + fixtures.build_files( + {self.unicode_filename(): {}}, + prefix=self.site_dir, + ) + list(distributions()) diff --git a/venv/Lib/site-packages/importlib_metadata/tests/test_zip.py b/venv/Lib/site-packages/importlib_metadata/tests/test_zip.py new file mode 100644 index 00000000..4aae933d --- /dev/null +++ b/venv/Lib/site-packages/importlib_metadata/tests/test_zip.py @@ -0,0 +1,80 @@ +import sys +import unittest + +from .. import ( + distribution, entry_points, files, PackageNotFoundError, + version, distributions, + ) + +try: + from importlib import resources + getattr(resources, 'files') + getattr(resources, 'as_file') +except (ImportError, AttributeError): + import importlib_resources as resources + +try: + from contextlib import ExitStack +except ImportError: + from contextlib2 import ExitStack + + +class TestZip(unittest.TestCase): + root = 'importlib_metadata.tests.data' + + def _fixture_on_path(self, filename): + pkg_file = resources.files(self.root).joinpath(filename) + file = self.resources.enter_context(resources.as_file(pkg_file)) + assert file.name.startswith('example-'), file.name + sys.path.insert(0, str(file)) + self.resources.callback(sys.path.pop, 0) + + def setUp(self): + # Find the path to the example-*.whl so we can add it to the front of + # sys.path, where we'll then try to find the metadata thereof. + self.resources = ExitStack() + self.addCleanup(self.resources.close) + self._fixture_on_path('example-21.12-py3-none-any.whl') + + def test_zip_version(self): + self.assertEqual(version('example'), '21.12') + + def test_zip_version_does_not_match(self): + with self.assertRaises(PackageNotFoundError): + version('definitely-not-installed') + + def test_zip_entry_points(self): + scripts = dict(entry_points()['console_scripts']) + entry_point = scripts['example'] + self.assertEqual(entry_point.value, 'example:main') + entry_point = scripts['Example'] + self.assertEqual(entry_point.value, 'example:main') + + def test_missing_metadata(self): + self.assertIsNone(distribution('example').read_text('does not exist')) + + def test_case_insensitive(self): + self.assertEqual(version('Example'), '21.12') + + def test_files(self): + for file in files('example'): + path = str(file.dist.locate_file(file)) + assert '.whl/' in path, path + + def test_one_distribution(self): + dists = list(distributions(path=sys.path[:1])) + assert len(dists) == 1 + + +class TestEgg(TestZip): + def setUp(self): + # Find the path to the example-*.egg so we can add it to the front of + # sys.path, where we'll then try to find the metadata thereof. + self.resources = ExitStack() + self.addCleanup(self.resources.close) + self._fixture_on_path('example-21.12-py3.6.egg') + + def test_files(self): + for file in files('example'): + path = str(file.dist.locate_file(file)) + assert '.egg/' in path, path diff --git a/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/INSTALLER b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/LICENSE b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/LICENSE new file mode 100644 index 00000000..378b991a --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2017-2019 Brett Cannon, Barry Warsaw + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/METADATA b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/METADATA new file mode 100644 index 00000000..ff57e061 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/METADATA @@ -0,0 +1,53 @@ +Metadata-Version: 2.1 +Name: importlib-resources +Version: 3.0.0 +Summary: Read resources from Python packages +Home-page: http://importlib-resources.readthedocs.io/ +Author: Barry Warsaw +Author-email: barry@python.org +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Topic :: Software Development :: Libraries +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7 +Requires-Dist: pathlib2 ; python_version < "3" +Requires-Dist: contextlib2 ; python_version < "3" +Requires-Dist: singledispatch ; python_version < "3.4" +Requires-Dist: typing ; python_version < "3.5" +Requires-Dist: zipp (>=0.4) ; python_version < "3.8" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: rst.linker ; extra == 'docs' +Requires-Dist: jaraco.packaging ; extra == 'docs' + +========================= + ``importlib_resources`` +========================= + +``importlib_resources`` is a backport of Python standard library +`importlib.resources +`_ +module for Python 2.7, and 3.4 through 3.8. Users of Python 3.9 and beyond +should use the standard library module, since for these versions, +``importlib_resources`` just delegates to that module. + +The key goal of this module is to replace parts of `pkg_resources +`_ with a +solution in Python's stdlib that relies on well-defined APIs. This makes +reading resources included in packages easier, with more stable and consistent +semantics. + + +Project details +=============== + + * Project home: https://gitlab.com/python-devs/importlib_resources + * Report bugs at: https://gitlab.com/python-devs/importlib_resources/issues + * Code hosting: https://gitlab.com/python-devs/importlib_resources.git + * Documentation: https://importlib-resources.readthedocs.io/ + + diff --git a/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/RECORD b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/RECORD new file mode 100644 index 00000000..0975141b --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/RECORD @@ -0,0 +1,66 @@ +importlib_resources-3.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_resources-3.0.0.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568 +importlib_resources-3.0.0.dist-info/METADATA,sha256=BIalQpWJG-Av5ZUNQXdFsv6M8s8EdYiymu6GaoEL1Rk,2100 +importlib_resources-3.0.0.dist-info/RECORD,, +importlib_resources-3.0.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +importlib_resources-3.0.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 +importlib_resources/__init__.py,sha256=hswDmLAH0IUlLWwmdHXPN2mgus2bk5IwDP-BFzg7VKo,977 +importlib_resources/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/__pycache__/_common.cpython-36.pyc,, +importlib_resources/__pycache__/_compat.cpython-36.pyc,, +importlib_resources/__pycache__/_py2.cpython-36.pyc,, +importlib_resources/__pycache__/_py3.cpython-36.pyc,, +importlib_resources/__pycache__/abc.cpython-36.pyc,, +importlib_resources/__pycache__/readers.cpython-36.pyc,, +importlib_resources/__pycache__/trees.cpython-36.pyc,, +importlib_resources/_common.py,sha256=jXVqgKZ1bt8IbZiErvjIeb69BjqsMSCSt9AwV4bHnE8,3157 +importlib_resources/_compat.py,sha256=5nvS1tAZIl_6VqrFSScNVW2wtBGaRXGxcpDXMskruoA,3233 +importlib_resources/_py2.py,sha256=G9M5mv1ILl8NARGdNX0v9_F_Hb4HUKCS-FCNK63Ajvw,4146 +importlib_resources/_py3.py,sha256=2wJYfjLG8nd9mT1HLBtX96m6zlu9-Tocte9wFl9q_bY,5474 +importlib_resources/abc.py,sha256=6PX4Nprv39YnAht3NymhHIuSso0ocAKqDJZf-A6BgIw,3894 +importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/readers.py,sha256=S0DsGQB1li6w5USiZQtiy-5HXe4UAxt-zmKo8QlAxsI,1155 +importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/__pycache__/test_files.cpython-36.pyc,, +importlib_resources/tests/__pycache__/test_open.cpython-36.pyc,, +importlib_resources/tests/__pycache__/test_path.cpython-36.pyc,, +importlib_resources/tests/__pycache__/test_read.cpython-36.pyc,, +importlib_resources/tests/__pycache__/test_resource.cpython-36.pyc,, +importlib_resources/tests/__pycache__/util.cpython-36.pyc,, +importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data01/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 +importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 +importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/one/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13 +importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/two/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13 +importlib_resources/tests/data03/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data03/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/data03/namespace/portion1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data03/namespace/portion1/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/data03/namespace/portion2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data03/namespace/portion2/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/data03/namespace/resource1.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/test_files.py,sha256=91rf4C74_aJsKNSt-a-03slVpY9QSAuCbogFWnsaPjE,1017 +importlib_resources/tests/test_open.py,sha256=yDXmTGXQspByj6WU0prnoVwab1yWWEA3fwz_XIx7TQU,2288 +importlib_resources/tests/test_path.py,sha256=GnUOu-338o9offnC8xwbXjH9JIQJpD7JujgQkGB106Q,1548 +importlib_resources/tests/test_read.py,sha256=DpA7tzxSQlU0_YQuWibB3E5PDL9fQUdzeKoEUGnAx78,2046 +importlib_resources/tests/test_resource.py,sha256=X77DzU2BRoM6d59iEh74zDHHw3pKOBGLCg3lP3dH4BI,6467 +importlib_resources/tests/util.py,sha256=f0RZU-RkEkybJjXRd7C5HcWMsoLFRWJL4FIUF1CJ2wo,6980 +importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=AYf51fj80OKCRis93v2DlZjt5rM-VQOPptSHJbFtkXw,1131 +importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-36.pyc,, +importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=e6HXvTEObXvJcNxyX5I8tu5M8_6mSN8ALahHfqE7ADA,698 +importlib_resources/trees.py,sha256=U3FlQSI5--eF4AdzOjBvW4xnjL21OFX8ivk82Quwv_M,117 diff --git a/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/WHEEL b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/top_level.txt b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/top_level.txt new file mode 100644 index 00000000..58ad1bd3 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources-3.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_resources diff --git a/venv/Lib/site-packages/importlib_resources/__init__.py b/venv/Lib/site-packages/importlib_resources/__init__.py new file mode 100644 index 00000000..f122f95e --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/__init__.py @@ -0,0 +1,53 @@ +"""Read resources contained within a package.""" + +import sys + +from ._common import ( + as_file, files, + ) + +# For compatibility. Ref #88. +# Also requires hook-importlib_resources.py (Ref #101). +__import__('importlib_resources.trees') + + +__all__ = [ + 'Package', + 'Resource', + 'ResourceReader', + 'as_file', + 'contents', + 'files', + 'is_resource', + 'open_binary', + 'open_text', + 'path', + 'read_binary', + 'read_text', + ] + + +if sys.version_info >= (3,): + from importlib_resources._py3 import ( + Package, + Resource, + contents, + is_resource, + open_binary, + open_text, + path, + read_binary, + read_text, + ) + from importlib_resources.abc import ResourceReader +else: + from importlib_resources._py2 import ( + contents, + is_resource, + open_binary, + open_text, + path, + read_binary, + read_text, + ) + del __all__[:3] diff --git a/venv/Lib/site-packages/importlib_resources/_common.py b/venv/Lib/site-packages/importlib_resources/_common.py new file mode 100644 index 00000000..f54c78d7 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/_common.py @@ -0,0 +1,121 @@ +from __future__ import absolute_import + +import os +import tempfile +import contextlib +import types +import importlib + +from ._compat import ( + Path, FileNotFoundError, + singledispatch, package_spec, + ) + +if False: # TYPE_CHECKING + from typing import Union, Any, Optional + from .abc import ResourceReader + Package = Union[types.ModuleType, str] + + +def files(package): + """ + Get a Traversable resource from a package + """ + return from_package(get_package(package)) + + +def normalize_path(path): + # type: (Any) -> str + """Normalize a path by ensuring it is a string. + + If the resulting string contains path separators, an exception is raised. + """ + str_path = str(path) + parent, file_name = os.path.split(str_path) + if parent: + raise ValueError('{!r} must be only a file name'.format(path)) + return file_name + + +def get_resource_reader(package): + # type: (types.ModuleType) -> Optional[ResourceReader] + """ + Return the package's loader if it's a ResourceReader. + """ + # We can't use + # a issubclass() check here because apparently abc.'s __subclasscheck__() + # hook wants to create a weak reference to the object, but + # zipimport.zipimporter does not support weak references, resulting in a + # TypeError. That seems terrible. + spec = package.__spec__ + reader = getattr(spec.loader, 'get_resource_reader', None) + if reader is None: + return None + return reader(spec.name) + + +def resolve(cand): + # type: (Package) -> types.ModuleType + return ( + cand if isinstance(cand, types.ModuleType) + else importlib.import_module(cand) + ) + + +def get_package(package): + # type: (Package) -> types.ModuleType + """Take a package name or module object and return the module. + + Raise an exception if the resolved module is not a package. + """ + resolved = resolve(package) + if package_spec(resolved).submodule_search_locations is None: + raise TypeError('{!r} is not a package'.format(package)) + return resolved + + +def from_package(package): + """ + Return a Traversable object for the given package. + + """ + spec = package_spec(package) + reader = spec.loader.get_resource_reader(spec.name) + return reader.files() + + +@contextlib.contextmanager +def _tempfile(reader, suffix=''): + # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' + # blocks due to the need to close the temporary file to work on Windows + # properly. + fd, raw_path = tempfile.mkstemp(suffix=suffix) + try: + os.write(fd, reader()) + os.close(fd) + yield Path(raw_path) + finally: + try: + os.remove(raw_path) + except FileNotFoundError: + pass + + +@singledispatch +@contextlib.contextmanager +def as_file(path): + """ + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ + with _tempfile(path.read_bytes, suffix=path.name) as local: + yield local + + +@as_file.register(Path) +@contextlib.contextmanager +def _(path): + """ + Degenerate behavior for pathlib.Path objects. + """ + yield path diff --git a/venv/Lib/site-packages/importlib_resources/_compat.py b/venv/Lib/site-packages/importlib_resources/_compat.py new file mode 100644 index 00000000..dbfc6796 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/_compat.py @@ -0,0 +1,127 @@ +from __future__ import absolute_import +import sys + +# flake8: noqa + +if sys.version_info > (3,5): + from pathlib import Path, PurePath +else: + from pathlib2 import Path, PurePath # type: ignore + + +if sys.version_info > (3,): + from contextlib import suppress +else: + from contextlib2 import suppress # type: ignore + + +try: + from functools import singledispatch +except ImportError: + from singledispatch import singledispatch # type: ignore + + +try: + from abc import ABC # type: ignore +except ImportError: + from abc import ABCMeta + + class ABC(object): # type: ignore + __metaclass__ = ABCMeta + + +try: + FileNotFoundError = FileNotFoundError # type: ignore +except NameError: + FileNotFoundError = OSError # type: ignore + + +try: + from zipfile import Path as ZipPath # type: ignore +except ImportError: + from zipp import Path as ZipPath # type: ignore + + +try: + from typing import runtime_checkable # type: ignore +except ImportError: + def runtime_checkable(cls): # type: ignore + return cls + + +try: + from typing import Protocol # type: ignore +except ImportError: + Protocol = ABC # type: ignore + + +__metaclass__ = type + + +class PackageSpec: + def __init__(self, **kwargs): + vars(self).update(kwargs) + + +class TraversableResourcesAdapter: + def __init__(self, spec): + self.spec = spec + self.loader = LoaderAdapter(spec) + + def __getattr__(self, name): + return getattr(self.spec, name) + + +class LoaderAdapter: + """ + Adapt loaders to provide TraversableResources and other + compatibility. + """ + def __init__(self, spec): + self.spec = spec + + @property + def path(self): + # Python < 3 + return self.spec.origin + + def get_resource_reader(self, name): + # Python < 3.9 + from . import readers + + def _zip_reader(spec): + with suppress(AttributeError): + return readers.ZipReader(spec.loader, spec.name) + + def _available_reader(spec): + with suppress(AttributeError): + return spec.loader.get_resource_reader(spec.name) + + def _native_reader(spec): + reader = _available_reader(spec) + return reader if hasattr(reader, 'files') else None + + return ( + # native reader if it supplies 'files' + _native_reader(self.spec) or + # local ZipReader if a zip module + _zip_reader(self.spec) or + # local FileReader + readers.FileReader(self) + ) + + +def package_spec(package): + """ + Construct a minimal package spec suitable for + matching the interfaces this library relies upon + in later Python versions. + """ + spec = getattr(package, '__spec__', None) or \ + PackageSpec( + origin=package.__file__, + loader=getattr(package, '__loader__', None), + name=package.__name__, + submodule_search_locations=getattr(package, '__path__', None), + ) + return TraversableResourcesAdapter(spec) diff --git a/venv/Lib/site-packages/importlib_resources/_py2.py b/venv/Lib/site-packages/importlib_resources/_py2.py new file mode 100644 index 00000000..dd8c7d62 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/_py2.py @@ -0,0 +1,107 @@ +import os +import errno + +from . import _common +from ._compat import FileNotFoundError +from io import BytesIO, TextIOWrapper, open as io_open + + +def open_binary(package, resource): + """Return a file-like object opened for binary reading of the resource.""" + resource = _common.normalize_path(resource) + package = _common.get_package(package) + # Using pathlib doesn't work well here due to the lack of 'strict' argument + # for pathlib.Path.resolve() prior to Python 3.6. + package_path = os.path.dirname(package.__file__) + relative_path = os.path.join(package_path, resource) + full_path = os.path.abspath(relative_path) + try: + return io_open(full_path, 'rb') + except IOError: + # This might be a package in a zip file. zipimport provides a loader + # with a functioning get_data() method, however we have to strip the + # archive (i.e. the .zip file's name) off the front of the path. This + # is because the zipimport loader in Python 2 doesn't actually follow + # PEP 302. It should allow the full path, but actually requires that + # the path be relative to the zip file. + try: + loader = package.__loader__ + full_path = relative_path[len(loader.archive)+1:] + data = loader.get_data(full_path) + except (IOError, AttributeError): + package_name = package.__name__ + message = '{!r} resource not found in {!r}'.format( + resource, package_name) + raise FileNotFoundError(message) + return BytesIO(data) + + +def open_text(package, resource, encoding='utf-8', errors='strict'): + """Return a file-like object opened for text reading of the resource.""" + return TextIOWrapper( + open_binary(package, resource), encoding=encoding, errors=errors) + + +def read_binary(package, resource): + """Return the binary contents of the resource.""" + with open_binary(package, resource) as fp: + return fp.read() + + +def read_text(package, resource, encoding='utf-8', errors='strict'): + """Return the decoded string of the resource. + + The decoding-related arguments have the same semantics as those of + bytes.decode(). + """ + with open_text(package, resource, encoding, errors) as fp: + return fp.read() + + +def path(package, resource): + """A context manager providing a file path object to the resource. + + If the resource does not already exist on its own on the file system, + a temporary file will be created. If the file was created, the file + will be deleted upon exiting the context manager (no exception is + raised if the file was deleted prior to the context manager + exiting). + """ + path = _common.files(package).joinpath(_common.normalize_path(resource)) + if not path.is_file(): + raise FileNotFoundError(path) + return _common.as_file(path) + + +def is_resource(package, name): + """True if name is a resource inside package. + + Directories are *not* resources. + """ + package = _common.get_package(package) + _common.normalize_path(name) + try: + package_contents = set(contents(package)) + except OSError as error: + if error.errno not in (errno.ENOENT, errno.ENOTDIR): + # We won't hit this in the Python 2 tests, so it'll appear + # uncovered. We could mock os.listdir() to return a non-ENOENT or + # ENOTDIR, but then we'd have to depend on another external + # library since Python 2 doesn't have unittest.mock. It's not + # worth it. + raise # pragma: nocover + return False + if name not in package_contents: + return False + return (_common.from_package(package) / name).is_file() + + +def contents(package): + """Return an iterable of entries in `package`. + + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ + package = _common.get_package(package) + return list(item.name for item in _common.from_package(package).iterdir()) diff --git a/venv/Lib/site-packages/importlib_resources/_py3.py b/venv/Lib/site-packages/importlib_resources/_py3.py new file mode 100644 index 00000000..5998f215 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/_py3.py @@ -0,0 +1,150 @@ +import os +import sys + +from . import _common +from contextlib import contextmanager, suppress +from importlib.abc import ResourceLoader +from io import BytesIO, TextIOWrapper +from pathlib import Path +from types import ModuleType +from typing import Iterable, Iterator, Optional, Set, Union # noqa: F401 +from typing import cast +from typing.io import BinaryIO, TextIO + +if False: # TYPE_CHECKING + from typing import ContextManager + +Package = Union[ModuleType, str] +if sys.version_info >= (3, 6): + Resource = Union[str, os.PathLike] # pragma: <=35 +else: + Resource = str # pragma: >=36 + + +def open_binary(package: Package, resource: Resource) -> BinaryIO: + """Return a file-like object opened for binary reading of the resource.""" + resource = _common.normalize_path(resource) + package = _common.get_package(package) + reader = _common.get_resource_reader(package) + if reader is not None: + return reader.open_resource(resource) + # Using pathlib doesn't work well here due to the lack of 'strict' + # argument for pathlib.Path.resolve() prior to Python 3.6. + absolute_package_path = os.path.abspath( + package.__spec__.origin or 'non-existent file') + package_path = os.path.dirname(absolute_package_path) + full_path = os.path.join(package_path, resource) + try: + return open(full_path, mode='rb') + except OSError: + # Just assume the loader is a resource loader; all the relevant + # importlib.machinery loaders are and an AttributeError for + # get_data() will make it clear what is needed from the loader. + loader = cast(ResourceLoader, package.__spec__.loader) + data = None + if hasattr(package.__spec__.loader, 'get_data'): + with suppress(OSError): + data = loader.get_data(full_path) + if data is None: + package_name = package.__spec__.name + message = '{!r} resource not found in {!r}'.format( + resource, package_name) + raise FileNotFoundError(message) + return BytesIO(data) + + +def open_text(package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict') -> TextIO: + """Return a file-like object opened for text reading of the resource.""" + return TextIOWrapper( + open_binary(package, resource), encoding=encoding, errors=errors) + + +def read_binary(package: Package, resource: Resource) -> bytes: + """Return the binary contents of the resource.""" + with open_binary(package, resource) as fp: + return fp.read() + + +def read_text(package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict') -> str: + """Return the decoded string of the resource. + + The decoding-related arguments have the same semantics as those of + bytes.decode(). + """ + with open_text(package, resource, encoding, errors) as fp: + return fp.read() + + +def path( + package: Package, resource: Resource, + ) -> 'ContextManager[Path]': + """A context manager providing a file path object to the resource. + + If the resource does not already exist on its own on the file system, + a temporary file will be created. If the file was created, the file + will be deleted upon exiting the context manager (no exception is + raised if the file was deleted prior to the context manager + exiting). + """ + reader = _common.get_resource_reader(_common.get_package(package)) + return ( + _path_from_reader(reader, resource) + if reader else + _common.as_file( + _common.files(package).joinpath(_common.normalize_path(resource))) + ) + + +@contextmanager +def _path_from_reader(reader, resource): + norm_resource = _common.normalize_path(resource) + with suppress(FileNotFoundError): + yield Path(reader.resource_path(norm_resource)) + return + opener_reader = reader.open_resource(norm_resource) + with _common._tempfile(opener_reader.read, suffix=norm_resource) as res: + yield res + + +def is_resource(package: Package, name: str) -> bool: + """True if `name` is a resource inside `package`. + + Directories are *not* resources. + """ + package = _common.get_package(package) + _common.normalize_path(name) + reader = _common.get_resource_reader(package) + if reader is not None: + return reader.is_resource(name) + package_contents = set(contents(package)) + if name not in package_contents: + return False + return (_common.from_package(package) / name).is_file() + + +def contents(package: Package) -> Iterable[str]: + """Return an iterable of entries in `package`. + + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ + package = _common.get_package(package) + reader = _common.get_resource_reader(package) + if reader is not None: + return reader.contents() + # Is the package a namespace package? By definition, namespace packages + # cannot have resources. + namespace = ( + package.__spec__.origin is None or + package.__spec__.origin == 'namespace' + ) + if namespace or not package.__spec__.has_location: + return () + return list(item.name for item in _common.from_package(package).iterdir()) diff --git a/venv/Lib/site-packages/importlib_resources/abc.py b/venv/Lib/site-packages/importlib_resources/abc.py new file mode 100644 index 00000000..18bc4ef8 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/abc.py @@ -0,0 +1,142 @@ +from __future__ import absolute_import + +import abc + +from ._compat import ABC, FileNotFoundError, runtime_checkable, Protocol + +# Use mypy's comment syntax for Python 2 compatibility +try: + from typing import BinaryIO, Iterable, Text +except ImportError: + pass + + +class ResourceReader(ABC): + """Abstract base class for loaders to provide resource reading support.""" + + @abc.abstractmethod + def open_resource(self, resource): + # type: (Text) -> BinaryIO + """Return an opened, file-like object for binary reading. + + The 'resource' argument is expected to represent only a file name. + If the resource cannot be found, FileNotFoundError is raised. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abc.abstractmethod + def resource_path(self, resource): + # type: (Text) -> Text + """Return the file system path to the specified resource. + + The 'resource' argument is expected to represent only a file name. + If the resource does not exist on the file system, raise + FileNotFoundError. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abc.abstractmethod + def is_resource(self, path): + # type: (Text) -> bool + """Return True if the named 'path' is a resource. + + Files are resources, directories are not. + """ + raise FileNotFoundError + + @abc.abstractmethod + def contents(self): + # type: () -> Iterable[str] + """Return an iterable of entries in `package`.""" + raise FileNotFoundError + + +@runtime_checkable +class Traversable(Protocol): + """ + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + """ + + @abc.abstractmethod + def iterdir(self): + """ + Yield Traversable objects in self + """ + + @abc.abstractmethod + def read_bytes(self): + """ + Read contents of self as bytes + """ + + @abc.abstractmethod + def read_text(self, encoding=None): + """ + Read contents of self as bytes + """ + + @abc.abstractmethod + def is_dir(self): + """ + Return True if self is a dir + """ + + @abc.abstractmethod + def is_file(self): + """ + Return True if self is a file + """ + + @abc.abstractmethod + def joinpath(self, child): + """ + Return Traversable child in self + """ + + @abc.abstractmethod + def __truediv__(self, child): + """ + Return Traversable child in self + """ + + @abc.abstractmethod + def open(self, mode='r', *args, **kwargs): + """ + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ + + @abc.abstractproperty + def name(self): + # type: () -> str + """ + The base name of this object without any parent references. + """ + + +class TraversableResources(ResourceReader): + @abc.abstractmethod + def files(self): + """Return a Traversable object for the loaded package.""" + + def open_resource(self, resource): + return self.files().joinpath(resource).open('rb') + + def resource_path(self, resource): + raise FileNotFoundError(resource) + + def is_resource(self, path): + return self.files().joinpath(path).is_file() + + def contents(self): + return (item.name for item in self.files().iterdir()) diff --git a/venv/Lib/site-packages/importlib_resources/py.typed b/venv/Lib/site-packages/importlib_resources/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/readers.py b/venv/Lib/site-packages/importlib_resources/readers.py new file mode 100644 index 00000000..0e0b17ab --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/readers.py @@ -0,0 +1,41 @@ +from . import abc + +from ._compat import Path, ZipPath + + +class FileReader(abc.TraversableResources): + def __init__(self, loader): + self.path = Path(loader.path).parent + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path + + +class ZipReader(abc.TraversableResources): + def __init__(self, loader, module): + _, _, name = module.rpartition('.') + prefix = loader.prefix.replace('\\', '/') + name + '/' + self.path = ZipPath(loader.archive, prefix) + + def open_resource(self, resource): + try: + return super().open_resource(resource) + except KeyError as exc: + raise FileNotFoundError(exc.args[0]) + + def is_resource(self, path): + # workaround for `zipfile.Path.is_file` returning true + # for non-existent paths. + target = self.files().joinpath(path) + return target.is_file() and target.exists() + + def files(self): + return self.path diff --git a/venv/Lib/site-packages/importlib_resources/tests/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/data01/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/data01/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/data01/binary.file b/venv/Lib/site-packages/importlib_resources/tests/data01/binary.file new file mode 100644 index 00000000..eaf36c1d Binary files /dev/null and b/venv/Lib/site-packages/importlib_resources/tests/data01/binary.file differ diff --git a/venv/Lib/site-packages/importlib_resources/tests/data01/subdirectory/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/data01/subdirectory/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/data01/subdirectory/binary.file b/venv/Lib/site-packages/importlib_resources/tests/data01/subdirectory/binary.file new file mode 100644 index 00000000..eaf36c1d Binary files /dev/null and b/venv/Lib/site-packages/importlib_resources/tests/data01/subdirectory/binary.file differ diff --git a/venv/Lib/site-packages/importlib_resources/tests/data01/utf-16.file b/venv/Lib/site-packages/importlib_resources/tests/data01/utf-16.file new file mode 100644 index 00000000..2cb77229 Binary files /dev/null and b/venv/Lib/site-packages/importlib_resources/tests/data01/utf-16.file differ diff --git a/venv/Lib/site-packages/importlib_resources/tests/data01/utf-8.file b/venv/Lib/site-packages/importlib_resources/tests/data01/utf-8.file new file mode 100644 index 00000000..1c0132ad --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/tests/data01/utf-8.file @@ -0,0 +1 @@ +Hello, UTF-8 world! diff --git a/venv/Lib/site-packages/importlib_resources/tests/data02/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/data02/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/data02/one/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/data02/one/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/data02/one/resource1.txt b/venv/Lib/site-packages/importlib_resources/tests/data02/one/resource1.txt new file mode 100644 index 00000000..61a813e4 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/tests/data02/one/resource1.txt @@ -0,0 +1 @@ +one resource diff --git a/venv/Lib/site-packages/importlib_resources/tests/data02/two/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/data02/two/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/data02/two/resource2.txt b/venv/Lib/site-packages/importlib_resources/tests/data02/two/resource2.txt new file mode 100644 index 00000000..a80ce46e --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/tests/data02/two/resource2.txt @@ -0,0 +1 @@ +two resource diff --git a/venv/Lib/site-packages/importlib_resources/tests/data03/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/data03/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/data03/namespace/portion1/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/data03/namespace/portion1/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/data03/namespace/portion2/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/data03/namespace/portion2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/data03/namespace/resource1.txt b/venv/Lib/site-packages/importlib_resources/tests/data03/namespace/resource1.txt new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/test_files.py b/venv/Lib/site-packages/importlib_resources/tests/test_files.py new file mode 100644 index 00000000..5a9cf9c7 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/tests/test_files.py @@ -0,0 +1,39 @@ +import typing +import unittest + +import importlib_resources as resources +from importlib_resources.abc import Traversable +from . import data01 +from . import util + + +class FilesTests: + def test_read_bytes(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_bytes() + assert actual == b'Hello, UTF-8 world!\n' + + def test_read_text(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_text() + assert actual == 'Hello, UTF-8 world!\n' + + @unittest.skipUnless( + hasattr(typing, 'runtime_checkable'), + "Only suitable when typing supports runtime_checkable", + ) + def test_traversable(self): + assert isinstance(resources.files(self.data), Traversable) + + +class OpenDiskTests(FilesTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/Lib/site-packages/importlib_resources/tests/test_open.py b/venv/Lib/site-packages/importlib_resources/tests/test_open.py new file mode 100644 index 00000000..8a3429f2 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/tests/test_open.py @@ -0,0 +1,73 @@ +import unittest + +import importlib_resources as resources +from . import data01 +from . import util +from .._compat import FileNotFoundError + + +class CommonBinaryTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + with resources.open_binary(package, path): + pass + + +class CommonTextTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + with resources.open_text(package, path): + pass + + +class OpenTests: + def test_open_binary(self): + with resources.open_binary(self.data, 'utf-8.file') as fp: + result = fp.read() + self.assertEqual(result, b'Hello, UTF-8 world!\n') + + def test_open_text_default_encoding(self): + with resources.open_text(self.data, 'utf-8.file') as fp: + result = fp.read() + self.assertEqual(result, 'Hello, UTF-8 world!\n') + + def test_open_text_given_encoding(self): + with resources.open_text( + self.data, 'utf-16.file', 'utf-16', 'strict') as fp: + result = fp.read() + self.assertEqual(result, 'Hello, UTF-16 world!\n') + + def test_open_text_with_errors(self): + # Raises UnicodeError without the 'errors' argument. + with resources.open_text( + self.data, 'utf-16.file', 'utf-8', 'strict') as fp: + self.assertRaises(UnicodeError, fp.read) + with resources.open_text( + self.data, 'utf-16.file', 'utf-8', 'ignore') as fp: + result = fp.read() + self.assertEqual( + result, + 'H\x00e\x00l\x00l\x00o\x00,\x00 ' + '\x00U\x00T\x00F\x00-\x001\x006\x00 ' + '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00') + + def test_open_binary_FileNotFoundError(self): + self.assertRaises( + FileNotFoundError, + resources.open_binary, self.data, 'does-not-exist') + + def test_open_text_FileNotFoundError(self): + self.assertRaises( + FileNotFoundError, + resources.open_text, self.data, 'does-not-exist') + + +class OpenDiskTests(OpenTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/Lib/site-packages/importlib_resources/tests/test_path.py b/venv/Lib/site-packages/importlib_resources/tests/test_path.py new file mode 100644 index 00000000..943b3752 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/tests/test_path.py @@ -0,0 +1,51 @@ +import unittest + +import importlib_resources as resources +from . import data01 +from . import util + + +class CommonTests(util.CommonTests, unittest.TestCase): + + def execute(self, package, path): + with resources.path(package, path): + pass + + +class PathTests: + + def test_reading(self): + # Path should be readable. + # Test also implicitly verifies the returned object is a pathlib.Path + # instance. + with resources.path(self.data, 'utf-8.file') as path: + self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) + # pathlib.Path.read_text() was introduced in Python 3.5. + with path.open('r', encoding='utf-8') as file: + text = file.read() + self.assertEqual('Hello, UTF-8 world!\n', text) + + +class PathDiskTests(PathTests, unittest.TestCase): + data = data01 + + def test_natural_path(self): + """ + Guarantee the internal implementation detail that + file-system-backed resources do not get the tempdir + treatment. + """ + with resources.path(self.data, 'utf-8.file') as path: + assert 'data' in str(path) + + +class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase): + def test_remove_in_context_manager(self): + # It is not an error if the file that was temporarily stashed on the + # file system is removed inside the `with` stanza. + with resources.path(self.data, 'utf-8.file') as path: + path.unlink() + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/Lib/site-packages/importlib_resources/tests/test_read.py b/venv/Lib/site-packages/importlib_resources/tests/test_read.py new file mode 100644 index 00000000..ee94d8ad --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/tests/test_read.py @@ -0,0 +1,63 @@ +import unittest +import importlib_resources as resources + +from . import data01 +from . import util +from importlib import import_module + + +class CommonBinaryTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + resources.read_binary(package, path) + + +class CommonTextTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + resources.read_text(package, path) + + +class ReadTests: + def test_read_binary(self): + result = resources.read_binary(self.data, 'binary.file') + self.assertEqual(result, b'\0\1\2\3') + + def test_read_text_default_encoding(self): + result = resources.read_text(self.data, 'utf-8.file') + self.assertEqual(result, 'Hello, UTF-8 world!\n') + + def test_read_text_given_encoding(self): + result = resources.read_text( + self.data, 'utf-16.file', encoding='utf-16') + self.assertEqual(result, 'Hello, UTF-16 world!\n') + + def test_read_text_with_errors(self): + # Raises UnicodeError without the 'errors' argument. + self.assertRaises( + UnicodeError, resources.read_text, self.data, 'utf-16.file') + result = resources.read_text(self.data, 'utf-16.file', errors='ignore') + self.assertEqual( + result, + 'H\x00e\x00l\x00l\x00o\x00,\x00 ' + '\x00U\x00T\x00F\x00-\x001\x006\x00 ' + '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00') + + +class ReadDiskTests(ReadTests, unittest.TestCase): + data = data01 + + +class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase): + def test_read_submodule_resource(self): + submodule = import_module('ziptestdata.subdirectory') + result = resources.read_binary( + submodule, 'binary.file') + self.assertEqual(result, b'\0\1\2\3') + + def test_read_submodule_resource_by_name(self): + result = resources.read_binary( + 'ziptestdata.subdirectory', 'binary.file') + self.assertEqual(result, b'\0\1\2\3') + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/Lib/site-packages/importlib_resources/tests/test_resource.py b/venv/Lib/site-packages/importlib_resources/tests/test_resource.py new file mode 100644 index 00000000..8c5a72cb --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/tests/test_resource.py @@ -0,0 +1,170 @@ +import sys +import unittest +import importlib_resources as resources + +from . import data01 +from . import zipdata01, zipdata02 +from . import util +from importlib import import_module + + +class ResourceTests: + # Subclasses are expected to set the `data` attribute. + + def test_is_resource_good_path(self): + self.assertTrue(resources.is_resource(self.data, 'binary.file')) + + def test_is_resource_missing(self): + self.assertFalse(resources.is_resource(self.data, 'not-a-file')) + + def test_is_resource_subresource_directory(self): + # Directories are not resources. + self.assertFalse(resources.is_resource(self.data, 'subdirectory')) + + def test_contents(self): + contents = set(resources.contents(self.data)) + # There may be cruft in the directory listing of the data directory. + # Under Python 3 we could have a __pycache__ directory, and under + # Python 2 we could have .pyc files. These are both artifacts of the + # test suite importing these modules and writing these caches. They + # aren't germane to this test, so just filter them out. + contents.discard('__pycache__') + contents.discard('__init__.pyc') + contents.discard('__init__.pyo') + self.assertEqual(contents, { + '__init__.py', + 'subdirectory', + 'utf-8.file', + 'binary.file', + 'utf-16.file', + }) + + +class ResourceDiskTests(ResourceTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase): + pass + + +@unittest.skipIf(sys.version_info < (3,), 'No ResourceReader in Python 2') +class ResourceLoaderTests(unittest.TestCase): + def test_resource_contents(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C']) + self.assertEqual( + set(resources.contents(package)), + {'A', 'B', 'C'}) + + def test_resource_is_resource(self): + package = util.create_package( + file=data01, path=data01.__file__, + contents=['A', 'B', 'C', 'D/E', 'D/F']) + self.assertTrue(resources.is_resource(package, 'B')) + + def test_resource_directory_is_not_resource(self): + package = util.create_package( + file=data01, path=data01.__file__, + contents=['A', 'B', 'C', 'D/E', 'D/F']) + self.assertFalse(resources.is_resource(package, 'D')) + + def test_resource_missing_is_not_resource(self): + package = util.create_package( + file=data01, path=data01.__file__, + contents=['A', 'B', 'C', 'D/E', 'D/F']) + self.assertFalse(resources.is_resource(package, 'Z')) + + +class ResourceCornerCaseTests(unittest.TestCase): + def test_package_has_no_reader_fallback(self): + # Test odd ball packages which: + # 1. Do not have a ResourceReader as a loader + # 2. Are not on the file system + # 3. Are not in a zip file + module = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C']) + # Give the module a dummy loader. + module.__loader__ = object() + # Give the module a dummy origin. + module.__file__ = '/path/which/shall/not/be/named' + if sys.version_info >= (3,): + module.__spec__.loader = module.__loader__ + module.__spec__.origin = module.__file__ + self.assertFalse(resources.is_resource(module, 'A')) + + +class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase): + ZIP_MODULE = zipdata01 # type: ignore + + def test_is_submodule_resource(self): + submodule = import_module('ziptestdata.subdirectory') + self.assertTrue( + resources.is_resource(submodule, 'binary.file')) + + def test_read_submodule_resource_by_name(self): + self.assertTrue( + resources.is_resource('ziptestdata.subdirectory', 'binary.file')) + + def test_submodule_contents(self): + submodule = import_module('ziptestdata.subdirectory') + self.assertEqual( + set(resources.contents(submodule)), + {'__init__.py', 'binary.file'}) + + def test_submodule_contents_by_name(self): + self.assertEqual( + set(resources.contents('ziptestdata.subdirectory')), + {'__init__.py', 'binary.file'}) + + +class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): + ZIP_MODULE = zipdata02 # type: ignore + + def test_unrelated_contents(self): + # https://gitlab.com/python-devs/importlib_resources/issues/44 + # + # Here we have a zip file with two unrelated subpackages. The bug + # reports that getting the contents of a resource returns unrelated + # files. + self.assertEqual( + set(resources.contents('ziptestdata.one')), + {'__init__.py', 'resource1.txt'}) + self.assertEqual( + set(resources.contents('ziptestdata.two')), + {'__init__.py', 'resource2.txt'}) + + +@unittest.skipIf(sys.version_info < (3,), 'No namespace packages in Python 2') +class NamespaceTest(unittest.TestCase): + def test_namespaces_cannot_have_resources(self): + contents = resources.contents( + 'importlib_resources.tests.data03.namespace') + self.assertFalse(list(contents)) + # Even though there is a file in the namespace directory, it is not + # considered a resource, since namespace packages can't have them. + self.assertFalse(resources.is_resource( + 'importlib_resources.tests.data03.namespace', + 'resource1.txt')) + # We should get an exception if we try to read it or open it. + self.assertRaises( + FileNotFoundError, + resources.open_text, + 'importlib_resources.tests.data03.namespace', 'resource1.txt') + self.assertRaises( + FileNotFoundError, + resources.open_binary, + 'importlib_resources.tests.data03.namespace', 'resource1.txt') + self.assertRaises( + FileNotFoundError, + resources.read_text, + 'importlib_resources.tests.data03.namespace', 'resource1.txt') + self.assertRaises( + FileNotFoundError, + resources.read_binary, + 'importlib_resources.tests.data03.namespace', 'resource1.txt') + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/Lib/site-packages/importlib_resources/tests/util.py b/venv/Lib/site-packages/importlib_resources/tests/util.py new file mode 100644 index 00000000..8c26496d --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/tests/util.py @@ -0,0 +1,213 @@ +import abc +import importlib +import io +import sys +import types +import unittest + +from . import data01 +from . import zipdata01 +from .._compat import ABC, Path, PurePath, FileNotFoundError +from ..abc import ResourceReader + +try: + from test.support import modules_setup, modules_cleanup +except ImportError: + # Python 2.7. + def modules_setup(): + return sys.modules.copy(), + + def modules_cleanup(oldmodules): + # Encoders/decoders are registered permanently within the internal + # codec cache. If we destroy the corresponding modules their + # globals will be set to None which will trip up the cached functions. + encodings = [(k, v) for k, v in sys.modules.items() + if k.startswith('encodings.')] + sys.modules.clear() + sys.modules.update(encodings) + # XXX: This kind of problem can affect more than just encodings. In + # particular extension modules (such as _ssl) don't cope with reloading + # properly. Really, test modules should be cleaning out the test + # specific modules they know they added (ala test_runpy) rather than + # relying on this function (as test_importhooks and test_pkg do + # currently). Implicitly imported *real* modules should be left alone + # (see issue 10556). + sys.modules.update(oldmodules) + + +try: + from importlib.machinery import ModuleSpec +except ImportError: + ModuleSpec = None # type: ignore + + +def create_package(file, path, is_package=True, contents=()): + class Reader(ResourceReader): + def get_resource_reader(self, package): + return self + + def open_resource(self, path): + self._path = path + if isinstance(file, Exception): + raise file + else: + return file + + def resource_path(self, path_): + self._path = path_ + if isinstance(path, Exception): + raise path + else: + return path + + def is_resource(self, path_): + self._path = path_ + if isinstance(path, Exception): + raise path + for entry in contents: + parts = entry.split('/') + if len(parts) == 1 and parts[0] == path_: + return True + return False + + def contents(self): + if isinstance(path, Exception): + raise path + # There's no yield from in baseball, er, Python 2. + for entry in contents: + yield entry + + name = 'testingpackage' + # Unforunately importlib.util.module_from_spec() was not introduced until + # Python 3.5. + module = types.ModuleType(name) + if ModuleSpec is None: + # Python 2. + module.__name__ = name + module.__file__ = 'does-not-exist' + if is_package: + module.__path__ = [] + else: + # Python 3. + loader = Reader() + spec = ModuleSpec( + name, loader, + origin='does-not-exist', + is_package=is_package) + module.__spec__ = spec + module.__loader__ = loader + return module + + +class CommonTests(ABC): + + @abc.abstractmethod + def execute(self, package, path): + raise NotImplementedError + + def test_package_name(self): + # Passing in the package name should succeed. + self.execute(data01.__name__, 'utf-8.file') + + def test_package_object(self): + # Passing in the package itself should succeed. + self.execute(data01, 'utf-8.file') + + def test_string_path(self): + # Passing in a string for the path should succeed. + path = 'utf-8.file' + self.execute(data01, path) + + @unittest.skipIf(sys.version_info < (3, 6), 'requires os.PathLike support') + def test_pathlib_path(self): + # Passing in a pathlib.PurePath object for the path should succeed. + path = PurePath('utf-8.file') + self.execute(data01, path) + + def test_absolute_path(self): + # An absolute path is a ValueError. + path = Path(__file__) + full_path = path.parent/'utf-8.file' + with self.assertRaises(ValueError): + self.execute(data01, full_path) + + def test_relative_path(self): + # A reative path is a ValueError. + with self.assertRaises(ValueError): + self.execute(data01, '../data01/utf-8.file') + + def test_importing_module_as_side_effect(self): + # The anchor package can already be imported. + del sys.modules[data01.__name__] + self.execute(data01.__name__, 'utf-8.file') + + def test_non_package_by_name(self): + # The anchor package cannot be a module. + with self.assertRaises(TypeError): + self.execute(__name__, 'utf-8.file') + + def test_non_package_by_package(self): + # The anchor package cannot be a module. + with self.assertRaises(TypeError): + module = sys.modules['importlib_resources.tests.util'] + self.execute(module, 'utf-8.file') + + @unittest.skipIf(sys.version_info < (3,), 'No ResourceReader in Python 2') + def test_resource_opener(self): + bytes_data = io.BytesIO(b'Hello, world!') + package = create_package(file=bytes_data, path=FileNotFoundError()) + self.execute(package, 'utf-8.file') + self.assertEqual(package.__loader__._path, 'utf-8.file') + + @unittest.skipIf(sys.version_info < (3,), 'No ResourceReader in Python 2') + def test_resource_path(self): + bytes_data = io.BytesIO(b'Hello, world!') + path = __file__ + package = create_package(file=bytes_data, path=path) + self.execute(package, 'utf-8.file') + self.assertEqual(package.__loader__._path, 'utf-8.file') + + def test_useless_loader(self): + package = create_package(file=FileNotFoundError(), + path=FileNotFoundError()) + with self.assertRaises(FileNotFoundError): + self.execute(package, 'utf-8.file') + + +class ZipSetupBase: + ZIP_MODULE = None + + @classmethod + def setUpClass(cls): + data_path = Path(cls.ZIP_MODULE.__file__) + data_dir = data_path.parent + cls._zip_path = str(data_dir / 'ziptestdata.zip') + sys.path.append(cls._zip_path) + cls.data = importlib.import_module('ziptestdata') + + @classmethod + def tearDownClass(cls): + try: + sys.path.remove(cls._zip_path) + except ValueError: + pass + + try: + del sys.path_importer_cache[cls._zip_path] + del sys.modules[cls.data.__name__] + except KeyError: + pass + + try: + del cls.data + del cls._zip_path + except AttributeError: + pass + + def setUp(self): + modules = modules_setup() + self.addCleanup(modules_cleanup, *modules) + + +class ZipSetup(ZipSetupBase): + ZIP_MODULE = zipdata01 # type: ignore diff --git a/venv/Lib/site-packages/importlib_resources/tests/zipdata01/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/zipdata01/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/zipdata01/ziptestdata.zip b/venv/Lib/site-packages/importlib_resources/tests/zipdata01/ziptestdata.zip new file mode 100644 index 00000000..12f7872c Binary files /dev/null and b/venv/Lib/site-packages/importlib_resources/tests/zipdata01/ziptestdata.zip differ diff --git a/venv/Lib/site-packages/importlib_resources/tests/zipdata02/__init__.py b/venv/Lib/site-packages/importlib_resources/tests/zipdata02/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/importlib_resources/tests/zipdata02/ziptestdata.zip b/venv/Lib/site-packages/importlib_resources/tests/zipdata02/ziptestdata.zip new file mode 100644 index 00000000..9ee00586 Binary files /dev/null and b/venv/Lib/site-packages/importlib_resources/tests/zipdata02/ziptestdata.zip differ diff --git a/venv/Lib/site-packages/importlib_resources/trees.py b/venv/Lib/site-packages/importlib_resources/trees.py new file mode 100644 index 00000000..ba42bb55 --- /dev/null +++ b/venv/Lib/site-packages/importlib_resources/trees.py @@ -0,0 +1,6 @@ +# for compatibility with 1.1, continue to expose as_file here. + +from ._common import as_file + + +__all__ = ['as_file'] diff --git a/venv/Lib/site-packages/pbr-5.4.5.dist-info/AUTHORS b/venv/Lib/site-packages/pbr-5.4.5.dist-info/AUTHORS new file mode 100644 index 00000000..f25c4589 --- /dev/null +++ b/venv/Lib/site-packages/pbr-5.4.5.dist-info/AUTHORS @@ -0,0 +1,150 @@ +A40351 +Akihiro Motoki +Alex Gaynor +Alexander Makarov +Alfredo Moralejo +Andreas Jaeger +Andreas Jaeger +Andrew Bogott +Angus Salkeld +Anthony Young +Attila Fazekas +Ben Nemec +Bhuvan Arumugam +Brandon LeBlanc +Brant Knudson +Brian Waldon +Cao Xuan Hoang +Chang Bo Guo +ChangBo Guo(gcb) +Chris Dent +Christian Berendt +Chuck Short +Clark Boylan +Claudiu Popa +Corey Bryant +Dan Prince +Darragh Bailey +Davanum Srinivas +Dave Walker (Daviey) +David Ripton +David Stanek +Dennis Verspuij +Devananda van der Veen +Dirk Mueller +Doug Hellmann +Doug Hellmann +Doug Hellmann +Dougal Matthews +Elena Ezhova +Eoghan Glynn +Eric Windisch +Erik M. Bray +Eugene Kirpichov +Florian Wilhelm +Gaetan Semet +Gage Hugo +Gary Kotton +Giampaolo Lauria +Hervé Beraud +Ian Cordasco +Ian Wienand +Ian Y. Choi +Ionuț Arțăriși +James E. Blair +James Polley +Jason Kölker +Jason R. Coombs +Jay Pipes +Jeremy Stanley +Joe D'Andrea +Joe Gordon +Joe Gordon +Joe Heck +Johannes Erdfelt +Joshua Harlow +Joshua Harlow +Joshua Harlow +Julien Danjou +Kevin McCarthy +Khai Do +Laurence Miao +Lucian Petrut +Luo Gangyi +Marc Abramowitz +Mark McLoughlin +Mark Sienkiewicz +Martin Domke +Maru Newby +Masaki Matsushita +Matt Riedemann +Matthew Montgomery +Matthew Treinish +Matthew Treinish +Mehdi Abaakouk +Michael Basnight +Michael Still +Mike Heald +Moises Guimaraes de Medeiros +Monty Taylor +Nikhil Manchanda +Octavian Ciuhandu +Ondřej Nový +Paul Belanger +Rajaram Mallya +Rajath Agasthya +Ralf Haferkamp +Randall Nortman +Rick Harris +Robert Collins +Robert Myers +Roger Luethi +Ronald Bradford +Ruby Loo +Russell Bryant +Ryan Bourgeois +Ryan Petrello +Sachi King +Sascha Peilicke +Sean Dague +Sean Dague +Sean McGinnis +Sergey Lukjanov +Sorin Sbarnea +Stephen Finucane +Stephen Finucane +Steve Kowalik +Steve Martinelli +Steven Hardy +Thomas Bechtold +Thomas Goirand +Thomas Grainger +Thomas Herve +Thomas Leaman +Thomas Morin +Tim Burke +Tim Simpson +Timothy Chavez +Toilal +Vasudev Kamath +Vincent Untz +Vishvananda Ishaya +Wei Tie +Will Szumski +YAMAMOTO Takashi +Yaguang Tang +Yuriy Taraday +Zhongyue Luo +alexpilotti +cbjchen@cn.ibm.com +dineshbhor +jiansong +lifeless +melanie witt +melissaml +nizam +qingszhao +weiweigu +xuanyandong +zhangyangyang +zhangyanxian diff --git a/venv/Lib/site-packages/pbr-5.4.5.dist-info/INSTALLER b/venv/Lib/site-packages/pbr-5.4.5.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/pbr-5.4.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/pbr-5.4.5.dist-info/LICENSE b/venv/Lib/site-packages/pbr-5.4.5.dist-info/LICENSE new file mode 100644 index 00000000..68c771a0 --- /dev/null +++ b/venv/Lib/site-packages/pbr-5.4.5.dist-info/LICENSE @@ -0,0 +1,176 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + diff --git a/venv/Lib/site-packages/pbr-5.4.5.dist-info/METADATA b/venv/Lib/site-packages/pbr-5.4.5.dist-info/METADATA new file mode 100644 index 00000000..b685152a --- /dev/null +++ b/venv/Lib/site-packages/pbr-5.4.5.dist-info/METADATA @@ -0,0 +1,73 @@ +Metadata-Version: 2.1 +Name: pbr +Version: 5.4.5 +Summary: Python Build Reasonableness +Home-page: https://docs.openstack.org/pbr/latest/ +Author: OpenStack +Author-email: openstack-discuss@lists.openstack.org +License: UNKNOWN +Project-URL: Bug Tracker, https://bugs.launchpad.net/pbr/ +Project-URL: Documentation, https://docs.openstack.org/pbr/ +Project-URL: Source Code, https://git.openstack.org/cgit/openstack-dev/pbr/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Environment :: OpenStack +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Description-Content-Type: text/x-rst; charset=UTF-8 + +Introduction +============ + +.. image:: https://img.shields.io/pypi/v/pbr.svg + :target: https://pypi.python.org/pypi/pbr/ + :alt: Latest Version + +.. image:: https://img.shields.io/pypi/dm/pbr.svg + :target: https://pypi.python.org/pypi/pbr/ + :alt: Downloads + +PBR is a library that injects some useful and sensible default behaviors +into your setuptools run. It started off life as the chunks of code that +were copied between all of the `OpenStack`_ projects. Around the time that +OpenStack hit 18 different projects each with at least 3 active branches, +it seemed like a good time to make that code into a proper reusable library. + +PBR is only mildly configurable. The basic idea is that there's a decent +way to run things and if you do, you should reap the rewards, because then +it's simple and repeatable. If you want to do things differently, cool! But +you've already got the power of Python at your fingertips, so you don't +really need PBR. + +PBR builds on top of the work that `d2to1`_ started to provide for declarative +configuration. `d2to1`_ is itself an implementation of the ideas behind +`distutils2`_. Although `distutils2`_ is now abandoned in favor of work towards +`PEP 426`_ and Metadata 2.0, declarative config is still a great idea and +specifically important in trying to distribute setup code as a library +when that library itself will alter how the setup is processed. As Metadata +2.0 and other modern Python packaging PEPs come out, PBR aims to support +them as quickly as possible. + +* License: Apache License, Version 2.0 +* Documentation: https://docs.openstack.org/pbr/latest/ +* Source: https://git.openstack.org/cgit/openstack-dev/pbr +* Bugs: https://bugs.launchpad.net/pbr +* Change Log: https://docs.openstack.org/pbr/latest/user/history.html + +.. _d2to1: https://pypi.python.org/pypi/d2to1 +.. _distutils2: https://pypi.python.org/pypi/Distutils2 +.. _PEP 426: http://legacy.python.org/dev/peps/pep-0426/ +.. _OpenStack: https://www.openstack.org/ + + + diff --git a/venv/Lib/site-packages/pbr-5.4.5.dist-info/RECORD b/venv/Lib/site-packages/pbr-5.4.5.dist-info/RECORD new file mode 100644 index 00000000..587394d7 --- /dev/null +++ b/venv/Lib/site-packages/pbr-5.4.5.dist-info/RECORD @@ -0,0 +1,110 @@ +../../Scripts/pbr.exe,sha256=xSTRYFoiZWuU4uQQmfDGukcPDp85nNgJaPPwblM1joc,106363 +pbr-5.4.5.dist-info/AUTHORS,sha256=Wsbu3DWgLA9h0veSbQmLUhuog5Uqrj8tkQAApWQxghc,5598 +pbr-5.4.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pbr-5.4.5.dist-info/LICENSE,sha256=XfKg2H1sVi8OoRxoisUlMqoo10TKvHmU_wU39ks7MyA,10143 +pbr-5.4.5.dist-info/METADATA,sha256=oD8D24XnlowVjZMrRXJ8tt88Lql0l8dKAKpOdOwbJXw,3140 +pbr-5.4.5.dist-info/RECORD,, +pbr-5.4.5.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +pbr-5.4.5.dist-info/entry_points.txt,sha256=QHw3RnItVVy03jocQCjhoKUiyKjBwiPBhJszz-i5YMg,149 +pbr-5.4.5.dist-info/top_level.txt,sha256=X3Q9Vhf2YxJul564xso0UcL55u9D75jaBuGZedivUyE,4 +pbr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pbr/__pycache__/__init__.cpython-36.pyc,, +pbr/__pycache__/builddoc.cpython-36.pyc,, +pbr/__pycache__/core.cpython-36.pyc,, +pbr/__pycache__/extra_files.cpython-36.pyc,, +pbr/__pycache__/find_package.cpython-36.pyc,, +pbr/__pycache__/git.cpython-36.pyc,, +pbr/__pycache__/options.cpython-36.pyc,, +pbr/__pycache__/packaging.cpython-36.pyc,, +pbr/__pycache__/pbr_json.cpython-36.pyc,, +pbr/__pycache__/sphinxext.cpython-36.pyc,, +pbr/__pycache__/testr_command.cpython-36.pyc,, +pbr/__pycache__/util.cpython-36.pyc,, +pbr/__pycache__/version.cpython-36.pyc,, +pbr/builddoc.py,sha256=SenZnqZuAW24hf8AWNb17PbY8_j3hT6gaB5vqbpV8n0,11915 +pbr/cmd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pbr/cmd/__pycache__/__init__.cpython-36.pyc,, +pbr/cmd/__pycache__/main.cpython-36.pyc,, +pbr/cmd/main.py,sha256=UVyJSnoMiVoXLQzuGQ6e8C4fEJnou1gjwkyKi-ZSsDs,3695 +pbr/core.py,sha256=5XOb58Wgj_cfjSgXQa7Aume11Jjd5L3Xtz2FDUAE9Tc,5945 +pbr/extra_files.py,sha256=7raV9bomd_Z0adKFUa_qBN-ZMbtnlDbxoc9e0gFib7s,1096 +pbr/find_package.py,sha256=u8Xm8Z9CQYLBBBWNrHi7YUGS1vhetw1CdYNuv2RpvJI,1043 +pbr/git.py,sha256=azhqYP1EalleraiAjxK9ETkSeQwJtU6eaaOQONC2eyU,11580 +pbr/hooks/__init__.py,sha256=v6xtosYxcJsJqE3sVg1IFNUa-FIXpJsuT0zavgxdbUM,1086 +pbr/hooks/__pycache__/__init__.cpython-36.pyc,, +pbr/hooks/__pycache__/backwards.cpython-36.pyc,, +pbr/hooks/__pycache__/base.cpython-36.pyc,, +pbr/hooks/__pycache__/commands.cpython-36.pyc,, +pbr/hooks/__pycache__/files.cpython-36.pyc,, +pbr/hooks/__pycache__/metadata.cpython-36.pyc,, +pbr/hooks/backwards.py,sha256=uz1ofnisgwXuEz2QKDARknw_GkeayWObKDHi36ekS2A,1176 +pbr/hooks/base.py,sha256=BQLcBfFd-f151aSOOOY359rKYNb2LKOaetj4hF25XY4,1038 +pbr/hooks/commands.py,sha256=iTErrioU8avItyV-v2yozVe2GQa_RlPKaUIZrXNmSEw,2476 +pbr/hooks/files.py,sha256=XvKTUF533sfVf8krZ3BqjqG9DVMC65XX1nbrNk0LZDw,4745 +pbr/hooks/metadata.py,sha256=f3gcLX1TNYJF2OmaexyAe9oh2aXLsdxp84KL30DP8IQ,1076 +pbr/options.py,sha256=pppVIelMTpHKpUAp8mTPxLIQtwgdEwj3MFojE32Ywjo,2371 +pbr/packaging.py,sha256=4NimH34KeK2ZAnQz8_vDJrrAE3-pZR6HZOiHLaSDV8k,31282 +pbr/pbr_json.py,sha256=tENBo-oXejEG4sUBS4QeR8anwGCoPdu7QIeFmQgY7NA,1250 +pbr/sphinxext.py,sha256=u8LsHwE9dl5wLll9o5CcpeybVRz-FaMsDPk3E54x68c,3207 +pbr/testr_command.py,sha256=CT0EcDNUQuuJ6WUkiJM73Q_M5W5gw8fHV2jxrcQEF04,5867 +pbr/tests/__init__.py,sha256=XX97pKeZeZ2X2nnRGTlCIbnBxaVd9WBdBZCKi5VEeSg,985 +pbr/tests/__pycache__/__init__.cpython-36.pyc,, +pbr/tests/__pycache__/base.cpython-36.pyc,, +pbr/tests/__pycache__/test_commands.cpython-36.pyc,, +pbr/tests/__pycache__/test_core.cpython-36.pyc,, +pbr/tests/__pycache__/test_files.cpython-36.pyc,, +pbr/tests/__pycache__/test_hooks.cpython-36.pyc,, +pbr/tests/__pycache__/test_integration.cpython-36.pyc,, +pbr/tests/__pycache__/test_packaging.cpython-36.pyc,, +pbr/tests/__pycache__/test_pbr_json.cpython-36.pyc,, +pbr/tests/__pycache__/test_setup.cpython-36.pyc,, +pbr/tests/__pycache__/test_util.cpython-36.pyc,, +pbr/tests/__pycache__/test_version.cpython-36.pyc,, +pbr/tests/__pycache__/test_wsgi.cpython-36.pyc,, +pbr/tests/__pycache__/util.cpython-36.pyc,, +pbr/tests/base.py,sha256=a13w0grj7r6JYPThWR3aQmIatQjutkXEfEB9QcKxkE8,8871 +pbr/tests/test_commands.py,sha256=Nv853JWSsRvM-J-kkvTzI4M14FH1yW2ZpLA2bYI5B8U,3688 +pbr/tests/test_core.py,sha256=MsL6tp_qBUqdg8yyeryGqHCNjq8UgBGMt57R6w_R-0I,5419 +pbr/tests/test_files.py,sha256=dKQQViZdxdzZ7rcvVcPEopyiPeVUKSkuWINtyizjnWQ,5465 +pbr/tests/test_hooks.py,sha256=XjPb8B4s_uvr2ysH0wDpGaU0WnU8z6T-2pzReXDyE54,3007 +pbr/tests/test_integration.py,sha256=tvDy5z3JZ90OO2mXz2jIipYJFsrz85UuyKSw0y0Ex2Q,10892 +pbr/tests/test_packaging.py,sha256=pPYX5_TuHqNPSxq2GyQBrJMZjEctUUCuRfhdRCoudic,46477 +pbr/tests/test_pbr_json.py,sha256=o_pZ_UJzJGj-YsmQmSUKIUI0SLN8sE0s38OfUdNpP8Y,1162 +pbr/tests/test_setup.py,sha256=mVeFbWJF-YSrSdG4t8b3iLld-i6pYBu7SeU2oue3YLY,18462 +pbr/tests/test_util.py,sha256=Ka70GMQD62wb3r-LwY4fNzyhzwKc-iQtR-2nLtSz8j8,7311 +pbr/tests/test_version.py,sha256=1c-5s75lrfAADE1Bp7yVeBikcAN_TDs7vetLLtZSRSU,14100 +pbr/tests/test_wsgi.py,sha256=kbkIdxPS8eznH9ZesVWlJuMHRtlfFWIfbXiSXzimzm0,5741 +pbr/tests/testpackage/CHANGES.txt,sha256=N6vxDAYI6Mx42G7pUkCNmtrBQgBioFSEiX0QGhOcAJo,4020 +pbr/tests/testpackage/LICENSE.txt,sha256=60qMh5H2yqsc823ybbK29OLd2lJlewYP9_AqvGORCu8,1464 +pbr/tests/testpackage/MANIFEST.in,sha256=pdPDHyVjHsaqv-OZ5-uYNPNUH25PlPbtG7WSS9yEJd8,54 +pbr/tests/testpackage/README.txt,sha256=i2cNRAa9UCdPqilaZXEjWMQKIikAXyGdZ96BQz_gB70,6674 +pbr/tests/testpackage/__pycache__/setup.cpython-36.pyc,, +pbr/tests/testpackage/data_files/a.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pbr/tests/testpackage/data_files/b.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pbr/tests/testpackage/data_files/c.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pbr/tests/testpackage/doc/source/__pycache__/conf.cpython-36.pyc,, +pbr/tests/testpackage/doc/source/conf.py,sha256=DUBiC-yg_nmQozdzEydiPnWauvNp76n0MX8y0dTq72s,1912 +pbr/tests/testpackage/doc/source/index.rst,sha256=4qvttWTQk9-UuzyS6s5EjSuhqlcxyhcQagBiJ0Pn2qM,479 +pbr/tests/testpackage/doc/source/installation.rst,sha256=JL_m5J7BX88Bq-hAP4xI9a6kt2EXxW76nK3YxndbcPQ,202 +pbr/tests/testpackage/doc/source/usage.rst,sha256=U5ZvmzuSYWEkaA3e1WhfN8-FpY3vFteakcR1vcl9IJo,83 +pbr/tests/testpackage/extra-file.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pbr/tests/testpackage/git-extra-file.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pbr/tests/testpackage/pbr_testpackage/__init__.py,sha256=LlPnJQqAYOmgTYrZqJZ9hT0hEBBeViqFGMijjRAXBF8,94 +pbr/tests/testpackage/pbr_testpackage/__pycache__/__init__.cpython-36.pyc,, +pbr/tests/testpackage/pbr_testpackage/__pycache__/_setup_hooks.cpython-36.pyc,, +pbr/tests/testpackage/pbr_testpackage/__pycache__/cmd.cpython-36.pyc,, +pbr/tests/testpackage/pbr_testpackage/__pycache__/extra.cpython-36.pyc,, +pbr/tests/testpackage/pbr_testpackage/__pycache__/wsgi.cpython-36.pyc,, +pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py,sha256=3g7Cff_VRiM1ipAA4VgOCpUoNMYrxpfVvO_F7HIu-JY,2310 +pbr/tests/testpackage/pbr_testpackage/cmd.py,sha256=T0eYtOjY-jvg21NSfVjTDQLkOyqrp3q3NcFkhA4LoiE,798 +pbr/tests/testpackage/pbr_testpackage/extra.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pbr/tests/testpackage/pbr_testpackage/package_data/1.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pbr/tests/testpackage/pbr_testpackage/package_data/2.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pbr/tests/testpackage/pbr_testpackage/wsgi.py,sha256=e3fHleqX_eDkjZIcgOkQ7fZYqZwTywMkLD2s0ouR0A8,1321 +pbr/tests/testpackage/setup.cfg,sha256=bG9-mX1jwVjtrW7V9EaRO8-wn-DDSmzZ3H87kb3xWrg,1737 +pbr/tests/testpackage/setup.py,sha256=GvzdcEFgIwgSO8wk8NzoJUUmoGnvrYRRQr3Kf9mbtuw,692 +pbr/tests/testpackage/src/testext.c,sha256=-fezBujL_5bvoKftDQSyxDcNhleYPR49npnnboy-P8U,673 +pbr/tests/testpackage/test-requirements.txt,sha256=hFOB6kveR9_ihI5A--BQuqU1e4bP1XAO6K2sswIVzeU,48 +pbr/tests/util.py,sha256=p9LBbCXovocRrGfuyfz887F2wzybCI1VtBs409N8XLg,2662 +pbr/util.py,sha256=5-__rqD_ojVL6vdrZox3JX5y38nQfYXQ1xIirhKjE2k,23745 +pbr/version.py,sha256=hRSU92m8tNVqUptF8lqI6enk62uXCi0VYm5TehvrRKs,18977 diff --git a/venv/Lib/site-packages/pbr-5.4.5.dist-info/WHEEL b/venv/Lib/site-packages/pbr-5.4.5.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/venv/Lib/site-packages/pbr-5.4.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/pbr-5.4.5.dist-info/entry_points.txt b/venv/Lib/site-packages/pbr-5.4.5.dist-info/entry_points.txt new file mode 100644 index 00000000..ba195784 --- /dev/null +++ b/venv/Lib/site-packages/pbr-5.4.5.dist-info/entry_points.txt @@ -0,0 +1,9 @@ +[console_scripts] +pbr = pbr.cmd.main:main + +[distutils.setup_keywords] +pbr = pbr.core:pbr + +[egg_info.writers] +pbr.json = pbr.pbr_json:write_pbr_json + diff --git a/venv/Lib/site-packages/pbr-5.4.5.dist-info/top_level.txt b/venv/Lib/site-packages/pbr-5.4.5.dist-info/top_level.txt new file mode 100644 index 00000000..1d45dc6e --- /dev/null +++ b/venv/Lib/site-packages/pbr-5.4.5.dist-info/top_level.txt @@ -0,0 +1 @@ +pbr diff --git a/venv/Lib/site-packages/pbr/__init__.py b/venv/Lib/site-packages/pbr/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pbr/builddoc.py b/venv/Lib/site-packages/pbr/builddoc.py new file mode 100644 index 00000000..276eec67 --- /dev/null +++ b/venv/Lib/site-packages/pbr/builddoc.py @@ -0,0 +1,292 @@ +# Copyright 2011 OpenStack Foundation +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from distutils import log +import fnmatch +import os +import sys + +try: + import cStringIO +except ImportError: + import io as cStringIO + +try: + import sphinx + # NOTE(dhellmann): Newer versions of Sphinx have moved the apidoc + # module into sphinx.ext and the API is slightly different (the + # function expects sys.argv[1:] instead of sys.argv[:]. So, figure + # out where we can import it from and set a flag so we can invoke + # it properly. See this change in sphinx for details: + # https://github.com/sphinx-doc/sphinx/commit/87630c8ae8bff8c0e23187676e6343d8903003a6 + try: + from sphinx.ext import apidoc + apidoc_use_padding = False + except ImportError: + from sphinx import apidoc + apidoc_use_padding = True + from sphinx import application + from sphinx import setup_command +except Exception as e: + # NOTE(dhellmann): During the installation of docutils, setuptools + # tries to import pbr code to find the egg_info.writer hooks. That + # imports this module, which imports sphinx, which imports + # docutils, which is being installed. Because docutils uses 2to3 + # to convert its code during installation under python 3, the + # import fails, but it fails with an error other than ImportError + # (today it's a NameError on StandardError, an exception base + # class). Convert the exception type here so it can be caught in + # packaging.py where we try to determine if we can import and use + # sphinx by importing this module. See bug #1403510 for details. + raise ImportError(str(e)) +from pbr import git +from pbr import options +from pbr import version + + +_deprecated_options = ['autodoc_tree_index_modules', 'autodoc_index_modules', + 'autodoc_tree_excludes', 'autodoc_exclude_modules'] +_deprecated_envs = ['AUTODOC_TREE_INDEX_MODULES', 'AUTODOC_INDEX_MODULES'] +_rst_template = """%(heading)s +%(underline)s + +.. automodule:: %(module)s + :members: + :undoc-members: + :show-inheritance: +""" + + +def _find_modules(arg, dirname, files): + for filename in files: + if filename.endswith('.py') and filename != '__init__.py': + arg["%s.%s" % (dirname.replace('/', '.'), + filename[:-3])] = True + + +class LocalBuildDoc(setup_command.BuildDoc): + + builders = ['html'] + command_name = 'build_sphinx' + sphinx_initialized = False + + def _get_source_dir(self): + option_dict = self.distribution.get_option_dict('build_sphinx') + pbr_option_dict = self.distribution.get_option_dict('pbr') + _, api_doc_dir = pbr_option_dict.get('api_doc_dir', (None, 'api')) + if 'source_dir' in option_dict: + source_dir = os.path.join(option_dict['source_dir'][1], + api_doc_dir) + else: + source_dir = 'doc/source/' + api_doc_dir + if not os.path.exists(source_dir): + os.makedirs(source_dir) + return source_dir + + def generate_autoindex(self, excluded_modules=None): + log.info("[pbr] Autodocumenting from %s" + % os.path.abspath(os.curdir)) + modules = {} + source_dir = self._get_source_dir() + for pkg in self.distribution.packages: + if '.' not in pkg: + for dirpath, dirnames, files in os.walk(pkg): + _find_modules(modules, dirpath, files) + + def include(module): + return not any(fnmatch.fnmatch(module, pat) + for pat in excluded_modules) + + module_list = sorted(mod for mod in modules.keys() if include(mod)) + autoindex_filename = os.path.join(source_dir, 'autoindex.rst') + with open(autoindex_filename, 'w') as autoindex: + autoindex.write(""".. toctree:: + :maxdepth: 1 + +""") + for module in module_list: + output_filename = os.path.join(source_dir, + "%s.rst" % module) + heading = "The :mod:`%s` Module" % module + underline = "=" * len(heading) + values = dict(module=module, heading=heading, + underline=underline) + + log.info("[pbr] Generating %s" + % output_filename) + with open(output_filename, 'w') as output_file: + output_file.write(_rst_template % values) + autoindex.write(" %s.rst\n" % module) + + def _sphinx_tree(self): + source_dir = self._get_source_dir() + cmd = ['-H', 'Modules', '-o', source_dir, '.'] + if apidoc_use_padding: + cmd.insert(0, 'apidoc') + apidoc.main(cmd + self.autodoc_tree_excludes) + + def _sphinx_run(self): + if not self.verbose: + status_stream = cStringIO.StringIO() + else: + status_stream = sys.stdout + confoverrides = {} + if self.project: + confoverrides['project'] = self.project + if self.version: + confoverrides['version'] = self.version + if self.release: + confoverrides['release'] = self.release + if self.today: + confoverrides['today'] = self.today + if self.sphinx_initialized: + confoverrides['suppress_warnings'] = [ + 'app.add_directive', 'app.add_role', + 'app.add_generic_role', 'app.add_node', + 'image.nonlocal_uri', + ] + app = application.Sphinx( + self.source_dir, self.config_dir, + self.builder_target_dir, self.doctree_dir, + self.builder, confoverrides, status_stream, + freshenv=self.fresh_env, warningiserror=self.warning_is_error) + self.sphinx_initialized = True + + try: + app.build(force_all=self.all_files) + except Exception as err: + from docutils import utils + if isinstance(err, utils.SystemMessage): + sys.stder.write('reST markup error:\n') + sys.stderr.write(err.args[0].encode('ascii', + 'backslashreplace')) + sys.stderr.write('\n') + else: + raise + + if self.link_index: + src = app.config.master_doc + app.builder.out_suffix + dst = app.builder.get_outfilename('index') + os.symlink(src, dst) + + def run(self): + option_dict = self.distribution.get_option_dict('pbr') + + # TODO(stephenfin): Remove this (and the entire file) when 5.0 is + # released + warn_opts = set(option_dict.keys()).intersection(_deprecated_options) + warn_env = list(filter(lambda x: os.getenv(x), _deprecated_envs)) + if warn_opts or warn_env: + msg = ('The autodoc and autodoc_tree features are deprecated in ' + '4.2 and will be removed in a future release. You should ' + 'use the sphinxcontrib-apidoc Sphinx extension instead. ' + 'Refer to the pbr documentation for more information.') + if warn_opts: + msg += ' Deprecated options: %s' % list(warn_opts) + if warn_env: + msg += ' Deprecated environment variables: %s' % warn_env + + log.warn(msg) + + if git._git_is_installed(): + git.write_git_changelog(option_dict=option_dict) + git.generate_authors(option_dict=option_dict) + tree_index = options.get_boolean_option(option_dict, + 'autodoc_tree_index_modules', + 'AUTODOC_TREE_INDEX_MODULES') + auto_index = options.get_boolean_option(option_dict, + 'autodoc_index_modules', + 'AUTODOC_INDEX_MODULES') + if not os.getenv('SPHINX_DEBUG'): + # NOTE(afazekas): These options can be used together, + # but they do a very similar thing in a different way + if tree_index: + self._sphinx_tree() + if auto_index: + self.generate_autoindex( + set(option_dict.get( + "autodoc_exclude_modules", + [None, ""])[1].split())) + + self.finalize_options() + + is_multibuilder_sphinx = version.SemanticVersion.from_pip_string( + sphinx.__version__) >= version.SemanticVersion(1, 6) + + # TODO(stephenfin): Remove support for Sphinx < 1.6 in 4.0 + if not is_multibuilder_sphinx: + log.warn('[pbr] Support for Sphinx < 1.6 will be dropped in ' + 'pbr 4.0. Upgrade to Sphinx 1.6+') + + # TODO(stephenfin): Remove this at the next MAJOR version bump + if self.builders != ['html']: + log.warn("[pbr] Sphinx 1.6 added native support for " + "specifying multiple builders in the " + "'[sphinx_build] builder' configuration option, " + "found in 'setup.cfg'. As a result, the " + "'[sphinx_build] builders' option has been " + "deprecated and will be removed in pbr 4.0. Migrate " + "to the 'builder' configuration option.") + if is_multibuilder_sphinx: + self.builder = self.builders + + if is_multibuilder_sphinx: + # Sphinx >= 1.6 + return setup_command.BuildDoc.run(self) + + # Sphinx < 1.6 + for builder in self.builders: + self.builder = builder + self.finalize_options() + self._sphinx_run() + + def initialize_options(self): + # Not a new style class, super keyword does not work. + setup_command.BuildDoc.initialize_options(self) + + # NOTE(dstanek): exclude setup.py from the autodoc tree index + # builds because all projects will have an issue with it + self.autodoc_tree_excludes = ['setup.py'] + + def finalize_options(self): + from pbr import util + + # Not a new style class, super keyword does not work. + setup_command.BuildDoc.finalize_options(self) + + # Handle builder option from command line - override cfg + option_dict = self.distribution.get_option_dict('build_sphinx') + if 'command line' in option_dict.get('builder', [[]])[0]: + self.builders = option_dict['builder'][1] + # Allow builders to be configurable - as a comma separated list. + if not isinstance(self.builders, list) and self.builders: + self.builders = self.builders.split(',') + + self.project = self.distribution.get_name() + self.version = self.distribution.get_version() + self.release = self.distribution.get_version() + + # NOTE(dstanek): check for autodoc tree exclusion overrides + # in the setup.cfg + opt = 'autodoc_tree_excludes' + option_dict = self.distribution.get_option_dict('pbr') + if opt in option_dict: + self.autodoc_tree_excludes = util.split_multiline( + option_dict[opt][1]) + + # handle Sphinx < 1.5.0 + if not hasattr(self, 'warning_is_error'): + self.warning_is_error = False diff --git a/venv/Lib/site-packages/pbr/cmd/__init__.py b/venv/Lib/site-packages/pbr/cmd/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pbr/cmd/main.py b/venv/Lib/site-packages/pbr/cmd/main.py new file mode 100644 index 00000000..162304f7 --- /dev/null +++ b/venv/Lib/site-packages/pbr/cmd/main.py @@ -0,0 +1,119 @@ +# Copyright 2014 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import argparse +import json +import sys + +import pkg_resources + +import pbr.version + + +def _get_metadata(package_name): + try: + return json.loads( + pkg_resources.get_distribution( + package_name).get_metadata('pbr.json')) + except pkg_resources.DistributionNotFound: + raise Exception('Package {0} not installed'.format(package_name)) + except Exception: + return None + + +def get_sha(args): + sha = _get_info(args.name)['sha'] + if sha: + print(sha) + + +def get_info(args): + if args.short: + print("{version}".format(**_get_info(args.name))) + else: + print("{name}\t{version}\t{released}\t{sha}".format( + **_get_info(args.name))) + + +def _get_info(name): + metadata = _get_metadata(name) + version = pkg_resources.get_distribution(name).version + if metadata: + if metadata['is_release']: + released = 'released' + else: + released = 'pre-release' + sha = metadata['git_version'] + else: + version_parts = version.split('.') + if version_parts[-1].startswith('g'): + sha = version_parts[-1][1:] + released = 'pre-release' + else: + sha = "" + released = "released" + for part in version_parts: + if not part.isdigit(): + released = "pre-release" + return dict(name=name, version=version, sha=sha, released=released) + + +def freeze(args): + sorted_dists = sorted(pkg_resources.working_set, + key=lambda dist: dist.project_name.lower()) + for dist in sorted_dists: + info = _get_info(dist.project_name) + output = "{name}=={version}".format(**info) + if info['sha']: + output += " # git sha {sha}".format(**info) + print(output) + + +def main(): + parser = argparse.ArgumentParser( + description='pbr: Python Build Reasonableness') + parser.add_argument( + '-v', '--version', action='version', + version=str(pbr.version.VersionInfo('pbr'))) + + subparsers = parser.add_subparsers( + title='commands', description='valid commands', help='additional help', + dest='cmd') + subparsers.required = True + + cmd_sha = subparsers.add_parser('sha', help='print sha of package') + cmd_sha.set_defaults(func=get_sha) + cmd_sha.add_argument('name', help='package to print sha of') + + cmd_info = subparsers.add_parser( + 'info', help='print version info for package') + cmd_info.set_defaults(func=get_info) + cmd_info.add_argument('name', help='package to print info of') + cmd_info.add_argument('-s', '--short', action="store_true", + help='only display package version') + + cmd_freeze = subparsers.add_parser( + 'freeze', help='print version info for all installed packages') + cmd_freeze.set_defaults(func=freeze) + + args = parser.parse_args() + try: + args.func(args) + except Exception as e: + print(e) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/venv/Lib/site-packages/pbr/core.py b/venv/Lib/site-packages/pbr/core.py new file mode 100644 index 00000000..645a2ef1 --- /dev/null +++ b/venv/Lib/site-packages/pbr/core.py @@ -0,0 +1,145 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +# DAMAGE. + +import logging +import os +import sys +import warnings + +from distutils import errors + +from pbr import util + + +if sys.version_info[0] == 3: + string_type = str + integer_types = (int,) +else: + string_type = basestring # noqa + integer_types = (int, long) # noqa + + +def pbr(dist, attr, value): + """Implements the actual pbr setup() keyword. + + When used, this should be the only keyword in your setup() aside from + `setup_requires`. + + If given as a string, the value of pbr is assumed to be the relative path + to the setup.cfg file to use. Otherwise, if it evaluates to true, it + simply assumes that pbr should be used, and the default 'setup.cfg' is + used. + + This works by reading the setup.cfg file, parsing out the supported + metadata and command options, and using them to rebuild the + `DistributionMetadata` object and set the newly added command options. + + The reason for doing things this way is that a custom `Distribution` class + will not play nicely with setup_requires; however, this implementation may + not work well with distributions that do use a `Distribution` subclass. + """ + + if not value: + return + if isinstance(value, string_type): + path = os.path.abspath(value) + else: + path = os.path.abspath('setup.cfg') + if not os.path.exists(path): + raise errors.DistutilsFileError( + 'The setup.cfg file %s does not exist.' % path) + + # Converts the setup.cfg file to setup() arguments + try: + attrs = util.cfg_to_args(path, dist.script_args) + except Exception: + e = sys.exc_info()[1] + # NB: This will output to the console if no explicit logging has + # been setup - but thats fine, this is a fatal distutils error, so + # being pretty isn't the #1 goal.. being diagnosable is. + logging.exception('Error parsing') + raise errors.DistutilsSetupError( + 'Error parsing %s: %s: %s' % (path, e.__class__.__name__, e)) + + # There are some metadata fields that are only supported by + # setuptools and not distutils, and hence are not in + # dist.metadata. We are OK to write these in. For gory details + # see + # https://github.com/pypa/setuptools/pull/1343 + _DISTUTILS_UNSUPPORTED_METADATA = ( + 'long_description_content_type', 'project_urls', 'provides_extras' + ) + + # Repeat some of the Distribution initialization code with the newly + # provided attrs + if attrs: + # Skips 'options' and 'licence' support which are rarely used; may + # add back in later if demanded + for key, val in attrs.items(): + if hasattr(dist.metadata, 'set_' + key): + getattr(dist.metadata, 'set_' + key)(val) + elif hasattr(dist.metadata, key): + setattr(dist.metadata, key, val) + elif hasattr(dist, key): + setattr(dist, key, val) + elif key in _DISTUTILS_UNSUPPORTED_METADATA: + setattr(dist.metadata, key, val) + else: + msg = 'Unknown distribution option: %s' % repr(key) + warnings.warn(msg) + + # Re-finalize the underlying Distribution + try: + super(dist.__class__, dist).finalize_options() + except TypeError: + # If dist is not declared as a new-style class (with object as + # a subclass) then super() will not work on it. This is the case + # for Python 2. In that case, fall back to doing this the ugly way + dist.__class__.__bases__[-1].finalize_options(dist) + + # This bit comes out of distribute/setuptools + if isinstance(dist.metadata.version, integer_types + (float,)): + # Some people apparently take "version number" too literally :) + dist.metadata.version = str(dist.metadata.version) diff --git a/venv/Lib/site-packages/pbr/extra_files.py b/venv/Lib/site-packages/pbr/extra_files.py new file mode 100644 index 00000000..a72db0c1 --- /dev/null +++ b/venv/Lib/site-packages/pbr/extra_files.py @@ -0,0 +1,35 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from distutils import errors +import os + +_extra_files = [] + + +def get_extra_files(): + global _extra_files + return _extra_files + + +def set_extra_files(extra_files): + # Let's do a sanity check + for filename in extra_files: + if not os.path.exists(filename): + raise errors.DistutilsFileError( + '%s from the extra_files option in setup.cfg does not ' + 'exist' % filename) + global _extra_files + _extra_files[:] = extra_files[:] diff --git a/venv/Lib/site-packages/pbr/find_package.py b/venv/Lib/site-packages/pbr/find_package.py new file mode 100644 index 00000000..717e93da --- /dev/null +++ b/venv/Lib/site-packages/pbr/find_package.py @@ -0,0 +1,29 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os + +import setuptools + + +def smart_find_packages(package_list): + """Run find_packages the way we intend.""" + packages = [] + for pkg in package_list.strip().split("\n"): + pkg_path = pkg.replace('.', os.path.sep) + packages.append(pkg) + packages.extend(['%s.%s' % (pkg, f) + for f in setuptools.find_packages(pkg_path)]) + return "\n".join(set(packages)) diff --git a/venv/Lib/site-packages/pbr/git.py b/venv/Lib/site-packages/pbr/git.py new file mode 100644 index 00000000..f1d7c501 --- /dev/null +++ b/venv/Lib/site-packages/pbr/git.py @@ -0,0 +1,338 @@ +# Copyright 2011 OpenStack Foundation +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import unicode_literals + +import distutils.errors +from distutils import log +import errno +import io +import os +import re +import subprocess +import time + +import pkg_resources + +from pbr import options +from pbr import version + + +def _run_shell_command(cmd, throw_on_error=False, buffer=True, env=None): + if buffer: + out_location = subprocess.PIPE + err_location = subprocess.PIPE + else: + out_location = None + err_location = None + + newenv = os.environ.copy() + if env: + newenv.update(env) + + output = subprocess.Popen(cmd, + stdout=out_location, + stderr=err_location, + env=newenv) + out = output.communicate() + if output.returncode and throw_on_error: + raise distutils.errors.DistutilsError( + "%s returned %d" % (cmd, output.returncode)) + if len(out) == 0 or not out[0] or not out[0].strip(): + return '' + # Since we don't control the history, and forcing users to rebase arbitrary + # history to fix utf8 issues is harsh, decode with replace. + return out[0].strip().decode('utf-8', 'replace') + + +def _run_git_command(cmd, git_dir, **kwargs): + if not isinstance(cmd, (list, tuple)): + cmd = [cmd] + return _run_shell_command( + ['git', '--git-dir=%s' % git_dir] + cmd, **kwargs) + + +def _get_git_directory(): + try: + return _run_shell_command(['git', 'rev-parse', '--git-dir']) + except OSError as e: + if e.errno == errno.ENOENT: + # git not installed. + return '' + raise + + +def _git_is_installed(): + try: + # We cannot use 'which git' as it may not be available + # in some distributions, So just try 'git --version' + # to see if we run into trouble + _run_shell_command(['git', '--version']) + except OSError: + return False + return True + + +def _get_highest_tag(tags): + """Find the highest tag from a list. + + Pass in a list of tag strings and this will return the highest + (latest) as sorted by the pkg_resources version parser. + """ + return max(tags, key=pkg_resources.parse_version) + + +def _find_git_files(dirname='', git_dir=None): + """Behave like a file finder entrypoint plugin. + + We don't actually use the entrypoints system for this because it runs + at absurd times. We only want to do this when we are building an sdist. + """ + file_list = [] + if git_dir is None: + git_dir = _run_git_functions() + if git_dir: + log.info("[pbr] In git context, generating filelist from git") + file_list = _run_git_command(['ls-files', '-z'], git_dir) + # Users can fix utf8 issues locally with a single commit, so we are + # strict here. + file_list = file_list.split(b'\x00'.decode('utf-8')) + return [f for f in file_list if f] + + +def _get_raw_tag_info(git_dir): + describe = _run_git_command(['describe', '--always'], git_dir) + if "-" in describe: + return describe.rsplit("-", 2)[-2] + if "." in describe: + return 0 + return None + + +def get_is_release(git_dir): + return _get_raw_tag_info(git_dir) == 0 + + +def _run_git_functions(): + git_dir = None + if _git_is_installed(): + git_dir = _get_git_directory() + return git_dir or None + + +def get_git_short_sha(git_dir=None): + """Return the short sha for this repo, if it exists.""" + if not git_dir: + git_dir = _run_git_functions() + if git_dir: + return _run_git_command( + ['log', '-n1', '--pretty=format:%h'], git_dir) + return None + + +def _clean_changelog_message(msg): + """Cleans any instances of invalid sphinx wording. + + This escapes/removes any instances of invalid characters + that can be interpreted by sphinx as a warning or error + when translating the Changelog into an HTML file for + documentation building within projects. + + * Escapes '_' which is interpreted as a link + * Escapes '*' which is interpreted as a new line + * Escapes '`' which is interpreted as a literal + """ + + msg = msg.replace('*', r'\*') + msg = msg.replace('_', r'\_') + msg = msg.replace('`', r'\`') + + return msg + + +def _iter_changelog(changelog): + """Convert a oneline log iterator to formatted strings. + + :param changelog: An iterator of one line log entries like + that given by _iter_log_oneline. + :return: An iterator over (release, formatted changelog) tuples. + """ + first_line = True + current_release = None + yield current_release, "CHANGES\n=======\n\n" + for hash, tags, msg in changelog: + if tags: + current_release = _get_highest_tag(tags) + underline = len(current_release) * '-' + if not first_line: + yield current_release, '\n' + yield current_release, ( + "%(tag)s\n%(underline)s\n\n" % + dict(tag=current_release, underline=underline)) + + if not msg.startswith("Merge "): + if msg.endswith("."): + msg = msg[:-1] + msg = _clean_changelog_message(msg) + yield current_release, "* %(msg)s\n" % dict(msg=msg) + first_line = False + + +def _iter_log_oneline(git_dir=None): + """Iterate over --oneline log entries if possible. + + This parses the output into a structured form but does not apply + presentation logic to the output - making it suitable for different + uses. + + :return: An iterator of (hash, tags_set, 1st_line) tuples, or None if + changelog generation is disabled / not available. + """ + if git_dir is None: + git_dir = _get_git_directory() + if not git_dir: + return [] + return _iter_log_inner(git_dir) + + +def _is_valid_version(candidate): + try: + version.SemanticVersion.from_pip_string(candidate) + return True + except ValueError: + return False + + +def _iter_log_inner(git_dir): + """Iterate over --oneline log entries. + + This parses the output intro a structured form but does not apply + presentation logic to the output - making it suitable for different + uses. + + .. caution:: this function risk to return a tag that doesn't exist really + inside the git objects list due to replacement made + to tag name to also list pre-release suffix. + Compliant with the SemVer specification (e.g 1.2.3-rc1) + + :return: An iterator of (hash, tags_set, 1st_line) tuples. + """ + log.info('[pbr] Generating ChangeLog') + log_cmd = ['log', '--decorate=full', '--format=%h%x00%s%x00%d'] + changelog = _run_git_command(log_cmd, git_dir) + for line in changelog.split('\n'): + line_parts = line.split('\x00') + if len(line_parts) != 3: + continue + sha, msg, refname = line_parts + tags = set() + + # refname can be: + # + # HEAD, tag: refs/tags/1.4.0, refs/remotes/origin/master, \ + # refs/heads/master + # refs/tags/1.3.4 + if "refs/tags/" in refname: + refname = refname.strip()[1:-1] # remove wrapping ()'s + # If we start with "tag: refs/tags/1.2b1, tag: refs/tags/1.2" + # The first split gives us "['', '1.2b1, tag:', '1.2']" + # Which is why we do the second split below on the comma + for tag_string in refname.split("refs/tags/")[1:]: + # git tag does not allow : or " " in tag names, so we split + # on ", " which is the separator between elements + candidate = tag_string.split(", ")[0].replace("-", ".") + if _is_valid_version(candidate): + tags.add(candidate) + + yield sha, tags, msg + + +def write_git_changelog(git_dir=None, dest_dir=os.path.curdir, + option_dict=None, changelog=None): + """Write a changelog based on the git changelog.""" + start = time.time() + if not option_dict: + option_dict = {} + should_skip = options.get_boolean_option(option_dict, 'skip_changelog', + 'SKIP_WRITE_GIT_CHANGELOG') + if should_skip: + return + if not changelog: + changelog = _iter_log_oneline(git_dir=git_dir) + if changelog: + changelog = _iter_changelog(changelog) + if not changelog: + return + + new_changelog = os.path.join(dest_dir, 'ChangeLog') + if os.path.exists(new_changelog) and not os.access(new_changelog, os.W_OK): + # If there's already a ChangeLog and it's not writable, just use it + log.info('[pbr] ChangeLog not written (file already' + ' exists and it is not writeable)') + return + + log.info('[pbr] Writing ChangeLog') + with io.open(new_changelog, "w", encoding="utf-8") as changelog_file: + for release, content in changelog: + changelog_file.write(content) + stop = time.time() + log.info('[pbr] ChangeLog complete (%0.1fs)' % (stop - start)) + + +def generate_authors(git_dir=None, dest_dir='.', option_dict=dict()): + """Create AUTHORS file using git commits.""" + should_skip = options.get_boolean_option(option_dict, 'skip_authors', + 'SKIP_GENERATE_AUTHORS') + if should_skip: + return + + start = time.time() + old_authors = os.path.join(dest_dir, 'AUTHORS.in') + new_authors = os.path.join(dest_dir, 'AUTHORS') + if os.path.exists(new_authors) and not os.access(new_authors, os.W_OK): + # If there's already an AUTHORS file and it's not writable, just use it + return + + log.info('[pbr] Generating AUTHORS') + ignore_emails = '((jenkins|zuul)@review|infra@lists|jenkins@openstack)' + if git_dir is None: + git_dir = _get_git_directory() + if git_dir: + authors = [] + + # don't include jenkins email address in AUTHORS file + git_log_cmd = ['log', '--format=%aN <%aE>'] + authors += _run_git_command(git_log_cmd, git_dir).split('\n') + authors = [a for a in authors if not re.search(ignore_emails, a)] + + # get all co-authors from commit messages + co_authors_out = _run_git_command('log', git_dir) + co_authors = re.findall('Co-authored-by:.+', co_authors_out, + re.MULTILINE) + co_authors = [signed.split(":", 1)[1].strip() + for signed in co_authors if signed] + + authors += co_authors + authors = sorted(set(authors)) + + with open(new_authors, 'wb') as new_authors_fh: + if os.path.exists(old_authors): + with open(old_authors, "rb") as old_authors_fh: + new_authors_fh.write(old_authors_fh.read()) + new_authors_fh.write(('\n'.join(authors) + '\n') + .encode('utf-8')) + stop = time.time() + log.info('[pbr] AUTHORS complete (%0.1fs)' % (stop - start)) diff --git a/venv/Lib/site-packages/pbr/hooks/__init__.py b/venv/Lib/site-packages/pbr/hooks/__init__.py new file mode 100644 index 00000000..f0056c0e --- /dev/null +++ b/venv/Lib/site-packages/pbr/hooks/__init__.py @@ -0,0 +1,28 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from pbr.hooks import backwards +from pbr.hooks import commands +from pbr.hooks import files +from pbr.hooks import metadata + + +def setup_hook(config): + """Filter config parsed from a setup.cfg to inject our defaults.""" + metadata_config = metadata.MetadataConfig(config) + metadata_config.run() + backwards.BackwardsCompatConfig(config).run() + commands.CommandsConfig(config).run() + files.FilesConfig(config, metadata_config.get_name()).run() diff --git a/venv/Lib/site-packages/pbr/hooks/backwards.py b/venv/Lib/site-packages/pbr/hooks/backwards.py new file mode 100644 index 00000000..01f07ab8 --- /dev/null +++ b/venv/Lib/site-packages/pbr/hooks/backwards.py @@ -0,0 +1,33 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from pbr.hooks import base +from pbr import packaging + + +class BackwardsCompatConfig(base.BaseConfig): + + section = 'backwards_compat' + + def hook(self): + self.config['include_package_data'] = 'True' + packaging.append_text_list( + self.config, 'dependency_links', + packaging.parse_dependency_links()) + packaging.append_text_list( + self.config, 'tests_require', + packaging.parse_requirements( + packaging.TEST_REQUIREMENTS_FILES, + strip_markers=True)) diff --git a/venv/Lib/site-packages/pbr/hooks/base.py b/venv/Lib/site-packages/pbr/hooks/base.py new file mode 100644 index 00000000..6672a362 --- /dev/null +++ b/venv/Lib/site-packages/pbr/hooks/base.py @@ -0,0 +1,34 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +class BaseConfig(object): + + section = None + + def __init__(self, config): + self._global_config = config + self.config = self._global_config.get(self.section, dict()) + self.pbr_config = config.get('pbr', dict()) + + def run(self): + self.hook() + self.save() + + def hook(self): + pass + + def save(self): + self._global_config[self.section] = self.config diff --git a/venv/Lib/site-packages/pbr/hooks/commands.py b/venv/Lib/site-packages/pbr/hooks/commands.py new file mode 100644 index 00000000..aa4db704 --- /dev/null +++ b/venv/Lib/site-packages/pbr/hooks/commands.py @@ -0,0 +1,66 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os + +from setuptools.command import easy_install + +from pbr.hooks import base +from pbr import options +from pbr import packaging + + +class CommandsConfig(base.BaseConfig): + + section = 'global' + + def __init__(self, config): + super(CommandsConfig, self).__init__(config) + self.commands = self.config.get('commands', "") + + def save(self): + self.config['commands'] = self.commands + super(CommandsConfig, self).save() + + def add_command(self, command): + self.commands = "%s\n%s" % (self.commands, command) + + def hook(self): + self.add_command('pbr.packaging.LocalEggInfo') + self.add_command('pbr.packaging.LocalSDist') + self.add_command('pbr.packaging.LocalInstallScripts') + self.add_command('pbr.packaging.LocalDevelop') + self.add_command('pbr.packaging.LocalRPMVersion') + self.add_command('pbr.packaging.LocalDebVersion') + if os.name != 'nt': + easy_install.get_script_args = packaging.override_get_script_args + + if packaging.have_sphinx(): + self.add_command('pbr.builddoc.LocalBuildDoc') + + if os.path.exists('.testr.conf') and packaging.have_testr(): + # There is a .testr.conf file. We want to use it. + self.add_command('pbr.packaging.TestrTest') + elif self.config.get('nosetests', False) and packaging.have_nose(): + # We seem to still have nose configured + self.add_command('pbr.packaging.NoseTest') + + use_egg = options.get_boolean_option( + self.pbr_config, 'use-egg', 'PBR_USE_EGG') + # We always want non-egg install unless explicitly requested + if 'manpages' in self.pbr_config or not use_egg: + self.add_command('pbr.packaging.LocalInstall') + else: + self.add_command('pbr.packaging.InstallWithGit') diff --git a/venv/Lib/site-packages/pbr/hooks/files.py b/venv/Lib/site-packages/pbr/hooks/files.py new file mode 100644 index 00000000..c44af7c4 --- /dev/null +++ b/venv/Lib/site-packages/pbr/hooks/files.py @@ -0,0 +1,126 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import shlex +import sys + +from pbr import find_package +from pbr.hooks import base + + +def get_manpath(): + manpath = 'share/man' + if os.path.exists(os.path.join(sys.prefix, 'man')): + # This works around a bug with install where it expects every node + # in the relative data directory to be an actual directory, since at + # least Debian derivatives (and probably other platforms as well) + # like to symlink Unixish /usr/local/man to /usr/local/share/man. + manpath = 'man' + return manpath + + +def get_man_section(section): + return os.path.join(get_manpath(), 'man%s' % section) + + +def unquote_path(path): + # unquote the full path, e.g: "'a/full/path'" becomes "a/full/path", also + # strip the quotes off individual path components because os.walk cannot + # handle paths like: "'i like spaces'/'another dir'", so we will pass it + # "i like spaces/another dir" instead. + + if os.name == 'nt': + # shlex cannot handle paths that contain backslashes, treating those + # as escape characters. + path = path.replace("\\", "/") + return "".join(shlex.split(path)).replace("/", "\\") + + return "".join(shlex.split(path)) + + +class FilesConfig(base.BaseConfig): + + section = 'files' + + def __init__(self, config, name): + super(FilesConfig, self).__init__(config) + self.name = name + self.data_files = self.config.get('data_files', '') + + def save(self): + self.config['data_files'] = self.data_files + super(FilesConfig, self).save() + + def expand_globs(self): + finished = [] + for line in self.data_files.split("\n"): + if line.rstrip().endswith('*') and '=' in line: + (target, source_glob) = line.split('=') + source_prefix = source_glob.strip()[:-1] + target = target.strip() + if not target.endswith(os.path.sep): + target += os.path.sep + unquoted_prefix = unquote_path(source_prefix) + unquoted_target = unquote_path(target) + for (dirpath, dirnames, fnames) in os.walk(unquoted_prefix): + # As source_prefix is always matched, using replace with a + # a limit of one is always going to replace the path prefix + # and not accidentally replace some text in the middle of + # the path + new_prefix = dirpath.replace(unquoted_prefix, + unquoted_target, 1) + finished.append("'%s' = " % new_prefix) + finished.extend( + [" '%s'" % os.path.join(dirpath, f) for f in fnames]) + else: + finished.append(line) + + self.data_files = "\n".join(finished) + + def add_man_path(self, man_path): + self.data_files = "%s\n'%s' =" % (self.data_files, man_path) + + def add_man_page(self, man_page): + self.data_files = "%s\n '%s'" % (self.data_files, man_page) + + def get_man_sections(self): + man_sections = dict() + manpages = self.pbr_config['manpages'] + for manpage in manpages.split(): + section_number = manpage.strip()[-1] + section = man_sections.get(section_number, list()) + section.append(manpage.strip()) + man_sections[section_number] = section + return man_sections + + def hook(self): + packages = self.config.get('packages', self.name).strip() + expanded = [] + for pkg in packages.split("\n"): + if os.path.isdir(pkg.strip()): + expanded.append(find_package.smart_find_packages(pkg.strip())) + + self.config['packages'] = "\n".join(expanded) + + self.expand_globs() + + if 'manpages' in self.pbr_config: + man_sections = self.get_man_sections() + for (section, pages) in man_sections.items(): + manpath = get_man_section(section) + self.add_man_path(manpath) + for page in pages: + self.add_man_page(page) diff --git a/venv/Lib/site-packages/pbr/hooks/metadata.py b/venv/Lib/site-packages/pbr/hooks/metadata.py new file mode 100644 index 00000000..3f65b6d7 --- /dev/null +++ b/venv/Lib/site-packages/pbr/hooks/metadata.py @@ -0,0 +1,32 @@ +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from pbr.hooks import base +from pbr import packaging + + +class MetadataConfig(base.BaseConfig): + + section = 'metadata' + + def hook(self): + self.config['version'] = packaging.get_version( + self.config['name'], self.config.get('version', None)) + packaging.append_text_list( + self.config, 'requires_dist', + packaging.parse_requirements()) + + def get_name(self): + return self.config['name'] diff --git a/venv/Lib/site-packages/pbr/options.py b/venv/Lib/site-packages/pbr/options.py new file mode 100644 index 00000000..2313cc4a --- /dev/null +++ b/venv/Lib/site-packages/pbr/options.py @@ -0,0 +1,53 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +# DAMAGE. + +import os + + +TRUE_VALUES = ('true', '1', 'yes') + + +def get_boolean_option(option_dict, option_name, env_name): + return ((option_name in option_dict and + option_dict[option_name][1].lower() in TRUE_VALUES) or + str(os.getenv(env_name)).lower() in TRUE_VALUES) diff --git a/venv/Lib/site-packages/pbr/packaging.py b/venv/Lib/site-packages/pbr/packaging.py new file mode 100644 index 00000000..a6979928 --- /dev/null +++ b/venv/Lib/site-packages/pbr/packaging.py @@ -0,0 +1,883 @@ +# Copyright 2011 OpenStack Foundation +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Utilities with minimum-depends for use in setup.py +""" + +from __future__ import unicode_literals + +from distutils.command import install as du_install +from distutils import log + +# (hberaud) do not use six here to import urlparse +# to keep this module free from external dependencies +# to avoid cross dependencies errors on minimal system +# free from dependencies. +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse + +import email +import email.errors +import os +import re +import sys +import warnings + +import pkg_resources +import setuptools +from setuptools.command import develop +from setuptools.command import easy_install +from setuptools.command import egg_info +from setuptools.command import install +from setuptools.command import install_scripts +from setuptools.command import sdist + +from pbr import extra_files +from pbr import git +from pbr import options +import pbr.pbr_json +from pbr import testr_command +from pbr import version + +REQUIREMENTS_FILES = ('requirements.txt', 'tools/pip-requires') +PY_REQUIREMENTS_FILES = [x % sys.version_info[0] for x in ( + 'requirements-py%d.txt', 'tools/pip-requires-py%d')] +TEST_REQUIREMENTS_FILES = ('test-requirements.txt', 'tools/test-requires') + + +def get_requirements_files(): + files = os.environ.get("PBR_REQUIREMENTS_FILES") + if files: + return tuple(f.strip() for f in files.split(',')) + # Returns a list composed of: + # - REQUIREMENTS_FILES with -py2 or -py3 in the name + # (e.g. requirements-py3.txt) + # - REQUIREMENTS_FILES + + return PY_REQUIREMENTS_FILES + list(REQUIREMENTS_FILES) + + +def append_text_list(config, key, text_list): + """Append a \n separated list to possibly existing value.""" + new_value = [] + current_value = config.get(key, "") + if current_value: + new_value.append(current_value) + new_value.extend(text_list) + config[key] = '\n'.join(new_value) + + +def _any_existing(file_list): + return [f for f in file_list if os.path.exists(f)] + + +# Get requirements from the first file that exists +def get_reqs_from_files(requirements_files): + existing = _any_existing(requirements_files) + + # TODO(stephenfin): Remove this in pbr 6.0+ + deprecated = [f for f in existing if f in PY_REQUIREMENTS_FILES] + if deprecated: + warnings.warn('Support for \'-pyN\'-suffixed requirements files is ' + 'removed in pbr 5.0 and these files are now ignored. ' + 'Use environment markers instead. Conflicting files: ' + '%r' % deprecated, + DeprecationWarning) + + existing = [f for f in existing if f not in PY_REQUIREMENTS_FILES] + for requirements_file in existing: + with open(requirements_file, 'r') as fil: + return fil.read().split('\n') + + return [] + + +def egg_fragment(match): + return re.sub(r'(?P[\w.-]+)-' + r'(?P' + r'(?P' + r'(?P0|[1-9][0-9]*)\.' + r'(?P0|[1-9][0-9]*)\.' + r'(?P0|[1-9][0-9]*)){1}' + r'(?P(?:\-' + r'(?P(?:(?=[0]{1}[0-9A-Za-z-]{0})(?:[0]{1})|' + r'(?=[1-9]{1}[0-9]*[A-Za-z]{0})(?:[0-9]+)|' + r'(?=[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)(?:[0-9A-Za-z-]+)){1}' + r'(?:\.(?=[0]{1}[0-9A-Za-z-]{0})(?:[0]{1})|' + r'\.(?=[1-9]{1}[0-9]*[A-Za-z]{0})(?:[0-9]+)|' + r'\.(?=[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)' + r'(?:[0-9A-Za-z-]+))*){1}){0,1}(?:\+' + r'(?P(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*))){0,1}))', + r'\g>=\g', + match.groups()[-1]) + + +def parse_requirements(requirements_files=None, strip_markers=False): + + if requirements_files is None: + requirements_files = get_requirements_files() + + requirements = [] + for line in get_reqs_from_files(requirements_files): + # Ignore comments + if (not line.strip()) or line.startswith('#'): + continue + + # Ignore index URL lines + if re.match(r'^\s*(-i|--index-url|--extra-index-url|--find-links).*', + line): + continue + + # Handle nested requirements files such as: + # -r other-requirements.txt + if line.startswith('-r'): + req_file = line.partition(' ')[2] + requirements += parse_requirements( + [req_file], strip_markers=strip_markers) + continue + + try: + project_name = pkg_resources.Requirement.parse(line).project_name + except ValueError: + project_name = None + + # For the requirements list, we need to inject only the portion + # after egg= so that distutils knows the package it's looking for + # such as: + # -e git://github.com/openstack/nova/master#egg=nova + # -e git://github.com/openstack/nova/master#egg=nova-1.2.3 + # -e git+https://foo.com/zipball#egg=bar&subdirectory=baz + # http://github.com/openstack/nova/zipball/master#egg=nova + # http://github.com/openstack/nova/zipball/master#egg=nova-1.2.3 + # git+https://foo.com/zipball#egg=bar&subdirectory=baz + # git+[ssh]://github.com/openstack/nova/zipball/master#egg=nova-1.2.3 + # hg+[ssh]://github.com/openstack/nova/zipball/master#egg=nova-1.2.3 + # svn+[proto]://github.com/openstack/nova/zipball/master#egg=nova-1.2.3 + # -f lines are for index locations, and don't get used here + if re.match(r'\s*-e\s+', line): + extract = re.match(r'\s*-e\s+(.*)$', line) + line = extract.group(1) + egg = urlparse(line) + if egg.scheme: + line = re.sub(r'egg=([^&]+).*$', egg_fragment, egg.fragment) + elif re.match(r'\s*-f\s+', line): + line = None + reason = 'Index Location' + + if line is not None: + line = re.sub('#.*$', '', line) + if strip_markers: + semi_pos = line.find(';') + if semi_pos < 0: + semi_pos = None + line = line[:semi_pos] + requirements.append(line) + else: + log.info( + '[pbr] Excluding %s: %s' % (project_name, reason)) + + return requirements + + +def parse_dependency_links(requirements_files=None): + if requirements_files is None: + requirements_files = get_requirements_files() + dependency_links = [] + # dependency_links inject alternate locations to find packages listed + # in requirements + for line in get_reqs_from_files(requirements_files): + # skip comments and blank lines + if re.match(r'(\s*#)|(\s*$)', line): + continue + # lines with -e or -f need the whole line, minus the flag + if re.match(r'\s*-[ef]\s+', line): + dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line)) + # lines that are only urls can go in unmolested + elif re.match(r'^\s*(https?|git(\+(https|ssh))?|svn|hg)\S*:', line): + dependency_links.append(line) + return dependency_links + + +class InstallWithGit(install.install): + """Extracts ChangeLog and AUTHORS from git then installs. + + This is useful for e.g. readthedocs where the package is + installed and then docs built. + """ + + command_name = 'install' + + def run(self): + _from_git(self.distribution) + return install.install.run(self) + + +class LocalInstall(install.install): + """Runs python setup.py install in a sensible manner. + + Force a non-egg installed in the manner of + single-version-externally-managed, which allows us to install manpages + and config files. + """ + + command_name = 'install' + + def run(self): + _from_git(self.distribution) + return du_install.install.run(self) + + +class TestrTest(testr_command.Testr): + """Make setup.py test do the right thing.""" + + command_name = 'test' + description = 'DEPRECATED: Run unit tests using testr' + + def run(self): + warnings.warn('testr integration is deprecated in pbr 4.2 and will ' + 'be removed in a future release. Please call your test ' + 'runner directly', + DeprecationWarning) + + # Can't use super - base class old-style class + testr_command.Testr.run(self) + + +class LocalRPMVersion(setuptools.Command): + __doc__ = """Output the rpm *compatible* version string of this package""" + description = __doc__ + + user_options = [] + command_name = "rpm_version" + + def run(self): + log.info("[pbr] Extracting rpm version") + name = self.distribution.get_name() + print(version.VersionInfo(name).semantic_version().rpm_string()) + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + +class LocalDebVersion(setuptools.Command): + __doc__ = """Output the deb *compatible* version string of this package""" + description = __doc__ + + user_options = [] + command_name = "deb_version" + + def run(self): + log.info("[pbr] Extracting deb version") + name = self.distribution.get_name() + print(version.VersionInfo(name).semantic_version().debian_string()) + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + +def have_testr(): + return testr_command.have_testr + + +try: + from nose import commands + + class NoseTest(commands.nosetests): + """Fallback test runner if testr is a no-go.""" + + command_name = 'test' + description = 'DEPRECATED: Run unit tests using nose' + + def run(self): + warnings.warn('nose integration in pbr is deprecated. Please use ' + 'the native nose setuptools configuration or call ' + 'nose directly', + DeprecationWarning) + + # Can't use super - base class old-style class + commands.nosetests.run(self) + + _have_nose = True + +except ImportError: + _have_nose = False + + +def have_nose(): + return _have_nose + + +_wsgi_text = """#PBR Generated from %(group)r + +import threading + +from %(module_name)s import %(import_target)s + +if __name__ == "__main__": + import argparse + import socket + import sys + import wsgiref.simple_server as wss + + parser = argparse.ArgumentParser( + description=%(import_target)s.__doc__, + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + usage='%%(prog)s [-h] [--port PORT] [--host IP] -- [passed options]') + parser.add_argument('--port', '-p', type=int, default=8000, + help='TCP port to listen on') + parser.add_argument('--host', '-b', default='', + help='IP to bind the server to') + parser.add_argument('args', + nargs=argparse.REMAINDER, + metavar='-- [passed options]', + help="'--' is the separator of the arguments used " + "to start the WSGI server and the arguments passed " + "to the WSGI application.") + args = parser.parse_args() + if args.args: + if args.args[0] == '--': + args.args.pop(0) + else: + parser.error("unrecognized arguments: %%s" %% ' '.join(args.args)) + sys.argv[1:] = args.args + server = wss.make_server(args.host, args.port, %(invoke_target)s()) + + print("*" * 80) + print("STARTING test server %(module_name)s.%(invoke_target)s") + url = "http://%%s:%%d/" %% (server.server_name, server.server_port) + print("Available at %%s" %% url) + print("DANGER! For testing only, do not use in production") + print("*" * 80) + sys.stdout.flush() + + server.serve_forever() +else: + application = None + app_lock = threading.Lock() + + with app_lock: + if application is None: + application = %(invoke_target)s() + +""" + +_script_text = """# PBR Generated from %(group)r + +import sys + +from %(module_name)s import %(import_target)s + + +if __name__ == "__main__": + sys.exit(%(invoke_target)s()) +""" + + +# the following allows us to specify different templates per entry +# point group when generating pbr scripts. +ENTRY_POINTS_MAP = { + 'console_scripts': _script_text, + 'gui_scripts': _script_text, + 'wsgi_scripts': _wsgi_text +} + + +def generate_script(group, entry_point, header, template): + """Generate the script based on the template. + + :param str group: + The entry-point group name, e.g., "console_scripts". + :param str header: + The first line of the script, e.g., "!#/usr/bin/env python". + :param str template: + The script template. + :returns: + The templated script content + :rtype: + str + """ + if not entry_point.attrs or len(entry_point.attrs) > 2: + raise ValueError("Script targets must be of the form " + "'func' or 'Class.class_method'.") + script_text = template % dict( + group=group, + module_name=entry_point.module_name, + import_target=entry_point.attrs[0], + invoke_target='.'.join(entry_point.attrs), + ) + return header + script_text + + +def override_get_script_args( + dist, executable=os.path.normpath(sys.executable), is_wininst=False): + """Override entrypoints console_script.""" + header = easy_install.get_script_header("", executable, is_wininst) + for group, template in ENTRY_POINTS_MAP.items(): + for name, ep in dist.get_entry_map(group).items(): + yield (name, generate_script(group, ep, header, template)) + + +class LocalDevelop(develop.develop): + + command_name = 'develop' + + def install_wrapper_scripts(self, dist): + if sys.platform == 'win32': + return develop.develop.install_wrapper_scripts(self, dist) + if not self.exclude_scripts: + for args in override_get_script_args(dist): + self.write_script(*args) + + +class LocalInstallScripts(install_scripts.install_scripts): + """Intercepts console scripts entry_points.""" + command_name = 'install_scripts' + + def _make_wsgi_scripts_only(self, dist, executable, is_wininst): + header = easy_install.get_script_header("", executable, is_wininst) + wsgi_script_template = ENTRY_POINTS_MAP['wsgi_scripts'] + for name, ep in dist.get_entry_map('wsgi_scripts').items(): + content = generate_script( + 'wsgi_scripts', ep, header, wsgi_script_template) + self.write_script(name, content) + + def run(self): + import distutils.command.install_scripts + + self.run_command("egg_info") + if self.distribution.scripts: + # run first to set up self.outfiles + distutils.command.install_scripts.install_scripts.run(self) + else: + self.outfiles = [] + + ei_cmd = self.get_finalized_command("egg_info") + dist = pkg_resources.Distribution( + ei_cmd.egg_base, + pkg_resources.PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), + ei_cmd.egg_name, ei_cmd.egg_version, + ) + bs_cmd = self.get_finalized_command('build_scripts') + executable = getattr( + bs_cmd, 'executable', easy_install.sys_executable) + is_wininst = getattr( + self.get_finalized_command("bdist_wininst"), '_is_running', False + ) + + if 'bdist_wheel' in self.distribution.have_run: + # We're building a wheel which has no way of generating mod_wsgi + # scripts for us. Let's build them. + # NOTE(sigmavirus24): This needs to happen here because, as the + # comment below indicates, no_ep is True when building a wheel. + self._make_wsgi_scripts_only(dist, executable, is_wininst) + + if self.no_ep: + # no_ep is True if we're installing into an .egg file or building + # a .whl file, in those cases, we do not want to build all of the + # entry-points listed for this package. + return + + if os.name != 'nt': + get_script_args = override_get_script_args + else: + get_script_args = easy_install.get_script_args + executable = '"%s"' % executable + + for args in get_script_args(dist, executable, is_wininst): + self.write_script(*args) + + +class LocalManifestMaker(egg_info.manifest_maker): + """Add any files that are in git and some standard sensible files.""" + + def _add_pbr_defaults(self): + for template_line in [ + 'include AUTHORS', + 'include ChangeLog', + 'exclude .gitignore', + 'exclude .gitreview', + 'global-exclude *.pyc' + ]: + self.filelist.process_template_line(template_line) + + def add_defaults(self): + """Add all the default files to self.filelist: + + Extends the functionality provided by distutils to also included + additional sane defaults, such as the ``AUTHORS`` and ``ChangeLog`` + files generated by *pbr*. + + Warns if (``README`` or ``README.txt``) or ``setup.py`` are missing; + everything else is optional. + """ + option_dict = self.distribution.get_option_dict('pbr') + + sdist.sdist.add_defaults(self) + self.filelist.append(self.template) + self.filelist.append(self.manifest) + self.filelist.extend(extra_files.get_extra_files()) + should_skip = options.get_boolean_option(option_dict, 'skip_git_sdist', + 'SKIP_GIT_SDIST') + if not should_skip: + rcfiles = git._find_git_files() + if rcfiles: + self.filelist.extend(rcfiles) + elif os.path.exists(self.manifest): + self.read_manifest() + ei_cmd = self.get_finalized_command('egg_info') + self._add_pbr_defaults() + self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) + + +class LocalEggInfo(egg_info.egg_info): + """Override the egg_info command to regenerate SOURCES.txt sensibly.""" + + command_name = 'egg_info' + + def find_sources(self): + """Generate SOURCES.txt only if there isn't one already. + + If we are in an sdist command, then we always want to update + SOURCES.txt. If we are not in an sdist command, then it doesn't + matter one flip, and is actually destructive. + However, if we're in a git context, it's always the right thing to do + to recreate SOURCES.txt + """ + manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") + if (not os.path.exists(manifest_filename) or + os.path.exists('.git') or + 'sdist' in sys.argv): + log.info("[pbr] Processing SOURCES.txt") + mm = LocalManifestMaker(self.distribution) + mm.manifest = manifest_filename + mm.run() + self.filelist = mm.filelist + else: + log.info("[pbr] Reusing existing SOURCES.txt") + self.filelist = egg_info.FileList() + for entry in open(manifest_filename, 'r').read().split('\n'): + self.filelist.append(entry) + + +def _from_git(distribution): + option_dict = distribution.get_option_dict('pbr') + changelog = git._iter_log_oneline() + if changelog: + changelog = git._iter_changelog(changelog) + git.write_git_changelog(option_dict=option_dict, changelog=changelog) + git.generate_authors(option_dict=option_dict) + + +class LocalSDist(sdist.sdist): + """Builds the ChangeLog and Authors files from VC first.""" + + command_name = 'sdist' + + def checking_reno(self): + """Ensure reno is installed and configured. + + We can't run reno-based commands if reno isn't installed/available, and + don't want to if the user isn't using it. + """ + if hasattr(self, '_has_reno'): + return self._has_reno + + option_dict = self.distribution.get_option_dict('pbr') + should_skip = options.get_boolean_option(option_dict, 'skip_reno', + 'SKIP_GENERATE_RENO') + if should_skip: + self._has_reno = False + return False + + try: + # versions of reno witout this module will not have the required + # feature, hence the import + from reno import setup_command # noqa + except ImportError: + log.info('[pbr] reno was not found or is too old. Skipping ' + 'release notes') + self._has_reno = False + return False + + conf, output_file, cache_file = setup_command.load_config( + self.distribution) + + if not os.path.exists(os.path.join(conf.reporoot, conf.notespath)): + log.info('[pbr] reno does not appear to be configured. Skipping ' + 'release notes') + self._has_reno = False + return False + + self._files = [output_file, cache_file] + + log.info('[pbr] Generating release notes') + self._has_reno = True + + return True + + sub_commands = [('build_reno', checking_reno)] + sdist.sdist.sub_commands + + def run(self): + _from_git(self.distribution) + # sdist.sdist is an old style class, can't use super() + sdist.sdist.run(self) + + def make_distribution(self): + # This is included in make_distribution because setuptools doesn't use + # 'get_file_list'. As such, this is the only hook point that runs after + # the commands in 'sub_commands' + if self.checking_reno(): + self.filelist.extend(self._files) + self.filelist.sort() + sdist.sdist.make_distribution(self) + + +try: + from pbr import builddoc + _have_sphinx = True + # Import the symbols from their new home so the package API stays + # compatible. + LocalBuildDoc = builddoc.LocalBuildDoc +except ImportError: + _have_sphinx = False + LocalBuildDoc = None + + +def have_sphinx(): + return _have_sphinx + + +def _get_increment_kwargs(git_dir, tag): + """Calculate the sort of semver increment needed from git history. + + Every commit from HEAD to tag is consider for Sem-Ver metadata lines. + See the pbr docs for their syntax. + + :return: a dict of kwargs for passing into SemanticVersion.increment. + """ + result = {} + if tag: + version_spec = tag + "..HEAD" + else: + version_spec = "HEAD" + # Get the raw body of the commit messages so that we don't have to + # parse out any formatting whitespace and to avoid user settings on + # git log output affecting out ability to have working sem ver headers. + changelog = git._run_git_command(['log', '--pretty=%B', version_spec], + git_dir) + header_len = len('sem-ver:') + commands = [line[header_len:].strip() for line in changelog.split('\n') + if line.lower().startswith('sem-ver:')] + symbols = set() + for command in commands: + symbols.update([symbol.strip() for symbol in command.split(',')]) + + def _handle_symbol(symbol, symbols, impact): + if symbol in symbols: + result[impact] = True + symbols.discard(symbol) + _handle_symbol('bugfix', symbols, 'patch') + _handle_symbol('feature', symbols, 'minor') + _handle_symbol('deprecation', symbols, 'minor') + _handle_symbol('api-break', symbols, 'major') + for symbol in symbols: + log.info('[pbr] Unknown Sem-Ver symbol %r' % symbol) + # We don't want patch in the kwargs since it is not a keyword argument - + # its the default minimum increment. + result.pop('patch', None) + return result + + +def _get_revno_and_last_tag(git_dir): + """Return the commit data about the most recent tag. + + We use git-describe to find this out, but if there are no + tags then we fall back to counting commits since the beginning + of time. + """ + changelog = git._iter_log_oneline(git_dir=git_dir) + row_count = 0 + for row_count, (ignored, tag_set, ignored) in enumerate(changelog): + version_tags = set() + semver_to_tag = dict() + for tag in list(tag_set): + try: + semver = version.SemanticVersion.from_pip_string(tag) + semver_to_tag[semver] = tag + version_tags.add(semver) + except Exception: + pass + if version_tags: + return semver_to_tag[max(version_tags)], row_count + return "", row_count + + +def _get_version_from_git_target(git_dir, target_version): + """Calculate a version from a target version in git_dir. + + This is used for untagged versions only. A new version is calculated as + necessary based on git metadata - distance to tags, current hash, contents + of commit messages. + + :param git_dir: The git directory we're working from. + :param target_version: If None, the last tagged version (or 0 if there are + no tags yet) is incremented as needed to produce an appropriate target + version following semver rules. Otherwise target_version is used as a + constraint - if semver rules would result in a newer version then an + exception is raised. + :return: A semver version object. + """ + tag, distance = _get_revno_and_last_tag(git_dir) + last_semver = version.SemanticVersion.from_pip_string(tag or '0') + if distance == 0: + new_version = last_semver + else: + new_version = last_semver.increment( + **_get_increment_kwargs(git_dir, tag)) + if target_version is not None and new_version > target_version: + raise ValueError( + "git history requires a target version of %(new)s, but target " + "version is %(target)s" % + dict(new=new_version, target=target_version)) + if distance == 0: + return last_semver + new_dev = new_version.to_dev(distance) + if target_version is not None: + target_dev = target_version.to_dev(distance) + if target_dev > new_dev: + return target_dev + return new_dev + + +def _get_version_from_git(pre_version=None): + """Calculate a version string from git. + + If the revision is tagged, return that. Otherwise calculate a semantic + version description of the tree. + + The number of revisions since the last tag is included in the dev counter + in the version for untagged versions. + + :param pre_version: If supplied use this as the target version rather than + inferring one from the last tag + commit messages. + """ + git_dir = git._run_git_functions() + if git_dir: + try: + tagged = git._run_git_command( + ['describe', '--exact-match'], git_dir, + throw_on_error=True).replace('-', '.') + target_version = version.SemanticVersion.from_pip_string(tagged) + except Exception: + if pre_version: + # not released yet - use pre_version as the target + target_version = version.SemanticVersion.from_pip_string( + pre_version) + else: + # not released yet - just calculate from git history + target_version = None + result = _get_version_from_git_target(git_dir, target_version) + return result.release_string() + # If we don't know the version, return an empty string so at least + # the downstream users of the value always have the same type of + # object to work with. + try: + return unicode() + except NameError: + return '' + + +def _get_version_from_pkg_metadata(package_name): + """Get the version from package metadata if present. + + This looks for PKG-INFO if present (for sdists), and if not looks + for METADATA (for wheels) and failing that will return None. + """ + pkg_metadata_filenames = ['PKG-INFO', 'METADATA'] + pkg_metadata = {} + for filename in pkg_metadata_filenames: + try: + pkg_metadata_file = open(filename, 'r') + except (IOError, OSError): + continue + try: + pkg_metadata = email.message_from_file(pkg_metadata_file) + except email.errors.MessageError: + continue + + # Check to make sure we're in our own dir + if pkg_metadata.get('Name', None) != package_name: + return None + return pkg_metadata.get('Version', None) + + +def get_version(package_name, pre_version=None): + """Get the version of the project. + + First, try getting it from PKG-INFO or METADATA, if it exists. If it does, + that means we're in a distribution tarball or that install has happened. + Otherwise, if there is no PKG-INFO or METADATA file, pull the version + from git. + + We do not support setup.py version sanity in git archive tarballs, nor do + we support packagers directly sucking our git repo into theirs. We expect + that a source tarball be made from our git repo - or that if someone wants + to make a source tarball from a fork of our repo with additional tags in it + that they understand and desire the results of doing that. + + :param pre_version: The version field from setup.cfg - if set then this + version will be the next release. + """ + version = os.environ.get( + "PBR_VERSION", + os.environ.get("OSLO_PACKAGE_VERSION", None)) + if version: + return version + version = _get_version_from_pkg_metadata(package_name) + if version: + return version + version = _get_version_from_git(pre_version) + # Handle http://bugs.python.org/issue11638 + # version will either be an empty unicode string or a valid + # unicode version string, but either way it's unicode and needs to + # be encoded. + if sys.version_info[0] == 2: + version = version.encode('utf-8') + if version: + return version + raise Exception("Versioning for this project requires either an sdist" + " tarball, or access to an upstream git repository." + " It's also possible that there is a mismatch between" + " the package name in setup.cfg and the argument given" + " to pbr.version.VersionInfo. Project name {name} was" + " given, but was not able to be found.".format( + name=package_name)) + + +# This is added because pbr uses pbr to install itself. That means that +# any changes to the egg info writer entrypoints must be forward and +# backward compatible. This maintains the pbr.packaging.write_pbr_json +# path. +write_pbr_json = pbr.pbr_json.write_pbr_json diff --git a/venv/Lib/site-packages/pbr/pbr_json.py b/venv/Lib/site-packages/pbr/pbr_json.py new file mode 100644 index 00000000..08c3da22 --- /dev/null +++ b/venv/Lib/site-packages/pbr/pbr_json.py @@ -0,0 +1,34 @@ +# Copyright 2011 OpenStack Foundation +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import json + +from pbr import git + + +def write_pbr_json(cmd, basename, filename): + if not hasattr(cmd.distribution, 'pbr') or not cmd.distribution.pbr: + return + git_dir = git._run_git_functions() + if not git_dir: + return + values = dict() + git_version = git.get_git_short_sha(git_dir) + is_release = git.get_is_release(git_dir) + if git_version is not None: + values['git_version'] = git_version + values['is_release'] = is_release + cmd.write_file('pbr', filename, json.dumps(values, sort_keys=True)) diff --git a/venv/Lib/site-packages/pbr/sphinxext.py b/venv/Lib/site-packages/pbr/sphinxext.py new file mode 100644 index 00000000..ef613052 --- /dev/null +++ b/venv/Lib/site-packages/pbr/sphinxext.py @@ -0,0 +1,99 @@ +# Copyright 2018 Red Hat, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os.path + +from six.moves import configparser +from sphinx.util import logging + +import pbr.version + +_project = None +logger = logging.getLogger(__name__) + + +def _find_setup_cfg(srcdir): + """Find the 'setup.cfg' file, if it exists. + + This assumes we're using 'doc/source' for documentation, but also allows + for single level 'doc' paths. + """ + # TODO(stephenfin): Are we sure that this will always exist, e.g. for + # an sdist or wheel? Perhaps we should check for 'PKG-INFO' or + # 'METADATA' files, a la 'pbr.packaging._get_version_from_pkg_metadata' + for path in [ + os.path.join(srcdir, os.pardir, 'setup.cfg'), + os.path.join(srcdir, os.pardir, os.pardir, 'setup.cfg')]: + if os.path.exists(path): + return path + + return None + + +def _get_project_name(srcdir): + """Return string name of project name, or None. + + This extracts metadata from 'setup.cfg'. We don't rely on + distutils/setuptools as we don't want to actually install the package + simply to build docs. + """ + global _project + + if _project is None: + parser = configparser.ConfigParser() + + path = _find_setup_cfg(srcdir) + if not path or not parser.read(path): + logger.info('Could not find a setup.cfg to extract project name ' + 'from') + return None + + try: + # for project name we use the name in setup.cfg, but if the + # length is longer then 32 we use summary. Otherwise thAe + # menu rendering looks brolen + project = parser.get('metadata', 'name') + if len(project.split()) == 1 and len(project) > 32: + project = parser.get('metadata', 'summary') + except configparser.Error: + logger.info('Could not extract project metadata from setup.cfg') + return None + + _project = project + + return _project + + +def _builder_inited(app): + # TODO(stephenfin): Once Sphinx 1.8 is released, we should move the below + # to a 'config-inited' handler + + project_name = _get_project_name(app.srcdir) + try: + version_info = pbr.version.VersionInfo(project_name) + except Exception: + version_info = None + + if version_info and not app.config.version and not app.config.release: + app.config.version = version_info.canonical_version_string() + app.config.release = version_info.version_string_with_vcs() + + +def setup(app): + app.connect('builder-inited', _builder_inited) + return { + 'parallel_read_safe': True, + 'parallel_write_safe': True, + } diff --git a/venv/Lib/site-packages/pbr/testr_command.py b/venv/Lib/site-packages/pbr/testr_command.py new file mode 100644 index 00000000..d143565f --- /dev/null +++ b/venv/Lib/site-packages/pbr/testr_command.py @@ -0,0 +1,167 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (c) 2013 Testrepository Contributors +# +# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause +# license at the users choice. A copy of both licenses are available in the +# project source as Apache-2.0 and BSD. You may not use this file except in +# compliance with one of these two licences. +# +# Unless required by applicable law or agreed to in writing, software +# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# license you chose for the specific language governing permissions and +# limitations under that license. + +"""setuptools/distutils command to run testr via setup.py + +PBR will hook in the Testr class to provide "setup.py test" when +.testr.conf is present in the repository (see pbr/hooks/commands.py). + +If we are activated but testrepository is not installed, we provide a +sensible error. + +You can pass --coverage which will also export PYTHON='coverage run +--source ' and automatically combine the coverage from +each testr backend test runner after the run completes. + +""" + +from distutils import cmd +import distutils.errors +import logging +import os +import sys +import warnings + +logger = logging.getLogger(__name__) + + +class TestrReal(cmd.Command): + + description = "DEPRECATED: Run unit tests using testr" + + user_options = [ + ('coverage', None, "Replace PYTHON with coverage and merge coverage " + "from each testr worker."), + ('testr-args=', 't', "Run 'testr' with these args"), + ('omit=', 'o', "Files to omit from coverage calculations"), + ('coverage-package-name=', None, "Use this name to select packages " + "for coverage (one or more, " + "comma-separated)"), + ('slowest', None, "Show slowest test times after tests complete."), + ('no-parallel', None, "Run testr serially"), + ('log-level=', 'l', "Log level (default: info)"), + ] + + boolean_options = ['coverage', 'slowest', 'no_parallel'] + + def _run_testr(self, *args): + logger.debug("_run_testr called with args = %r", args) + return commands.run_argv([sys.argv[0]] + list(args), + sys.stdin, sys.stdout, sys.stderr) + + def initialize_options(self): + self.testr_args = None + self.coverage = None + self.omit = "" + self.slowest = None + self.coverage_package_name = None + self.no_parallel = None + self.log_level = 'info' + + def finalize_options(self): + self.log_level = getattr( + logging, + self.log_level.upper(), + logging.INFO) + logging.basicConfig(level=self.log_level) + logger.debug("finalize_options called") + if self.testr_args is None: + self.testr_args = [] + else: + self.testr_args = self.testr_args.split() + if self.omit: + self.omit = "--omit=%s" % self.omit + logger.debug("finalize_options: self.__dict__ = %r", self.__dict__) + + def run(self): + """Set up testr repo, then run testr.""" + logger.debug("run called") + + warnings.warn('testr integration in pbr is deprecated. Please use ' + 'the \'testr\' setup command or call testr directly', + DeprecationWarning) + + if not os.path.isdir(".testrepository"): + self._run_testr("init") + + if self.coverage: + self._coverage_before() + if not self.no_parallel: + testr_ret = self._run_testr("run", "--parallel", *self.testr_args) + else: + testr_ret = self._run_testr("run", *self.testr_args) + if testr_ret: + raise distutils.errors.DistutilsError( + "testr failed (%d)" % testr_ret) + if self.slowest: + print("Slowest Tests") + self._run_testr("slowest") + if self.coverage: + self._coverage_after() + + def _coverage_before(self): + logger.debug("_coverage_before called") + package = self.distribution.get_name() + if package.startswith('python-'): + package = package[7:] + + # Use this as coverage package name + if self.coverage_package_name: + package = self.coverage_package_name + options = "--source %s --parallel-mode" % package + os.environ['PYTHON'] = ("coverage run %s" % options) + logger.debug("os.environ['PYTHON'] = %r", os.environ['PYTHON']) + + def _coverage_after(self): + logger.debug("_coverage_after called") + os.system("coverage combine") + os.system("coverage html -d ./cover %s" % self.omit) + os.system("coverage xml -o ./cover/coverage.xml %s" % self.omit) + + +class TestrFake(cmd.Command): + description = "Run unit tests using testr" + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + print("Install testrepository to run 'testr' command properly.") + + +try: + from testrepository import commands + have_testr = True + Testr = TestrReal +except ImportError: + have_testr = False + Testr = TestrFake diff --git a/venv/Lib/site-packages/pbr/tests/__init__.py b/venv/Lib/site-packages/pbr/tests/__init__.py new file mode 100644 index 00000000..583e0c6b --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/__init__.py @@ -0,0 +1,26 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import testscenarios + + +def load_tests(loader, standard_tests, pattern): + # top level directory cached on loader instance + this_dir = os.path.dirname(__file__) + package_tests = loader.discover(start_dir=this_dir, pattern=pattern) + result = loader.suiteClass() + result.addTests(testscenarios.generate_scenarios(standard_tests)) + result.addTests(testscenarios.generate_scenarios(package_tests)) + return result diff --git a/venv/Lib/site-packages/pbr/tests/base.py b/venv/Lib/site-packages/pbr/tests/base.py new file mode 100644 index 00000000..ea4a458a --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/base.py @@ -0,0 +1,223 @@ +# Copyright 2010-2011 OpenStack Foundation +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +"""Common utilities used in testing""" + +import os +import shutil +import subprocess +import sys + +import fixtures +import testresources +import testtools +from testtools import content + +from pbr import options + + +class DiveDir(fixtures.Fixture): + """Dive into given directory and return back on cleanup. + + :ivar path: The target directory. + """ + + def __init__(self, path): + self.path = path + + def setUp(self): + super(DiveDir, self).setUp() + self.addCleanup(os.chdir, os.getcwd()) + os.chdir(self.path) + + +class BaseTestCase(testtools.TestCase, testresources.ResourcedTestCase): + + def setUp(self): + super(BaseTestCase, self).setUp() + test_timeout = os.environ.get('OS_TEST_TIMEOUT', 30) + try: + test_timeout = int(test_timeout) + except ValueError: + # If timeout value is invalid, fail hard. + print("OS_TEST_TIMEOUT set to invalid value" + " defaulting to no timeout") + test_timeout = 0 + if test_timeout > 0: + self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) + + if os.environ.get('OS_STDOUT_CAPTURE') in options.TRUE_VALUES: + stdout = self.useFixture(fixtures.StringStream('stdout')).stream + self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) + if os.environ.get('OS_STDERR_CAPTURE') in options.TRUE_VALUES: + stderr = self.useFixture(fixtures.StringStream('stderr')).stream + self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) + self.log_fixture = self.useFixture( + fixtures.FakeLogger('pbr')) + + # Older git does not have config --local, so create a temporary home + # directory to permit using git config --global without stepping on + # developer configuration. + self.useFixture(fixtures.TempHomeDir()) + self.useFixture(fixtures.NestedTempfile()) + self.useFixture(fixtures.FakeLogger()) + # TODO(lifeless) we should remove PBR_VERSION from the environment. + # rather than setting it, because thats not representative - we need to + # test non-preversioned codepaths too! + self.useFixture(fixtures.EnvironmentVariable('PBR_VERSION', '0.0')) + + self.temp_dir = self.useFixture(fixtures.TempDir()).path + self.package_dir = os.path.join(self.temp_dir, 'testpackage') + shutil.copytree(os.path.join(os.path.dirname(__file__), 'testpackage'), + self.package_dir) + self.addCleanup(os.chdir, os.getcwd()) + os.chdir(self.package_dir) + self.addCleanup(self._discard_testpackage) + # Tests can opt into non-PBR_VERSION by setting preversioned=False as + # an attribute. + if not getattr(self, 'preversioned', True): + self.useFixture(fixtures.EnvironmentVariable('PBR_VERSION')) + setup_cfg_path = os.path.join(self.package_dir, 'setup.cfg') + with open(setup_cfg_path, 'rt') as cfg: + content = cfg.read() + content = content.replace(u'version = 0.1.dev', u'') + with open(setup_cfg_path, 'wt') as cfg: + cfg.write(content) + + def _discard_testpackage(self): + # Remove pbr.testpackage from sys.modules so that it can be freshly + # re-imported by the next test + for k in list(sys.modules): + if (k == 'pbr_testpackage' or + k.startswith('pbr_testpackage.')): + del sys.modules[k] + + def run_pbr(self, *args, **kwargs): + return self._run_cmd('pbr', args, **kwargs) + + def run_setup(self, *args, **kwargs): + return self._run_cmd(sys.executable, ('setup.py',) + args, **kwargs) + + def _run_cmd(self, cmd, args=[], allow_fail=True, cwd=None): + """Run a command in the root of the test working copy. + + Runs a command, with the given argument list, in the root of the test + working copy--returns the stdout and stderr streams and the exit code + from the subprocess. + + :param cwd: If falsy run within the test package dir, otherwise run + within the named path. + """ + cwd = cwd or self.package_dir + result = _run_cmd([cmd] + list(args), cwd=cwd) + if result[2] and not allow_fail: + raise Exception("Command failed retcode=%s" % result[2]) + return result + + +class CapturedSubprocess(fixtures.Fixture): + """Run a process and capture its output. + + :attr stdout: The output (a string). + :attr stderr: The standard error (a string). + :attr returncode: The return code of the process. + + Note that stdout and stderr are decoded from the bytestrings subprocess + returns using error=replace. + """ + + def __init__(self, label, *args, **kwargs): + """Create a CapturedSubprocess. + + :param label: A label for the subprocess in the test log. E.g. 'foo'. + :param *args: The *args to pass to Popen. + :param **kwargs: The **kwargs to pass to Popen. + """ + super(CapturedSubprocess, self).__init__() + self.label = label + self.args = args + self.kwargs = kwargs + self.kwargs['stderr'] = subprocess.PIPE + self.kwargs['stdin'] = subprocess.PIPE + self.kwargs['stdout'] = subprocess.PIPE + + def setUp(self): + super(CapturedSubprocess, self).setUp() + proc = subprocess.Popen(*self.args, **self.kwargs) + out, err = proc.communicate() + self.out = out.decode('utf-8', 'replace') + self.err = err.decode('utf-8', 'replace') + self.addDetail(self.label + '-stdout', content.text_content(self.out)) + self.addDetail(self.label + '-stderr', content.text_content(self.err)) + self.returncode = proc.returncode + if proc.returncode: + raise AssertionError( + 'Failed process args=%r, kwargs=%r, returncode=%s' % ( + self.args, self.kwargs, proc.returncode)) + self.addCleanup(delattr, self, 'out') + self.addCleanup(delattr, self, 'err') + self.addCleanup(delattr, self, 'returncode') + + +def _run_cmd(args, cwd): + """Run the command args in cwd. + + :param args: The command to run e.g. ['git', 'status'] + :param cwd: The directory to run the comamnd in. + :return: ((stdout, stderr), returncode) + """ + p = subprocess.Popen( + args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, cwd=cwd) + streams = tuple(s.decode('latin1').strip() for s in p.communicate()) + for stream_content in streams: + print(stream_content) + return (streams) + (p.returncode,) + + +def _config_git(): + _run_cmd( + ['git', 'config', '--global', 'user.email', 'example@example.com'], + None) + _run_cmd( + ['git', 'config', '--global', 'user.name', 'OpenStack Developer'], + None) + _run_cmd( + ['git', 'config', '--global', 'user.signingkey', + 'example@example.com'], None) diff --git a/venv/Lib/site-packages/pbr/tests/test_commands.py b/venv/Lib/site-packages/pbr/tests/test_commands.py new file mode 100644 index 00000000..51e27116 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_commands.py @@ -0,0 +1,84 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +from testtools import content + +from pbr.tests import base + + +class TestCommands(base.BaseTestCase): + def test_custom_build_py_command(self): + """Test custom build_py command. + + Test that a custom subclass of the build_py command runs when listed in + the commands [global] option, rather than the normal build command. + """ + + stdout, stderr, return_code = self.run_setup('build_py') + self.addDetail('stdout', content.text_content(stdout)) + self.addDetail('stderr', content.text_content(stderr)) + self.assertIn('Running custom build_py command.', stdout) + self.assertEqual(0, return_code) + + def test_custom_deb_version_py_command(self): + """Test custom deb_version command.""" + stdout, stderr, return_code = self.run_setup('deb_version') + self.addDetail('stdout', content.text_content(stdout)) + self.addDetail('stderr', content.text_content(stderr)) + self.assertIn('Extracting deb version', stdout) + self.assertEqual(0, return_code) + + def test_custom_rpm_version_py_command(self): + """Test custom rpm_version command.""" + stdout, stderr, return_code = self.run_setup('rpm_version') + self.addDetail('stdout', content.text_content(stdout)) + self.addDetail('stderr', content.text_content(stderr)) + self.assertIn('Extracting rpm version', stdout) + self.assertEqual(0, return_code) + + def test_freeze_command(self): + """Test that freeze output is sorted in a case-insensitive manner.""" + stdout, stderr, return_code = self.run_pbr('freeze') + self.assertEqual(0, return_code) + pkgs = [] + for l in stdout.split('\n'): + pkgs.append(l.split('==')[0].lower()) + pkgs_sort = sorted(pkgs[:]) + self.assertEqual(pkgs_sort, pkgs) diff --git a/venv/Lib/site-packages/pbr/tests/test_core.py b/venv/Lib/site-packages/pbr/tests/test_core.py new file mode 100644 index 00000000..ccd14aba --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_core.py @@ -0,0 +1,151 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +import glob +import os +import tarfile + +import fixtures + +from pbr.tests import base + + +class TestCore(base.BaseTestCase): + + cmd_names = ('pbr_test_cmd', 'pbr_test_cmd_with_class') + + def check_script_install(self, install_stdout): + for cmd_name in self.cmd_names: + install_txt = 'Installing %s script to %s' % (cmd_name, + self.temp_dir) + self.assertIn(install_txt, install_stdout) + + cmd_filename = os.path.join(self.temp_dir, cmd_name) + + script_txt = open(cmd_filename, 'r').read() + self.assertNotIn('pkg_resources', script_txt) + + stdout, _, return_code = self._run_cmd(cmd_filename) + self.assertIn("PBR", stdout) + + def test_setup_py_keywords(self): + """setup.py --keywords. + + Test that the `./setup.py --keywords` command returns the correct + value without balking. + """ + + self.run_setup('egg_info') + stdout, _, _ = self.run_setup('--keywords') + assert stdout == 'packaging, distutils, setuptools' + + def test_setup_py_build_sphinx(self): + stdout, _, return_code = self.run_setup('build_sphinx') + self.assertEqual(0, return_code) + + def test_sdist_extra_files(self): + """Test that the extra files are correctly added.""" + + stdout, _, return_code = self.run_setup('sdist', '--formats=gztar') + + # There can be only one + try: + tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0] + except IndexError: + assert False, 'source dist not found' + + tf = tarfile.open(tf_path) + names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()] + + self.assertIn('extra-file.txt', names) + + def test_console_script_install(self): + """Test that we install a non-pkg-resources console script.""" + + if os.name == 'nt': + self.skipTest('Windows support is passthrough') + + stdout, _, return_code = self.run_setup( + 'install_scripts', '--install-dir=%s' % self.temp_dir) + + self.useFixture( + fixtures.EnvironmentVariable('PYTHONPATH', '.')) + + self.check_script_install(stdout) + + def test_console_script_develop(self): + """Test that we develop a non-pkg-resources console script.""" + + if os.name == 'nt': + self.skipTest('Windows support is passthrough') + + self.useFixture( + fixtures.EnvironmentVariable( + 'PYTHONPATH', ".:%s" % self.temp_dir)) + + stdout, _, return_code = self.run_setup( + 'develop', '--install-dir=%s' % self.temp_dir) + + self.check_script_install(stdout) + + +class TestGitSDist(base.BaseTestCase): + + def setUp(self): + super(TestGitSDist, self).setUp() + + stdout, _, return_code = self._run_cmd('git', ('init',)) + if return_code: + self.skipTest("git not installed") + + stdout, _, return_code = self._run_cmd('git', ('add', '.')) + stdout, _, return_code = self._run_cmd( + 'git', ('commit', '-m', 'Turn this into a git repo')) + + stdout, _, return_code = self.run_setup('sdist', '--formats=gztar') + + def test_sdist_git_extra_files(self): + """Test that extra files found in git are correctly added.""" + # There can be only one + tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0] + tf = tarfile.open(tf_path) + names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()] + + self.assertIn('git-extra-file.txt', names) diff --git a/venv/Lib/site-packages/pbr/tests/test_files.py b/venv/Lib/site-packages/pbr/tests/test_files.py new file mode 100644 index 00000000..94a2d9ad --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_files.py @@ -0,0 +1,148 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import print_function + +import os + +import fixtures + +from pbr.hooks import files +from pbr.tests import base + + +class FilesConfigTest(base.BaseTestCase): + + def setUp(self): + super(FilesConfigTest, self).setUp() + + pkg_fixture = fixtures.PythonPackage( + "fake_package", [ + ("fake_module.py", b""), + ("other_fake_module.py", b""), + ]) + self.useFixture(pkg_fixture) + pkg_etc = os.path.join(pkg_fixture.base, 'etc') + pkg_ansible = os.path.join(pkg_fixture.base, 'ansible', + 'kolla-ansible', 'test') + dir_spcs = os.path.join(pkg_fixture.base, 'dir with space') + dir_subdir_spc = os.path.join(pkg_fixture.base, 'multi space', + 'more spaces') + pkg_sub = os.path.join(pkg_etc, 'sub') + subpackage = os.path.join( + pkg_fixture.base, 'fake_package', 'subpackage') + os.makedirs(pkg_sub) + os.makedirs(subpackage) + os.makedirs(pkg_ansible) + os.makedirs(dir_spcs) + os.makedirs(dir_subdir_spc) + with open(os.path.join(pkg_etc, "foo"), 'w') as foo_file: + foo_file.write("Foo Data") + with open(os.path.join(pkg_sub, "bar"), 'w') as foo_file: + foo_file.write("Bar Data") + with open(os.path.join(pkg_ansible, "baz"), 'w') as baz_file: + baz_file.write("Baz Data") + with open(os.path.join(subpackage, "__init__.py"), 'w') as foo_file: + foo_file.write("# empty") + with open(os.path.join(dir_spcs, "file with spc"), 'w') as spc_file: + spc_file.write("# empty") + with open(os.path.join(dir_subdir_spc, "file with spc"), 'w') as file_: + file_.write("# empty") + + self.useFixture(base.DiveDir(pkg_fixture.base)) + + def test_implicit_auto_package(self): + config = dict( + files=dict( + ) + ) + files.FilesConfig(config, 'fake_package').run() + self.assertIn('subpackage', config['files']['packages']) + + def test_auto_package(self): + config = dict( + files=dict( + packages='fake_package', + ) + ) + files.FilesConfig(config, 'fake_package').run() + self.assertIn('subpackage', config['files']['packages']) + + def test_data_files_globbing(self): + config = dict( + files=dict( + data_files="\n etc/pbr = etc/*" + ) + ) + files.FilesConfig(config, 'fake_package').run() + self.assertIn( + "\n'etc/pbr/' = \n 'etc/foo'\n'etc/pbr/sub' = \n 'etc/sub/bar'", + config['files']['data_files']) + + def test_data_files_with_spaces(self): + config = dict( + files=dict( + data_files="\n 'i like spaces' = 'dir with space'/*" + ) + ) + files.FilesConfig(config, 'fake_package').run() + self.assertIn( + "\n'i like spaces/' = \n 'dir with space/file with spc'", + config['files']['data_files']) + + def test_data_files_with_spaces_subdirectories(self): + # test that we can handle whitespace in subdirectories + data_files = "\n 'one space/two space' = 'multi space/more spaces'/*" + expected = ( + "\n'one space/two space/' = " + "\n 'multi space/more spaces/file with spc'") + config = dict( + files=dict( + data_files=data_files + ) + ) + files.FilesConfig(config, 'fake_package').run() + self.assertIn(expected, config['files']['data_files']) + + def test_data_files_with_spaces_quoted_components(self): + # test that we can quote individual path components + data_files = ( + "\n'one space'/'two space' = 'multi space'/'more spaces'/*" + ) + expected = ("\n'one space/two space/' = " + "\n 'multi space/more spaces/file with spc'") + config = dict( + files=dict( + data_files=data_files + ) + ) + files.FilesConfig(config, 'fake_package').run() + self.assertIn(expected, config['files']['data_files']) + + def test_data_files_globbing_source_prefix_in_directory_name(self): + # We want to test that the string, "docs", is not replaced in a + # subdirectory name, "sub-docs" + config = dict( + files=dict( + data_files="\n share/ansible = ansible/*" + ) + ) + files.FilesConfig(config, 'fake_package').run() + self.assertIn( + "\n'share/ansible/' = " + "\n'share/ansible/kolla-ansible' = " + "\n'share/ansible/kolla-ansible/test' = " + "\n 'ansible/kolla-ansible/test/baz'", + config['files']['data_files']) diff --git a/venv/Lib/site-packages/pbr/tests/test_hooks.py b/venv/Lib/site-packages/pbr/tests/test_hooks.py new file mode 100644 index 00000000..3f747904 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_hooks.py @@ -0,0 +1,75 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +import os + +from testtools import matchers +from testtools import skipUnless + +from pbr import testr_command +from pbr.tests import base +from pbr.tests import util + + +class TestHooks(base.BaseTestCase): + def setUp(self): + super(TestHooks, self).setUp() + with util.open_config( + os.path.join(self.package_dir, 'setup.cfg')) as cfg: + cfg.set('global', 'setup-hooks', + 'pbr_testpackage._setup_hooks.test_hook_1\n' + 'pbr_testpackage._setup_hooks.test_hook_2') + + def test_global_setup_hooks(self): + """Test setup_hooks. + + Test that setup_hooks listed in the [global] section of setup.cfg are + executed in order. + """ + + stdout, _, return_code = self.run_setup('egg_info') + assert 'test_hook_1\ntest_hook_2' in stdout + assert return_code == 0 + + @skipUnless(testr_command.have_testr, "testrepository not available") + def test_custom_commands_known(self): + stdout, _, return_code = self.run_setup('--help-commands') + self.assertFalse(return_code) + self.assertThat(stdout, matchers.Contains(" testr ")) diff --git a/venv/Lib/site-packages/pbr/tests/test_integration.py b/venv/Lib/site-packages/pbr/tests/test_integration.py new file mode 100644 index 00000000..8e96f21f --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_integration.py @@ -0,0 +1,269 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os.path +import shlex +import sys + +import fixtures +import testtools +import textwrap + +from pbr.tests import base +from pbr.tests import test_packaging + +PIPFLAGS = shlex.split(os.environ.get('PIPFLAGS', '')) +PIPVERSION = os.environ.get('PIPVERSION', 'pip') +PBRVERSION = os.environ.get('PBRVERSION', 'pbr') +REPODIR = os.environ.get('REPODIR', '') +WHEELHOUSE = os.environ.get('WHEELHOUSE', '') +PIP_CMD = ['-m', 'pip'] + PIPFLAGS + ['install', '-f', WHEELHOUSE] +PROJECTS = shlex.split(os.environ.get('PROJECTS', '')) +PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..')) + + +def all_projects(): + if not REPODIR: + return + # Future: make this path parameterisable. + excludes = set(['tempest', 'requirements']) + for name in PROJECTS: + name = name.strip() + short_name = name.split('/')[-1] + try: + with open(os.path.join( + REPODIR, short_name, 'setup.py'), 'rt') as f: + if 'pbr' not in f.read(): + continue + except IOError: + continue + if short_name in excludes: + continue + yield (short_name, dict(name=name, short_name=short_name)) + + +class TestIntegration(base.BaseTestCase): + + scenarios = list(all_projects()) + + def setUp(self): + # Integration tests need a higher default - big repos can be slow to + # clone, particularly under guest load. + env = fixtures.EnvironmentVariable( + 'OS_TEST_TIMEOUT', os.environ.get('OS_TEST_TIMEOUT', '600')) + with env: + super(TestIntegration, self).setUp() + base._config_git() + + @testtools.skipUnless( + os.environ.get('PBR_INTEGRATION', None) == '1', + 'integration tests not enabled') + def test_integration(self): + # Test that we can: + # - run sdist from the repo in a venv + # - install the resulting tarball in a new venv + # - pip install the repo + # - pip install -e the repo + # We don't break these into separate tests because we'd need separate + # source dirs to isolate from side effects of running pip, and the + # overheads of setup would start to beat the benefits of parallelism. + self.useFixture(base.CapturedSubprocess( + 'sync-req', + ['python', 'update.py', os.path.join(REPODIR, self.short_name)], + cwd=os.path.join(REPODIR, 'requirements'))) + self.useFixture(base.CapturedSubprocess( + 'commit-requirements', + 'git diff --quiet || git commit -amrequirements', + cwd=os.path.join(REPODIR, self.short_name), shell=True)) + path = os.path.join( + self.useFixture(fixtures.TempDir()).path, 'project') + self.useFixture(base.CapturedSubprocess( + 'clone', + ['git', 'clone', os.path.join(REPODIR, self.short_name), path])) + venv = self.useFixture( + test_packaging.Venv('sdist', + modules=['pip', 'wheel', PBRVERSION], + pip_cmd=PIP_CMD)) + python = venv.python + self.useFixture(base.CapturedSubprocess( + 'sdist', [python, 'setup.py', 'sdist'], cwd=path)) + venv = self.useFixture( + test_packaging.Venv('tarball', + modules=['pip', 'wheel', PBRVERSION], + pip_cmd=PIP_CMD)) + python = venv.python + filename = os.path.join( + path, 'dist', os.listdir(os.path.join(path, 'dist'))[0]) + self.useFixture(base.CapturedSubprocess( + 'tarball', [python] + PIP_CMD + [filename])) + venv = self.useFixture( + test_packaging.Venv('install-git', + modules=['pip', 'wheel', PBRVERSION], + pip_cmd=PIP_CMD)) + root = venv.path + python = venv.python + self.useFixture(base.CapturedSubprocess( + 'install-git', [python] + PIP_CMD + ['git+file://' + path])) + if self.short_name == 'nova': + found = False + for _, _, filenames in os.walk(root): + if 'migrate.cfg' in filenames: + found = True + self.assertTrue(found) + venv = self.useFixture( + test_packaging.Venv('install-e', + modules=['pip', 'wheel', PBRVERSION], + pip_cmd=PIP_CMD)) + root = venv.path + python = venv.python + self.useFixture(base.CapturedSubprocess( + 'install-e', [python] + PIP_CMD + ['-e', path])) + + +class TestInstallWithoutPbr(base.BaseTestCase): + + @testtools.skipUnless( + os.environ.get('PBR_INTEGRATION', None) == '1', + 'integration tests not enabled') + def test_install_without_pbr(self): + # Test easy-install of a thing that depends on a thing using pbr + tempdir = self.useFixture(fixtures.TempDir()).path + # A directory containing sdists of the things we're going to depend on + # in using-package. + dist_dir = os.path.join(tempdir, 'distdir') + os.mkdir(dist_dir) + self._run_cmd(sys.executable, ('setup.py', 'sdist', '-d', dist_dir), + allow_fail=False, cwd=PBR_ROOT) + # testpkg - this requires a pbr-using package + test_pkg_dir = os.path.join(tempdir, 'testpkg') + os.mkdir(test_pkg_dir) + pkgs = { + 'pkgTest': { + 'setup.py': textwrap.dedent("""\ + #!/usr/bin/env python + import setuptools + setuptools.setup( + name = 'pkgTest', + tests_require = ['pkgReq'], + test_suite='pkgReq' + ) + """), + 'setup.cfg': textwrap.dedent("""\ + [easy_install] + find_links = %s + """ % dist_dir)}, + 'pkgReq': { + 'requirements.txt': textwrap.dedent("""\ + pbr + """), + 'pkgReq/__init__.py': textwrap.dedent("""\ + print("FakeTest loaded and ran") + """)}, + } + pkg_dirs = self.useFixture( + test_packaging.CreatePackages(pkgs)).package_dirs + test_pkg_dir = pkg_dirs['pkgTest'] + req_pkg_dir = pkg_dirs['pkgReq'] + + self._run_cmd(sys.executable, ('setup.py', 'sdist', '-d', dist_dir), + allow_fail=False, cwd=req_pkg_dir) + # A venv to test within + venv = self.useFixture(test_packaging.Venv('nopbr', ['pip', 'wheel'])) + python = venv.python + # Run the depending script + self.useFixture(base.CapturedSubprocess( + 'nopbr', [python] + ['setup.py', 'test'], cwd=test_pkg_dir)) + + +class TestMarkersPip(base.BaseTestCase): + + scenarios = [ + ('pip-1.5', {'modules': ['pip>=1.5,<1.6']}), + ('pip-6.0', {'modules': ['pip>=6.0,<6.1']}), + ('pip-latest', {'modules': ['pip']}), + ('setuptools-EL7', {'modules': ['pip==1.4.1', 'setuptools==0.9.8']}), + ('setuptools-Trusty', {'modules': ['pip==1.5', 'setuptools==2.2']}), + ('setuptools-minimum', {'modules': ['pip==1.5', 'setuptools==0.7.2']}), + ] + + @testtools.skipUnless( + os.environ.get('PBR_INTEGRATION', None) == '1', + 'integration tests not enabled') + def test_pip_versions(self): + pkgs = { + 'test_markers': + {'requirements.txt': textwrap.dedent("""\ + pkg_a; python_version=='1.2' + pkg_b; python_version!='1.2' + """)}, + 'pkg_a': {}, + 'pkg_b': {}, + } + pkg_dirs = self.useFixture( + test_packaging.CreatePackages(pkgs)).package_dirs + temp_dir = self.useFixture(fixtures.TempDir()).path + repo_dir = os.path.join(temp_dir, 'repo') + venv = self.useFixture(test_packaging.Venv('markers')) + bin_python = venv.python + os.mkdir(repo_dir) + for module in self.modules: + self._run_cmd( + bin_python, + ['-m', 'pip', 'install', '--upgrade', module], + cwd=venv.path, allow_fail=False) + for pkg in pkg_dirs: + self._run_cmd( + bin_python, ['setup.py', 'sdist', '-d', repo_dir], + cwd=pkg_dirs[pkg], allow_fail=False) + self._run_cmd( + bin_python, + ['-m', 'pip', 'install', '--no-index', '-f', repo_dir, + 'test_markers'], + cwd=venv.path, allow_fail=False) + self.assertIn('pkg-b', self._run_cmd( + bin_python, ['-m', 'pip', 'freeze'], cwd=venv.path, + allow_fail=False)[0]) + + +class TestLTSSupport(base.BaseTestCase): + + # These versions come from the versions installed from the 'virtualenv' + # command from the 'python-virtualenv' package. + scenarios = [ + ('EL7', {'modules': ['pip==1.4.1', 'setuptools==0.9.8'], + 'py3support': True}), # And EPEL6 + ('Trusty', {'modules': ['pip==1.5', 'setuptools==2.2'], + 'py3support': True}), + ('Jessie', {'modules': ['pip==1.5.6', 'setuptools==5.5.1'], + 'py3support': True}), + # Wheezy has pip1.1, which cannot be called with '-m pip' + # So we'll use a different version of pip here. + ('WheezyPrecise', {'modules': ['pip==1.4.1', 'setuptools==0.6c11'], + 'py3support': False}) + ] + + @testtools.skipUnless( + os.environ.get('PBR_INTEGRATION', None) == '1', + 'integration tests not enabled') + def test_lts_venv_default_versions(self): + if (sys.version_info[0] == 3 and not self.py3support): + self.skipTest('This combination will not install with py3, ' + 'skipping test') + venv = self.useFixture( + test_packaging.Venv('setuptools', modules=self.modules)) + bin_python = venv.python + pbr = 'file://%s#egg=pbr' % PBR_ROOT + # Installing PBR is a reasonable indication that we are not broken on + # this particular combination of setuptools and pip. + self._run_cmd(bin_python, ['-m', 'pip', 'install', pbr], + cwd=venv.path, allow_fail=False) diff --git a/venv/Lib/site-packages/pbr/tests/test_packaging.py b/venv/Lib/site-packages/pbr/tests/test_packaging.py new file mode 100644 index 00000000..07be5477 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_packaging.py @@ -0,0 +1,1113 @@ +# Copyright (c) 2013 New Dream Network, LLC (DreamHost) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +import email +import email.errors +import imp +import os +import re +import sysconfig +import tempfile +import textwrap + +import fixtures +import mock +import pkg_resources +import six +import testscenarios +import testtools +from testtools import matchers +import virtualenv +from wheel import wheelfile + +from pbr import git +from pbr import packaging +from pbr.tests import base + + +PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..')) + + +class TestRepo(fixtures.Fixture): + """A git repo for testing with. + + Use of TempHomeDir with this fixture is strongly recommended as due to the + lack of config --local in older gits, it will write to the users global + configuration without TempHomeDir. + """ + + def __init__(self, basedir): + super(TestRepo, self).__init__() + self._basedir = basedir + + def setUp(self): + super(TestRepo, self).setUp() + base._run_cmd(['git', 'init', '.'], self._basedir) + base._config_git() + base._run_cmd(['git', 'add', '.'], self._basedir) + + def commit(self, message_content='test commit'): + files = len(os.listdir(self._basedir)) + path = self._basedir + '/%d' % files + open(path, 'wt').close() + base._run_cmd(['git', 'add', path], self._basedir) + base._run_cmd(['git', 'commit', '-m', message_content], self._basedir) + + def uncommit(self): + base._run_cmd(['git', 'reset', '--hard', 'HEAD^'], self._basedir) + + def tag(self, version): + base._run_cmd( + ['git', 'tag', '-sm', 'test tag', version], self._basedir) + + +class GPGKeyFixture(fixtures.Fixture): + """Creates a GPG key for testing. + + It's recommended that this be used in concert with a unique home + directory. + """ + + def setUp(self): + super(GPGKeyFixture, self).setUp() + tempdir = self.useFixture(fixtures.TempDir()) + gnupg_version_re = re.compile(r'^gpg\s.*\s([\d+])\.([\d+])\.([\d+])') + gnupg_version = base._run_cmd(['gpg', '--version'], tempdir.path) + for line in gnupg_version[0].split('\n'): + gnupg_version = gnupg_version_re.match(line) + if gnupg_version: + gnupg_version = (int(gnupg_version.group(1)), + int(gnupg_version.group(2)), + int(gnupg_version.group(3))) + break + else: + if gnupg_version is None: + gnupg_version = (0, 0, 0) + + config_file = os.path.join(tempdir.path, 'key-config') + with open(config_file, 'wt') as f: + if gnupg_version[0] == 2 and gnupg_version[1] >= 1: + f.write(""" + %no-protection + %transient-key + """) + f.write(""" + %no-ask-passphrase + Key-Type: RSA + Name-Real: Example Key + Name-Comment: N/A + Name-Email: example@example.com + Expire-Date: 2d + Preferences: (setpref) + %commit + """) + + # Note that --quick-random (--debug-quick-random in GnuPG 2.x) + # does not have a corresponding preferences file setting and + # must be passed explicitly on the command line instead + if gnupg_version[0] == 1: + gnupg_random = '--quick-random' + elif gnupg_version[0] >= 2: + gnupg_random = '--debug-quick-random' + else: + gnupg_random = '' + + base._run_cmd( + ['gpg', '--gen-key', '--batch', gnupg_random, config_file], + tempdir.path) + + +class Venv(fixtures.Fixture): + """Create a virtual environment for testing with. + + :attr path: The path to the environment root. + :attr python: The path to the python binary in the environment. + """ + + def __init__(self, reason, modules=(), pip_cmd=None): + """Create a Venv fixture. + + :param reason: A human readable string to bake into the venv + file path to aid diagnostics in the case of failures. + :param modules: A list of modules to install, defaults to latest + pip, wheel, and the working copy of PBR. + :attr pip_cmd: A list to override the default pip_cmd passed to + python for installing base packages. + """ + self._reason = reason + if modules == (): + pbr = 'file://%s#egg=pbr' % PBR_ROOT + modules = ['pip', 'wheel', pbr] + self.modules = modules + if pip_cmd is None: + self.pip_cmd = ['-m', 'pip', 'install'] + else: + self.pip_cmd = pip_cmd + + def _setUp(self): + path = self.useFixture(fixtures.TempDir()).path + virtualenv.create_environment(path, clear=True) + python = os.path.join(path, 'bin', 'python') + command = [python] + self.pip_cmd + ['-U'] + if self.modules and len(self.modules) > 0: + command.extend(self.modules) + self.useFixture(base.CapturedSubprocess( + 'mkvenv-' + self._reason, command)) + self.addCleanup(delattr, self, 'path') + self.addCleanup(delattr, self, 'python') + self.path = path + self.python = python + return path, python + + +class CreatePackages(fixtures.Fixture): + """Creates packages from dict with defaults + + :param package_dirs: A dict of package name to directory strings + {'pkg_a': '/tmp/path/to/tmp/pkg_a', 'pkg_b': '/tmp/path/to/tmp/pkg_b'} + """ + + defaults = { + 'setup.py': textwrap.dedent(six.u("""\ + #!/usr/bin/env python + import setuptools + setuptools.setup( + setup_requires=['pbr'], + pbr=True, + ) + """)), + 'setup.cfg': textwrap.dedent(six.u("""\ + [metadata] + name = {pkg_name} + """)) + } + + def __init__(self, packages): + """Creates packages from dict with defaults + + :param packages: a dict where the keys are the package name and a + value that is a second dict that may be empty, containing keys of + filenames and a string value of the contents. + {'package-a': {'requirements.txt': 'string', 'setup.cfg': 'string'} + """ + self.packages = packages + + def _writeFile(self, directory, file_name, contents): + path = os.path.abspath(os.path.join(directory, file_name)) + path_dir = os.path.dirname(path) + if not os.path.exists(path_dir): + if path_dir.startswith(directory): + os.makedirs(path_dir) + else: + raise ValueError + with open(path, 'wt') as f: + f.write(contents) + + def _setUp(self): + tmpdir = self.useFixture(fixtures.TempDir()).path + package_dirs = {} + for pkg_name in self.packages: + pkg_path = os.path.join(tmpdir, pkg_name) + package_dirs[pkg_name] = pkg_path + os.mkdir(pkg_path) + for cf in ['setup.py', 'setup.cfg']: + if cf in self.packages[pkg_name]: + contents = self.packages[pkg_name].pop(cf) + else: + contents = self.defaults[cf].format(pkg_name=pkg_name) + self._writeFile(pkg_path, cf, contents) + + for cf in self.packages[pkg_name]: + self._writeFile(pkg_path, cf, self.packages[pkg_name][cf]) + self.useFixture(TestRepo(pkg_path)).commit() + self.addCleanup(delattr, self, 'package_dirs') + self.package_dirs = package_dirs + return package_dirs + + +class TestPackagingInGitRepoWithCommit(base.BaseTestCase): + + scenarios = [ + ('preversioned', dict(preversioned=True)), + ('postversioned', dict(preversioned=False)), + ] + + def setUp(self): + super(TestPackagingInGitRepoWithCommit, self).setUp() + self.repo = self.useFixture(TestRepo(self.package_dir)) + self.repo.commit() + + def test_authors(self): + self.run_setup('sdist', allow_fail=False) + # One commit, something should be in the authors list + with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f: + body = f.read() + self.assertNotEqual(body, '') + + def test_changelog(self): + self.run_setup('sdist', allow_fail=False) + with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: + body = f.read() + # One commit, something should be in the ChangeLog list + self.assertNotEqual(body, '') + + def test_changelog_handles_astrisk(self): + self.repo.commit(message_content="Allow *.openstack.org to work") + self.run_setup('sdist', allow_fail=False) + with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: + body = f.read() + self.assertIn(r'\*', body) + + def test_changelog_handles_dead_links_in_commit(self): + self.repo.commit(message_content="See os_ for to_do about qemu_.") + self.run_setup('sdist', allow_fail=False) + with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: + body = f.read() + self.assertIn(r'os\_', body) + self.assertIn(r'to\_do', body) + self.assertIn(r'qemu\_', body) + + def test_changelog_handles_backticks(self): + self.repo.commit(message_content="Allow `openstack.org` to `work") + self.run_setup('sdist', allow_fail=False) + with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: + body = f.read() + self.assertIn(r'\`', body) + + def test_manifest_exclude_honoured(self): + self.run_setup('sdist', allow_fail=False) + with open(os.path.join( + self.package_dir, + 'pbr_testpackage.egg-info/SOURCES.txt'), 'r') as f: + body = f.read() + self.assertThat( + body, matchers.Not(matchers.Contains('pbr_testpackage/extra.py'))) + self.assertThat(body, matchers.Contains('pbr_testpackage/__init__.py')) + + def test_install_writes_changelog(self): + stdout, _, _ = self.run_setup( + 'install', '--root', self.temp_dir + 'installed', + allow_fail=False) + self.expectThat(stdout, matchers.Contains('Generating ChangeLog')) + + +class TestExtrafileInstallation(base.BaseTestCase): + def test_install_glob(self): + stdout, _, _ = self.run_setup( + 'install', '--root', self.temp_dir + 'installed', + allow_fail=False) + self.expectThat( + stdout, matchers.Contains('copying data_files/a.txt')) + self.expectThat( + stdout, matchers.Contains('copying data_files/b.txt')) + + +class TestPackagingInGitRepoWithoutCommit(base.BaseTestCase): + + def setUp(self): + super(TestPackagingInGitRepoWithoutCommit, self).setUp() + self.useFixture(TestRepo(self.package_dir)) + self.run_setup('sdist', allow_fail=False) + + def test_authors(self): + # No commits, no authors in list + with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f: + body = f.read() + self.assertEqual('\n', body) + + def test_changelog(self): + # No commits, nothing should be in the ChangeLog list + with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: + body = f.read() + self.assertEqual('CHANGES\n=======\n\n', body) + + +class TestPackagingWheels(base.BaseTestCase): + + def setUp(self): + super(TestPackagingWheels, self).setUp() + self.useFixture(TestRepo(self.package_dir)) + # Build the wheel + self.run_setup('bdist_wheel', allow_fail=False) + # Slowly construct the path to the generated whl + dist_dir = os.path.join(self.package_dir, 'dist') + relative_wheel_filename = os.listdir(dist_dir)[0] + absolute_wheel_filename = os.path.join( + dist_dir, relative_wheel_filename) + wheel_file = wheelfile.WheelFile(absolute_wheel_filename) + wheel_name = wheel_file.parsed_filename.group('namever') + # Create a directory path to unpack the wheel to + self.extracted_wheel_dir = os.path.join(dist_dir, wheel_name) + # Extract the wheel contents to the directory we just created + wheel_file.extractall(self.extracted_wheel_dir) + wheel_file.close() + + def test_data_directory_has_wsgi_scripts(self): + # Build the path to the scripts directory + scripts_dir = os.path.join( + self.extracted_wheel_dir, 'pbr_testpackage-0.0.data/scripts') + self.assertTrue(os.path.exists(scripts_dir)) + scripts = os.listdir(scripts_dir) + + self.assertIn('pbr_test_wsgi', scripts) + self.assertIn('pbr_test_wsgi_with_class', scripts) + self.assertNotIn('pbr_test_cmd', scripts) + self.assertNotIn('pbr_test_cmd_with_class', scripts) + + def test_generates_c_extensions(self): + built_package_dir = os.path.join( + self.extracted_wheel_dir, 'pbr_testpackage') + static_object_filename = 'testext.so' + soabi = get_soabi() + if soabi: + static_object_filename = 'testext.{0}.so'.format(soabi) + static_object_path = os.path.join( + built_package_dir, static_object_filename) + + self.assertTrue(os.path.exists(built_package_dir)) + self.assertTrue(os.path.exists(static_object_path)) + + +class TestPackagingHelpers(testtools.TestCase): + + def test_generate_script(self): + group = 'console_scripts' + entry_point = pkg_resources.EntryPoint( + name='test-ep', + module_name='pbr.packaging', + attrs=('LocalInstallScripts',)) + header = '#!/usr/bin/env fake-header\n' + template = ('%(group)s %(module_name)s %(import_target)s ' + '%(invoke_target)s') + + generated_script = packaging.generate_script( + group, entry_point, header, template) + + expected_script = ( + '#!/usr/bin/env fake-header\nconsole_scripts pbr.packaging ' + 'LocalInstallScripts LocalInstallScripts' + ) + self.assertEqual(expected_script, generated_script) + + def test_generate_script_validates_expectations(self): + group = 'console_scripts' + entry_point = pkg_resources.EntryPoint( + name='test-ep', + module_name='pbr.packaging') + header = '#!/usr/bin/env fake-header\n' + template = ('%(group)s %(module_name)s %(import_target)s ' + '%(invoke_target)s') + self.assertRaises( + ValueError, packaging.generate_script, group, entry_point, header, + template) + + entry_point = pkg_resources.EntryPoint( + name='test-ep', + module_name='pbr.packaging', + attrs=('attr1', 'attr2', 'attr3')) + self.assertRaises( + ValueError, packaging.generate_script, group, entry_point, header, + template) + + +class TestPackagingInPlainDirectory(base.BaseTestCase): + + def setUp(self): + super(TestPackagingInPlainDirectory, self).setUp() + + def test_authors(self): + self.run_setup('sdist', allow_fail=False) + # Not a git repo, no AUTHORS file created + filename = os.path.join(self.package_dir, 'AUTHORS') + self.assertFalse(os.path.exists(filename)) + + def test_changelog(self): + self.run_setup('sdist', allow_fail=False) + # Not a git repo, no ChangeLog created + filename = os.path.join(self.package_dir, 'ChangeLog') + self.assertFalse(os.path.exists(filename)) + + def test_install_no_ChangeLog(self): + stdout, _, _ = self.run_setup( + 'install', '--root', self.temp_dir + 'installed', + allow_fail=False) + self.expectThat( + stdout, matchers.Not(matchers.Contains('Generating ChangeLog'))) + + +class TestPresenceOfGit(base.BaseTestCase): + + def testGitIsInstalled(self): + with mock.patch.object(git, + '_run_shell_command') as _command: + _command.return_value = 'git version 1.8.4.1' + self.assertEqual(True, git._git_is_installed()) + + def testGitIsNotInstalled(self): + with mock.patch.object(git, + '_run_shell_command') as _command: + _command.side_effect = OSError + self.assertEqual(False, git._git_is_installed()) + + +class ParseRequirementsTest(base.BaseTestCase): + + def test_empty_requirements(self): + actual = packaging.parse_requirements([]) + self.assertEqual([], actual) + + def test_default_requirements(self): + """Ensure default files used if no files provided.""" + tempdir = tempfile.mkdtemp() + requirements = os.path.join(tempdir, 'requirements.txt') + with open(requirements, 'w') as f: + f.write('pbr') + # the defaults are relative to where pbr is called from so we need to + # override them. This is OK, however, as we want to validate that + # defaults are used - not what those defaults are + with mock.patch.object(packaging, 'REQUIREMENTS_FILES', ( + requirements,)): + result = packaging.parse_requirements() + self.assertEqual(['pbr'], result) + + def test_override_with_env(self): + """Ensure environment variable used if no files provided.""" + _, tmp_file = tempfile.mkstemp(prefix='openstack', suffix='.setup') + with open(tmp_file, 'w') as fh: + fh.write("foo\nbar") + self.useFixture( + fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', tmp_file)) + self.assertEqual(['foo', 'bar'], + packaging.parse_requirements()) + + def test_override_with_env_multiple_files(self): + _, tmp_file = tempfile.mkstemp(prefix='openstack', suffix='.setup') + with open(tmp_file, 'w') as fh: + fh.write("foo\nbar") + self.useFixture( + fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', + "no-such-file," + tmp_file)) + self.assertEqual(['foo', 'bar'], + packaging.parse_requirements()) + + def test_index_present(self): + tempdir = tempfile.mkdtemp() + requirements = os.path.join(tempdir, 'requirements.txt') + with open(requirements, 'w') as f: + f.write('-i https://myindex.local\n') + f.write(' --index-url https://myindex.local\n') + f.write(' --extra-index-url https://myindex.local\n') + f.write('--find-links https://myindex.local\n') + f.write('arequirement>=1.0\n') + result = packaging.parse_requirements([requirements]) + self.assertEqual(['arequirement>=1.0'], result) + + def test_nested_requirements(self): + tempdir = tempfile.mkdtemp() + requirements = os.path.join(tempdir, 'requirements.txt') + nested = os.path.join(tempdir, 'nested.txt') + with open(requirements, 'w') as f: + f.write('-r ' + nested) + with open(nested, 'w') as f: + f.write('pbr') + result = packaging.parse_requirements([requirements]) + self.assertEqual(['pbr'], result) + + +class ParseRequirementsTestScenarios(base.BaseTestCase): + + versioned_scenarios = [ + ('non-versioned', {'versioned': False, 'expected': ['bar']}), + ('versioned', {'versioned': True, 'expected': ['bar>=1.2.3']}) + ] + + subdirectory_scenarios = [ + ('non-subdirectory', {'has_subdirectory': False}), + ('has-subdirectory', {'has_subdirectory': True}) + ] + + scenarios = [ + ('normal', {'url': "foo\nbar", 'expected': ['foo', 'bar']}), + ('normal_with_comments', { + 'url': "# this is a comment\nfoo\n# and another one\nbar", + 'expected': ['foo', 'bar']}), + ('removes_index_lines', {'url': '-f foobar', 'expected': []}), + ] + + scenarios = scenarios + testscenarios.multiply_scenarios([ + ('ssh_egg_url', {'url': 'git+ssh://foo.com/zipball#egg=bar'}), + ('git_https_egg_url', {'url': 'git+https://foo.com/zipball#egg=bar'}), + ('http_egg_url', {'url': 'https://foo.com/zipball#egg=bar'}), + ], versioned_scenarios, subdirectory_scenarios) + + scenarios = scenarios + testscenarios.multiply_scenarios( + [ + ('git_egg_url', + {'url': 'git://foo.com/zipball#egg=bar', 'name': 'bar'}) + ], [ + ('non-editable', {'editable': False}), + ('editable', {'editable': True}), + ], + versioned_scenarios, subdirectory_scenarios) + + def test_parse_requirements(self): + tmp_file = tempfile.NamedTemporaryFile() + req_string = self.url + if hasattr(self, 'editable') and self.editable: + req_string = ("-e %s" % req_string) + if hasattr(self, 'versioned') and self.versioned: + req_string = ("%s-1.2.3" % req_string) + if hasattr(self, 'has_subdirectory') and self.has_subdirectory: + req_string = ("%s&subdirectory=baz" % req_string) + with open(tmp_file.name, 'w') as fh: + fh.write(req_string) + self.assertEqual(self.expected, + packaging.parse_requirements([tmp_file.name])) + + +class ParseDependencyLinksTest(base.BaseTestCase): + + def setUp(self): + super(ParseDependencyLinksTest, self).setUp() + _, self.tmp_file = tempfile.mkstemp(prefix="openstack", + suffix=".setup") + + def test_parse_dependency_normal(self): + with open(self.tmp_file, "w") as fh: + fh.write("http://test.com\n") + self.assertEqual( + ["http://test.com"], + packaging.parse_dependency_links([self.tmp_file])) + + def test_parse_dependency_with_git_egg_url(self): + with open(self.tmp_file, "w") as fh: + fh.write("-e git://foo.com/zipball#egg=bar") + self.assertEqual( + ["git://foo.com/zipball#egg=bar"], + packaging.parse_dependency_links([self.tmp_file])) + + +class TestVersions(base.BaseTestCase): + + scenarios = [ + ('preversioned', dict(preversioned=True)), + ('postversioned', dict(preversioned=False)), + ] + + def setUp(self): + super(TestVersions, self).setUp() + self.repo = self.useFixture(TestRepo(self.package_dir)) + self.useFixture(GPGKeyFixture()) + self.useFixture(base.DiveDir(self.package_dir)) + + def test_email_parsing_errors_are_handled(self): + mocked_open = mock.mock_open() + with mock.patch('pbr.packaging.open', mocked_open): + with mock.patch('email.message_from_file') as message_from_file: + message_from_file.side_effect = [ + email.errors.MessageError('Test'), + {'Name': 'pbr_testpackage'}] + version = packaging._get_version_from_pkg_metadata( + 'pbr_testpackage') + + self.assertTrue(message_from_file.called) + self.assertIsNone(version) + + def test_capitalized_headers(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit('Sem-Ver: api-break') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) + + def test_capitalized_headers_partial(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit('Sem-ver: api-break') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) + + def test_tagged_version_has_tag_version(self): + self.repo.commit() + self.repo.tag('1.2.3') + version = packaging._get_version_from_git('1.2.3') + self.assertEqual('1.2.3', version) + + def test_tagged_version_with_semver_compliant_prerelease(self): + self.repo.commit() + self.repo.tag('1.2.3-rc2') + version = packaging._get_version_from_git() + self.assertEqual('1.2.3.0rc2', version) + + def test_non_canonical_tagged_version_bump(self): + self.repo.commit() + self.repo.tag('1.4') + self.repo.commit('Sem-Ver: api-break') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) + + def test_untagged_version_has_dev_version_postversion(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit() + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.2.4.dev1')) + + def test_untagged_pre_release_has_pre_dev_version_postversion(self): + self.repo.commit() + self.repo.tag('1.2.3.0a1') + self.repo.commit() + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.2.3.0a2.dev1')) + + def test_untagged_version_minor_bump(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit('sem-ver: deprecation') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.3.0.dev1')) + + def test_untagged_version_major_bump(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit('sem-ver: api-break') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) + + def test_untagged_version_has_dev_version_preversion(self): + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit() + version = packaging._get_version_from_git('1.2.5') + self.assertThat(version, matchers.StartsWith('1.2.5.dev1')) + + def test_untagged_version_after_pre_has_dev_version_preversion(self): + self.repo.commit() + self.repo.tag('1.2.3.0a1') + self.repo.commit() + version = packaging._get_version_from_git('1.2.5') + self.assertThat(version, matchers.StartsWith('1.2.5.dev1')) + + def test_untagged_version_after_rc_has_dev_version_preversion(self): + self.repo.commit() + self.repo.tag('1.2.3.0a1') + self.repo.commit() + version = packaging._get_version_from_git('1.2.3') + self.assertThat(version, matchers.StartsWith('1.2.3.0a2.dev1')) + + def test_untagged_version_after_semver_compliant_prerelease_tag(self): + self.repo.commit() + self.repo.tag('1.2.3-rc2') + self.repo.commit() + version = packaging._get_version_from_git() + self.assertEqual('1.2.3.0rc3.dev1', version) + + def test_preversion_too_low_simple(self): + # That is, the target version is either already released or not high + # enough for the semver requirements given api breaks etc. + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit() + # Note that we can't target 1.2.3 anymore - with 1.2.3 released we + # need to be working on 1.2.4. + err = self.assertRaises( + ValueError, packaging._get_version_from_git, '1.2.3') + self.assertThat(err.args[0], matchers.StartsWith('git history')) + + def test_preversion_too_low_semver_headers(self): + # That is, the target version is either already released or not high + # enough for the semver requirements given api breaks etc. + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit('sem-ver: feature') + # Note that we can't target 1.2.4, the feature header means we need + # to be working on 1.3.0 or above. + err = self.assertRaises( + ValueError, packaging._get_version_from_git, '1.2.4') + self.assertThat(err.args[0], matchers.StartsWith('git history')) + + def test_get_kwargs_corner_cases(self): + # No tags: + + def get_kwargs(tag): + git_dir = self.repo._basedir + '/.git' + return packaging._get_increment_kwargs(git_dir, tag) + + def _check_combinations(tag): + self.repo.commit() + self.assertEqual(dict(), get_kwargs(tag)) + self.repo.commit('sem-ver: bugfix') + self.assertEqual(dict(), get_kwargs(tag)) + self.repo.commit('sem-ver: feature') + self.assertEqual(dict(minor=True), get_kwargs(tag)) + self.repo.uncommit() + self.repo.commit('sem-ver: deprecation') + self.assertEqual(dict(minor=True), get_kwargs(tag)) + self.repo.uncommit() + self.repo.commit('sem-ver: api-break') + self.assertEqual(dict(major=True), get_kwargs(tag)) + self.repo.commit('sem-ver: deprecation') + self.assertEqual(dict(major=True, minor=True), get_kwargs(tag)) + _check_combinations('') + self.repo.tag('1.2.3') + _check_combinations('1.2.3') + + def test_invalid_tag_ignored(self): + # Fix for bug 1356784 - we treated any tag as a version, not just those + # that are valid versions. + self.repo.commit() + self.repo.tag('1') + self.repo.commit() + # when the tree is tagged and its wrong: + self.repo.tag('badver') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.0.1.dev1')) + # When the tree isn't tagged, we also fall through. + self.repo.commit() + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.0.1.dev2')) + # We don't fall through x.y versions + self.repo.commit() + self.repo.tag('1.2') + self.repo.commit() + self.repo.tag('badver2') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.2.1.dev1')) + # Or x.y.z versions + self.repo.commit() + self.repo.tag('1.2.3') + self.repo.commit() + self.repo.tag('badver3') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.2.4.dev1')) + # Or alpha/beta/pre versions + self.repo.commit() + self.repo.tag('1.2.4.0a1') + self.repo.commit() + self.repo.tag('badver4') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('1.2.4.0a2.dev1')) + # Non-release related tags are ignored. + self.repo.commit() + self.repo.tag('2') + self.repo.commit() + self.repo.tag('non-release-tag/2014.12.16-1') + version = packaging._get_version_from_git() + self.assertThat(version, matchers.StartsWith('2.0.1.dev1')) + + def test_valid_tag_honoured(self): + # Fix for bug 1370608 - we converted any target into a 'dev version' + # even if there was a distance of 0 - indicating that we were on the + # tag itself. + self.repo.commit() + self.repo.tag('1.3.0.0a1') + version = packaging._get_version_from_git() + self.assertEqual('1.3.0.0a1', version) + + def test_skip_write_git_changelog(self): + # Fix for bug 1467440 + self.repo.commit() + self.repo.tag('1.2.3') + os.environ['SKIP_WRITE_GIT_CHANGELOG'] = '1' + version = packaging._get_version_from_git('1.2.3') + self.assertEqual('1.2.3', version) + + def tearDown(self): + super(TestVersions, self).tearDown() + os.environ.pop('SKIP_WRITE_GIT_CHANGELOG', None) + + +class TestRequirementParsing(base.BaseTestCase): + + def test_requirement_parsing(self): + pkgs = { + 'test_reqparse': + { + 'requirements.txt': textwrap.dedent("""\ + bar + quux<1.0; python_version=='2.6' + requests-aws>=0.1.4 # BSD License (3 clause) + Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7' + requests-kerberos>=0.6;python_version=='2.7' # MIT + """), + 'setup.cfg': textwrap.dedent("""\ + [metadata] + name = test_reqparse + + [extras] + test = + foo + baz>3.2 :python_version=='2.7' # MIT + bar>3.3 :python_version=='2.7' # MIT # Apache + """)}, + } + pkg_dirs = self.useFixture(CreatePackages(pkgs)).package_dirs + pkg_dir = pkg_dirs['test_reqparse'] + # pkg_resources.split_sections uses None as the title of an + # anonymous section instead of the empty string. Weird. + expected_requirements = { + None: ['bar', 'requests-aws>=0.1.4'], + ":(python_version=='2.6')": ['quux<1.0'], + ":(python_version=='2.7')": ['Routes!=2.0,!=2.1,>=1.12.3', + 'requests-kerberos>=0.6'], + 'test': ['foo'], + "test:(python_version=='2.7')": ['baz>3.2', 'bar>3.3'] + } + venv = self.useFixture(Venv('reqParse')) + bin_python = venv.python + # Two things are tested by this + # 1) pbr properly parses markers from requiremnts.txt and setup.cfg + # 2) bdist_wheel causes pbr to not evaluate markers + self._run_cmd(bin_python, ('setup.py', 'bdist_wheel'), + allow_fail=False, cwd=pkg_dir) + egg_info = os.path.join(pkg_dir, 'test_reqparse.egg-info') + + requires_txt = os.path.join(egg_info, 'requires.txt') + with open(requires_txt, 'rt') as requires: + generated_requirements = dict( + pkg_resources.split_sections(requires)) + + # NOTE(dhellmann): We have to spell out the comparison because + # the rendering for version specifiers in a range is not + # consistent across versions of setuptools. + + for section, expected in expected_requirements.items(): + exp_parsed = [ + pkg_resources.Requirement.parse(s) + for s in expected + ] + gen_parsed = [ + pkg_resources.Requirement.parse(s) + for s in generated_requirements[section] + ] + self.assertEqual(exp_parsed, gen_parsed) + + +class TestRepositoryURLDependencies(base.BaseTestCase): + + def setUp(self): + super(TestRepositoryURLDependencies, self).setUp() + self.requirements = os.path.join(tempfile.mkdtemp(), + 'requirements.txt') + with open(self.requirements, 'w') as f: + f.write('\n'.join([ + '-e git+git://git.pro-ject.org/oslo.messaging#egg=oslo.messaging-1.0.0-rc', # noqa + '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize', # noqa + '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize-beta', # noqa + '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta', # noqa + '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-4.0.1', # noqa + '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha.beta.1', # noqa + '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa + '-e git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-2.0.0-rc.1+build.123', # noqa + '-e git+git://git.project.org/Proj#egg=Proj1', + 'git+https://git.project.org/Proj#egg=Proj2-0.0.1', + '-e git+ssh://git.project.org/Proj#egg=Proj3', + 'svn+svn://svn.project.org/svn/Proj#egg=Proj4-0.0.2', + '-e svn+http://svn.project.org/svn/Proj/trunk@2019#egg=Proj5', + 'hg+http://hg.project.org/Proj@da39a3ee5e6b#egg=Proj-0.0.3', + '-e hg+http://hg.project.org/Proj@2019#egg=Proj', + 'hg+http://hg.project.org/Proj@v1.0#egg=Proj-0.0.4', + '-e hg+http://hg.project.org/Proj@special_feature#egg=Proj', + 'git://foo.com/zipball#egg=foo-bar-1.2.4', + 'pypi-proj1', 'pypi-proj2'])) + + def test_egg_fragment(self): + expected = [ + 'django-thumborize', + 'django-thumborize-beta', + 'django-thumborize2-beta', + 'django-thumborize2-beta>=4.0.1', + 'django-thumborize2-beta>=1.0.0-alpha.beta.1', + 'django-thumborize2-beta>=1.0.0-alpha-a.b-c-long+build.1-aef.1-its-okay', # noqa + 'django-thumborize2-beta>=2.0.0-rc.1+build.123', + 'django-thumborize-beta>=0.0.4', + 'django-thumborize-beta>=1.2.3', + 'django-thumborize-beta>=10.20.30', + 'django-thumborize-beta>=1.1.2-prerelease+meta', + 'django-thumborize-beta>=1.1.2+meta', + 'django-thumborize-beta>=1.1.2+meta-valid', + 'django-thumborize-beta>=1.0.0-alpha', + 'django-thumborize-beta>=1.0.0-beta', + 'django-thumborize-beta>=1.0.0-alpha.beta', + 'django-thumborize-beta>=1.0.0-alpha.beta.1', + 'django-thumborize-beta>=1.0.0-alpha.1', + 'django-thumborize-beta>=1.0.0-alpha0.valid', + 'django-thumborize-beta>=1.0.0-alpha.0valid', + 'django-thumborize-beta>=1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa + 'django-thumborize-beta>=1.0.0-rc.1+build.1', + 'django-thumborize-beta>=2.0.0-rc.1+build.123', + 'django-thumborize-beta>=1.2.3-beta', + 'django-thumborize-beta>=10.2.3-DEV-SNAPSHOT', + 'django-thumborize-beta>=1.2.3-SNAPSHOT-123', + 'django-thumborize-beta>=1.0.0', + 'django-thumborize-beta>=2.0.0', + 'django-thumborize-beta>=1.1.7', + 'django-thumborize-beta>=2.0.0+build.1848', + 'django-thumborize-beta>=2.0.1-alpha.1227', + 'django-thumborize-beta>=1.0.0-alpha+beta', + 'django-thumborize-beta>=1.2.3----RC-SNAPSHOT.12.9.1--.12+788', + 'django-thumborize-beta>=1.2.3----R-S.12.9.1--.12+meta', + 'django-thumborize-beta>=1.2.3----RC-SNAPSHOT.12.9.1--.12', + 'django-thumborize-beta>=1.0.0+0.build.1-rc.10000aaa-kk-0.1', + 'django-thumborize-beta>=999999999999999999.99999999999999.9999999999999', # noqa + 'Proj1', + 'Proj2>=0.0.1', + 'Proj3', + 'Proj4>=0.0.2', + 'Proj5', + 'Proj>=0.0.3', + 'Proj', + 'Proj>=0.0.4', + 'Proj', + 'foo-bar>=1.2.4', + ] + tests = [ + 'egg=django-thumborize', + 'egg=django-thumborize-beta', + 'egg=django-thumborize2-beta', + 'egg=django-thumborize2-beta-4.0.1', + 'egg=django-thumborize2-beta-1.0.0-alpha.beta.1', + 'egg=django-thumborize2-beta-1.0.0-alpha-a.b-c-long+build.1-aef.1-its-okay', # noqa + 'egg=django-thumborize2-beta-2.0.0-rc.1+build.123', + 'egg=django-thumborize-beta-0.0.4', + 'egg=django-thumborize-beta-1.2.3', + 'egg=django-thumborize-beta-10.20.30', + 'egg=django-thumborize-beta-1.1.2-prerelease+meta', + 'egg=django-thumborize-beta-1.1.2+meta', + 'egg=django-thumborize-beta-1.1.2+meta-valid', + 'egg=django-thumborize-beta-1.0.0-alpha', + 'egg=django-thumborize-beta-1.0.0-beta', + 'egg=django-thumborize-beta-1.0.0-alpha.beta', + 'egg=django-thumborize-beta-1.0.0-alpha.beta.1', + 'egg=django-thumborize-beta-1.0.0-alpha.1', + 'egg=django-thumborize-beta-1.0.0-alpha0.valid', + 'egg=django-thumborize-beta-1.0.0-alpha.0valid', + 'egg=django-thumborize-beta-1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa + 'egg=django-thumborize-beta-1.0.0-rc.1+build.1', + 'egg=django-thumborize-beta-2.0.0-rc.1+build.123', + 'egg=django-thumborize-beta-1.2.3-beta', + 'egg=django-thumborize-beta-10.2.3-DEV-SNAPSHOT', + 'egg=django-thumborize-beta-1.2.3-SNAPSHOT-123', + 'egg=django-thumborize-beta-1.0.0', + 'egg=django-thumborize-beta-2.0.0', + 'egg=django-thumborize-beta-1.1.7', + 'egg=django-thumborize-beta-2.0.0+build.1848', + 'egg=django-thumborize-beta-2.0.1-alpha.1227', + 'egg=django-thumborize-beta-1.0.0-alpha+beta', + 'egg=django-thumborize-beta-1.2.3----RC-SNAPSHOT.12.9.1--.12+788', # noqa + 'egg=django-thumborize-beta-1.2.3----R-S.12.9.1--.12+meta', + 'egg=django-thumborize-beta-1.2.3----RC-SNAPSHOT.12.9.1--.12', + 'egg=django-thumborize-beta-1.0.0+0.build.1-rc.10000aaa-kk-0.1', # noqa + 'egg=django-thumborize-beta-999999999999999999.99999999999999.9999999999999', # noqa + 'egg=Proj1', + 'egg=Proj2-0.0.1', + 'egg=Proj3', + 'egg=Proj4-0.0.2', + 'egg=Proj5', + 'egg=Proj-0.0.3', + 'egg=Proj', + 'egg=Proj-0.0.4', + 'egg=Proj', + 'egg=foo-bar-1.2.4', + ] + for index, test in enumerate(tests): + self.assertEqual(expected[index], + re.sub(r'egg=([^&]+).*$', + packaging.egg_fragment, + test)) + + def test_parse_repo_url_requirements(self): + result = packaging.parse_requirements([self.requirements]) + self.assertEqual(['oslo.messaging>=1.0.0-rc', + 'django-thumborize', + 'django-thumborize-beta', + 'django-thumborize2-beta', + 'django-thumborize2-beta>=4.0.1', + 'django-thumborize2-beta>=1.0.0-alpha.beta.1', + 'django-thumborize2-beta>=1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa + 'django-thumborize2-beta>=2.0.0-rc.1+build.123', + 'Proj1', 'Proj2>=0.0.1', 'Proj3', + 'Proj4>=0.0.2', 'Proj5', 'Proj>=0.0.3', + 'Proj', 'Proj>=0.0.4', 'Proj', + 'foo-bar>=1.2.4', 'pypi-proj1', + 'pypi-proj2'], result) + + def test_parse_repo_url_dependency_links(self): + result = packaging.parse_dependency_links([self.requirements]) + self.assertEqual( + [ + 'git+git://git.pro-ject.org/oslo.messaging#egg=oslo.messaging-1.0.0-rc', # noqa + 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize', # noqa + 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize-beta', # noqa + 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta', # noqa + 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-4.0.1', # noqa + 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha.beta.1', # noqa + 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay', # noqa + 'git+git://git.pro-ject.org/django-thumborize#egg=django-thumborize2-beta-2.0.0-rc.1+build.123', # noqa + 'git+git://git.project.org/Proj#egg=Proj1', + 'git+https://git.project.org/Proj#egg=Proj2-0.0.1', + 'git+ssh://git.project.org/Proj#egg=Proj3', + 'svn+svn://svn.project.org/svn/Proj#egg=Proj4-0.0.2', + 'svn+http://svn.project.org/svn/Proj/trunk@2019#egg=Proj5', + 'hg+http://hg.project.org/Proj@da39a3ee5e6b#egg=Proj-0.0.3', + 'hg+http://hg.project.org/Proj@2019#egg=Proj', + 'hg+http://hg.project.org/Proj@v1.0#egg=Proj-0.0.4', + 'hg+http://hg.project.org/Proj@special_feature#egg=Proj', + 'git://foo.com/zipball#egg=foo-bar-1.2.4'], result) + + +def get_soabi(): + soabi = None + try: + soabi = sysconfig.get_config_var('SOABI') + arch = sysconfig.get_config_var('MULTIARCH') + except IOError: + pass + if soabi and arch and 'pypy' in sysconfig.get_scheme_names(): + soabi = '%s-%s' % (soabi, arch) + if soabi is None and 'pypy' in sysconfig.get_scheme_names(): + # NOTE(sigmavirus24): PyPy only added support for the SOABI config var + # to sysconfig in 2015. That was well after 2.2.1 was published in the + # Ubuntu 14.04 archive. + for suffix, _, _ in imp.get_suffixes(): + if suffix.startswith('.pypy') and suffix.endswith('.so'): + soabi = suffix.split('.')[1] + break + return soabi diff --git a/venv/Lib/site-packages/pbr/tests/test_pbr_json.py b/venv/Lib/site-packages/pbr/tests/test_pbr_json.py new file mode 100644 index 00000000..f0669713 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_pbr_json.py @@ -0,0 +1,30 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock + +from pbr import pbr_json +from pbr.tests import base + + +class TestJsonContent(base.BaseTestCase): + @mock.patch('pbr.git._run_git_functions', return_value=True) + @mock.patch('pbr.git.get_git_short_sha', return_value="123456") + @mock.patch('pbr.git.get_is_release', return_value=True) + def test_content(self, mock_get_is, mock_get_git, mock_run): + cmd = mock.Mock() + pbr_json.write_pbr_json(cmd, "basename", "pbr.json") + cmd.write_file.assert_called_once_with( + 'pbr', + 'pbr.json', + '{"git_version": "123456", "is_release": true}' + ) diff --git a/venv/Lib/site-packages/pbr/tests/test_setup.py b/venv/Lib/site-packages/pbr/tests/test_setup.py new file mode 100644 index 00000000..d171d7c0 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_setup.py @@ -0,0 +1,447 @@ +# Copyright (c) 2011 OpenStack Foundation +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import print_function + +import os + +try: + import cStringIO as io + BytesIO = io.StringIO +except ImportError: + import io + BytesIO = io.BytesIO + +import fixtures + +from pbr import git +from pbr import options +from pbr import packaging +from pbr.tests import base + + +class SkipFileWrites(base.BaseTestCase): + + scenarios = [ + ('changelog_option_true', + dict(option_key='skip_changelog', option_value='True', + env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None, + pkg_func=git.write_git_changelog, filename='ChangeLog')), + ('changelog_option_false', + dict(option_key='skip_changelog', option_value='False', + env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None, + pkg_func=git.write_git_changelog, filename='ChangeLog')), + ('changelog_env_true', + dict(option_key='skip_changelog', option_value='False', + env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True', + pkg_func=git.write_git_changelog, filename='ChangeLog')), + ('changelog_both_true', + dict(option_key='skip_changelog', option_value='True', + env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True', + pkg_func=git.write_git_changelog, filename='ChangeLog')), + ('authors_option_true', + dict(option_key='skip_authors', option_value='True', + env_key='SKIP_GENERATE_AUTHORS', env_value=None, + pkg_func=git.generate_authors, filename='AUTHORS')), + ('authors_option_false', + dict(option_key='skip_authors', option_value='False', + env_key='SKIP_GENERATE_AUTHORS', env_value=None, + pkg_func=git.generate_authors, filename='AUTHORS')), + ('authors_env_true', + dict(option_key='skip_authors', option_value='False', + env_key='SKIP_GENERATE_AUTHORS', env_value='True', + pkg_func=git.generate_authors, filename='AUTHORS')), + ('authors_both_true', + dict(option_key='skip_authors', option_value='True', + env_key='SKIP_GENERATE_AUTHORS', env_value='True', + pkg_func=git.generate_authors, filename='AUTHORS')), + ] + + def setUp(self): + super(SkipFileWrites, self).setUp() + self.temp_path = self.useFixture(fixtures.TempDir()).path + self.root_dir = os.path.abspath(os.path.curdir) + self.git_dir = os.path.join(self.root_dir, ".git") + if not os.path.exists(self.git_dir): + self.skipTest("%s is missing; skipping git-related checks" + % self.git_dir) + return + self.filename = os.path.join(self.temp_path, self.filename) + self.option_dict = dict() + if self.option_key is not None: + self.option_dict[self.option_key] = ('setup.cfg', + self.option_value) + self.useFixture( + fixtures.EnvironmentVariable(self.env_key, self.env_value)) + + def test_skip(self): + self.pkg_func(git_dir=self.git_dir, + dest_dir=self.temp_path, + option_dict=self.option_dict) + self.assertEqual( + not os.path.exists(self.filename), + (self.option_value.lower() in options.TRUE_VALUES or + self.env_value is not None)) + + +_changelog_content = """7780758\x00Break parser\x00 (tag: refs/tags/1_foo.1) +04316fe\x00Make python\x00 (refs/heads/review/monty_taylor/27519) +378261a\x00Add an integration test script.\x00 +3c373ac\x00Merge "Lib\x00 (HEAD, tag: refs/tags/2013.2.rc2, tag: refs/tags/2013.2, refs/heads/mile-proposed) +182feb3\x00Fix pip invocation for old versions of pip.\x00 (tag: refs/tags/0.5.17) +fa4f46e\x00Remove explicit depend on distribute.\x00 (tag: refs/tags/0.5.16) +d1c53dd\x00Use pip instead of easy_install for installation.\x00 +a793ea1\x00Merge "Skip git-checkout related tests when .git is missing"\x00 +6c27ce7\x00Skip git-checkout related tests when .git is missing\x00 +451e513\x00Bug fix: create_stack() fails when waiting\x00 +4c8cfe4\x00Improve test coverage: network delete API\x00 (tag: refs/tags/(evil)) +d7e6167\x00Bug fix: Fix pass thru filtering in list_networks\x00 (tag: refs/tags/ev()il) +c47ec15\x00Consider 'in-use' a non-pending volume for caching\x00 (tag: refs/tags/ev)il) +8696fbd\x00Improve test coverage: private extension API\x00 (tag: refs/tags/ev(il) +f0440f8\x00Improve test coverage: hypervisor list\x00 (tag: refs/tags/e(vi)l) +04984a5\x00Refactor hooks file.\x00 (HEAD, tag: 0.6.7,b, tag: refs/tags/(12), refs/heads/master) +a65e8ee\x00Remove jinja pin.\x00 (tag: refs/tags/0.5.14, tag: refs/tags/0.5.13) +""" # noqa + + +def _make_old_git_changelog_format(line): + """Convert post-1.8.1 git log format to pre-1.8.1 git log format""" + + if not line.strip(): + return line + sha, msg, refname = line.split('\x00') + refname = refname.replace('tag: ', '') + return '\x00'.join((sha, msg, refname)) + + +_old_git_changelog_content = '\n'.join( + _make_old_git_changelog_format(line) + for line in _changelog_content.split('\n')) + + +class GitLogsTest(base.BaseTestCase): + + scenarios = [ + ('pre1.8.3', {'changelog': _old_git_changelog_content}), + ('post1.8.3', {'changelog': _changelog_content}), + ] + + def setUp(self): + super(GitLogsTest, self).setUp() + self.temp_path = self.useFixture(fixtures.TempDir()).path + self.root_dir = os.path.abspath(os.path.curdir) + self.git_dir = os.path.join(self.root_dir, ".git") + self.useFixture( + fixtures.EnvironmentVariable('SKIP_GENERATE_AUTHORS')) + self.useFixture( + fixtures.EnvironmentVariable('SKIP_WRITE_GIT_CHANGELOG')) + + def test_write_git_changelog(self): + self.useFixture(fixtures.FakePopen(lambda _: { + "stdout": BytesIO(self.changelog.encode('utf-8')) + })) + + git.write_git_changelog(git_dir=self.git_dir, + dest_dir=self.temp_path) + + with open(os.path.join(self.temp_path, "ChangeLog"), "r") as ch_fh: + changelog_contents = ch_fh.read() + self.assertIn("2013.2", changelog_contents) + self.assertIn("0.5.17", changelog_contents) + self.assertIn("------", changelog_contents) + self.assertIn("Refactor hooks file", changelog_contents) + self.assertIn( + r"Bug fix: create\_stack() fails when waiting", + changelog_contents) + self.assertNotIn("Refactor hooks file.", changelog_contents) + self.assertNotIn("182feb3", changelog_contents) + self.assertNotIn("review/monty_taylor/27519", changelog_contents) + self.assertNotIn("0.5.13", changelog_contents) + self.assertNotIn("0.6.7", changelog_contents) + self.assertNotIn("12", changelog_contents) + self.assertNotIn("(evil)", changelog_contents) + self.assertNotIn("ev()il", changelog_contents) + self.assertNotIn("ev(il", changelog_contents) + self.assertNotIn("ev)il", changelog_contents) + self.assertNotIn("e(vi)l", changelog_contents) + self.assertNotIn('Merge "', changelog_contents) + self.assertNotIn(r'1\_foo.1', changelog_contents) + + def test_generate_authors(self): + author_old = u"Foo Foo " + author_new = u"Bar Bar " + co_author = u"Foo Bar " + co_author_by = u"Co-authored-by: " + co_author + + git_log_cmd = ( + "git --git-dir=%s log --format=%%aN <%%aE>" + % self.git_dir) + git_co_log_cmd = ("git --git-dir=%s log" % self.git_dir) + git_top_level = "git rev-parse --show-toplevel" + cmd_map = { + git_log_cmd: author_new, + git_co_log_cmd: co_author_by, + git_top_level: self.root_dir, + } + + exist_files = [self.git_dir, + os.path.join(self.temp_path, "AUTHORS.in")] + self.useFixture(fixtures.MonkeyPatch( + "os.path.exists", + lambda path: os.path.abspath(path) in exist_files)) + + def _fake_run_shell_command(cmd, **kwargs): + return cmd_map[" ".join(cmd)] + + self.useFixture(fixtures.MonkeyPatch( + "pbr.git._run_shell_command", + _fake_run_shell_command)) + + with open(os.path.join(self.temp_path, "AUTHORS.in"), "w") as auth_fh: + auth_fh.write("%s\n" % author_old) + + git.generate_authors(git_dir=self.git_dir, + dest_dir=self.temp_path) + + with open(os.path.join(self.temp_path, "AUTHORS"), "r") as auth_fh: + authors = auth_fh.read() + self.assertTrue(author_old in authors) + self.assertTrue(author_new in authors) + self.assertTrue(co_author in authors) + + +class _SphinxConfig(object): + man_pages = ['foo'] + + +class BaseSphinxTest(base.BaseTestCase): + + def setUp(self): + super(BaseSphinxTest, self).setUp() + + # setup_command requires the Sphinx instance to have some + # attributes that aren't set normally with the way we use the + # class (because we replace the constructor). Add default + # values directly to the class definition. + import sphinx.application + sphinx.application.Sphinx.messagelog = [] + sphinx.application.Sphinx.statuscode = 0 + + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.__init__", lambda *a, **kw: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.build", lambda *a, **kw: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.config", _SphinxConfig)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.config.Config.init_values", lambda *a: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.config.Config.__init__", lambda *a: None)) + from distutils import dist + self.distr = dist.Distribution() + self.distr.packages = ("fake_package",) + self.distr.command_options["build_sphinx"] = { + "source_dir": ["a", "."]} + pkg_fixture = fixtures.PythonPackage( + "fake_package", [("fake_module.py", b""), + ("another_fake_module_for_testing.py", b""), + ("fake_private_module.py", b"")]) + self.useFixture(pkg_fixture) + self.useFixture(base.DiveDir(pkg_fixture.base)) + self.distr.command_options["pbr"] = {} + if hasattr(self, "excludes"): + self.distr.command_options["pbr"]["autodoc_exclude_modules"] = ( + 'setup.cfg', + "fake_package.fake_private_module\n" + "fake_package.another_fake_*\n" + "fake_package.unknown_module") + if hasattr(self, 'has_opt') and self.has_opt: + options = self.distr.command_options["pbr"] + options["autodoc_index_modules"] = ('setup.cfg', self.autodoc) + + +class BuildSphinxTest(BaseSphinxTest): + + scenarios = [ + ('true_autodoc_caps', + dict(has_opt=True, autodoc='True', has_autodoc=True)), + ('true_autodoc_caps_with_excludes', + dict(has_opt=True, autodoc='True', has_autodoc=True, + excludes="fake_package.fake_private_module\n" + "fake_package.another_fake_*\n" + "fake_package.unknown_module")), + ('true_autodoc_lower', + dict(has_opt=True, autodoc='true', has_autodoc=True)), + ('false_autodoc', + dict(has_opt=True, autodoc='False', has_autodoc=False)), + ('no_autodoc', + dict(has_opt=False, autodoc='False', has_autodoc=False)), + ] + + def test_build_doc(self): + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.run() + + self.assertTrue( + os.path.exists("api/autoindex.rst") == self.has_autodoc) + self.assertTrue( + os.path.exists( + "api/fake_package.fake_module.rst") == self.has_autodoc) + if not self.has_autodoc or hasattr(self, "excludes"): + assertion = self.assertFalse + else: + assertion = self.assertTrue + assertion( + os.path.exists( + "api/fake_package.fake_private_module.rst")) + assertion( + os.path.exists( + "api/fake_package.another_fake_module_for_testing.rst")) + + def test_builders_config(self): + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.finalize_options() + + self.assertEqual(1, len(build_doc.builders)) + self.assertIn('html', build_doc.builders) + + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.builders = '' + build_doc.finalize_options() + + self.assertEqual('', build_doc.builders) + + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.builders = 'man' + build_doc.finalize_options() + + self.assertEqual(1, len(build_doc.builders)) + self.assertIn('man', build_doc.builders) + + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.builders = 'html,man,doctest' + build_doc.finalize_options() + + self.assertIn('html', build_doc.builders) + self.assertIn('man', build_doc.builders) + self.assertIn('doctest', build_doc.builders) + + def test_cmd_builder_override(self): + + if self.has_opt: + self.distr.command_options["pbr"] = { + "autodoc_index_modules": ('setup.cfg', self.autodoc) + } + + self.distr.command_options["build_sphinx"]["builder"] = ( + "command line", "non-existing-builder") + + build_doc = packaging.LocalBuildDoc(self.distr) + self.assertNotIn('non-existing-builder', build_doc.builders) + self.assertIn('html', build_doc.builders) + + # process command line options which should override config + build_doc.finalize_options() + + self.assertIn('non-existing-builder', build_doc.builders) + self.assertNotIn('html', build_doc.builders) + + def test_cmd_builder_override_multiple_builders(self): + + if self.has_opt: + self.distr.command_options["pbr"] = { + "autodoc_index_modules": ('setup.cfg', self.autodoc) + } + + self.distr.command_options["build_sphinx"]["builder"] = ( + "command line", "builder1,builder2") + + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.finalize_options() + + self.assertEqual(["builder1", "builder2"], build_doc.builders) + + +class APIAutoDocTest(base.BaseTestCase): + + def setUp(self): + super(APIAutoDocTest, self).setUp() + + # setup_command requires the Sphinx instance to have some + # attributes that aren't set normally with the way we use the + # class (because we replace the constructor). Add default + # values directly to the class definition. + import sphinx.application + sphinx.application.Sphinx.messagelog = [] + sphinx.application.Sphinx.statuscode = 0 + + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.__init__", lambda *a, **kw: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.build", lambda *a, **kw: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.application.Sphinx.config", _SphinxConfig)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.config.Config.init_values", lambda *a: None)) + self.useFixture(fixtures.MonkeyPatch( + "sphinx.config.Config.__init__", lambda *a: None)) + from distutils import dist + self.distr = dist.Distribution() + self.distr.packages = ("fake_package",) + self.distr.command_options["build_sphinx"] = { + "source_dir": ["a", "."]} + self.sphinx_options = self.distr.command_options["build_sphinx"] + pkg_fixture = fixtures.PythonPackage( + "fake_package", [("fake_module.py", b""), + ("another_fake_module_for_testing.py", b""), + ("fake_private_module.py", b"")]) + self.useFixture(pkg_fixture) + self.useFixture(base.DiveDir(pkg_fixture.base)) + self.pbr_options = self.distr.command_options.setdefault('pbr', {}) + self.pbr_options["autodoc_index_modules"] = ('setup.cfg', 'True') + + def test_default_api_build_dir(self): + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.run() + + print('PBR OPTIONS:', self.pbr_options) + print('DISTR OPTIONS:', self.distr.command_options) + + self.assertTrue(os.path.exists("api/autoindex.rst")) + self.assertTrue(os.path.exists("api/fake_package.fake_module.rst")) + self.assertTrue( + os.path.exists( + "api/fake_package.fake_private_module.rst")) + self.assertTrue( + os.path.exists( + "api/fake_package.another_fake_module_for_testing.rst")) + + def test_different_api_build_dir(self): + # Options have to come out of the settings dict as a tuple + # showing the source and the value. + self.pbr_options['api_doc_dir'] = (None, 'contributor/api') + build_doc = packaging.LocalBuildDoc(self.distr) + build_doc.run() + + print('PBR OPTIONS:', self.pbr_options) + print('DISTR OPTIONS:', self.distr.command_options) + + self.assertTrue(os.path.exists("contributor/api/autoindex.rst")) + self.assertTrue( + os.path.exists("contributor/api/fake_package.fake_module.rst")) + self.assertTrue( + os.path.exists( + "contributor/api/fake_package.fake_private_module.rst")) diff --git a/venv/Lib/site-packages/pbr/tests/test_util.py b/venv/Lib/site-packages/pbr/tests/test_util.py new file mode 100644 index 00000000..1cbb2d2a --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_util.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. (HP) +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import io +import tempfile +import textwrap + +import six +from six.moves import configparser +import sys + +from pbr.tests import base +from pbr import util + + +def config_from_ini(ini): + config = {} + if sys.version_info >= (3, 2): + parser = configparser.ConfigParser() + else: + parser = configparser.SafeConfigParser() + ini = textwrap.dedent(six.u(ini)) + parser.readfp(io.StringIO(ini)) + for section in parser.sections(): + config[section] = dict(parser.items(section)) + return config + + +class TestExtrasRequireParsingScenarios(base.BaseTestCase): + + scenarios = [ + ('simple_extras', { + 'config_text': """ + [extras] + first = + foo + bar==1.0 + second = + baz>=3.2 + foo + """, + 'expected_extra_requires': { + 'first': ['foo', 'bar==1.0'], + 'second': ['baz>=3.2', 'foo'], + 'test': ['requests-mock'], + "test:(python_version=='2.6')": ['ordereddict'], + } + }), + ('with_markers', { + 'config_text': """ + [extras] + test = + foo:python_version=='2.6' + bar + baz<1.6 :python_version=='2.6' + zaz :python_version>'1.0' + """, + 'expected_extra_requires': { + "test:(python_version=='2.6')": ['foo', 'baz<1.6'], + "test": ['bar', 'zaz']}}), + ('no_extras', { + 'config_text': """ + [metadata] + long_description = foo + """, + 'expected_extra_requires': + {} + })] + + def test_extras_parsing(self): + config = config_from_ini(self.config_text) + kwargs = util.setup_cfg_to_setup_kwargs(config) + + self.assertEqual(self.expected_extra_requires, + kwargs['extras_require']) + + +class TestInvalidMarkers(base.BaseTestCase): + + def test_invalid_marker_raises_error(self): + config = {'extras': {'test': "foo :bad_marker>'1.0'"}} + self.assertRaises(SyntaxError, util.setup_cfg_to_setup_kwargs, config) + + +class TestMapFieldsParsingScenarios(base.BaseTestCase): + + scenarios = [ + ('simple_project_urls', { + 'config_text': """ + [metadata] + project_urls = + Bug Tracker = https://bugs.launchpad.net/pbr/ + Documentation = https://docs.openstack.org/pbr/ + Source Code = https://git.openstack.org/cgit/openstack-dev/pbr/ + """, # noqa: E501 + 'expected_project_urls': { + 'Bug Tracker': 'https://bugs.launchpad.net/pbr/', + 'Documentation': 'https://docs.openstack.org/pbr/', + 'Source Code': 'https://git.openstack.org/cgit/openstack-dev/pbr/', # noqa: E501 + }, + }), + ('query_parameters', { + 'config_text': """ + [metadata] + project_urls = + Bug Tracker = https://bugs.launchpad.net/pbr/?query=true + Documentation = https://docs.openstack.org/pbr/?foo=bar + Source Code = https://git.openstack.org/cgit/openstack-dev/pbr/commit/?id=hash + """, # noqa: E501 + 'expected_project_urls': { + 'Bug Tracker': 'https://bugs.launchpad.net/pbr/?query=true', + 'Documentation': 'https://docs.openstack.org/pbr/?foo=bar', + 'Source Code': 'https://git.openstack.org/cgit/openstack-dev/pbr/commit/?id=hash', # noqa: E501 + }, + }), + ] + + def test_project_url_parsing(self): + config = config_from_ini(self.config_text) + kwargs = util.setup_cfg_to_setup_kwargs(config) + + self.assertEqual(self.expected_project_urls, kwargs['project_urls']) + + +class TestKeywordsParsingScenarios(base.BaseTestCase): + + scenarios = [ + ('keywords_list', { + 'config_text': """ + [metadata] + keywords = + one + two + three + """, # noqa: E501 + 'expected_keywords': ['one', 'two', 'three'], + }, + ), + ('inline_keywords', { + 'config_text': """ + [metadata] + keywords = one, two, three + """, # noqa: E501 + 'expected_keywords': ['one, two, three'], + }), + ] + + def test_keywords_parsing(self): + config = config_from_ini(self.config_text) + kwargs = util.setup_cfg_to_setup_kwargs(config) + + self.assertEqual(self.expected_keywords, kwargs['keywords']) + + +class TestProvidesExtras(base.BaseTestCase): + def test_provides_extras(self): + ini = """ + [metadata] + provides_extras = foo + bar + """ + config = config_from_ini(ini) + kwargs = util.setup_cfg_to_setup_kwargs(config) + self.assertEqual(['foo', 'bar'], kwargs['provides_extras']) + + +class TestDataFilesParsing(base.BaseTestCase): + + scenarios = [ + ('data_files', { + 'config_text': """ + [files] + data_files = + 'i like spaces/' = + 'dir with space/file with spc 2' + 'dir with space/file with spc 1' + """, + 'data_files': [ + ('i like spaces/', ['dir with space/file with spc 2', + 'dir with space/file with spc 1']) + ] + })] + + def test_handling_of_whitespace_in_data_files(self): + config = config_from_ini(self.config_text) + kwargs = util.setup_cfg_to_setup_kwargs(config) + + self.assertEqual(self.data_files, + list(kwargs['data_files'])) + + +class TestUTF8DescriptionFile(base.BaseTestCase): + def test_utf8_description_file(self): + _, path = tempfile.mkstemp() + ini_template = """ + [metadata] + description_file = %s + """ + # Two \n's because pbr strips the file content and adds \n\n + # This way we can use it directly as the assert comparison + unicode_description = u'UTF8 description: é"…-ʃŋ\'\n\n' + ini = ini_template % path + with io.open(path, 'w', encoding='utf8') as f: + f.write(unicode_description) + config = config_from_ini(ini) + kwargs = util.setup_cfg_to_setup_kwargs(config) + self.assertEqual(unicode_description, kwargs['long_description']) diff --git a/venv/Lib/site-packages/pbr/tests/test_version.py b/venv/Lib/site-packages/pbr/tests/test_version.py new file mode 100644 index 00000000..d861d572 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_version.py @@ -0,0 +1,311 @@ +# Copyright 2012 Red Hat, Inc. +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import itertools + +from testtools import matchers + +from pbr.tests import base +from pbr import version + + +from_pip_string = version.SemanticVersion.from_pip_string + + +class TestSemanticVersion(base.BaseTestCase): + + def test_ordering(self): + ordered_versions = [ + "1.2.3.dev6", + "1.2.3.dev7", + "1.2.3.a4.dev12", + "1.2.3.a4.dev13", + "1.2.3.a4", + "1.2.3.a5.dev1", + "1.2.3.a5", + "1.2.3.b3.dev1", + "1.2.3.b3", + "1.2.3.rc2.dev1", + "1.2.3.rc2", + "1.2.3.rc3.dev1", + "1.2.3", + "1.2.4", + "1.3.3", + "2.2.3", + ] + for v in ordered_versions: + sv = version.SemanticVersion.from_pip_string(v) + self.expectThat(sv, matchers.Equals(sv)) + for left, right in itertools.combinations(ordered_versions, 2): + l_pos = ordered_versions.index(left) + r_pos = ordered_versions.index(right) + if l_pos < r_pos: + m1 = matchers.LessThan + m2 = matchers.GreaterThan + else: + m1 = matchers.GreaterThan + m2 = matchers.LessThan + left_sv = version.SemanticVersion.from_pip_string(left) + right_sv = version.SemanticVersion.from_pip_string(right) + self.expectThat(left_sv, m1(right_sv)) + self.expectThat(right_sv, m2(left_sv)) + + def test_from_pip_string_legacy_alpha(self): + expected = version.SemanticVersion( + 1, 2, 0, prerelease_type='rc', prerelease=1) + parsed = from_pip_string('1.2.0rc1') + self.assertEqual(expected, parsed) + + def test_from_pip_string_legacy_postN(self): + # When pbr trunk was incompatible with PEP-440, a stable release was + # made that used postN versions to represent developer builds. As + # we expect only to be parsing versions of our own, we map those + # into dev builds of the next version. + expected = version.SemanticVersion(1, 2, 4, dev_count=5) + parsed = from_pip_string('1.2.3.post5') + self.expectThat(expected, matchers.Equals(parsed)) + expected = version.SemanticVersion(1, 2, 3, 'a', 5, dev_count=6) + parsed = from_pip_string('1.2.3.0a4.post6') + self.expectThat(expected, matchers.Equals(parsed)) + # We can't define a mapping for .postN.devM, so it should raise. + self.expectThat( + lambda: from_pip_string('1.2.3.post5.dev6'), + matchers.raises(ValueError)) + + def test_from_pip_string_v_version(self): + parsed = from_pip_string('v1.2.3') + expected = version.SemanticVersion(1, 2, 3) + self.expectThat(expected, matchers.Equals(parsed)) + + expected = version.SemanticVersion(1, 2, 3, 'a', 5, dev_count=6) + parsed = from_pip_string('V1.2.3.0a4.post6') + self.expectThat(expected, matchers.Equals(parsed)) + + self.expectThat( + lambda: from_pip_string('x1.2.3'), + matchers.raises(ValueError)) + + def test_from_pip_string_legacy_nonzero_lead_in(self): + # reported in bug 1361251 + expected = version.SemanticVersion( + 0, 0, 1, prerelease_type='a', prerelease=2) + parsed = from_pip_string('0.0.1a2') + self.assertEqual(expected, parsed) + + def test_from_pip_string_legacy_short_nonzero_lead_in(self): + expected = version.SemanticVersion( + 0, 1, 0, prerelease_type='a', prerelease=2) + parsed = from_pip_string('0.1a2') + self.assertEqual(expected, parsed) + + def test_from_pip_string_legacy_no_0_prerelease(self): + expected = version.SemanticVersion( + 2, 1, 0, prerelease_type='rc', prerelease=1) + parsed = from_pip_string('2.1.0.rc1') + self.assertEqual(expected, parsed) + + def test_from_pip_string_legacy_no_0_prerelease_2(self): + expected = version.SemanticVersion( + 2, 0, 0, prerelease_type='rc', prerelease=1) + parsed = from_pip_string('2.0.0.rc1') + self.assertEqual(expected, parsed) + + def test_from_pip_string_legacy_non_440_beta(self): + expected = version.SemanticVersion( + 2014, 2, prerelease_type='b', prerelease=2) + parsed = from_pip_string('2014.2.b2') + self.assertEqual(expected, parsed) + + def test_from_pip_string_pure_git_hash(self): + self.assertRaises(ValueError, from_pip_string, '6eed5ae') + + def test_from_pip_string_non_digit_start(self): + self.assertRaises(ValueError, from_pip_string, + 'non-release-tag/2014.12.16-1') + + def test_final_version(self): + semver = version.SemanticVersion(1, 2, 3) + self.assertEqual((1, 2, 3, 'final', 0), semver.version_tuple()) + self.assertEqual("1.2.3", semver.brief_string()) + self.assertEqual("1.2.3", semver.debian_string()) + self.assertEqual("1.2.3", semver.release_string()) + self.assertEqual("1.2.3", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.3")) + + def test_parsing_short_forms(self): + semver = version.SemanticVersion(1, 0, 0) + self.assertEqual(semver, from_pip_string("1")) + self.assertEqual(semver, from_pip_string("1.0")) + self.assertEqual(semver, from_pip_string("1.0.0")) + + def test_dev_version(self): + semver = version.SemanticVersion(1, 2, 4, dev_count=5) + self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~dev5", semver.debian_string()) + self.assertEqual("1.2.4.dev5", semver.release_string()) + self.assertEqual("1.2.3.dev5", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.dev5")) + + def test_dev_no_git_version(self): + semver = version.SemanticVersion(1, 2, 4, dev_count=5) + self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~dev5", semver.debian_string()) + self.assertEqual("1.2.4.dev5", semver.release_string()) + self.assertEqual("1.2.3.dev5", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.dev5")) + + def test_dev_zero_version(self): + semver = version.SemanticVersion(1, 2, 0, dev_count=5) + self.assertEqual((1, 2, 0, 'dev', 4), semver.version_tuple()) + self.assertEqual("1.2.0", semver.brief_string()) + self.assertEqual("1.2.0~dev5", semver.debian_string()) + self.assertEqual("1.2.0.dev5", semver.release_string()) + self.assertEqual("1.1.9999.dev5", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.0.dev5")) + + def test_alpha_dev_version(self): + semver = version.SemanticVersion(1, 2, 4, 'a', 1, 12) + self.assertEqual((1, 2, 4, 'alphadev', 12), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~a1.dev12", semver.debian_string()) + self.assertEqual("1.2.4.0a1.dev12", semver.release_string()) + self.assertEqual("1.2.3.a1.dev12", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0a1.dev12")) + + def test_alpha_version(self): + semver = version.SemanticVersion(1, 2, 4, 'a', 1) + self.assertEqual((1, 2, 4, 'alpha', 1), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~a1", semver.debian_string()) + self.assertEqual("1.2.4.0a1", semver.release_string()) + self.assertEqual("1.2.3.a1", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0a1")) + + def test_alpha_zero_version(self): + semver = version.SemanticVersion(1, 2, 0, 'a', 1) + self.assertEqual((1, 2, 0, 'alpha', 1), semver.version_tuple()) + self.assertEqual("1.2.0", semver.brief_string()) + self.assertEqual("1.2.0~a1", semver.debian_string()) + self.assertEqual("1.2.0.0a1", semver.release_string()) + self.assertEqual("1.1.9999.a1", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.0.0a1")) + + def test_alpha_major_zero_version(self): + semver = version.SemanticVersion(1, 0, 0, 'a', 1) + self.assertEqual((1, 0, 0, 'alpha', 1), semver.version_tuple()) + self.assertEqual("1.0.0", semver.brief_string()) + self.assertEqual("1.0.0~a1", semver.debian_string()) + self.assertEqual("1.0.0.0a1", semver.release_string()) + self.assertEqual("0.9999.9999.a1", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.0.0.0a1")) + + def test_alpha_default_version(self): + semver = version.SemanticVersion(1, 2, 4, 'a') + self.assertEqual((1, 2, 4, 'alpha', 0), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~a0", semver.debian_string()) + self.assertEqual("1.2.4.0a0", semver.release_string()) + self.assertEqual("1.2.3.a0", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0a0")) + + def test_beta_dev_version(self): + semver = version.SemanticVersion(1, 2, 4, 'b', 1, 12) + self.assertEqual((1, 2, 4, 'betadev', 12), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~b1.dev12", semver.debian_string()) + self.assertEqual("1.2.4.0b1.dev12", semver.release_string()) + self.assertEqual("1.2.3.b1.dev12", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0b1.dev12")) + + def test_beta_version(self): + semver = version.SemanticVersion(1, 2, 4, 'b', 1) + self.assertEqual((1, 2, 4, 'beta', 1), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~b1", semver.debian_string()) + self.assertEqual("1.2.4.0b1", semver.release_string()) + self.assertEqual("1.2.3.b1", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0b1")) + + def test_decrement_nonrelease(self): + # The prior version of any non-release is a release + semver = version.SemanticVersion(1, 2, 4, 'b', 1) + self.assertEqual( + version.SemanticVersion(1, 2, 3), semver.decrement()) + + def test_decrement_nonrelease_zero(self): + # We set an arbitrary max version of 9999 when decrementing versions + # - this is part of handling rpm support. + semver = version.SemanticVersion(1, 0, 0) + self.assertEqual( + version.SemanticVersion(0, 9999, 9999), semver.decrement()) + + def test_decrement_release(self): + # The next patch version of a release version requires a change to the + # patch level. + semver = version.SemanticVersion(2, 2, 5) + self.assertEqual( + version.SemanticVersion(2, 2, 4), semver.decrement()) + + def test_increment_nonrelease(self): + # The next patch version of a non-release version is another + # non-release version as the next release doesn't need to be + # incremented. + semver = version.SemanticVersion(1, 2, 4, 'b', 1) + self.assertEqual( + version.SemanticVersion(1, 2, 4, 'b', 2), semver.increment()) + # Major and minor increments however need to bump things. + self.assertEqual( + version.SemanticVersion(1, 3, 0), semver.increment(minor=True)) + self.assertEqual( + version.SemanticVersion(2, 0, 0), semver.increment(major=True)) + + def test_increment_release(self): + # The next patch version of a release version requires a change to the + # patch level. + semver = version.SemanticVersion(1, 2, 5) + self.assertEqual( + version.SemanticVersion(1, 2, 6), semver.increment()) + self.assertEqual( + version.SemanticVersion(1, 3, 0), semver.increment(minor=True)) + self.assertEqual( + version.SemanticVersion(2, 0, 0), semver.increment(major=True)) + + def test_rc_dev_version(self): + semver = version.SemanticVersion(1, 2, 4, 'rc', 1, 12) + self.assertEqual((1, 2, 4, 'candidatedev', 12), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~rc1.dev12", semver.debian_string()) + self.assertEqual("1.2.4.0rc1.dev12", semver.release_string()) + self.assertEqual("1.2.3.rc1.dev12", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0rc1.dev12")) + + def test_rc_version(self): + semver = version.SemanticVersion(1, 2, 4, 'rc', 1) + self.assertEqual((1, 2, 4, 'candidate', 1), semver.version_tuple()) + self.assertEqual("1.2.4", semver.brief_string()) + self.assertEqual("1.2.4~rc1", semver.debian_string()) + self.assertEqual("1.2.4.0rc1", semver.release_string()) + self.assertEqual("1.2.3.rc1", semver.rpm_string()) + self.assertEqual(semver, from_pip_string("1.2.4.0rc1")) + + def test_to_dev(self): + self.assertEqual( + version.SemanticVersion(1, 2, 3, dev_count=1), + version.SemanticVersion(1, 2, 3).to_dev(1)) + self.assertEqual( + version.SemanticVersion(1, 2, 3, 'rc', 1, dev_count=1), + version.SemanticVersion(1, 2, 3, 'rc', 1).to_dev(1)) diff --git a/venv/Lib/site-packages/pbr/tests/test_wsgi.py b/venv/Lib/site-packages/pbr/tests/test_wsgi.py new file mode 100644 index 00000000..a42fe785 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/test_wsgi.py @@ -0,0 +1,163 @@ +# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. (HP) +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import re +import subprocess +import sys +try: + # python 2 + from urllib2 import urlopen +except ImportError: + # python 3 + from urllib.request import urlopen + +from pbr.tests import base + + +class TestWsgiScripts(base.BaseTestCase): + + cmd_names = ('pbr_test_wsgi', 'pbr_test_wsgi_with_class') + + def _get_path(self): + if os.path.isdir("%s/lib64" % self.temp_dir): + path = "%s/lib64" % self.temp_dir + elif os.path.isdir("%s/lib" % self.temp_dir): + path = "%s/lib" % self.temp_dir + elif os.path.isdir("%s/site-packages" % self.temp_dir): + return ".:%s/site-packages" % self.temp_dir + else: + raise Exception("Could not determine path for test") + return ".:%s/python%s.%s/site-packages" % ( + path, + sys.version_info[0], + sys.version_info[1]) + + def test_wsgi_script_install(self): + """Test that we install a non-pkg-resources wsgi script.""" + if os.name == 'nt': + self.skipTest('Windows support is passthrough') + + stdout, _, return_code = self.run_setup( + 'install', '--prefix=%s' % self.temp_dir) + + self._check_wsgi_install_content(stdout) + + def test_wsgi_script_run(self): + """Test that we install a runnable wsgi script. + + This test actually attempts to start and interact with the + wsgi script in question to demonstrate that it's a working + wsgi script using simple server. + + """ + if os.name == 'nt': + self.skipTest('Windows support is passthrough') + + stdout, _, return_code = self.run_setup( + 'install', '--prefix=%s' % self.temp_dir) + + self._check_wsgi_install_content(stdout) + + # Live test run the scripts and see that they respond to wsgi + # requests. + for cmd_name in self.cmd_names: + self._test_wsgi(cmd_name, b'Hello World') + + def _test_wsgi(self, cmd_name, output, extra_args=None): + cmd = os.path.join(self.temp_dir, 'bin', cmd_name) + print("Running %s -p 0 -b 127.0.0.1" % cmd) + popen_cmd = [cmd, '-p', '0', '-b', '127.0.0.1'] + if extra_args: + popen_cmd.extend(extra_args) + + env = {'PYTHONPATH': self._get_path()} + + p = subprocess.Popen(popen_cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, cwd=self.temp_dir, + env=env) + self.addCleanup(p.kill) + + stdoutdata = p.stdout.readline() # ****... + + stdoutdata = p.stdout.readline() # STARTING test server... + self.assertIn( + b"STARTING test server pbr_testpackage.wsgi", + stdoutdata) + + stdoutdata = p.stdout.readline() # Available at ... + print(stdoutdata) + m = re.search(br'(http://[^:]+:\d+)/', stdoutdata) + self.assertIsNotNone(m, "Regex failed to match on %s" % stdoutdata) + + stdoutdata = p.stdout.readline() # DANGER! ... + self.assertIn( + b"DANGER! For testing only, do not use in production", + stdoutdata) + + stdoutdata = p.stdout.readline() # ***... + + f = urlopen(m.group(1).decode('utf-8')) + self.assertEqual(output, f.read()) + + # Request again so that the application can force stderr.flush(), + # otherwise the log is buffered and the next readline() will hang. + urlopen(m.group(1).decode('utf-8')) + + stdoutdata = p.stderr.readline() + # we should have logged an HTTP request, return code 200, that + # returned the right amount of bytes + status = '"GET / HTTP/1.1" 200 %d' % len(output) + self.assertIn(status.encode('utf-8'), stdoutdata) + + def _check_wsgi_install_content(self, install_stdout): + for cmd_name in self.cmd_names: + install_txt = 'Installing %s script to %s' % (cmd_name, + self.temp_dir) + self.assertIn(install_txt, install_stdout) + + cmd_filename = os.path.join(self.temp_dir, 'bin', cmd_name) + + script_txt = open(cmd_filename, 'r').read() + self.assertNotIn('pkg_resources', script_txt) + + main_block = """if __name__ == "__main__": + import argparse + import socket + import sys + import wsgiref.simple_server as wss""" + + if cmd_name == 'pbr_test_wsgi': + app_name = "main" + else: + app_name = "WSGI.app" + + starting_block = ("STARTING test server pbr_testpackage.wsgi." + "%s" % app_name) + + else_block = """else: + application = None""" + + self.assertIn(main_block, script_txt) + self.assertIn(starting_block, script_txt) + self.assertIn(else_block, script_txt) + + def test_with_argument(self): + if os.name == 'nt': + self.skipTest('Windows support is passthrough') + + stdout, _, return_code = self.run_setup( + 'install', '--prefix=%s' % self.temp_dir) + + self._test_wsgi('pbr_test_wsgi', b'Foo Bar', ["--", "-c", "Foo Bar"]) diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/CHANGES.txt b/venv/Lib/site-packages/pbr/tests/testpackage/CHANGES.txt new file mode 100644 index 00000000..709b9d4c --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/CHANGES.txt @@ -0,0 +1,86 @@ +Changelog +=========== + +0.3 (unreleased) +------------------ + +- The ``glob_data_files`` hook became a pre-command hook for the install_data + command instead of being a setup-hook. This is to support the additional + functionality of requiring data_files with relative destination paths to be + install relative to the package's install path (i.e. site-packages). + +- Dropped support for and deprecated the easier_install custom command. + Although it should still work, it probably won't be used anymore for + stsci_python packages. + +- Added support for the ``build_optional_ext`` command, which replaces/extends + the default ``build_ext`` command. See the README for more details. + +- Added the ``tag_svn_revision`` setup_hook as a replacement for the + setuptools-specific tag_svn_revision option to the egg_info command. This + new hook is easier to use than the old tag_svn_revision option: It's + automatically enabled by the presence of ``.dev`` in the version string, and + disabled otherwise. + +- The ``svn_info_pre_hook`` and ``svn_info_post_hook`` have been replaced with + ``version_pre_command_hook`` and ``version_post_command_hook`` respectively. + However, a new ``version_setup_hook``, which has the same purpose, has been + added. It is generally easier to use and will give more consistent results + in that it will run every time setup.py is run, regardless of which command + is used. ``stsci.distutils`` itself uses this hook--see the `setup.cfg` file + and `stsci/distutils/__init__.py` for example usage. + +- Instead of creating an `svninfo.py` module, the new ``version_`` hooks create + a file called `version.py`. In addition to the SVN info that was included + in `svninfo.py`, it includes a ``__version__`` variable to be used by the + package's `__init__.py`. This allows there to be a hard-coded + ``__version__`` variable included in the source code, rather than using + pkg_resources to get the version. + +- In `version.py`, the variables previously named ``__svn_version__`` and + ``__full_svn_info__`` are now named ``__svn_revision__`` and + ``__svn_full_info__``. + +- Fixed a bug when using stsci.distutils in the installation of other packages + in the ``stsci.*`` namespace package. If stsci.distutils was not already + installed, and was downloaded automatically by distribute through the + setup_requires option, then ``stsci.distutils`` would fail to import. This + is because the way the namespace package (nspkg) mechanism currently works, + all packages belonging to the nspkg *must* be on the import path at initial + import time. + + So when installing stsci.tools, for example, if ``stsci.tools`` is imported + from within the source code at install time, but before ``stsci.distutils`` + is downloaded and added to the path, the ``stsci`` package is already + imported and can't be extended to include the path of ``stsci.distutils`` + after the fact. The easiest way of dealing with this, it seems, is to + delete ``stsci`` from ``sys.modules``, which forces it to be reimported, now + the its ``__path__`` extended to include ``stsci.distutil``'s path. + + +0.2.2 (2011-11-09) +------------------ + +- Fixed check for the issue205 bug on actual setuptools installs; before it + only worked on distribute. setuptools has the issue205 bug prior to version + 0.6c10. + +- Improved the fix for the issue205 bug, especially on setuptools. + setuptools, prior to 0.6c10, did not back of sys.modules either before + sandboxing, which causes serious problems. In fact, it's so bad that it's + not enough to add a sys.modules backup to the current sandbox: It's in fact + necessary to monkeypatch setuptools.sandbox.run_setup so that any subsequent + calls to it also back up sys.modules. + + +0.2.1 (2011-09-02) +------------------ + +- Fixed the dependencies so that setuptools is requirement but 'distribute' + specifically. Previously installation could fail if users had plain + setuptools installed and not distribute + +0.2 (2011-08-23) +------------------ + +- Initial public release diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/LICENSE.txt b/venv/Lib/site-packages/pbr/tests/testpackage/LICENSE.txt new file mode 100644 index 00000000..7e8019a8 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/LICENSE.txt @@ -0,0 +1,29 @@ +Copyright (C) 2005 Association of Universities for Research in Astronomy (AURA) + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + 3. The name of AURA and its representatives may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/MANIFEST.in b/venv/Lib/site-packages/pbr/tests/testpackage/MANIFEST.in new file mode 100644 index 00000000..2e35f3ed --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/MANIFEST.in @@ -0,0 +1,2 @@ +include data_files/* +exclude pbr_testpackage/extra.py diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/README.txt b/venv/Lib/site-packages/pbr/tests/testpackage/README.txt new file mode 100644 index 00000000..b6d84a7b --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/README.txt @@ -0,0 +1,148 @@ +Introduction +============ +This package contains utilities used to package some of STScI's Python +projects; specifically those projects that comprise stsci_python_ and +Astrolib_. + +It currently consists mostly of some setup_hook scripts meant for use with +`distutils2/packaging`_ and/or pbr_, and a customized easy_install command +meant for use with distribute_. + +This package is not meant for general consumption, though it might be worth +looking at for examples of how to do certain things with your own packages, but +YMMV. + +Features +======== + +Hook Scripts +------------ +Currently the main features of this package are a couple of setup_hook scripts. +In distutils2, a setup_hook is a script that runs at the beginning of any +pysetup command, and can modify the package configuration read from setup.cfg. +There are also pre- and post-command hooks that only run before/after a +specific setup command (eg. build_ext, install) is run. + +stsci.distutils.hooks.use_packages_root +''''''''''''''''''''''''''''''''''''''' +If using the ``packages_root`` option under the ``[files]`` section of +setup.cfg, this hook will add that path to ``sys.path`` so that modules in your +package can be imported and used in setup. This can be used even if +``packages_root`` is not specified--in this case it adds ``''`` to +``sys.path``. + +stsci.distutils.hooks.version_setup_hook +'''''''''''''''''''''''''''''''''''''''' +Creates a Python module called version.py which currently contains four +variables: + +* ``__version__`` (the release version) +* ``__svn_revision__`` (the SVN revision info as returned by the ``svnversion`` + command) +* ``__svn_full_info__`` (as returned by the ``svn info`` command) +* ``__setup_datetime__`` (the date and time that setup.py was last run). + +These variables can be imported in the package's `__init__.py` for degugging +purposes. The version.py module will *only* be created in a package that +imports from the version module in its `__init__.py`. It should be noted that +this is generally preferable to writing these variables directly into +`__init__.py`, since this provides more control and is less likely to +unexpectedly break things in `__init__.py`. + +stsci.distutils.hooks.version_pre_command_hook +'''''''''''''''''''''''''''''''''''''''''''''' +Identical to version_setup_hook, but designed to be used as a pre-command +hook. + +stsci.distutils.hooks.version_post_command_hook +''''''''''''''''''''''''''''''''''''''''''''''' +The complement to version_pre_command_hook. This will delete any version.py +files created during a build in order to prevent them from cluttering an SVN +working copy (note, however, that version.py is *not* deleted from the build/ +directory, so a copy of it is still preserved). It will also not be deleted +if the current directory is not an SVN working copy. For example, if source +code extracted from a source tarball it will be preserved. + +stsci.distutils.hooks.tag_svn_revision +'''''''''''''''''''''''''''''''''''''' +A setup_hook to add the SVN revision of the current working copy path to the +package version string, but only if the version ends in .dev. + +For example, ``mypackage-1.0.dev`` becomes ``mypackage-1.0.dev1234``. This is +in accordance with the version string format standardized by PEP 386. + +This should be used as a replacement for the ``tag_svn_revision`` option to +the egg_info command. This hook is more compatible with packaging/distutils2, +which does not include any VCS support. This hook is also more flexible in +that it turns the revision number on/off depending on the presence of ``.dev`` +in the version string, so that it's not automatically added to the version in +final releases. + +This hook does require the ``svnversion`` command to be available in order to +work. It does not examine the working copy metadata directly. + +stsci.distutils.hooks.numpy_extension_hook +'''''''''''''''''''''''''''''''''''''''''' +This is a pre-command hook for the build_ext command. To use it, add a +``[build_ext]`` section to your setup.cfg, and add to it:: + + pre-hook.numpy-extension-hook = stsci.distutils.hooks.numpy_extension_hook + +This hook must be used to build extension modules that use Numpy. The primary +side-effect of this hook is to add the correct numpy include directories to +`include_dirs`. To use it, add 'numpy' to the 'include-dirs' option of each +extension module that requires numpy to build. The value 'numpy' will be +replaced with the actual path to the numpy includes. + +stsci.distutils.hooks.is_display_option +''''''''''''''''''''''''''''''''''''''' +This is not actually a hook, but is a useful utility function that can be used +in writing other hooks. Basically, it returns ``True`` if setup.py was run +with a "display option" such as --version or --help. This can be used to +prevent your hook from running in such cases. + +stsci.distutils.hooks.glob_data_files +''''''''''''''''''''''''''''''''''''' +A pre-command hook for the install_data command. Allows filename wildcards as +understood by ``glob.glob()`` to be used in the data_files option. This hook +must be used in order to have this functionality since it does not normally +exist in distutils. + +This hook also ensures that data files are installed relative to the package +path. data_files shouldn't normally be installed this way, but the +functionality is required for a few special cases. + + +Commands +-------- +build_optional_ext +'''''''''''''''''' +This serves as an optional replacement for the default built_ext command, +which compiles C extension modules. Its purpose is to allow extension modules +to be *optional*, so that if their build fails the rest of the package is +still allowed to be built and installed. This can be used when an extension +module is not definitely required to use the package. + +To use this custom command, add:: + + commands = stsci.distutils.command.build_optional_ext.build_optional_ext + +under the ``[global]`` section of your package's setup.cfg. Then, to mark +an individual extension module as optional, under the setup.cfg section for +that extension add:: + + optional = True + +Optionally, you may also add a custom failure message by adding:: + + fail_message = The foobar extension module failed to compile. + This could be because you lack such and such headers. + This package will still work, but such and such features + will be disabled. + + +.. _stsci_python: http://www.stsci.edu/resources/software_hardware/pyraf/stsci_python +.. _Astrolib: http://www.scipy.org/AstroLib/ +.. _distutils2/packaging: http://distutils2.notmyidea.org/ +.. _d2to1: http://pypi.python.org/pypi/d2to1 +.. _distribute: http://pypi.python.org/pypi/distribute diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/data_files/a.txt b/venv/Lib/site-packages/pbr/tests/testpackage/data_files/a.txt new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/data_files/b.txt b/venv/Lib/site-packages/pbr/tests/testpackage/data_files/b.txt new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/data_files/c.rst b/venv/Lib/site-packages/pbr/tests/testpackage/data_files/c.rst new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/conf.py b/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/conf.py new file mode 100644 index 00000000..6edbe8e3 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/conf.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# -- General configuration ---------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [ + 'sphinx.ext.autodoc', +] + +# autodoc generation is a bit aggressive and a nuisance when doing heavy +# text edit cycles. +# execute "export SPHINX_DEBUG=1" in your terminal to disable + +# The suffix of source filenames. +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'testpackage' +copyright = u'2013, OpenStack Foundation' + +# If true, '()' will be appended to :func: etc. cross-reference text. +add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +add_module_names = True + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + + +# -- Options for HTML output -------------------------------------------------- + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass +# [howto/manual]). +latex_documents = [ + ('index', + '%s.tex' % project, + u'%s Documentation' % project, + u'OpenStack Foundation', 'manual'), +] diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/index.rst b/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/index.rst new file mode 100644 index 00000000..9ce317fd --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/index.rst @@ -0,0 +1,23 @@ +.. testpackage documentation master file, created by + sphinx-quickstart on Tue Jul 9 22:26:36 2013. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to testpackage's documentation! +======================================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + installation + usage + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/installation.rst b/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/installation.rst new file mode 100644 index 00000000..9bc2bdea --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/installation.rst @@ -0,0 +1,12 @@ +============ +Installation +============ + +At the command line:: + +$ pip install testpackage + +Or, if you have virtualenvwrapper installed:: + +$ mkvirtualenv testpackage +$ pip install testpackage diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/usage.rst b/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/usage.rst new file mode 100644 index 00000000..8f08a91b --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/doc/source/usage.rst @@ -0,0 +1,7 @@ +======== +Usage +======== + +To use testpackage in a project:: + +import testpackage diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/extra-file.txt b/venv/Lib/site-packages/pbr/tests/testpackage/extra-file.txt new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/git-extra-file.txt b/venv/Lib/site-packages/pbr/tests/testpackage/git-extra-file.txt new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/__init__.py b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/__init__.py new file mode 100644 index 00000000..aa56dc6f --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/__init__.py @@ -0,0 +1,3 @@ +import pbr.version + +__version__ = pbr.version.VersionInfo('pbr_testpackage').version_string() diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py new file mode 100644 index 00000000..f8b30876 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/_setup_hooks.py @@ -0,0 +1,65 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +from distutils.command import build_py + + +def test_hook_1(config): + print('test_hook_1') + + +def test_hook_2(config): + print('test_hook_2') + + +class test_command(build_py.build_py): + command_name = 'build_py' + + def run(self): + print('Running custom build_py command.') + return build_py.build_py.run(self) + + +def test_pre_hook(cmdobj): + print('build_ext pre-hook') + + +def test_post_hook(cmdobj): + print('build_ext post-hook') diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/cmd.py b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/cmd.py new file mode 100644 index 00000000..4cc4522f --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/cmd.py @@ -0,0 +1,26 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import print_function + + +def main(): + print("PBR Test Command") + + +class Foo(object): + + @classmethod + def bar(self): + print("PBR Test Command - with class!") diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/extra.py b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/extra.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/package_data/1.txt b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/package_data/1.txt new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/package_data/2.txt b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/package_data/2.txt new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/wsgi.py b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/wsgi.py new file mode 100644 index 00000000..1edd54d3 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/pbr_testpackage/wsgi.py @@ -0,0 +1,40 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import print_function + +import argparse +import functools +import sys + + +def application(env, start_response, data): + sys.stderr.flush() # Force the previous request log to be written. + start_response('200 OK', [('Content-Type', 'text/html')]) + return [data.encode('utf-8')] + + +def main(): + parser = argparse.ArgumentParser(description='Return a string.') + parser.add_argument('--content', '-c', help='String returned', + default='Hello World') + args = parser.parse_args() + return functools.partial(application, data=args.content) + + +class WSGI(object): + + @classmethod + def app(self): + return functools.partial(application, data='Hello World') diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/setup.cfg b/venv/Lib/site-packages/pbr/tests/testpackage/setup.cfg new file mode 100644 index 00000000..3929f0db --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/setup.cfg @@ -0,0 +1,58 @@ +[metadata] +name = pbr_testpackage +# TODO(lifeless) we should inject this as needed otherwise we're not truely +# testing postversioned codepaths. +version = 0.1.dev +author = OpenStack +author-email = openstack-discuss@lists.openstack.org +home-page = http://pypi.python.org/pypi/pbr +project_urls = + Bug Tracker = https://bugs.launchpad.net/pbr/ + Documentation = https://docs.openstack.org/pbr/ + Source Code = https://git.openstack.org/cgit/openstack-dev/pbr/ +summary = Test package for testing pbr +description-file = + README.txt + CHANGES.txt +description-content-type = text/plain; charset=UTF-8 +requires-python = >=2.5 + +requires-dist = + setuptools + +classifier = + Development Status :: 3 - Alpha + Intended Audience :: Developers + License :: OSI Approved :: BSD License + Programming Language :: Python + Topic :: Scientific/Engineering + Topic :: Software Development :: Build Tools + Topic :: Software Development :: Libraries :: Python Modules + Topic :: System :: Archiving :: Packaging + +keywords = packaging, distutils, setuptools + +[files] +packages = pbr_testpackage +package-data = testpackage = package_data/*.txt +data-files = testpackage/data_files = data_files/* +extra-files = extra-file.txt + +[entry_points] +console_scripts = + pbr_test_cmd = pbr_testpackage.cmd:main + pbr_test_cmd_with_class = pbr_testpackage.cmd:Foo.bar + +wsgi_scripts = + pbr_test_wsgi = pbr_testpackage.wsgi:main + pbr_test_wsgi_with_class = pbr_testpackage.wsgi:WSGI.app + +[extension=pbr_testpackage.testext] +sources = src/testext.c +optional = True + +[global] +#setup-hooks = +# pbr_testpackage._setup_hooks.test_hook_1 +# pbr_testpackage._setup_hooks.test_hook_2 +commands = pbr_testpackage._setup_hooks.test_command diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/setup.py b/venv/Lib/site-packages/pbr/tests/testpackage/setup.py new file mode 100644 index 00000000..2d9f685b --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/setup.py @@ -0,0 +1,21 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import setuptools + +setuptools.setup( + setup_requires=['pbr'], + pbr=True, +) diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/src/testext.c b/venv/Lib/site-packages/pbr/tests/testpackage/src/testext.c new file mode 100644 index 00000000..1b366e9b --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/src/testext.c @@ -0,0 +1,29 @@ +#include + + +static PyMethodDef TestextMethods[] = { + {NULL, NULL, 0, NULL} +}; + + +#if PY_MAJOR_VERSION >=3 +static struct PyModuleDef testextmodule = { + PyModuleDef_HEAD_INIT, /* This should correspond to a PyModuleDef_Base type */ + "testext", /* This is the module name */ + "Test extension module", /* This is the module docstring */ + -1, /* This defines the size of the module and says everything is global */ + TestextMethods /* This is the method definition */ +}; + +PyObject* +PyInit_testext(void) +{ + return PyModule_Create(&testextmodule); +} +#else +PyMODINIT_FUNC +inittestext(void) +{ + Py_InitModule("testext", TestextMethods); +} +#endif diff --git a/venv/Lib/site-packages/pbr/tests/testpackage/test-requirements.txt b/venv/Lib/site-packages/pbr/tests/testpackage/test-requirements.txt new file mode 100644 index 00000000..8755eb4c --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/testpackage/test-requirements.txt @@ -0,0 +1,2 @@ +ordereddict;python_version=='2.6' +requests-mock diff --git a/venv/Lib/site-packages/pbr/tests/util.py b/venv/Lib/site-packages/pbr/tests/util.py new file mode 100644 index 00000000..8a00c840 --- /dev/null +++ b/venv/Lib/site-packages/pbr/tests/util.py @@ -0,0 +1,78 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + +import contextlib +import os +import shutil +import stat +import sys + +try: + import ConfigParser as configparser +except ImportError: + import configparser + + +@contextlib.contextmanager +def open_config(filename): + if sys.version_info >= (3, 2): + cfg = configparser.ConfigParser() + else: + cfg = configparser.SafeConfigParser() + cfg.read(filename) + yield cfg + with open(filename, 'w') as fp: + cfg.write(fp) + + +def rmtree(path): + """shutil.rmtree() with error handler. + + Handle 'access denied' from trying to delete read-only files. + """ + + def onerror(func, path, exc_info): + if not os.access(path, os.W_OK): + os.chmod(path, stat.S_IWUSR) + func(path) + else: + raise + + return shutil.rmtree(path, onerror=onerror) diff --git a/venv/Lib/site-packages/pbr/util.py b/venv/Lib/site-packages/pbr/util.py new file mode 100644 index 00000000..1e9fdc49 --- /dev/null +++ b/venv/Lib/site-packages/pbr/util.py @@ -0,0 +1,627 @@ +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Copyright (C) 2013 Association of Universities for Research in Astronomy +# (AURA) +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of AURA and its representatives may not be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +# DAMAGE. + +"""The code in this module is mostly copy/pasted out of the distutils2 source +code, as recommended by Tarek Ziade. As such, it may be subject to some change +as distutils2 development continues, and will have to be kept up to date. + +I didn't want to use it directly from distutils2 itself, since I do not want it +to be an installation dependency for our packages yet--it is still too unstable +(the latest version on PyPI doesn't even install). +""" + +# These first two imports are not used, but are needed to get around an +# irritating Python bug that can crop up when using ./setup.py test. +# See: http://www.eby-sarna.com/pipermail/peak/2010-May/003355.html +try: + import multiprocessing # noqa +except ImportError: + pass +import logging # noqa + +from collections import defaultdict +import io +import os +import re +import shlex +import sys +import traceback + +import distutils.ccompiler +from distutils import errors +from distutils import log +import pkg_resources +from setuptools import dist as st_dist +from setuptools import extension + +try: + import ConfigParser as configparser +except ImportError: + import configparser + +from pbr import extra_files +import pbr.hooks + +# A simplified RE for this; just checks that the line ends with version +# predicates in () +_VERSION_SPEC_RE = re.compile(r'\s*(.*?)\s*\((.*)\)\s*$') + + +# Mappings from setup() keyword arguments to setup.cfg options; +# The values are (section, option) tuples, or simply (section,) tuples if +# the option has the same name as the setup() argument +D1_D2_SETUP_ARGS = { + "name": ("metadata",), + "version": ("metadata",), + "author": ("metadata",), + "author_email": ("metadata",), + "maintainer": ("metadata",), + "maintainer_email": ("metadata",), + "url": ("metadata", "home_page"), + "project_urls": ("metadata",), + "description": ("metadata", "summary"), + "keywords": ("metadata",), + "long_description": ("metadata", "description"), + "long_description_content_type": ("metadata", "description_content_type"), + "download_url": ("metadata",), + "classifiers": ("metadata", "classifier"), + "platforms": ("metadata", "platform"), # ** + "license": ("metadata",), + # Use setuptools install_requires, not + # broken distutils requires + "install_requires": ("metadata", "requires_dist"), + "setup_requires": ("metadata", "setup_requires_dist"), + "python_requires": ("metadata",), + "provides": ("metadata", "provides_dist"), # ** + "provides_extras": ("metadata",), + "obsoletes": ("metadata", "obsoletes_dist"), # ** + "package_dir": ("files", 'packages_root'), + "packages": ("files",), + "package_data": ("files",), + "namespace_packages": ("files",), + "data_files": ("files",), + "scripts": ("files",), + "py_modules": ("files", "modules"), # ** + "cmdclass": ("global", "commands"), + # Not supported in distutils2, but provided for + # backwards compatibility with setuptools + "use_2to3": ("backwards_compat", "use_2to3"), + "zip_safe": ("backwards_compat", "zip_safe"), + "tests_require": ("backwards_compat", "tests_require"), + "dependency_links": ("backwards_compat",), + "include_package_data": ("backwards_compat",), +} + +# setup() arguments that can have multiple values in setup.cfg +MULTI_FIELDS = ("classifiers", + "platforms", + "install_requires", + "provides", + "obsoletes", + "namespace_packages", + "packages", + "package_data", + "data_files", + "scripts", + "py_modules", + "dependency_links", + "setup_requires", + "tests_require", + "keywords", + "cmdclass", + "provides_extras") + +# setup() arguments that can have mapping values in setup.cfg +MAP_FIELDS = ("project_urls",) + +# setup() arguments that contain boolean values +BOOL_FIELDS = ("use_2to3", "zip_safe", "include_package_data") + +CSV_FIELDS = () + + +def shlex_split(path): + if os.name == 'nt': + # shlex cannot handle paths that contain backslashes, treating those + # as escape characters. + path = path.replace("\\", "/") + return [x.replace("/", "\\") for x in shlex.split(path)] + + return shlex.split(path) + + +def resolve_name(name): + """Resolve a name like ``module.object`` to an object and return it. + + Raise ImportError if the module or name is not found. + """ + + parts = name.split('.') + cursor = len(parts) - 1 + module_name = parts[:cursor] + attr_name = parts[-1] + + while cursor > 0: + try: + ret = __import__('.'.join(module_name), fromlist=[attr_name]) + break + except ImportError: + if cursor == 0: + raise + cursor -= 1 + module_name = parts[:cursor] + attr_name = parts[cursor] + ret = '' + + for part in parts[cursor:]: + try: + ret = getattr(ret, part) + except AttributeError: + raise ImportError(name) + + return ret + + +def cfg_to_args(path='setup.cfg', script_args=()): + """Distutils2 to distutils1 compatibility util. + + This method uses an existing setup.cfg to generate a dictionary of + keywords that can be used by distutils.core.setup(kwargs**). + + :param path: + The setup.cfg path. + :param script_args: + List of commands setup.py was called with. + :raises DistutilsFileError: + When the setup.cfg file is not found. + """ + + # The method source code really starts here. + if sys.version_info >= (3, 0): + parser = configparser.ConfigParser() + else: + parser = configparser.SafeConfigParser() + + if not os.path.exists(path): + raise errors.DistutilsFileError("file '%s' does not exist" % + os.path.abspath(path)) + try: + parser.read(path, encoding='utf-8') + except TypeError: + # Python 2 doesn't accept the encoding kwarg + parser.read(path) + config = {} + for section in parser.sections(): + config[section] = dict() + for k, value in parser.items(section): + config[section][k.replace('-', '_')] = value + + # Run setup_hooks, if configured + setup_hooks = has_get_option(config, 'global', 'setup_hooks') + package_dir = has_get_option(config, 'files', 'packages_root') + + # Add the source package directory to sys.path in case it contains + # additional hooks, and to make sure it's on the path before any existing + # installations of the package + if package_dir: + package_dir = os.path.abspath(package_dir) + sys.path.insert(0, package_dir) + + try: + if setup_hooks: + setup_hooks = [ + hook for hook in split_multiline(setup_hooks) + if hook != 'pbr.hooks.setup_hook'] + for hook in setup_hooks: + hook_fn = resolve_name(hook) + try: + hook_fn(config) + except SystemExit: + log.error('setup hook %s terminated the installation') + except Exception: + e = sys.exc_info()[1] + log.error('setup hook %s raised exception: %s\n' % + (hook, e)) + log.error(traceback.format_exc()) + sys.exit(1) + + # Run the pbr hook + pbr.hooks.setup_hook(config) + + kwargs = setup_cfg_to_setup_kwargs(config, script_args) + + # Set default config overrides + kwargs['include_package_data'] = True + kwargs['zip_safe'] = False + + register_custom_compilers(config) + + ext_modules = get_extension_modules(config) + if ext_modules: + kwargs['ext_modules'] = ext_modules + + entry_points = get_entry_points(config) + if entry_points: + kwargs['entry_points'] = entry_points + + # Handle the [files]/extra_files option + files_extra_files = has_get_option(config, 'files', 'extra_files') + if files_extra_files: + extra_files.set_extra_files(split_multiline(files_extra_files)) + + finally: + # Perform cleanup if any paths were added to sys.path + if package_dir: + sys.path.pop(0) + + return kwargs + + +def setup_cfg_to_setup_kwargs(config, script_args=()): + """Convert config options to kwargs. + + Processes the setup.cfg options and converts them to arguments accepted + by setuptools' setup() function. + """ + + kwargs = {} + + # Temporarily holds install_requires and extra_requires while we + # parse env_markers. + all_requirements = {} + + for arg in D1_D2_SETUP_ARGS: + if len(D1_D2_SETUP_ARGS[arg]) == 2: + # The distutils field name is different than distutils2's. + section, option = D1_D2_SETUP_ARGS[arg] + + elif len(D1_D2_SETUP_ARGS[arg]) == 1: + # The distutils field name is the same thant distutils2's. + section = D1_D2_SETUP_ARGS[arg][0] + option = arg + + in_cfg_value = has_get_option(config, section, option) + if not in_cfg_value: + # There is no such option in the setup.cfg + if arg == "long_description": + in_cfg_value = has_get_option(config, section, + "description_file") + if in_cfg_value: + in_cfg_value = split_multiline(in_cfg_value) + value = '' + for filename in in_cfg_value: + description_file = io.open(filename, encoding='utf-8') + try: + value += description_file.read().strip() + '\n\n' + finally: + description_file.close() + in_cfg_value = value + else: + continue + + if arg in CSV_FIELDS: + in_cfg_value = split_csv(in_cfg_value) + if arg in MULTI_FIELDS: + in_cfg_value = split_multiline(in_cfg_value) + elif arg in MAP_FIELDS: + in_cfg_map = {} + for i in split_multiline(in_cfg_value): + k, v = i.split('=', 1) + in_cfg_map[k.strip()] = v.strip() + in_cfg_value = in_cfg_map + elif arg in BOOL_FIELDS: + # Provide some flexibility here... + if in_cfg_value.lower() in ('true', 't', '1', 'yes', 'y'): + in_cfg_value = True + else: + in_cfg_value = False + + if in_cfg_value: + if arg in ('install_requires', 'tests_require'): + # Replaces PEP345-style version specs with the sort expected by + # setuptools + in_cfg_value = [_VERSION_SPEC_RE.sub(r'\1\2', pred) + for pred in in_cfg_value] + if arg == 'install_requires': + # Split install_requires into package,env_marker tuples + # These will be re-assembled later + install_requires = [] + requirement_pattern = ( + r'(?P[^;]*);?(?P[^#]*?)(?:\s*#.*)?$') + for requirement in in_cfg_value: + m = re.match(requirement_pattern, requirement) + requirement_package = m.group('package').strip() + env_marker = m.group('env_marker').strip() + install_requires.append((requirement_package, env_marker)) + all_requirements[''] = install_requires + elif arg == 'package_dir': + in_cfg_value = {'': in_cfg_value} + elif arg in ('package_data', 'data_files'): + data_files = {} + firstline = True + prev = None + for line in in_cfg_value: + if '=' in line: + key, value = line.split('=', 1) + key_unquoted = shlex_split(key.strip())[0] + key, value = (key_unquoted, value.strip()) + if key in data_files: + # Multiple duplicates of the same package name; + # this is for backwards compatibility of the old + # format prior to d2to1 0.2.6. + prev = data_files[key] + prev.extend(shlex_split(value)) + else: + prev = data_files[key.strip()] = shlex_split(value) + elif firstline: + raise errors.DistutilsOptionError( + 'malformed package_data first line %r (misses ' + '"=")' % line) + else: + prev.extend(shlex_split(line.strip())) + firstline = False + if arg == 'data_files': + # the data_files value is a pointlessly different structure + # from the package_data value + data_files = data_files.items() + in_cfg_value = data_files + elif arg == 'cmdclass': + cmdclass = {} + dist = st_dist.Distribution() + for cls_name in in_cfg_value: + cls = resolve_name(cls_name) + cmd = cls(dist) + cmdclass[cmd.get_command_name()] = cls + in_cfg_value = cmdclass + + kwargs[arg] = in_cfg_value + + # Transform requirements with embedded environment markers to + # setuptools' supported marker-per-requirement format. + # + # install_requires are treated as a special case of extras, before + # being put back in the expected place + # + # fred = + # foo:marker + # bar + # -> {'fred': ['bar'], 'fred:marker':['foo']} + + if 'extras' in config: + requirement_pattern = ( + r'(?P[^:]*):?(?P[^#]*?)(?:\s*#.*)?$') + extras = config['extras'] + # Add contents of test-requirements, if any, into an extra named + # 'test' if one does not already exist. + if 'test' not in extras: + from pbr import packaging + extras['test'] = "\n".join(packaging.parse_requirements( + packaging.TEST_REQUIREMENTS_FILES)).replace(';', ':') + + for extra in extras: + extra_requirements = [] + requirements = split_multiline(extras[extra]) + for requirement in requirements: + m = re.match(requirement_pattern, requirement) + extras_value = m.group('package').strip() + env_marker = m.group('env_marker') + extra_requirements.append((extras_value, env_marker)) + all_requirements[extra] = extra_requirements + + # Transform the full list of requirements into: + # - install_requires, for those that have no extra and no + # env_marker + # - named extras, for those with an extra name (which may include + # an env_marker) + # - and as a special case, install_requires with an env_marker are + # treated as named extras where the name is the empty string + + extras_require = {} + for req_group in all_requirements: + for requirement, env_marker in all_requirements[req_group]: + if env_marker: + extras_key = '%s:(%s)' % (req_group, env_marker) + # We do not want to poison wheel creation with locally + # evaluated markers. sdists always re-create the egg_info + # and as such do not need guarded, and pip will never call + # multiple setup.py commands at once. + if 'bdist_wheel' not in script_args: + try: + if pkg_resources.evaluate_marker('(%s)' % env_marker): + extras_key = req_group + except SyntaxError: + log.error( + "Marker evaluation failed, see the following " + "error. For more information see: " + "http://docs.openstack.org/" + "pbr/latest/user/using.html#environment-markers" + ) + raise + else: + extras_key = req_group + extras_require.setdefault(extras_key, []).append(requirement) + + kwargs['install_requires'] = extras_require.pop('', []) + kwargs['extras_require'] = extras_require + + return kwargs + + +def register_custom_compilers(config): + """Handle custom compilers. + + This has no real equivalent in distutils, where additional compilers could + only be added programmatically, so we have to hack it in somehow. + """ + + compilers = has_get_option(config, 'global', 'compilers') + if compilers: + compilers = split_multiline(compilers) + for compiler in compilers: + compiler = resolve_name(compiler) + + # In distutils2 compilers these class attributes exist; for + # distutils1 we just have to make something up + if hasattr(compiler, 'name'): + name = compiler.name + else: + name = compiler.__name__ + if hasattr(compiler, 'description'): + desc = compiler.description + else: + desc = 'custom compiler %s' % name + + module_name = compiler.__module__ + # Note; this *will* override built in compilers with the same name + # TODO(embray): Maybe display a warning about this? + cc = distutils.ccompiler.compiler_class + cc[name] = (module_name, compiler.__name__, desc) + + # HACK!!!! Distutils assumes all compiler modules are in the + # distutils package + sys.modules['distutils.' + module_name] = sys.modules[module_name] + + +def get_extension_modules(config): + """Handle extension modules""" + + EXTENSION_FIELDS = ("sources", + "include_dirs", + "define_macros", + "undef_macros", + "library_dirs", + "libraries", + "runtime_library_dirs", + "extra_objects", + "extra_compile_args", + "extra_link_args", + "export_symbols", + "swig_opts", + "depends") + + ext_modules = [] + for section in config: + if ':' in section: + labels = section.split(':', 1) + else: + # Backwards compatibility for old syntax; don't use this though + labels = section.split('=', 1) + labels = [l.strip() for l in labels] + if (len(labels) == 2) and (labels[0] == 'extension'): + ext_args = {} + for field in EXTENSION_FIELDS: + value = has_get_option(config, section, field) + # All extension module options besides name can have multiple + # values + if not value: + continue + value = split_multiline(value) + if field == 'define_macros': + macros = [] + for macro in value: + macro = macro.split('=', 1) + if len(macro) == 1: + macro = (macro[0].strip(), None) + else: + macro = (macro[0].strip(), macro[1].strip()) + macros.append(macro) + value = macros + ext_args[field] = value + if ext_args: + if 'name' not in ext_args: + ext_args['name'] = labels[1] + ext_modules.append(extension.Extension(ext_args.pop('name'), + **ext_args)) + return ext_modules + + +def get_entry_points(config): + """Process the [entry_points] section of setup.cfg. + + Processes setup.cfg to handle setuptools entry points. This is, of course, + not a standard feature of distutils2/packaging, but as there is not + currently a standard alternative in packaging, we provide support for them. + """ + + if 'entry_points' not in config: + return {} + + return dict((option, split_multiline(value)) + for option, value in config['entry_points'].items()) + + +def has_get_option(config, section, option): + if section in config and option in config[section]: + return config[section][option] + else: + return False + + +def split_multiline(value): + """Special behaviour when we have a multi line options""" + + value = [element for element in + (line.strip() for line in value.split('\n')) + if element and not element.startswith('#')] + return value + + +def split_csv(value): + """Special behaviour when we have a comma separated options""" + + value = [element for element in + (chunk.strip() for chunk in value.split(',')) + if element] + return value + + +# The following classes are used to hack Distribution.command_options a bit +class DefaultGetDict(defaultdict): + """Like defaultdict, but get() also sets and returns the default value.""" + + def get(self, key, default=None): + if default is None: + default = self.default_factory() + return super(DefaultGetDict, self).setdefault(key, default) diff --git a/venv/Lib/site-packages/pbr/version.py b/venv/Lib/site-packages/pbr/version.py new file mode 100644 index 00000000..46c60207 --- /dev/null +++ b/venv/Lib/site-packages/pbr/version.py @@ -0,0 +1,483 @@ + +# Copyright 2012 OpenStack Foundation +# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Utilities for consuming the version from pkg_resources. +""" + +import itertools +import operator +import sys + + +def _is_int(string): + try: + int(string) + return True + except ValueError: + return False + + +class SemanticVersion(object): + """A pure semantic version independent of serialisation. + + See the pbr doc 'semver' for details on the semantics. + """ + + def __init__( + self, major, minor=0, patch=0, prerelease_type=None, + prerelease=None, dev_count=None): + """Create a SemanticVersion. + + :param major: Major component of the version. + :param minor: Minor component of the version. Defaults to 0. + :param patch: Patch level component. Defaults to 0. + :param prerelease_type: What sort of prerelease version this is - + one of a(alpha), b(beta) or rc(release candidate). + :param prerelease: For prerelease versions, what number prerelease. + Defaults to 0. + :param dev_count: How many commits since the last release. + """ + self._major = major + self._minor = minor + self._patch = patch + self._prerelease_type = prerelease_type + self._prerelease = prerelease + if self._prerelease_type and not self._prerelease: + self._prerelease = 0 + self._dev_count = dev_count or 0 # Normalise 0 to None. + + def __eq__(self, other): + if not isinstance(other, SemanticVersion): + return False + return self.__dict__ == other.__dict__ + + def __hash__(self): + return sum(map(hash, self.__dict__.values())) + + def _sort_key(self): + """Return a key for sorting SemanticVersion's on.""" + # key things: + # - final is after rc's, so we make that a/b/rc/z + # - dev==None is after all other devs, so we use sys.maxsize there. + # - unqualified dev releases come before any pre-releases. + # So we do: + # (major, minor, patch) - gets the major grouping. + # (0|1) unqualified dev flag + # (a/b/rc/z) - release segment grouping + # pre-release level + # dev count, maxsize for releases. + rc_lookup = {'a': 'a', 'b': 'b', 'rc': 'rc', None: 'z'} + if self._dev_count and not self._prerelease_type: + uq_dev = 0 + else: + uq_dev = 1 + return ( + self._major, self._minor, self._patch, + uq_dev, + rc_lookup[self._prerelease_type], self._prerelease, + self._dev_count or sys.maxsize) + + def __lt__(self, other): + """Compare self and other, another Semantic Version.""" + # NB(lifeless) this could perhaps be rewritten as + # lt (tuple_of_one, tuple_of_other) with a single check for + # the typeerror corner cases - that would likely be faster + # if this ever becomes performance sensitive. + if not isinstance(other, SemanticVersion): + raise TypeError("ordering to non-SemanticVersion is undefined") + return self._sort_key() < other._sort_key() + + def __le__(self, other): + return self == other or self < other + + def __ge__(self, other): + return not self < other + + def __gt__(self, other): + return not self <= other + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return "pbr.version.SemanticVersion(%s)" % self.release_string() + + @classmethod + def from_pip_string(klass, version_string): + """Create a SemanticVersion from a pip version string. + + This method will parse a version like 1.3.0 into a SemanticVersion. + + This method is responsible for accepting any version string that any + older version of pbr ever created. + + Therefore: versions like 1.3.0a1 versions are handled, parsed into a + canonical form and then output - resulting in 1.3.0.0a1. + Pre pbr-semver dev versions like 0.10.1.3.g83bef74 will be parsed but + output as 0.10.1.dev3.g83bef74. + + :raises ValueError: Never tagged versions sdisted by old pbr result in + just the git hash, e.g. '1234567' which poses a substantial problem + since they collide with the semver versions when all the digits are + numerals. Such versions will result in a ValueError being thrown if + any non-numeric digits are present. They are an exception to the + general case of accepting anything we ever output, since they were + never intended and would permanently mess up versions on PyPI if + ever released - we're treating that as a critical bug that we ever + made them and have stopped doing that. + """ + + try: + return klass._from_pip_string_unsafe(version_string) + except IndexError: + raise ValueError("Invalid version %r" % version_string) + + @classmethod + def _from_pip_string_unsafe(klass, version_string): + # Versions need to start numerically, ignore if not + version_string = version_string.lstrip('vV') + if not version_string[:1].isdigit(): + raise ValueError("Invalid version %r" % version_string) + input_components = version_string.split('.') + # decimals first (keep pre-release and dev/hashes to the right) + components = [c for c in input_components if c.isdigit()] + digit_len = len(components) + if digit_len == 0: + raise ValueError("Invalid version %r" % version_string) + elif digit_len < 3: + if (digit_len < len(input_components) and + input_components[digit_len][0].isdigit()): + # Handle X.YaZ - Y is a digit not a leadin to pre-release. + mixed_component = input_components[digit_len] + last_component = ''.join(itertools.takewhile( + lambda x: x.isdigit(), mixed_component)) + components.append(last_component) + input_components[digit_len:digit_len + 1] = [ + last_component, mixed_component[len(last_component):]] + digit_len += 1 + components.extend([0] * (3 - digit_len)) + components.extend(input_components[digit_len:]) + major = int(components[0]) + minor = int(components[1]) + dev_count = None + post_count = None + prerelease_type = None + prerelease = None + + def _parse_type(segment): + # Discard leading digits (the 0 in 0a1) + isdigit = operator.methodcaller('isdigit') + segment = ''.join(itertools.dropwhile(isdigit, segment)) + isalpha = operator.methodcaller('isalpha') + prerelease_type = ''.join(itertools.takewhile(isalpha, segment)) + prerelease = segment[len(prerelease_type)::] + return prerelease_type, int(prerelease) + if _is_int(components[2]): + patch = int(components[2]) + else: + # legacy version e.g. 1.2.0a1 (canonical is 1.2.0.0a1) + # or 1.2.dev4.g1234 or 1.2.b4 + patch = 0 + components[2:2] = [0] + remainder = components[3:] + remainder_starts_with_int = False + try: + if remainder and int(remainder[0]): + remainder_starts_with_int = True + except ValueError: + pass + if remainder_starts_with_int: + # old dev format - 0.1.2.3.g1234 + dev_count = int(remainder[0]) + else: + if remainder and (remainder[0][0] == '0' or + remainder[0][0] in ('a', 'b', 'r')): + # Current RC/beta layout + prerelease_type, prerelease = _parse_type(remainder[0]) + remainder = remainder[1:] + while remainder: + component = remainder[0] + if component.startswith('dev'): + dev_count = int(component[3:]) + elif component.startswith('post'): + dev_count = None + post_count = int(component[4:]) + else: + raise ValueError( + 'Unknown remainder %r in %r' + % (remainder, version_string)) + remainder = remainder[1:] + result = SemanticVersion( + major, minor, patch, prerelease_type=prerelease_type, + prerelease=prerelease, dev_count=dev_count) + if post_count: + if dev_count: + raise ValueError( + 'Cannot combine postN and devN - no mapping in %r' + % (version_string,)) + result = result.increment().to_dev(post_count) + return result + + def brief_string(self): + """Return the short version minus any alpha/beta tags.""" + return "%s.%s.%s" % (self._major, self._minor, self._patch) + + def debian_string(self): + """Return the version number to use when building a debian package. + + This translates the PEP440/semver precedence rules into Debian version + sorting operators. + """ + return self._long_version("~") + + def decrement(self): + """Return a decremented SemanticVersion. + + Decrementing versions doesn't make a lot of sense - this method only + exists to support rendering of pre-release versions strings into + serialisations (such as rpm) with no sort-before operator. + + The 9999 magic version component is from the spec on this - pbr-semver. + + :return: A new SemanticVersion object. + """ + if self._patch: + new_patch = self._patch - 1 + new_minor = self._minor + new_major = self._major + else: + new_patch = 9999 + if self._minor: + new_minor = self._minor - 1 + new_major = self._major + else: + new_minor = 9999 + if self._major: + new_major = self._major - 1 + else: + new_major = 0 + return SemanticVersion( + new_major, new_minor, new_patch) + + def increment(self, minor=False, major=False): + """Return an incremented SemanticVersion. + + The default behaviour is to perform a patch level increment. When + incrementing a prerelease version, the patch level is not changed + - the prerelease serial is changed (e.g. beta 0 -> beta 1). + + Incrementing non-pre-release versions will not introduce pre-release + versions - except when doing a patch incremental to a pre-release + version the new version will only consist of major/minor/patch. + + :param minor: Increment the minor version. + :param major: Increment the major version. + :return: A new SemanticVersion object. + """ + if self._prerelease_type: + new_prerelease_type = self._prerelease_type + new_prerelease = self._prerelease + 1 + new_patch = self._patch + else: + new_prerelease_type = None + new_prerelease = None + new_patch = self._patch + 1 + if minor: + new_minor = self._minor + 1 + new_patch = 0 + new_prerelease_type = None + new_prerelease = None + else: + new_minor = self._minor + if major: + new_major = self._major + 1 + new_minor = 0 + new_patch = 0 + new_prerelease_type = None + new_prerelease = None + else: + new_major = self._major + return SemanticVersion( + new_major, new_minor, new_patch, + new_prerelease_type, new_prerelease) + + def _long_version(self, pre_separator, rc_marker=""): + """Construct a long string version of this semver. + + :param pre_separator: What separator to use between components + that sort before rather than after. If None, use . and lower the + version number of the component to preserve sorting. (Used for + rpm support) + """ + if ((self._prerelease_type or self._dev_count) and + pre_separator is None): + segments = [self.decrement().brief_string()] + pre_separator = "." + else: + segments = [self.brief_string()] + if self._prerelease_type: + segments.append( + "%s%s%s%s" % (pre_separator, rc_marker, self._prerelease_type, + self._prerelease)) + if self._dev_count: + if not self._prerelease_type: + segments.append(pre_separator) + else: + segments.append('.') + segments.append('dev') + segments.append(self._dev_count) + return "".join(str(s) for s in segments) + + def release_string(self): + """Return the full version of the package. + + This including suffixes indicating VCS status. + """ + return self._long_version(".", "0") + + def rpm_string(self): + """Return the version number to use when building an RPM package. + + This translates the PEP440/semver precedence rules into RPM version + sorting operators. Because RPM has no sort-before operator (such as the + ~ operator in dpkg), we show all prerelease versions as being versions + of the release before. + """ + return self._long_version(None) + + def to_dev(self, dev_count): + """Return a development version of this semver. + + :param dev_count: The number of commits since the last release. + """ + return SemanticVersion( + self._major, self._minor, self._patch, self._prerelease_type, + self._prerelease, dev_count=dev_count) + + def version_tuple(self): + """Present the version as a version_info tuple. + + For documentation on version_info tuples see the Python + documentation for sys.version_info. + + Since semver and PEP-440 represent overlapping but not subsets of + versions, we have to have some heuristic / mapping rules, and have + extended the releaselevel field to have alphadev, betadev and + candidatedev values. When they are present the dev count is used + to provide the serial. + - a/b/rc take precedence. + - if there is no pre-release version the dev version is used. + - serial is taken from the dev/a/b/c component. + - final non-dev versions never get serials. + """ + segments = [self._major, self._minor, self._patch] + if self._prerelease_type: + type_map = {('a', False): 'alpha', + ('b', False): 'beta', + ('rc', False): 'candidate', + ('a', True): 'alphadev', + ('b', True): 'betadev', + ('rc', True): 'candidatedev', + } + segments.append( + type_map[(self._prerelease_type, bool(self._dev_count))]) + segments.append(self._dev_count or self._prerelease) + elif self._dev_count: + segments.append('dev') + segments.append(self._dev_count - 1) + else: + segments.append('final') + segments.append(0) + return tuple(segments) + + +class VersionInfo(object): + + def __init__(self, package): + """Object that understands versioning for a package + + :param package: name of the python package, such as glance, or + python-glanceclient + """ + self.package = package + self.version = None + self._cached_version = None + self._semantic = None + + def __str__(self): + """Make the VersionInfo object behave like a string.""" + return self.version_string() + + def __repr__(self): + """Include the name.""" + return "pbr.version.VersionInfo(%s:%s)" % ( + self.package, self.version_string()) + + def _get_version_from_pkg_resources(self): + """Obtain a version from pkg_resources or setup-time logic if missing. + + This will try to get the version of the package from the pkg_resources + record associated with the package, and if there is no such record + falls back to the logic sdist would use. + """ + # Lazy import because pkg_resources is costly to import so defer until + # we absolutely need it. + import pkg_resources + try: + requirement = pkg_resources.Requirement.parse(self.package) + provider = pkg_resources.get_provider(requirement) + result_string = provider.version + except pkg_resources.DistributionNotFound: + # The most likely cause for this is running tests in a tree + # produced from a tarball where the package itself has not been + # installed into anything. Revert to setup-time logic. + from pbr import packaging + result_string = packaging.get_version(self.package) + return SemanticVersion.from_pip_string(result_string) + + def release_string(self): + """Return the full version of the package. + + This including suffixes indicating VCS status. + """ + return self.semantic_version().release_string() + + def semantic_version(self): + """Return the SemanticVersion object for this version.""" + if self._semantic is None: + self._semantic = self._get_version_from_pkg_resources() + return self._semantic + + def version_string(self): + """Return the short version minus any alpha/beta tags.""" + return self.semantic_version().brief_string() + + # Compatibility functions + canonical_version_string = version_string + version_string_with_vcs = release_string + + def cached_version_string(self, prefix=""): + """Return a cached version string. + + This will return a cached version string if one is already cached, + irrespective of prefix. If none is cached, one will be created with + prefix and then cached and returned. + """ + if not self._cached_version: + self._cached_version = "%s%s" % (prefix, + self.version_string()) + return self._cached_version diff --git a/venv/Lib/site-packages/stevedore-3.2.0.dist-info/DESCRIPTION.rst b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..126f066f --- /dev/null +++ b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,30 @@ +=========================================================== +stevedore -- Manage dynamic plugins for Python applications +=========================================================== + +.. image:: https://img.shields.io/pypi/v/stevedore.svg + :target: https://pypi.org/project/stevedore/ + :alt: Latest Version + +.. image:: https://governance.openstack.org/tc/badges/stevedore.svg + :target: https://governance.openstack.org/tc/reference/tags/index.html + +Python makes loading code dynamically easy, allowing you to configure +and extend your application by discovering and loading extensions +("*plugins*") at runtime. Many applications implement their own +library for doing this, using ``__import__`` or ``importlib``. +stevedore avoids creating yet another extension +mechanism by building on top of `setuptools entry points`_. The code +for managing entry points tends to be repetitive, though, so stevedore +provides manager classes for implementing common patterns for using +dynamically loaded extensions. + +.. _setuptools entry points: http://setuptools.readthedocs.io/en/latest/pkg_resources.html?#entry-points + +* Free software: Apache license +* Documentation: https://docs.openstack.org/stevedore/latest +* Source: https://opendev.org/openstack/stevedore +* Bugs: https://bugs.launchpad.net/python-stevedore + + + diff --git a/venv/Lib/site-packages/stevedore-3.2.0.dist-info/INSTALLER b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/stevedore-3.2.0.dist-info/METADATA b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/METADATA new file mode 100644 index 00000000..56d6ddaf --- /dev/null +++ b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/METADATA @@ -0,0 +1,53 @@ +Metadata-Version: 2.0 +Name: stevedore +Version: 3.2.0 +Summary: Manage dynamic plugins for Python applications +Home-page: https://docs.openstack.org/stevedore/latest/ +Author: OpenStack +Author-email: openstack-discuss@lists.openstack.org +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Intended Audience :: Developers +Classifier: Environment :: Console +Requires-Python: >=3.6 +Requires-Dist: pbr (!=2.1.0,>=2.0.0) +Requires-Dist: importlib-metadata (>=1.7.0); (python_version<'3.8') + +=========================================================== +stevedore -- Manage dynamic plugins for Python applications +=========================================================== + +.. image:: https://img.shields.io/pypi/v/stevedore.svg + :target: https://pypi.org/project/stevedore/ + :alt: Latest Version + +.. image:: https://governance.openstack.org/tc/badges/stevedore.svg + :target: https://governance.openstack.org/tc/reference/tags/index.html + +Python makes loading code dynamically easy, allowing you to configure +and extend your application by discovering and loading extensions +("*plugins*") at runtime. Many applications implement their own +library for doing this, using ``__import__`` or ``importlib``. +stevedore avoids creating yet another extension +mechanism by building on top of `setuptools entry points`_. The code +for managing entry points tends to be repetitive, though, so stevedore +provides manager classes for implementing common patterns for using +dynamically loaded extensions. + +.. _setuptools entry points: http://setuptools.readthedocs.io/en/latest/pkg_resources.html?#entry-points + +* Free software: Apache license +* Documentation: https://docs.openstack.org/stevedore/latest +* Source: https://opendev.org/openstack/stevedore +* Bugs: https://bugs.launchpad.net/python-stevedore + + + diff --git a/venv/Lib/site-packages/stevedore-3.2.0.dist-info/RECORD b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/RECORD new file mode 100644 index 00000000..201f49cf --- /dev/null +++ b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/RECORD @@ -0,0 +1,77 @@ +stevedore-3.2.0.dist-info/DESCRIPTION.rst,sha256=nBjUeLr8UCyVj8lqPWllqt3PAUbsb8QNcob6g4DiFvw,1310 +stevedore-3.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +stevedore-3.2.0.dist-info/METADATA,sha256=wnY0Dr5S1wtZowsI4rwV12kWiQwXNH35q0z7ALNNrO8,2226 +stevedore-3.2.0.dist-info/RECORD,, +stevedore-3.2.0.dist-info/WHEEL,sha256=8Lm45v9gcYRm70DrgFGVe4WsUtUMi1_0Tso1hqPGMjA,92 +stevedore-3.2.0.dist-info/entry_points.txt,sha256=6cL05UCGjPy45yEg4flcYVvlbiSiaHTLnYvmvbNmnnM,388 +stevedore-3.2.0.dist-info/metadata.json,sha256=gmFp_eQFN9dWOZIwfhz8_93fmGO6ZjY17LlCwLrvwVQ,1618 +stevedore-3.2.0.dist-info/pbr.json,sha256=JBFv4tR5fKuecQTleWHumyAy5D06-p35attK26fQ5lU,46 +stevedore-3.2.0.dist-info/top_level.txt,sha256=rtOULIhauZOXFiAgHRCDBdnqb0wKxA-NqLlvo_b_SOM,10 +stevedore/__init__.py,sha256=lwsEP3iDFwk2lPJjgW3IbeQkhN6TeLM76tCl9V5BWYM,544 +stevedore/__pycache__/__init__.cpython-36.pyc,, +stevedore/__pycache__/_cache.cpython-36.pyc,, +stevedore/__pycache__/dispatch.cpython-36.pyc,, +stevedore/__pycache__/driver.cpython-36.pyc,, +stevedore/__pycache__/enabled.cpython-36.pyc,, +stevedore/__pycache__/exception.cpython-36.pyc,, +stevedore/__pycache__/extension.cpython-36.pyc,, +stevedore/__pycache__/hook.cpython-36.pyc,, +stevedore/__pycache__/named.cpython-36.pyc,, +stevedore/__pycache__/sphinxext.cpython-36.pyc,, +stevedore/_cache.py,sha256=XEa4GRzXL85ScuJln6_qJ3iXaowVzf64VrxEPx7XTrU,6005 +stevedore/dispatch.py,sha256=I5wiZfF9As7gNe5rZgfHV7dAZ9K7tePzoao121WzOmM,9560 +stevedore/driver.py,sha256=nkUv2CbvvVadthppsWZZArb0GyqUjoAMWjqgvJVyb2g,6210 +stevedore/enabled.py,sha256=vtWydytqC77rOUoPrjCjJyMtqAO-iGMfvJ5oKUi4Kto,3569 +stevedore/example/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +stevedore/example/__pycache__/__init__.cpython-36.pyc,, +stevedore/example/__pycache__/base.cpython-36.pyc,, +stevedore/example/__pycache__/load_as_driver.cpython-36.pyc,, +stevedore/example/__pycache__/load_as_extension.cpython-36.pyc,, +stevedore/example/__pycache__/setup.cpython-36.pyc,, +stevedore/example/__pycache__/simple.cpython-36.pyc,, +stevedore/example/base.py,sha256=-_pdDBZUoI-KY7tAbZKyeRN_hdpFsfkeBZorrYgdOIk,505 +stevedore/example/load_as_driver.py,sha256=bQNxK4cJ5wVcE2ro2Pgt60Phlmp2KrD7DX83MPpUOys,763 +stevedore/example/load_as_extension.py,sha256=Qo3YYbTulSfLBR3eKMrZMtoqtWc6VLPr4S88fW_tcpk,822 +stevedore/example/setup.py,sha256=0ee44YR11wieurWOdzUM3booDmCgpocXEwM20hb7ie0,1168 +stevedore/example/simple.py,sha256=DgSwoMOBc3YnANLuUbVg_xunRo3z7eKwJK2pNz_YLLs,526 +stevedore/example2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +stevedore/example2/__pycache__/__init__.cpython-36.pyc,, +stevedore/example2/__pycache__/fields.cpython-36.pyc,, +stevedore/example2/__pycache__/setup.cpython-36.pyc,, +stevedore/example2/fields.py,sha256=2fm-vPcuC_I9WYFBz08IhLI0m-0wBfbvAa9Sg05YS9o,939 +stevedore/example2/setup.py,sha256=0F0C3AHwfFYvSM3jEZwBWcJwoLjvU23UuY1sJRN3rIU,1118 +stevedore/exception.py,sha256=D0oRCv7A_tLG3AKIOGGoKuj1dAqEkCwNa99qLcCxzBs,864 +stevedore/extension.py,sha256=OAafQ1SXojhVHFRLdmIWWZcKYl2T0J7lM_4aETTN8Sw,14202 +stevedore/hook.py,sha256=Ggi8wlBBVB0mQ_Le-BReFnpPdJ6mhhGfrvYyM6-AwhU,3972 +stevedore/named.py,sha256=Jr8ij43NUldHoJ9NTR9X9YM5GuWcGcIttSyviBeIZAk,7232 +stevedore/sphinxext.py,sha256=0-JaJEkUZaXRw_-zQlMg3fW02g2KpcUHvxO4LtXUgz8,3806 +stevedore/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +stevedore/tests/__pycache__/__init__.cpython-36.pyc,, +stevedore/tests/__pycache__/extension_unimportable.cpython-36.pyc,, +stevedore/tests/__pycache__/manager.cpython-36.pyc,, +stevedore/tests/__pycache__/test_callback.cpython-36.pyc,, +stevedore/tests/__pycache__/test_dispatch.cpython-36.pyc,, +stevedore/tests/__pycache__/test_driver.cpython-36.pyc,, +stevedore/tests/__pycache__/test_enabled.cpython-36.pyc,, +stevedore/tests/__pycache__/test_example_fields.cpython-36.pyc,, +stevedore/tests/__pycache__/test_example_simple.cpython-36.pyc,, +stevedore/tests/__pycache__/test_extension.cpython-36.pyc,, +stevedore/tests/__pycache__/test_hook.cpython-36.pyc,, +stevedore/tests/__pycache__/test_named.cpython-36.pyc,, +stevedore/tests/__pycache__/test_sphinxext.cpython-36.pyc,, +stevedore/tests/__pycache__/test_test_manager.cpython-36.pyc,, +stevedore/tests/__pycache__/utils.cpython-36.pyc,, +stevedore/tests/extension_unimportable.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +stevedore/tests/manager.py,sha256=sTcvrhZrhOKfOxmj7TEOX9jwl_ME_lG4F2GCdL8TPn4,2538 +stevedore/tests/test_callback.py,sha256=IX7rKVXev83v72XG6ys78UjHv5EKiHfY3BkecJnsvxw,2152 +stevedore/tests/test_dispatch.py,sha256=IhPb9TEvdyLuLnDBRBPfvLcjMrwPN2-iXhkb9EoYf0s,4145 +stevedore/tests/test_driver.py,sha256=t94x8NZV843tbZHcQgo6k949zS50opB73aKpGUMJkPs,3439 +stevedore/tests/test_enabled.py,sha256=2IAeQ_uI9d7TRs8q6V7OaZvk3cl7DlNXQUNFA4lCWRc,1504 +stevedore/tests/test_example_fields.py,sha256=-GSF2-mANuKcQDhuyDOUJYrTcWJkzCeY-tJ1KPULPfQ,1351 +stevedore/tests/test_example_simple.py,sha256=NDZA75boEd8jlzRQUgOb6x-ZkvjMrLy2uDrjBt0F0YM,972 +stevedore/tests/test_extension.py,sha256=OMpwjJ7ESWB2r3RF0p_xH8jghtNeQVR2kSnnwGriPEo,10578 +stevedore/tests/test_hook.py,sha256=4CMD7Bq_TCWEnTBFCwC6k2VV_5r34juh5epWpnkkoz4,1713 +stevedore/tests/test_named.py,sha256=GE3s0nx3hxMzGSkEWai1k9u4lfRJAENJoqLJxYZi9WE,3356 +stevedore/tests/test_sphinxext.py,sha256=C3WkQ2OkxfeRJxZNfOee7Um_GXWh6iN9GVOtkdKdA-0,4108 +stevedore/tests/test_test_manager.py,sha256=Wgwnw2QQ4wXfTMjXnMB4fBUCf5nr1lpYv9JjfEtjumg,9480 +stevedore/tests/utils.py,sha256=ClMCj0b9u1ZYVf2cc6Y4Gq_Sm0m7PRU0ynjZ4EAYifs,617 diff --git a/venv/Lib/site-packages/stevedore-3.2.0.dist-info/WHEEL b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/WHEEL new file mode 100644 index 00000000..6261a26e --- /dev/null +++ b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/stevedore-3.2.0.dist-info/entry_points.txt b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/entry_points.txt new file mode 100644 index 00000000..20bd7fd1 --- /dev/null +++ b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/entry_points.txt @@ -0,0 +1,11 @@ +[stevedore.example.formatter] +field = stevedore.example2.fields:FieldList +plain = stevedore.example.simple:Simple +simple = stevedore.example.simple:Simple + +[stevedore.test.extension] +e1 = stevedore.tests.test_extension:BrokenExtension +e2 = stevedore.tests.notfound:UnimportableExtension +t1 = stevedore.tests.test_extension:FauxExtension +t2 = stevedore.tests.test_extension:FauxExtension + diff --git a/venv/Lib/site-packages/stevedore-3.2.0.dist-info/metadata.json b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/metadata.json new file mode 100644 index 00000000..39e9bd47 --- /dev/null +++ b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: Implementation :: CPython", "Intended Audience :: Developers", "Environment :: Console"], "extensions": {"python.details": {"contacts": [{"email": "openstack-discuss@lists.openstack.org", "name": "OpenStack", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://docs.openstack.org/stevedore/latest/"}}, "python.exports": {"stevedore.example.formatter": {"field": "stevedore.example2.fields:FieldList", "plain": "stevedore.example.simple:Simple", "simple": "stevedore.example.simple:Simple"}, "stevedore.test.extension": {"e1": "stevedore.tests.test_extension:BrokenExtension", "e2": "stevedore.tests.notfound:UnimportableExtension", "t1": "stevedore.tests.test_extension:FauxExtension", "t2": "stevedore.tests.test_extension:FauxExtension"}}}, "extras": [], "generator": "bdist_wheel (0.30.0)", "metadata_version": "2.0", "name": "stevedore", "requires_python": ">=3.6", "run_requires": [{"requires": ["pbr (!=2.1.0,>=2.0.0)"]}, {"environment": "(python_version<'3.8')", "requires": ["importlib-metadata (>=1.7.0)"]}], "summary": "Manage dynamic plugins for Python applications", "test_requires": [{"requires": ["coverage (!=4.4,>=4.0)", "sphinx (!=2.1.0,>=2.0.0)", "stestr (>=2.0.0)"]}], "version": "3.2.0"} \ No newline at end of file diff --git a/venv/Lib/site-packages/stevedore-3.2.0.dist-info/pbr.json b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/pbr.json new file mode 100644 index 00000000..a7d3559d --- /dev/null +++ b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/pbr.json @@ -0,0 +1 @@ +{"git_version": "e9204ee", "is_release": true} \ No newline at end of file diff --git a/venv/Lib/site-packages/stevedore-3.2.0.dist-info/top_level.txt b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/top_level.txt new file mode 100644 index 00000000..19c82760 --- /dev/null +++ b/venv/Lib/site-packages/stevedore-3.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +stevedore diff --git a/venv/Lib/site-packages/stevedore/__init__.py b/venv/Lib/site-packages/stevedore/__init__.py new file mode 100644 index 00000000..fdf37a99 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/__init__.py @@ -0,0 +1,23 @@ +# flake8: noqa + +__all__ = [ + 'ExtensionManager', + 'EnabledExtensionManager', + 'NamedExtensionManager', + 'HookManager', + 'DriverManager', +] + +from .extension import ExtensionManager +from .enabled import EnabledExtensionManager +from .named import NamedExtensionManager +from .hook import HookManager +from .driver import DriverManager + +import logging + +# Configure a NullHandler for our log messages in case +# the app we're used from does not set up logging. +LOG = logging.getLogger('stevedore') + +LOG.addHandler(logging.NullHandler()) diff --git a/venv/Lib/site-packages/stevedore/_cache.py b/venv/Lib/site-packages/stevedore/_cache.py new file mode 100644 index 00000000..28a45faa --- /dev/null +++ b/venv/Lib/site-packages/stevedore/_cache.py @@ -0,0 +1,195 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Use a cache layer in front of entry point scanning.""" + +import errno +import glob +import hashlib +import itertools +import json +import logging +import os +import os.path +import struct +import sys + +try: + # For python 3.8 and later + import importlib.metadata as importlib_metadata +except ImportError: + # For everyone else + import importlib_metadata + + +log = logging.getLogger('stevedore._cache') + + +def _get_cache_dir(): + """Locate a platform-appropriate cache directory to use. + + Does not ensure that the cache directory exists. + """ + # Linux, Unix, AIX, etc. + if os.name == 'posix' and sys.platform != 'darwin': + # use ~/.cache if empty OR not set + base_path = os.environ.get("XDG_CACHE_HOME", None) \ + or os.path.expanduser('~/.cache') + return os.path.join(base_path, 'python-entrypoints') + + # Mac OS + elif sys.platform == 'darwin': + return os.path.expanduser('~/Library/Caches/Python Entry Points') + + # Windows (hopefully) + else: + base_path = os.environ.get('LOCALAPPDATA', None) \ + or os.path.expanduser('~\\AppData\\Local') + return os.path.join(base_path, 'Python Entry Points') + + +def _get_mtime(name): + try: + s = os.stat(name) + return s.st_mtime + except OSError as err: + if err.errno != errno.ENOENT: + raise + return -1.0 + + +def _ftobytes(f): + return struct.Struct('f').pack(f) + + +def _hash_settings_for_path(path): + """Return a hash and the path settings that created it. + """ + paths = [] + h = hashlib.sha256() + + # Tie the cache to the python interpreter, in case it is part of a + # virtualenv. + h.update(sys.executable.encode('utf-8')) + h.update(sys.prefix.encode('utf-8')) + + for entry in path: + mtime = _get_mtime(entry) + h.update(entry.encode('utf-8')) + h.update(_ftobytes(mtime)) + paths.append((entry, mtime)) + + for ep_file in itertools.chain( + glob.iglob(os.path.join(entry, + '*.dist-info', + 'entry_points.txt')), + glob.iglob(os.path.join(entry, + '*.egg-info', + 'entry_points.txt')) + ): + mtime = _get_mtime(ep_file) + h.update(ep_file.encode('utf-8')) + h.update(_ftobytes(mtime)) + paths.append((ep_file, mtime)) + + return (h.hexdigest(), paths) + + +def _build_cacheable_data(path): + real_groups = importlib_metadata.entry_points() + # Convert the namedtuple values to regular tuples + groups = {} + for name, group_data in real_groups.items(): + existing = set() + members = [] + groups[name] = members + for ep in group_data: + # Filter out duplicates that can occur when testing a + # package that provides entry points using tox, where the + # package is installed in the virtualenv that tox builds + # and is present in the path as '.'. + item = ep[:] # convert namedtuple to tuple + if item in existing: + continue + existing.add(item) + members.append(item) + return { + 'groups': groups, + 'sys.executable': sys.executable, + 'sys.prefix': sys.prefix, + } + + +class Cache: + + def __init__(self, cache_dir=None): + if cache_dir is None: + cache_dir = _get_cache_dir() + self._dir = cache_dir + self._internal = {} + + def _get_data_for_path(self, path): + if path is None: + path = sys.path + + internal_key = tuple(path) + if internal_key in self._internal: + return self._internal[internal_key] + + digest, path_values = _hash_settings_for_path(path) + filename = os.path.join(self._dir, digest) + try: + log.debug('reading %s', filename) + with open(filename, 'r') as f: + data = json.load(f) + except (IOError, json.JSONDecodeError): + data = _build_cacheable_data(path) + data['path_values'] = path_values + try: + log.debug('writing to %s', filename) + os.makedirs(self._dir, exist_ok=True) + with open(filename, 'w') as f: + json.dump(data, f) + except (IOError, OSError): + # Could not create cache dir or write file. + pass + + self._internal[internal_key] = data + return data + + def get_group_all(self, group, path=None): + result = [] + data = self._get_data_for_path(path) + group_data = data.get('groups', {}).get(group, []) + for vals in group_data: + result.append(importlib_metadata.EntryPoint(*vals)) + return result + + def get_group_named(self, group, path=None): + result = {} + for ep in self.get_group_all(group, path=path): + if ep.name not in result: + result[ep.name] = ep + return result + + def get_single(self, group, name, path=None): + for name, ep in self.get_group_named(group, path=path).items(): + if name == name: + return ep + raise ValueError('No entrypoint {!r} in group {!r}'.format( + group, name)) + + +_c = Cache() +get_group_all = _c.get_group_all +get_group_named = _c.get_group_named +get_single = _c.get_single diff --git a/venv/Lib/site-packages/stevedore/dispatch.py b/venv/Lib/site-packages/stevedore/dispatch.py new file mode 100644 index 00000000..a1589673 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/dispatch.py @@ -0,0 +1,229 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from .enabled import EnabledExtensionManager +from .exception import NoMatches + +LOG = logging.getLogger(__name__) + + +class DispatchExtensionManager(EnabledExtensionManager): + """Loads all plugins and filters on execution. + + This is useful for long-running processes that need to pass + different inputs to different extensions. + + :param namespace: The namespace for the entry points. + :type namespace: str + :param check_func: Function to determine which extensions to load. + :type check_func: callable + :param invoke_on_load: Boolean controlling whether to invoke the + object returned by the entry point after the driver is loaded. + :type invoke_on_load: bool + :param invoke_args: Positional arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_args: tuple + :param invoke_kwds: Named arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_kwds: dict + :param propagate_map_exceptions: Boolean controlling whether exceptions + are propagated up through the map call or whether they are logged and + then ignored + :type invoke_on_load: bool + """ + + def map(self, filter_func, func, *args, **kwds): + """Iterate over the extensions invoking func() for any where + filter_func() returns True. + + The signature of filter_func() should be:: + + def filter_func(ext, *args, **kwds): + pass + + The first argument to filter_func(), 'ext', is the + :class:`~stevedore.extension.Extension` + instance. filter_func() should return True if the extension + should be invoked for the input arguments. + + The signature for func() should be:: + + def func(ext, *args, **kwds): + pass + + The first argument to func(), 'ext', is the + :class:`~stevedore.extension.Extension` instance. + + Exceptions raised from within func() are propagated up and + processing stopped if self.propagate_map_exceptions is True, + otherwise they are logged and ignored. + + :param filter_func: Callable to test each extension. + :param func: Callable to invoke for each extension. + :param args: Variable arguments to pass to func() + :param kwds: Keyword arguments to pass to func() + :returns: List of values returned from func() + """ + if not self.extensions: + # FIXME: Use a more specific exception class here. + raise NoMatches('No %s extensions found' % self.namespace) + response = [] + for e in self.extensions: + if filter_func(e, *args, **kwds): + self._invoke_one_plugin(response.append, func, e, args, kwds) + return response + + def map_method(self, filter_func, method_name, *args, **kwds): + """Iterate over the extensions invoking each one's object method called + `method_name` for any where filter_func() returns True. + + This is equivalent of using :meth:`map` with func set to + `lambda x: x.obj.method_name()` + while being more convenient. + + Exceptions raised from within the called method are propagated up + and processing stopped if self.propagate_map_exceptions is True, + otherwise they are logged and ignored. + + .. versionadded:: 0.12 + + :param filter_func: Callable to test each extension. + :param method_name: The extension method name to call + for each extension. + :param args: Variable arguments to pass to method + :param kwds: Keyword arguments to pass to method + :returns: List of values returned from methods + """ + return self.map(filter_func, self._call_extension_method, + method_name, *args, **kwds) + + +class NameDispatchExtensionManager(DispatchExtensionManager): + """Loads all plugins and filters on execution. + + This is useful for long-running processes that need to pass + different inputs to different extensions and can predict the name + of the extensions before calling them. + + The check_func argument should return a boolean, with ``True`` + indicating that the extension should be loaded and made available + and ``False`` indicating that the extension should be ignored. + + :param namespace: The namespace for the entry points. + :type namespace: str + :param check_func: Function to determine which extensions to load. + :type check_func: callable + :param invoke_on_load: Boolean controlling whether to invoke the + object returned by the entry point after the driver is loaded. + :type invoke_on_load: bool + :param invoke_args: Positional arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_args: tuple + :param invoke_kwds: Named arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_kwds: dict + :param propagate_map_exceptions: Boolean controlling whether exceptions + are propagated up through the map call or whether they are logged and + then ignored + :type invoke_on_load: bool + :param on_load_failure_callback: Callback function that will be called when + a entrypoint can not be loaded. The arguments that will be provided + when this is called (when an entrypoint fails to load) are + (manager, entrypoint, exception) + :type on_load_failure_callback: function + :param verify_requirements: Use setuptools to enforce the + dependencies of the plugin(s) being loaded. Defaults to False. + :type verify_requirements: bool + + """ + + def __init__(self, namespace, check_func, invoke_on_load=False, + invoke_args=(), invoke_kwds={}, + propagate_map_exceptions=False, + on_load_failure_callback=None, + verify_requirements=False): + super(NameDispatchExtensionManager, self).__init__( + namespace=namespace, + check_func=check_func, + invoke_on_load=invoke_on_load, + invoke_args=invoke_args, + invoke_kwds=invoke_kwds, + propagate_map_exceptions=propagate_map_exceptions, + on_load_failure_callback=on_load_failure_callback, + verify_requirements=verify_requirements, + ) + + def _init_plugins(self, extensions): + super(NameDispatchExtensionManager, self)._init_plugins(extensions) + self.by_name = dict((e.name, e) for e in self.extensions) + + def map(self, names, func, *args, **kwds): + """Iterate over the extensions invoking func() for any where + the name is in the given list of names. + + The signature for func() should be:: + + def func(ext, *args, **kwds): + pass + + The first argument to func(), 'ext', is the + :class:`~stevedore.extension.Extension` instance. + + Exceptions raised from within func() are propagated up and + processing stopped if self.propagate_map_exceptions is True, + otherwise they are logged and ignored. + + :param names: List or set of name(s) of extension(s) to invoke. + :param func: Callable to invoke for each extension. + :param args: Variable arguments to pass to func() + :param kwds: Keyword arguments to pass to func() + :returns: List of values returned from func() + """ + response = [] + for name in names: + try: + e = self.by_name[name] + except KeyError: + LOG.debug('Missing extension %r being ignored', name) + else: + self._invoke_one_plugin(response.append, func, e, args, kwds) + return response + + def map_method(self, names, method_name, *args, **kwds): + """Iterate over the extensions invoking each one's object method called + `method_name` for any where the name is in the given list of names. + + This is equivalent of using :meth:`map` with func set to + `lambda x: x.obj.method_name()` + while being more convenient. + + Exceptions raised from within the called method are propagated up + and processing stopped if self.propagate_map_exceptions is True, + otherwise they are logged and ignored. + + .. versionadded:: 0.12 + + :param names: List or set of name(s) of extension(s) to invoke. + :param method_name: The extension method name + to call for each extension. + :param args: Variable arguments to pass to method + :param kwds: Keyword arguments to pass to method + :returns: List of values returned from methods + """ + return self.map(names, self._call_extension_method, + method_name, *args, **kwds) diff --git a/venv/Lib/site-packages/stevedore/driver.py b/venv/Lib/site-packages/stevedore/driver.py new file mode 100644 index 00000000..167dc671 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/driver.py @@ -0,0 +1,148 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from .exception import NoMatches, MultipleMatches +from .named import NamedExtensionManager + + +class DriverManager(NamedExtensionManager): + """Load a single plugin with a given name from the namespace. + + :param namespace: The namespace for the entry points. + :type namespace: str + :param name: The name of the driver to load. + :type name: str + :param invoke_on_load: Boolean controlling whether to invoke the + object returned by the entry point after the driver is loaded. + :type invoke_on_load: bool + :param invoke_args: Positional arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_args: tuple + :param invoke_kwds: Named arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_kwds: dict + :param on_load_failure_callback: Callback function that will be called when + a entrypoint can not be loaded. The arguments that will be provided + when this is called (when an entrypoint fails to load) are + (manager, entrypoint, exception) + :type on_load_failure_callback: function + :param verify_requirements: Use setuptools to enforce the + dependencies of the plugin(s) being loaded. Defaults to False. + :type verify_requirements: bool + :type warn_on_missing_entrypoint: bool + """ + + def __init__(self, namespace, name, + invoke_on_load=False, invoke_args=(), invoke_kwds={}, + on_load_failure_callback=None, + verify_requirements=False, + warn_on_missing_entrypoint=True): + on_load_failure_callback = on_load_failure_callback \ + or self._default_on_load_failure + super(DriverManager, self).__init__( + namespace=namespace, + names=[name], + invoke_on_load=invoke_on_load, + invoke_args=invoke_args, + invoke_kwds=invoke_kwds, + on_load_failure_callback=on_load_failure_callback, + verify_requirements=verify_requirements, + warn_on_missing_entrypoint=warn_on_missing_entrypoint + ) + + @staticmethod + def _default_on_load_failure(drivermanager, ep, err): + raise + + @classmethod + def make_test_instance(cls, extension, namespace='TESTING', + propagate_map_exceptions=False, + on_load_failure_callback=None, + verify_requirements=False): + """Construct a test DriverManager + + Test instances are passed a list of extensions to work from rather + than loading them from entry points. + + :param extension: Pre-configured Extension instance + :type extension: :class:`~stevedore.extension.Extension` + :param namespace: The namespace for the manager; used only for + identification since the extensions are passed in. + :type namespace: str + :param propagate_map_exceptions: Boolean controlling whether exceptions + are propagated up through the map call or whether they are logged + and then ignored + :type propagate_map_exceptions: bool + :param on_load_failure_callback: Callback function that will + be called when a entrypoint can not be loaded. The + arguments that will be provided when this is called (when + an entrypoint fails to load) are (manager, entrypoint, + exception) + :type on_load_failure_callback: function + :param verify_requirements: Use setuptools to enforce the + dependencies of the plugin(s) being loaded. Defaults to False. + :type verify_requirements: bool + :return: The manager instance, initialized for testing + + """ + + o = super(DriverManager, cls).make_test_instance( + [extension], namespace=namespace, + propagate_map_exceptions=propagate_map_exceptions, + on_load_failure_callback=on_load_failure_callback, + verify_requirements=verify_requirements) + return o + + def _init_plugins(self, extensions): + super(DriverManager, self)._init_plugins(extensions) + + if not self.extensions: + name = self._names[0] + raise NoMatches('No %r driver found, looking for %r' % + (self.namespace, name)) + if len(self.extensions) > 1: + discovered_drivers = ','.join(e.entry_point_target + for e in self.extensions) + + raise MultipleMatches('Multiple %r drivers found: %s' % + (self.namespace, discovered_drivers)) + + def __call__(self, func, *args, **kwds): + """Invokes func() for the single loaded extension. + + The signature for func() should be:: + + def func(ext, *args, **kwds): + pass + + The first argument to func(), 'ext', is the + :class:`~stevedore.extension.Extension` instance. + + Exceptions raised from within func() are logged and ignored. + + :param func: Callable to invoke for each extension. + :param args: Variable arguments to pass to func() + :param kwds: Keyword arguments to pass to func() + :returns: List of values returned from func() + """ + results = self.map(func, *args, **kwds) + if results: + return results[0] + + @property + def driver(self): + """Returns the driver being used by this manager. + """ + ext = self.extensions[0] + return ext.obj if ext.obj else ext.plugin diff --git a/venv/Lib/site-packages/stevedore/enabled.py b/venv/Lib/site-packages/stevedore/enabled.py new file mode 100644 index 00000000..c2e0c03d --- /dev/null +++ b/venv/Lib/site-packages/stevedore/enabled.py @@ -0,0 +1,84 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from .extension import ExtensionManager + + +LOG = logging.getLogger(__name__) + + +class EnabledExtensionManager(ExtensionManager): + """Loads only plugins that pass a check function. + + The check_func argument should return a boolean, with ``True`` + indicating that the extension should be loaded and made available + and ``False`` indicating that the extension should be ignored. + + :param namespace: The namespace for the entry points. + :type namespace: str + :param check_func: Function to determine which extensions to load. + :type check_func: callable, taking an :class:`Extension` + instance as argument + :param invoke_on_load: Boolean controlling whether to invoke the + object returned by the entry point after the driver is loaded. + :type invoke_on_load: bool + :param invoke_args: Positional arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_args: tuple + :param invoke_kwds: Named arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_kwds: dict + :param propagate_map_exceptions: Boolean controlling whether exceptions + are propagated up through the map call or whether they are logged and + then ignored + :type propagate_map_exceptions: bool + :param on_load_failure_callback: Callback function that will be called when + a entrypoint can not be loaded. The arguments that will be provided + when this is called (when an entrypoint fails to load) are + (manager, entrypoint, exception) + :type on_load_failure_callback: function + :param verify_requirements: Use setuptools to enforce the + dependencies of the plugin(s) being loaded. Defaults to False. + :type verify_requirements: bool + + """ + + def __init__(self, namespace, check_func, invoke_on_load=False, + invoke_args=(), invoke_kwds={}, + propagate_map_exceptions=False, + on_load_failure_callback=None, + verify_requirements=False,): + self.check_func = check_func + super(EnabledExtensionManager, self).__init__( + namespace, + invoke_on_load=invoke_on_load, + invoke_args=invoke_args, + invoke_kwds=invoke_kwds, + propagate_map_exceptions=propagate_map_exceptions, + on_load_failure_callback=on_load_failure_callback, + verify_requirements=verify_requirements, + ) + + def _load_one_plugin(self, ep, invoke_on_load, invoke_args, invoke_kwds, + verify_requirements): + ext = super(EnabledExtensionManager, self)._load_one_plugin( + ep, invoke_on_load, invoke_args, invoke_kwds, + verify_requirements, + ) + if ext and not self.check_func(ext): + LOG.debug('ignoring extension %r', ep.name) + return None + return ext diff --git a/venv/Lib/site-packages/stevedore/example/__init__.py b/venv/Lib/site-packages/stevedore/example/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/stevedore/example/base.py b/venv/Lib/site-packages/stevedore/example/base.py new file mode 100644 index 00000000..08f8a5c6 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/example/base.py @@ -0,0 +1,19 @@ +import abc + + +class FormatterBase(metaclass=abc.ABCMeta): + """Base class for example plugin used in the tutorial. + """ + + def __init__(self, max_width=60): + self.max_width = max_width + + @abc.abstractmethod + def format(self, data): + """Format the data and return unicode text. + + :param data: A dictionary with string keys and simple types as + values. + :type data: dict(str:?) + :returns: Iterable producing the formatted text. + """ diff --git a/venv/Lib/site-packages/stevedore/example/load_as_driver.py b/venv/Lib/site-packages/stevedore/example/load_as_driver.py new file mode 100644 index 00000000..6838c9da --- /dev/null +++ b/venv/Lib/site-packages/stevedore/example/load_as_driver.py @@ -0,0 +1,35 @@ +import argparse + +from stevedore import driver + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument( + 'format', + nargs='?', + default='simple', + help='the output format', + ) + parser.add_argument( + '--width', + default=60, + type=int, + help='maximum output width for text', + ) + parsed_args = parser.parse_args() + + data = { + 'a': 'A', + 'b': 'B', + 'long': 'word ' * 80, + } + + mgr = driver.DriverManager( + namespace='stevedore.example.formatter', + name=parsed_args.format, + invoke_on_load=True, + invoke_args=(parsed_args.width,), + ) + for chunk in mgr.driver.format(data): + print(chunk, end='') diff --git a/venv/Lib/site-packages/stevedore/example/load_as_extension.py b/venv/Lib/site-packages/stevedore/example/load_as_extension.py new file mode 100644 index 00000000..f7518529 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/example/load_as_extension.py @@ -0,0 +1,37 @@ +import argparse + +from stevedore import extension + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument( + '--width', + default=60, + type=int, + help='maximum output width for text', + ) + parsed_args = parser.parse_args() + + data = { + 'a': 'A', + 'b': 'B', + 'long': 'word ' * 80, + } + + mgr = extension.ExtensionManager( + namespace='stevedore.example.formatter', + invoke_on_load=True, + invoke_args=(parsed_args.width,), + ) + + def format_data(ext, data): + return (ext.name, ext.obj.format(data)) + + results = mgr.map(format_data, data) + + for name, result in results: + print('Formatter: {0}'.format(name)) + for chunk in result: + print(chunk, end='') + print('') diff --git a/venv/Lib/site-packages/stevedore/example/setup.py b/venv/Lib/site-packages/stevedore/example/setup.py new file mode 100644 index 00000000..33e2aeec --- /dev/null +++ b/venv/Lib/site-packages/stevedore/example/setup.py @@ -0,0 +1,43 @@ +from setuptools import setup, find_packages + +setup( + name='stevedore-examples', + version='1.0', + + description='Demonstration package for stevedore', + + author='Doug Hellmann', + author_email='doug@doughellmann.com', + + url='http://opendev.org/openstack/stevedore', + + classifiers=['Development Status :: 3 - Alpha', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Intended Audience :: Developers', + 'Environment :: Console', + ], + + platforms=['Any'], + + scripts=[], + + provides=['stevedore.examples', + ], + + packages=find_packages(), + include_package_data=True, + + entry_points={ + 'stevedore.example.formatter': [ + 'simple = stevedore.example.simple:Simple', + 'plain = stevedore.example.simple:Simple', + ], + }, + + zip_safe=False, +) diff --git a/venv/Lib/site-packages/stevedore/example/simple.py b/venv/Lib/site-packages/stevedore/example/simple.py new file mode 100644 index 00000000..1cad96af --- /dev/null +++ b/venv/Lib/site-packages/stevedore/example/simple.py @@ -0,0 +1,20 @@ +from stevedore.example import base + + +class Simple(base.FormatterBase): + """A very basic formatter. + """ + + def format(self, data): + """Format the data and return unicode text. + + :param data: A dictionary with string keys and simple types as + values. + :type data: dict(str:?) + """ + for name, value in sorted(data.items()): + line = '{name} = {value}\n'.format( + name=name, + value=value, + ) + yield line diff --git a/venv/Lib/site-packages/stevedore/example2/__init__.py b/venv/Lib/site-packages/stevedore/example2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/stevedore/example2/fields.py b/venv/Lib/site-packages/stevedore/example2/fields.py new file mode 100644 index 00000000..f5c8e194 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/example2/fields.py @@ -0,0 +1,36 @@ +import textwrap + +from stevedore.example import base + + +class FieldList(base.FormatterBase): + """Format values as a reStructuredText field list. + + For example:: + + : name1 : value + : name2 : value + : name3 : a long value + will be wrapped with + a hanging indent + """ + + def format(self, data): + """Format the data and return unicode text. + + :param data: A dictionary with string keys and simple types as + values. + :type data: dict(str:?) + """ + for name, value in sorted(data.items()): + full_text = ': {name} : {value}'.format( + name=name, + value=value, + ) + wrapped_text = textwrap.fill( + full_text, + initial_indent='', + subsequent_indent=' ', + width=self.max_width, + ) + yield wrapped_text + '\n' diff --git a/venv/Lib/site-packages/stevedore/example2/setup.py b/venv/Lib/site-packages/stevedore/example2/setup.py new file mode 100644 index 00000000..31d71454 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/example2/setup.py @@ -0,0 +1,42 @@ +from setuptools import setup, find_packages + +setup( + name='stevedore-examples2', + version='1.0', + + description='Demonstration package for stevedore', + + author='Doug Hellmann', + author_email='doug@doughellmann.com', + + url='http://opendev.org/openstack/stevedore', + + classifiers=['Development Status :: 3 - Alpha', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Intended Audience :: Developers', + 'Environment :: Console', + ], + + platforms=['Any'], + + scripts=[], + + provides=['stevedore.examples2', + ], + + packages=find_packages(), + include_package_data=True, + + entry_points={ + 'stevedore.example.formatter': [ + 'field = stevedore.example2.fields:FieldList', + ], + }, + + zip_safe=False, +) diff --git a/venv/Lib/site-packages/stevedore/exception.py b/venv/Lib/site-packages/stevedore/exception.py new file mode 100644 index 00000000..aa7f1451 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/exception.py @@ -0,0 +1,23 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +class NoUniqueMatch(RuntimeError): + """There was more than one extension, or none, that matched the query.""" + + +class NoMatches(NoUniqueMatch): + """There were no extensions with the driver name found.""" + + +class MultipleMatches(NoUniqueMatch): + """There were multiple matches for the given name.""" diff --git a/venv/Lib/site-packages/stevedore/extension.py b/venv/Lib/site-packages/stevedore/extension.py new file mode 100644 index 00000000..2ccdc279 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/extension.py @@ -0,0 +1,361 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""ExtensionManager +""" + +import operator +import logging + +from . import _cache +from .exception import NoMatches + +LOG = logging.getLogger(__name__) + + +class Extension(object): + """Book-keeping object for tracking extensions. + + The arguments passed to the constructor are saved as attributes of + the instance using the same names, and can be accessed by the + callables passed to :meth:`map` or when iterating over an + :class:`ExtensionManager` directly. + + :param name: The entry point name. + :type name: str + :param entry_point: The EntryPoint instance returned by + :mod:`entrypoints`. + :type entry_point: EntryPoint + :param plugin: The value returned by entry_point.load() + :param obj: The object returned by ``plugin(*args, **kwds)`` if the + manager invoked the extension on load. + + """ + + def __init__(self, name, entry_point, plugin, obj): + self.name = name + self.entry_point = entry_point + self.plugin = plugin + self.obj = obj + + @property + def module_name(self): + """The name of the module from which the entry point is loaded. + + :return: A string in 'dotted.module' format. + """ + # NOTE: importlib_metadata from PyPI includes this but the + # Python 3.8 standard library does not. + match = self.entry_point.pattern.match(self.entry_point.value) + return match.group('module') + + @property + def extras(self): + """The 'extras' settings for the plugin.""" + # NOTE: The underlying package returns re.Match objects for + # some reason. Translate those to the matched strings, which + # seem more useful. + return [ + # Python 3.6 returns _sre.SRE_Match objects. Later + # versions of python return re.Match objects. Both types + # have a 'string' attribute containing the text that + # matched the pattern. + getattr(e, 'string', e) + for e in self.entry_point.extras + ] + + @property + def attr(self): + """The attribute of the module to be loaded.""" + match = self.entry_point.pattern.match(self.entry_point.value) + return match.group('attr') + + @property + def entry_point_target(self): + """The module and attribute referenced by this extension's entry_point. + + :return: A string representation of the target of the entry point in + 'dotted.module:object' format. + """ + return self.entry_point.value + + +class ExtensionManager(object): + """Base class for all of the other managers. + + :param namespace: The namespace for the entry points. + :type namespace: str + :param invoke_on_load: Boolean controlling whether to invoke the + object returned by the entry point after the driver is loaded. + :type invoke_on_load: bool + :param invoke_args: Positional arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_args: tuple + :param invoke_kwds: Named arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_kwds: dict + :param propagate_map_exceptions: Boolean controlling whether exceptions + are propagated up through the map call or whether they are logged and + then ignored + :type propagate_map_exceptions: bool + :param on_load_failure_callback: Callback function that will be called when + a entrypoint can not be loaded. The arguments that will be provided + when this is called (when an entrypoint fails to load) are + (manager, entrypoint, exception) + :type on_load_failure_callback: function + :param verify_requirements: Use setuptools to enforce the + dependencies of the plugin(s) being loaded. Defaults to False. + :type verify_requirements: bool + """ + + def __init__(self, namespace, + invoke_on_load=False, + invoke_args=(), + invoke_kwds={}, + propagate_map_exceptions=False, + on_load_failure_callback=None, + verify_requirements=False): + self._init_attributes( + namespace, + propagate_map_exceptions=propagate_map_exceptions, + on_load_failure_callback=on_load_failure_callback) + extensions = self._load_plugins(invoke_on_load, + invoke_args, + invoke_kwds, + verify_requirements) + self._init_plugins(extensions) + + @classmethod + def make_test_instance(cls, extensions, namespace='TESTING', + propagate_map_exceptions=False, + on_load_failure_callback=None, + verify_requirements=False): + """Construct a test ExtensionManager + + Test instances are passed a list of extensions to work from rather + than loading them from entry points. + + :param extensions: Pre-configured Extension instances to use + :type extensions: list of :class:`~stevedore.extension.Extension` + :param namespace: The namespace for the manager; used only for + identification since the extensions are passed in. + :type namespace: str + :param propagate_map_exceptions: When calling map, controls whether + exceptions are propagated up through the map call or whether they + are logged and then ignored + :type propagate_map_exceptions: bool + :param on_load_failure_callback: Callback function that will + be called when a entrypoint can not be loaded. The + arguments that will be provided when this is called (when + an entrypoint fails to load) are (manager, entrypoint, + exception) + :type on_load_failure_callback: function + :param verify_requirements: Use setuptools to enforce the + dependencies of the plugin(s) being loaded. Defaults to False. + :type verify_requirements: bool + :return: The manager instance, initialized for testing + + """ + + o = cls.__new__(cls) + o._init_attributes(namespace, + propagate_map_exceptions=propagate_map_exceptions, + on_load_failure_callback=on_load_failure_callback) + o._init_plugins(extensions) + return o + + def _init_attributes(self, namespace, propagate_map_exceptions=False, + on_load_failure_callback=None): + self.namespace = namespace + self.propagate_map_exceptions = propagate_map_exceptions + self._on_load_failure_callback = on_load_failure_callback + + def _init_plugins(self, extensions): + self.extensions = extensions + self._extensions_by_name_cache = None + + @property + def _extensions_by_name(self): + if self._extensions_by_name_cache is None: + d = {} + for e in self.extensions: + d[e.name] = e + self._extensions_by_name_cache = d + return self._extensions_by_name_cache + + ENTRY_POINT_CACHE = {} + + def list_entry_points(self): + """Return the list of entry points for this namespace. + + The entry points are not actually loaded, their list is just read and + returned. + + """ + if self.namespace not in self.ENTRY_POINT_CACHE: + eps = list(_cache.get_group_all(self.namespace)) + self.ENTRY_POINT_CACHE[self.namespace] = eps + return self.ENTRY_POINT_CACHE[self.namespace] + + def entry_points_names(self): + """Return the list of entry points names for this namespace.""" + return list(map(operator.attrgetter("name"), self.list_entry_points())) + + def _load_plugins(self, invoke_on_load, invoke_args, invoke_kwds, + verify_requirements): + extensions = [] + for ep in self.list_entry_points(): + LOG.debug('found extension %r', ep) + try: + ext = self._load_one_plugin(ep, + invoke_on_load, + invoke_args, + invoke_kwds, + verify_requirements, + ) + if ext: + extensions.append(ext) + except (KeyboardInterrupt, AssertionError): + raise + except Exception as err: + if self._on_load_failure_callback is not None: + self._on_load_failure_callback(self, ep, err) + else: + # Log the reason we couldn't import the module, + # usually without a traceback. The most common + # reason is an ImportError due to a missing + # dependency, and the error message should be + # enough to debug that. If debug logging is + # enabled for our logger, provide the full + # traceback. + LOG.error('Could not load %r: %s', ep.name, err, + exc_info=LOG.isEnabledFor(logging.DEBUG)) + return extensions + + def _load_one_plugin(self, ep, invoke_on_load, invoke_args, invoke_kwds, + verify_requirements): + # NOTE(dhellmann): Using require=False is deprecated in + # setuptools 11.3. + if hasattr(ep, 'resolve') and hasattr(ep, 'require'): + if verify_requirements: + ep.require() + plugin = ep.resolve() + else: + plugin = ep.load() + if invoke_on_load: + obj = plugin(*invoke_args, **invoke_kwds) + else: + obj = None + return Extension(ep.name, ep, plugin, obj) + + def names(self): + "Returns the names of the discovered extensions" + # We want to return the names of the extensions in the order + # they would be used by map(), since some subclasses change + # that order. + return [e.name for e in self.extensions] + + def map(self, func, *args, **kwds): + """Iterate over the extensions invoking func() for each. + + The signature for func() should be:: + + def func(ext, *args, **kwds): + pass + + The first argument to func(), 'ext', is the + :class:`~stevedore.extension.Extension` instance. + + Exceptions raised from within func() are propagated up and + processing stopped if self.propagate_map_exceptions is True, + otherwise they are logged and ignored. + + :param func: Callable to invoke for each extension. + :param args: Variable arguments to pass to func() + :param kwds: Keyword arguments to pass to func() + :returns: List of values returned from func() + """ + if not self.extensions: + # FIXME: Use a more specific exception class here. + raise NoMatches('No %s extensions found' % self.namespace) + response = [] + for e in self.extensions: + self._invoke_one_plugin(response.append, func, e, args, kwds) + return response + + @staticmethod + def _call_extension_method(extension, method_name, *args, **kwds): + return getattr(extension.obj, method_name)(*args, **kwds) + + def map_method(self, method_name, *args, **kwds): + """Iterate over the extensions invoking a method by name. + + This is equivalent of using :meth:`map` with func set to + `lambda x: x.obj.method_name()` + while being more convenient. + + Exceptions raised from within the called method are propagated up + and processing stopped if self.propagate_map_exceptions is True, + otherwise they are logged and ignored. + + .. versionadded:: 0.12 + + :param method_name: The extension method name + to call for each extension. + :param args: Variable arguments to pass to method + :param kwds: Keyword arguments to pass to method + :returns: List of values returned from methods + """ + return self.map(self._call_extension_method, + method_name, *args, **kwds) + + def _invoke_one_plugin(self, response_callback, func, e, args, kwds): + try: + response_callback(func(e, *args, **kwds)) + except Exception as err: + if self.propagate_map_exceptions: + raise + else: + LOG.error('error calling %r: %s', e.name, err) + LOG.exception(err) + + def items(self): + """ + Return an iterator of tuples of the form (name, extension). + + This is analogous to the Mapping.items() method. + """ + return self._extensions_by_name.items() + + def __iter__(self): + """Produce iterator for the manager. + + Iterating over an ExtensionManager produces the :class:`Extension` + instances in the order they would be invoked. + """ + return iter(self.extensions) + + def __getitem__(self, name): + """Return the named extension. + + Accessing an ExtensionManager as a dictionary (``em['name']``) + produces the :class:`Extension` instance with the + specified name. + """ + return self._extensions_by_name[name] + + def __contains__(self, name): + """Return true if name is in list of enabled extensions. + """ + return any(extension.name == name for extension in self.extensions) diff --git a/venv/Lib/site-packages/stevedore/hook.py b/venv/Lib/site-packages/stevedore/hook.py new file mode 100644 index 00000000..4df2b0f7 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/hook.py @@ -0,0 +1,89 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from .named import NamedExtensionManager + + +class HookManager(NamedExtensionManager): + """Coordinate execution of multiple extensions using a common name. + + :param namespace: The namespace for the entry points. + :type namespace: str + :param name: The name of the hooks to load. + :type name: str + :param invoke_on_load: Boolean controlling whether to invoke the + object returned by the entry point after the driver is loaded. + :type invoke_on_load: bool + :param invoke_args: Positional arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_args: tuple + :param invoke_kwds: Named arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_kwds: dict + :param on_load_failure_callback: Callback function that will be called when + a entrypoint can not be loaded. The arguments that will be provided + when this is called (when an entrypoint fails to load) are + (manager, entrypoint, exception) + :type on_load_failure_callback: function + :param verify_requirements: Use setuptools to enforce the + dependencies of the plugin(s) being loaded. Defaults to False. + :type verify_requirements: bool + :type on_missing_entrypoints_callback: function + :param warn_on_missing_entrypoint: Flag to control whether failing + to load a plugin is reported via a log mess. Only applies if + on_missing_entrypoints_callback is None. + :type warn_on_missing_entrypoint: bool + + """ + + def __init__(self, namespace, name, + invoke_on_load=False, invoke_args=(), invoke_kwds={}, + on_load_failure_callback=None, + verify_requirements=False, + on_missing_entrypoints_callback=None, + # NOTE(dhellmann): This default is different from the + # base class because for hooks it is less likely to + # be an error to have no entry points present. + warn_on_missing_entrypoint=False): + super(HookManager, self).__init__( + namespace, + [name], + invoke_on_load=invoke_on_load, + invoke_args=invoke_args, + invoke_kwds=invoke_kwds, + on_load_failure_callback=on_load_failure_callback, + on_missing_entrypoints_callback=on_missing_entrypoints_callback, + verify_requirements=verify_requirements, + warn_on_missing_entrypoint=warn_on_missing_entrypoint, + ) + + def _init_attributes(self, namespace, names, name_order=False, + propagate_map_exceptions=False, + on_load_failure_callback=None): + super(HookManager, self)._init_attributes( + namespace, names, + propagate_map_exceptions=propagate_map_exceptions, + on_load_failure_callback=on_load_failure_callback) + self._name = names[0] + + def __getitem__(self, name): + """Return the named extensions. + + Accessing a HookManager as a dictionary (``em['name']``) + produces a list of the :class:`Extension` instance(s) with the + specified name, in the order they would be invoked by map(). + """ + if name != self._name: + raise KeyError(name) + return self.extensions diff --git a/venv/Lib/site-packages/stevedore/named.py b/venv/Lib/site-packages/stevedore/named.py new file mode 100644 index 00000000..3b47dfd3 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/named.py @@ -0,0 +1,159 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from .extension import ExtensionManager + +LOG = logging.getLogger(__name__) + + +class NamedExtensionManager(ExtensionManager): + """Loads only the named extensions. + + This is useful for explicitly enabling extensions in a + configuration file, for example. + + :param namespace: The namespace for the entry points. + :type namespace: str + :param names: The names of the extensions to load. + :type names: list(str) + :param invoke_on_load: Boolean controlling whether to invoke the + object returned by the entry point after the driver is loaded. + :type invoke_on_load: bool + :param invoke_args: Positional arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_args: tuple + :param invoke_kwds: Named arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_kwds: dict + :param name_order: If true, sort the loaded extensions to match the + order used in ``names``. + :type name_order: bool + :param propagate_map_exceptions: Boolean controlling whether exceptions + are propagated up through the map call or whether they are logged and + then ignored + :type propagate_map_exceptions: bool + :param on_load_failure_callback: Callback function that will be called when + a entrypoint can not be loaded. The arguments that will be provided + when this is called (when an entrypoint fails to load) are + (manager, entrypoint, exception) + :type on_load_failure_callback: function + :param on_missing_entrypoints_callback: Callback function that will be + called when one or more names cannot be found. The provided argument + will be a subset of the 'names' parameter. + :type on_missing_entrypoints_callback: function + :param verify_requirements: Use setuptools to enforce the + dependencies of the plugin(s) being loaded. Defaults to False. + :type verify_requirements: bool + :param warn_on_missing_entrypoint: Flag to control whether failing + to load a plugin is reported via a log mess. Only applies if + on_missing_entrypoints_callback is None. + :type warn_on_missing_entrypoint: bool + + """ + + def __init__(self, namespace, names, + invoke_on_load=False, invoke_args=(), invoke_kwds={}, + name_order=False, propagate_map_exceptions=False, + on_load_failure_callback=None, + on_missing_entrypoints_callback=None, + verify_requirements=False, + warn_on_missing_entrypoint=True): + self._init_attributes( + namespace, names, name_order=name_order, + propagate_map_exceptions=propagate_map_exceptions, + on_load_failure_callback=on_load_failure_callback) + extensions = self._load_plugins(invoke_on_load, + invoke_args, + invoke_kwds, + verify_requirements) + self._missing_names = set(names) - set([e.name for e in extensions]) + if self._missing_names: + if on_missing_entrypoints_callback: + on_missing_entrypoints_callback(self._missing_names) + elif warn_on_missing_entrypoint: + LOG.warning('Could not load %s' % + ', '.join(self._missing_names)) + self._init_plugins(extensions) + + @classmethod + def make_test_instance(cls, extensions, namespace='TESTING', + propagate_map_exceptions=False, + on_load_failure_callback=None, + verify_requirements=False): + """Construct a test NamedExtensionManager + + Test instances are passed a list of extensions to use rather than + loading them from entry points. + + :param extensions: Pre-configured Extension instances + :type extensions: list of :class:`~stevedore.extension.Extension` + :param namespace: The namespace for the manager; used only for + identification since the extensions are passed in. + :type namespace: str + :param propagate_map_exceptions: Boolean controlling whether exceptions + are propagated up through the map call or whether they are logged + and then ignored + :type propagate_map_exceptions: bool + :param on_load_failure_callback: Callback function that will + be called when a entrypoint can not be loaded. The + arguments that will be provided when this is called (when + an entrypoint fails to load) are (manager, entrypoint, + exception) + :type on_load_failure_callback: function + :param verify_requirements: Use setuptools to enforce the + dependencies of the plugin(s) being loaded. Defaults to False. + :type verify_requirements: bool + :return: The manager instance, initialized for testing + + """ + + o = cls.__new__(cls) + names = [e.name for e in extensions] + o._init_attributes(namespace, names, + propagate_map_exceptions=propagate_map_exceptions, + on_load_failure_callback=on_load_failure_callback) + o._init_plugins(extensions) + return o + + def _init_attributes(self, namespace, names, name_order=False, + propagate_map_exceptions=False, + on_load_failure_callback=None): + super(NamedExtensionManager, self)._init_attributes( + namespace, propagate_map_exceptions=propagate_map_exceptions, + on_load_failure_callback=on_load_failure_callback) + + self._names = names + self._missing_names = set() + self._name_order = name_order + + def _init_plugins(self, extensions): + super(NamedExtensionManager, self)._init_plugins(extensions) + + if self._name_order: + self.extensions = [self[n] for n in self._names + if n not in self._missing_names] + + def _load_one_plugin(self, ep, invoke_on_load, invoke_args, invoke_kwds, + verify_requirements): + # Check the name before going any further to prevent + # undesirable code from being loaded at all if we are not + # going to use it. + if ep.name not in self._names: + return None + return super(NamedExtensionManager, self)._load_one_plugin( + ep, invoke_on_load, invoke_args, invoke_kwds, + verify_requirements, + ) diff --git a/venv/Lib/site-packages/stevedore/sphinxext.py b/venv/Lib/site-packages/stevedore/sphinxext.py new file mode 100644 index 00000000..250122ea --- /dev/null +++ b/venv/Lib/site-packages/stevedore/sphinxext.py @@ -0,0 +1,120 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import inspect + +from docutils import nodes +from docutils.parsers import rst +from docutils.parsers.rst import directives +from docutils.statemachine import ViewList +from sphinx.util import logging +from sphinx.util.nodes import nested_parse_with_titles + +from stevedore import extension + +LOG = logging.getLogger(__name__) + + +def _get_docstring(plugin): + return inspect.getdoc(plugin) or '' + + +def _simple_list(mgr): + for name in sorted(mgr.names()): + ext = mgr[name] + doc = _get_docstring(ext.plugin) or '\n' + summary = doc.splitlines()[0].strip() + yield('* %s -- %s' % (ext.name, summary), + ext.module_name) + + +def _detailed_list(mgr, over='', under='-', titlecase=False): + for name in sorted(mgr.names()): + ext = mgr[name] + if over: + yield (over * len(ext.name), ext.module_name) + if titlecase: + yield (ext.name.title(), ext.module_name) + else: + yield (ext.name, ext.module_name) + if under: + yield (under * len(ext.name), ext.module_name) + yield ('\n', ext.module_name) + doc = _get_docstring(ext.plugin) + if doc: + yield (doc, ext.module_name) + else: + yield ( + '.. warning:: No documentation found for {} in {}'.format( + ext.name, ext.entry_point_target, + ), + ext.module_name, + ) + yield ('\n', ext.module_name) + + +class ListPluginsDirective(rst.Directive): + """Present a simple list of the plugins in a namespace.""" + + option_spec = { + 'class': directives.class_option, + 'detailed': directives.flag, + 'titlecase': directives.flag, + 'overline-style': directives.single_char_or_unicode, + 'underline-style': directives.single_char_or_unicode, + } + + has_content = True + + def run(self): + namespace = ' '.join(self.content).strip() + LOG.info('documenting plugins from %r' % namespace) + overline_style = self.options.get('overline-style', '') + underline_style = self.options.get('underline-style', '=') + + def report_load_failure(mgr, ep, err): + LOG.warning(u'Failed to load %s: %s' % (ep.module, err)) + + mgr = extension.ExtensionManager( + namespace, + on_load_failure_callback=report_load_failure, + ) + + result = ViewList() + + titlecase = 'titlecase' in self.options + + if 'detailed' in self.options: + data = _detailed_list( + mgr, over=overline_style, under=underline_style, + titlecase=titlecase) + else: + data = _simple_list(mgr) + for text, source in data: + for line in text.splitlines(): + result.append(line, source) + + # Parse what we have into a new section. + node = nodes.section() + node.document = self.state.document + nested_parse_with_titles(self.state, result, node) + + return node.children + + +def setup(app): + LOG.info('loading stevedore.sphinxext') + app.add_directive('list-plugins', ListPluginsDirective) + return { + 'parallel_read_safe': True, + 'parallel_write_safe': True, + } diff --git a/venv/Lib/site-packages/stevedore/tests/__init__.py b/venv/Lib/site-packages/stevedore/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/stevedore/tests/extension_unimportable.py b/venv/Lib/site-packages/stevedore/tests/extension_unimportable.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/stevedore/tests/manager.py b/venv/Lib/site-packages/stevedore/tests/manager.py new file mode 100644 index 00000000..8c97a680 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/manager.py @@ -0,0 +1,67 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""TestExtensionManager + +Extension manager used only for testing. +""" + +import warnings + +from stevedore import extension + + +class TestExtensionManager(extension.ExtensionManager): + """ExtensionManager that is explicitly initialized for tests. + + .. deprecated:: 0.13 + + Use the :func:`make_test_instance` class method of the class + being replaced by the test instance instead of using this class + directly. + + :param extensions: Pre-configured Extension instances to use + instead of loading them from entry points. + :type extensions: list of :class:`~stevedore.extension.Extension` + :param namespace: The namespace for the entry points. + :type namespace: str + :param invoke_on_load: Boolean controlling whether to invoke the + object returned by the entry point after the driver is loaded. + :type invoke_on_load: bool + :param invoke_args: Positional arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_args: tuple + :param invoke_kwds: Named arguments to pass when invoking + the object returned by the entry point. Only used if invoke_on_load + is True. + :type invoke_kwds: dict + + """ + + def __init__(self, extensions, + namespace='test', + invoke_on_load=False, + invoke_args=(), + invoke_kwds={}): + super(TestExtensionManager, self).__init__(namespace, + invoke_on_load, + invoke_args, + invoke_kwds, + ) + self.extensions = extensions + warnings.warn( + 'TestExtesionManager has been replaced by make_test_instance()', + DeprecationWarning) + + def _load_plugins(self, *args, **kwds): + return [] diff --git a/venv/Lib/site-packages/stevedore/tests/test_callback.py b/venv/Lib/site-packages/stevedore/tests/test_callback.py new file mode 100644 index 00000000..75026f75 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_callback.py @@ -0,0 +1,56 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Tests for failure loading callback +""" +from unittest import mock + +from testtools.matchers import GreaterThan + +from stevedore import extension +from stevedore import named +from stevedore.tests import utils + + +class TestCallback(utils.TestCase): + def test_extension_failure_custom_callback(self): + errors = [] + + def failure_callback(manager, entrypoint, error): + errors.append((manager, entrypoint, error)) + + em = extension.ExtensionManager('stevedore.test.extension', + invoke_on_load=True, + on_load_failure_callback= + failure_callback) + extensions = list(em.extensions) + self.assertTrue(len(extensions), GreaterThan(0)) + self.assertEqual(len(errors), 2) + for manager, entrypoint, error in errors: + self.assertIs(manager, em) + self.assertIsInstance(error, (IOError, ImportError)) + + @mock.patch('stevedore.named.NamedExtensionManager._load_plugins') + def test_missing_entrypoints_callback(self, load_fn): + errors = set() + + def callback(names): + errors.update(names) + + load_fn.return_value = [ + extension.Extension('foo', None, None, None) + ] + named.NamedExtensionManager('stevedore.test.extension', + names=['foo', 'bar'], + invoke_on_load=True, + on_missing_entrypoints_callback=callback) + self.assertEqual(errors, {'bar'}) diff --git a/venv/Lib/site-packages/stevedore/tests/test_dispatch.py b/venv/Lib/site-packages/stevedore/tests/test_dispatch.py new file mode 100644 index 00000000..f1c305ab --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_dispatch.py @@ -0,0 +1,103 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from stevedore.tests import utils +from stevedore import dispatch + + +def check_dispatch(ep, *args, **kwds): + return ep.name == 't2' + + +class TestDispatch(utils.TestCase): + def check_dispatch(ep, *args, **kwds): + return ep.name == 't2' + + def test_dispatch(self): + + def invoke(ep, *args, **kwds): + return (ep.name, args, kwds) + + em = dispatch.DispatchExtensionManager('stevedore.test.extension', + lambda *args, **kwds: True, + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + self.assertEqual(len(em.extensions), 2) + self.assertEqual(set(em.names()), set(['t1', 't2'])) + + results = em.map(check_dispatch, + invoke, + 'first', + named='named value', + ) + expected = [('t2', ('first',), {'named': 'named value'})] + self.assertEqual(results, expected) + + def test_dispatch_map_method(self): + em = dispatch.DispatchExtensionManager('stevedore.test.extension', + lambda *args, **kwds: True, + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + + results = em.map_method(check_dispatch, 'get_args_and_data', 'first') + self.assertEqual(results, [(('a',), {'b': 'B'}, 'first')]) + + def test_name_dispatch(self): + + def invoke(ep, *args, **kwds): + return (ep.name, args, kwds) + + em = dispatch.NameDispatchExtensionManager('stevedore.test.extension', + lambda *args, **kwds: True, + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + self.assertEqual(len(em.extensions), 2) + self.assertEqual(set(em.names()), set(['t1', 't2'])) + + results = em.map(['t2'], invoke, 'first', named='named value',) + expected = [('t2', ('first',), {'named': 'named value'})] + self.assertEqual(results, expected) + + def test_name_dispatch_ignore_missing(self): + + def invoke(ep, *args, **kwds): + return (ep.name, args, kwds) + + em = dispatch.NameDispatchExtensionManager( + 'stevedore.test.extension', + lambda *args, **kwds: True, + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + + results = em.map(['t3', 't1'], invoke, 'first', named='named value',) + expected = [('t1', ('first',), {'named': 'named value'})] + self.assertEqual(results, expected) + + def test_name_dispatch_map_method(self): + em = dispatch.NameDispatchExtensionManager( + 'stevedore.test.extension', + lambda *args, **kwds: True, + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + + results = em.map_method(['t3', 't1'], 'get_args_and_data', 'first') + self.assertEqual(results, [(('a',), {'b': 'B'}, 'first')]) diff --git a/venv/Lib/site-packages/stevedore/tests/test_driver.py b/venv/Lib/site-packages/stevedore/tests/test_driver.py new file mode 100644 index 00000000..92308359 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_driver.py @@ -0,0 +1,96 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Tests for stevedore.extension +""" + +try: + # For python 3.8 and later + import importlib.metadata as importlib_metadata +except ImportError: + # For everyone else + import importlib_metadata + +from stevedore import driver +from stevedore import exception +from stevedore import extension +from stevedore.tests import test_extension +from stevedore.tests import utils + + +class TestCallback(utils.TestCase): + def test_detect_plugins(self): + em = driver.DriverManager('stevedore.test.extension', 't1') + names = sorted(em.names()) + self.assertEqual(names, ['t1']) + + def test_call(self): + def invoke(ext, *args, **kwds): + return (ext.name, args, kwds) + em = driver.DriverManager('stevedore.test.extension', 't1') + result = em(invoke, 'a', b='C') + self.assertEqual(result, ('t1', ('a',), {'b': 'C'})) + + def test_driver_property_not_invoked_on_load(self): + em = driver.DriverManager('stevedore.test.extension', 't1', + invoke_on_load=False) + d = em.driver + self.assertIs(d, test_extension.FauxExtension) + + def test_driver_property_invoked_on_load(self): + em = driver.DriverManager('stevedore.test.extension', 't1', + invoke_on_load=True) + d = em.driver + self.assertIsInstance(d, test_extension.FauxExtension) + + def test_no_drivers(self): + try: + driver.DriverManager('stevedore.test.extension.none', 't1') + except exception.NoMatches as err: + self.assertIn("No 'stevedore.test.extension.none' driver found", + str(err)) + + def test_bad_driver(self): + try: + driver.DriverManager('stevedore.test.extension', 'e2') + except ImportError: + pass + else: + self.assertEqual(False, "No error raised") + + def test_multiple_drivers(self): + # The idea for this test was contributed by clayg: + # https://gist.github.com/clayg/6311348 + extensions = [ + extension.Extension( + 'backend', + importlib_metadata.EntryPoint( + 'backend', 'pkg1:driver', 'backend'), + 'pkg backend', + None, + ), + extension.Extension( + 'backend', + importlib_metadata.EntryPoint( + 'backend', 'pkg2:driver', 'backend'), + 'pkg backend', + None, + ), + ] + try: + dm = driver.DriverManager.make_test_instance(extensions[0]) + # Call the initialization code that verifies the extension + dm._init_plugins(extensions) + except exception.MultipleMatches as err: + self.assertIn("Multiple", str(err)) + else: + self.fail('Should have had an error') diff --git a/venv/Lib/site-packages/stevedore/tests/test_enabled.py b/venv/Lib/site-packages/stevedore/tests/test_enabled.py new file mode 100644 index 00000000..32cd1992 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_enabled.py @@ -0,0 +1,42 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from stevedore import enabled +from stevedore.tests import utils + + +class TestEnabled(utils.TestCase): + def test_enabled(self): + def check_enabled(ep): + return ep.name == 't2' + em = enabled.EnabledExtensionManager( + 'stevedore.test.extension', + check_enabled, + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + self.assertEqual(len(em.extensions), 1) + self.assertEqual(em.names(), ['t2']) + + def test_enabled_after_load(self): + def check_enabled(ext): + return ext.obj and ext.name == 't2' + em = enabled.EnabledExtensionManager( + 'stevedore.test.extension', + check_enabled, + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + self.assertEqual(len(em.extensions), 1) + self.assertEqual(em.names(), ['t2']) diff --git a/venv/Lib/site-packages/stevedore/tests/test_example_fields.py b/venv/Lib/site-packages/stevedore/tests/test_example_fields.py new file mode 100644 index 00000000..757917c9 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_example_fields.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Tests for stevedore.example2.fields +""" + +from stevedore.example2 import fields +from stevedore.tests import utils + + +class TestExampleFields(utils.TestCase): + def test_simple_items(self): + f = fields.FieldList(100) + text = ''.join(f.format({'a': 'A', 'b': 'B'})) + expected = '\n'.join([ + ': a : A', + ': b : B', + '', + ]) + self.assertEqual(text, expected) + + def test_long_item(self): + f = fields.FieldList(25) + text = ''.join(f.format({'name': + 'a value longer than the allowed width'})) + expected = '\n'.join([ + ': name : a value longer', + ' than the allowed', + ' width', + '', + ]) + self.assertEqual(text, expected) diff --git a/venv/Lib/site-packages/stevedore/tests/test_example_simple.py b/venv/Lib/site-packages/stevedore/tests/test_example_simple.py new file mode 100644 index 00000000..382ed899 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_example_simple.py @@ -0,0 +1,29 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Tests for stevedore.example.simple +""" + +from stevedore.example import simple +from stevedore.tests import utils + + +class TestExampleSimple(utils.TestCase): + def test_simple_items(self): + f = simple.Simple(100) + text = ''.join(f.format({'a': 'A', 'b': 'B'})) + expected = '\n'.join([ + 'a = A', + 'b = B', + '', + ]) + self.assertEqual(text, expected) diff --git a/venv/Lib/site-packages/stevedore/tests/test_extension.py b/venv/Lib/site-packages/stevedore/tests/test_extension.py new file mode 100644 index 00000000..405fb88b --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_extension.py @@ -0,0 +1,289 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Tests for stevedore.extension +""" + +import operator +from unittest import mock + +try: + # For python 3.8 and later + import importlib.metadata as importlib_metadata +except ImportError: + # For everyone else + import importlib_metadata + +from stevedore import exception +from stevedore import extension +from stevedore.tests import utils + + +ALL_NAMES = ['e1', 't1', 't2'] +WORKING_NAMES = ['t1', 't2'] + + +class FauxExtension(object): + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + + def get_args_and_data(self, data): + return self.args, self.kwds, data + + +class BrokenExtension(object): + def __init__(self, *args, **kwds): + raise IOError("Did not create") + + +class TestCallback(utils.TestCase): + def test_detect_plugins(self): + em = extension.ExtensionManager('stevedore.test.extension') + names = sorted(em.names()) + self.assertEqual(names, ALL_NAMES) + + def test_get_by_name(self): + em = extension.ExtensionManager('stevedore.test.extension') + e = em['t1'] + self.assertEqual(e.name, 't1') + + def test_list_entry_points(self): + em = extension.ExtensionManager('stevedore.test.extension') + n = em.list_entry_points() + self.assertEqual(set(['e1', 'e2', 't1', 't2']), + set(map(operator.attrgetter("name"), n))) + self.assertEqual(4, len(n)) + + def test_list_entry_points_names(self): + em = extension.ExtensionManager('stevedore.test.extension') + names = em.entry_points_names() + self.assertEqual(set(['e1', 'e2', 't1', 't2']), set(names)) + self.assertEqual(4, len(names)) + + def test_contains_by_name(self): + em = extension.ExtensionManager('stevedore.test.extension') + self.assertEqual('t1' in em, True) + + def test_get_by_name_missing(self): + em = extension.ExtensionManager('stevedore.test.extension') + try: + em['t3'] + except KeyError: + pass + else: + assert False, 'Failed to raise KeyError' + + def test_load_multiple_times_entry_points(self): + # We expect to get the same EntryPoint object because we save them + # in the cache. + em1 = extension.ExtensionManager('stevedore.test.extension') + eps1 = [ext.entry_point for ext in em1] + em2 = extension.ExtensionManager('stevedore.test.extension') + eps2 = [ext.entry_point for ext in em2] + self.assertIs(eps1[0], eps2[0]) + + def test_load_multiple_times_plugins(self): + # We expect to get the same plugin object (module or class) + # because the underlying import machinery will cache the values. + em1 = extension.ExtensionManager('stevedore.test.extension') + plugins1 = [ext.plugin for ext in em1] + em2 = extension.ExtensionManager('stevedore.test.extension') + plugins2 = [ext.plugin for ext in em2] + self.assertIs(plugins1[0], plugins2[0]) + + def test_use_cache(self): + # If we insert something into the cache of entry points, + # the manager should not have to call into entrypoints + # to find the plugins. + cache = extension.ExtensionManager.ENTRY_POINT_CACHE + cache['stevedore.test.faux'] = [] + with mock.patch('stevedore._cache.get_group_all', + side_effect= + AssertionError('called get_group_all')): + em = extension.ExtensionManager('stevedore.test.faux') + names = em.names() + self.assertEqual(names, []) + + def test_iterable(self): + em = extension.ExtensionManager('stevedore.test.extension') + names = sorted(e.name for e in em) + self.assertEqual(names, ALL_NAMES) + + def test_invoke_on_load(self): + em = extension.ExtensionManager('stevedore.test.extension', + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + self.assertEqual(len(em.extensions), 2) + for e in em.extensions: + self.assertEqual(e.obj.args, ('a',)) + self.assertEqual(e.obj.kwds, {'b': 'B'}) + + def test_map_return_values(self): + def mapped(ext, *args, **kwds): + return ext.name + + em = extension.ExtensionManager('stevedore.test.extension', + invoke_on_load=True, + ) + results = em.map(mapped) + self.assertEqual(sorted(results), WORKING_NAMES) + + def test_map_arguments(self): + objs = [] + + def mapped(ext, *args, **kwds): + objs.append((ext, args, kwds)) + + em = extension.ExtensionManager('stevedore.test.extension', + invoke_on_load=True, + ) + em.map(mapped, 1, 2, a='A', b='B') + self.assertEqual(len(objs), 2) + names = sorted([o[0].name for o in objs]) + self.assertEqual(names, WORKING_NAMES) + for o in objs: + self.assertEqual(o[1], (1, 2)) + self.assertEqual(o[2], {'a': 'A', 'b': 'B'}) + + def test_map_eats_errors(self): + def mapped(ext, *args, **kwds): + raise RuntimeError('hard coded error') + + em = extension.ExtensionManager('stevedore.test.extension', + invoke_on_load=True, + ) + results = em.map(mapped, 1, 2, a='A', b='B') + self.assertEqual(results, []) + + def test_map_propagate_exceptions(self): + def mapped(ext, *args, **kwds): + raise RuntimeError('hard coded error') + + em = extension.ExtensionManager('stevedore.test.extension', + invoke_on_load=True, + propagate_map_exceptions=True + ) + + try: + em.map(mapped, 1, 2, a='A', b='B') + assert False + except RuntimeError: + pass + + def test_map_errors_when_no_plugins(self): + expected_str = 'No stevedore.test.extension.none extensions found' + + def mapped(ext, *args, **kwds): + pass + + em = extension.ExtensionManager('stevedore.test.extension.none', + invoke_on_load=True, + ) + try: + em.map(mapped, 1, 2, a='A', b='B') + except exception.NoMatches as err: + self.assertEqual(expected_str, str(err)) + + def test_map_method(self): + em = extension.ExtensionManager('stevedore.test.extension', + invoke_on_load=True, + ) + + result = em.map_method('get_args_and_data', 42) + self.assertEqual(set(r[2] for r in result), set([42])) + + def test_items(self): + em = extension.ExtensionManager('stevedore.test.extension') + expected_output = set([(name, em[name]) for name in ALL_NAMES]) + self.assertEqual(expected_output, set(em.items())) + + +class TestLoadRequirementsNewSetuptools(utils.TestCase): + # setuptools 11.3 and later + + def setUp(self): + super(TestLoadRequirementsNewSetuptools, self).setUp() + self.mock_ep = mock.Mock(spec=['require', 'resolve', 'load', 'name']) + self.em = extension.ExtensionManager.make_test_instance([]) + + def test_verify_requirements(self): + self.em._load_one_plugin(self.mock_ep, False, (), {}, + verify_requirements=True) + self.mock_ep.require.assert_called_once_with() + self.mock_ep.resolve.assert_called_once_with() + + def test_no_verify_requirements(self): + self.em._load_one_plugin(self.mock_ep, False, (), {}, + verify_requirements=False) + self.assertEqual(0, self.mock_ep.require.call_count) + self.mock_ep.resolve.assert_called_once_with() + + +class TestLoadRequirementsOldSetuptools(utils.TestCase): + # Before setuptools 11.3 + + def setUp(self): + super(TestLoadRequirementsOldSetuptools, self).setUp() + self.mock_ep = mock.Mock(spec=['load', 'name']) + self.em = extension.ExtensionManager.make_test_instance([]) + + def test_verify_requirements(self): + self.em._load_one_plugin(self.mock_ep, False, (), {}, + verify_requirements=True) + self.mock_ep.load.assert_called_once_with() + + def test_no_verify_requirements(self): + self.em._load_one_plugin(self.mock_ep, False, (), {}, + verify_requirements=False) + self.mock_ep.load.assert_called_once_with() + + +class TestExtensionProperties(utils.TestCase): + + def setUp(self): + self.ext1 = extension.Extension( + 'name', + importlib_metadata.EntryPoint( + 'name', 'module.name:attribute.name [extra]', 'group_name', + ), + mock.Mock(), + None, + ) + self.ext2 = extension.Extension( + 'name', + importlib_metadata.EntryPoint( + 'name', 'module:attribute', 'group_name', + ), + mock.Mock(), + None, + ) + + def test_module_name(self): + self.assertEqual('module.name', self.ext1.module_name) + self.assertEqual('module', self.ext2.module_name) + + def test_extras(self): + self.assertEqual(['[extra]'], self.ext1.extras) + self.assertEqual([], self.ext2.extras) + + def test_attr(self): + self.assertEqual('attribute.name', self.ext1.attr) + self.assertEqual('attribute', self.ext2.attr) + + def test_entry_point_target(self): + self.assertEqual('module.name:attribute.name [extra]', + self.ext1.entry_point_target) + self.assertEqual('module:attribute', + self.ext2.entry_point_target) diff --git a/venv/Lib/site-packages/stevedore/tests/test_hook.py b/venv/Lib/site-packages/stevedore/tests/test_hook.py new file mode 100644 index 00000000..5741bb9f --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_hook.py @@ -0,0 +1,55 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from stevedore import hook +from stevedore.tests import utils + + +class TestHook(utils.TestCase): + def test_hook(self): + em = hook.HookManager( + 'stevedore.test.extension', + 't1', + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + self.assertEqual(len(em.extensions), 1) + self.assertEqual(em.names(), ['t1']) + + def test_get_by_name(self): + em = hook.HookManager( + 'stevedore.test.extension', + 't1', + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + e_list = em['t1'] + self.assertEqual(len(e_list), 1) + e = e_list[0] + self.assertEqual(e.name, 't1') + + def test_get_by_name_missing(self): + em = hook.HookManager( + 'stevedore.test.extension', + 't1', + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + try: + em['t2'] + except KeyError: + pass + else: + assert False, 'Failed to raise KeyError' diff --git a/venv/Lib/site-packages/stevedore/tests/test_named.py b/venv/Lib/site-packages/stevedore/tests/test_named.py new file mode 100644 index 00000000..d41dc2e5 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_named.py @@ -0,0 +1,93 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from unittest import mock + +from stevedore import named +from stevedore.tests import utils + + +class TestNamed(utils.TestCase): + def test_named(self): + em = named.NamedExtensionManager( + 'stevedore.test.extension', + names=['t1'], + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + actual = em.names() + self.assertEqual(actual, ['t1']) + + def test_enabled_before_load(self): + # Set up the constructor for the FauxExtension to cause an + # AssertionError so the test fails if the class is instantiated, + # which should only happen if it is loaded before the name of the + # extension is compared against the names that should be loaded by + # the manager. + init_name = 'stevedore.tests.test_extension.FauxExtension.__init__' + with mock.patch(init_name) as m: + m.side_effect = AssertionError + em = named.NamedExtensionManager( + 'stevedore.test.extension', + # Look for an extension that does not exist so the + # __init__ we mocked should never be invoked. + names=['no-such-extension'], + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + actual = em.names() + self.assertEqual(actual, []) + + def test_extensions_listed_in_name_order(self): + # Since we don't know the "natural" order of the extensions, run + # the test both ways: if the sorting is broken, one of them will + # fail + em = named.NamedExtensionManager( + 'stevedore.test.extension', + names=['t1', 't2'], + name_order=True + ) + actual = em.names() + self.assertEqual(actual, ['t1', 't2']) + + em = named.NamedExtensionManager( + 'stevedore.test.extension', + names=['t2', 't1'], + name_order=True + ) + actual = em.names() + self.assertEqual(actual, ['t2', 't1']) + + def test_load_fail_ignored_when_sorted(self): + em = named.NamedExtensionManager( + 'stevedore.test.extension', + names=['e1', 't2', 'e2', 't1'], + name_order=True, + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + actual = em.names() + self.assertEqual(['t2', 't1'], actual) + + em = named.NamedExtensionManager( + 'stevedore.test.extension', + names=['e1', 't1'], + name_order=False, + invoke_on_load=True, + invoke_args=('a',), + invoke_kwds={'b': 'B'}, + ) + actual = em.names() + self.assertEqual(['t1'], actual) diff --git a/venv/Lib/site-packages/stevedore/tests/test_sphinxext.py b/venv/Lib/site-packages/stevedore/tests/test_sphinxext.py new file mode 100644 index 00000000..e90bd679 --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_sphinxext.py @@ -0,0 +1,122 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Tests for the sphinx extension +""" + +try: + # For python 3.8 and later + import importlib.metadata as importlib_metadata +except ImportError: + # For everyone else + import importlib_metadata + +from stevedore import extension +from stevedore import sphinxext +from stevedore.tests import utils + + +def _make_ext(name, docstring): + def inner(): + pass + + inner.__doc__ = docstring + m1 = importlib_metadata.EntryPoint( + name, '{}_module:{}'.format(name, name), 'group', + ) + return extension.Extension(name, m1, inner, None) + + +class TestSphinxExt(utils.TestCase): + + def setUp(self): + super(TestSphinxExt, self).setUp() + self.exts = [ + _make_ext('test1', 'One-line docstring'), + _make_ext('test2', 'Multi-line docstring\n\nAnother para'), + ] + self.em = extension.ExtensionManager.make_test_instance(self.exts) + + def test_simple_list(self): + results = list(sphinxext._simple_list(self.em)) + self.assertEqual( + [ + ('* test1 -- One-line docstring', 'test1_module'), + ('* test2 -- Multi-line docstring', 'test2_module'), + ], + results, + ) + + def test_simple_list_no_docstring(self): + ext = [_make_ext('nodoc', None)] + em = extension.ExtensionManager.make_test_instance(ext) + results = list(sphinxext._simple_list(em)) + self.assertEqual( + [ + ('* nodoc -- ', 'nodoc_module'), + ], + results, + ) + + def test_detailed_list(self): + results = list(sphinxext._detailed_list(self.em)) + self.assertEqual( + [ + ('test1', 'test1_module'), + ('-----', 'test1_module'), + ('\n', 'test1_module'), + ('One-line docstring', 'test1_module'), + ('\n', 'test1_module'), + ('test2', 'test2_module'), + ('-----', 'test2_module'), + ('\n', 'test2_module'), + ('Multi-line docstring\n\nAnother para', 'test2_module'), + ('\n', 'test2_module'), + ], + results, + ) + + def test_detailed_list_format(self): + results = list(sphinxext._detailed_list(self.em, over='+', under='+')) + self.assertEqual( + [ + ('+++++', 'test1_module'), + ('test1', 'test1_module'), + ('+++++', 'test1_module'), + ('\n', 'test1_module'), + ('One-line docstring', 'test1_module'), + ('\n', 'test1_module'), + ('+++++', 'test2_module'), + ('test2', 'test2_module'), + ('+++++', 'test2_module'), + ('\n', 'test2_module'), + ('Multi-line docstring\n\nAnother para', 'test2_module'), + ('\n', 'test2_module'), + ], + results, + ) + + def test_detailed_list_no_docstring(self): + ext = [_make_ext('nodoc', None)] + em = extension.ExtensionManager.make_test_instance(ext) + results = list(sphinxext._detailed_list(em)) + self.assertEqual( + [ + ('nodoc', 'nodoc_module'), + ('-----', 'nodoc_module'), + ('\n', 'nodoc_module'), + (('.. warning:: No documentation found for ' + 'nodoc in nodoc_module:nodoc'), + 'nodoc_module'), + ('\n', 'nodoc_module'), + ], + results, + ) diff --git a/venv/Lib/site-packages/stevedore/tests/test_test_manager.py b/venv/Lib/site-packages/stevedore/tests/test_test_manager.py new file mode 100644 index 00000000..54bb454f --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/test_test_manager.py @@ -0,0 +1,217 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from unittest.mock import Mock, sentinel + +from stevedore import (ExtensionManager, NamedExtensionManager, HookManager, + DriverManager, EnabledExtensionManager) +from stevedore.dispatch import (DispatchExtensionManager, + NameDispatchExtensionManager) +from stevedore.extension import Extension +from stevedore.tests import utils + + +test_extension = Extension('test_extension', None, None, None) +test_extension2 = Extension('another_one', None, None, None) + +mock_entry_point = Mock(module_name='test.extension', attrs=['obj']) +a_driver = Extension('test_driver', mock_entry_point, sentinel.driver_plugin, + sentinel.driver_obj) + + +# base ExtensionManager +class TestTestManager(utils.TestCase): + def test_instance_should_use_supplied_extensions(self): + extensions = [test_extension, test_extension2] + em = ExtensionManager.make_test_instance(extensions) + self.assertEqual(extensions, em.extensions) + + def test_instance_should_have_default_namespace(self): + em = ExtensionManager.make_test_instance([]) + self.assertEqual(em.namespace, 'TESTING') + + def test_instance_should_use_supplied_namespace(self): + namespace = 'testing.1.2.3' + em = ExtensionManager.make_test_instance([], namespace=namespace) + self.assertEqual(namespace, em.namespace) + + def test_extension_name_should_be_listed(self): + em = ExtensionManager.make_test_instance([test_extension]) + self.assertIn(test_extension.name, em.names()) + + def test_iterator_should_yield_extension(self): + em = ExtensionManager.make_test_instance([test_extension]) + self.assertEqual(test_extension, next(iter(em))) + + def test_manager_should_allow_name_access(self): + em = ExtensionManager.make_test_instance([test_extension]) + self.assertEqual(test_extension, em[test_extension.name]) + + def test_manager_should_call(self): + em = ExtensionManager.make_test_instance([test_extension]) + func = Mock() + em.map(func) + func.assert_called_once_with(test_extension) + + def test_manager_should_call_all(self): + em = ExtensionManager.make_test_instance([test_extension2, + test_extension]) + func = Mock() + em.map(func) + func.assert_any_call(test_extension2) + func.assert_any_call(test_extension) + + def test_manager_return_values(self): + def mapped(ext, *args, **kwds): + return ext.name + + em = ExtensionManager.make_test_instance([test_extension2, + test_extension]) + results = em.map(mapped) + self.assertEqual(sorted(results), ['another_one', 'test_extension']) + + def test_manager_should_eat_exceptions(self): + em = ExtensionManager.make_test_instance([test_extension]) + + func = Mock(side_effect=RuntimeError('hard coded error')) + + results = em.map(func, 1, 2, a='A', b='B') + self.assertEqual(results, []) + + def test_manager_should_propagate_exceptions(self): + em = ExtensionManager.make_test_instance([test_extension], + propagate_map_exceptions=True) + self.skipTest('Skipping temporarily') + func = Mock(side_effect=RuntimeError('hard coded error')) + em.map(func, 1, 2, a='A', b='B') + + # NamedExtensionManager + def test_named_manager_should_use_supplied_extensions(self): + extensions = [test_extension, test_extension2] + em = NamedExtensionManager.make_test_instance(extensions) + self.assertEqual(extensions, em.extensions) + + def test_named_manager_should_have_default_namespace(self): + em = NamedExtensionManager.make_test_instance([]) + self.assertEqual(em.namespace, 'TESTING') + + def test_named_manager_should_use_supplied_namespace(self): + namespace = 'testing.1.2.3' + em = NamedExtensionManager.make_test_instance([], namespace=namespace) + self.assertEqual(namespace, em.namespace) + + def test_named_manager_should_populate_names(self): + extensions = [test_extension, test_extension2] + em = NamedExtensionManager.make_test_instance(extensions) + self.assertEqual(em.names(), ['test_extension', 'another_one']) + + # HookManager + def test_hook_manager_should_use_supplied_extensions(self): + extensions = [test_extension, test_extension2] + em = HookManager.make_test_instance(extensions) + self.assertEqual(extensions, em.extensions) + + def test_hook_manager_should_be_first_extension_name(self): + extensions = [test_extension, test_extension2] + em = HookManager.make_test_instance(extensions) + # This will raise KeyError if the names don't match + assert(em[test_extension.name]) + + def test_hook_manager_should_have_default_namespace(self): + em = HookManager.make_test_instance([test_extension]) + self.assertEqual(em.namespace, 'TESTING') + + def test_hook_manager_should_use_supplied_namespace(self): + namespace = 'testing.1.2.3' + em = HookManager.make_test_instance([test_extension], + namespace=namespace) + self.assertEqual(namespace, em.namespace) + + def test_hook_manager_should_return_named_extensions(self): + hook1 = Extension('captain', None, None, None) + hook2 = Extension('captain', None, None, None) + em = HookManager.make_test_instance([hook1, hook2]) + self.assertEqual([hook1, hook2], em['captain']) + + # DriverManager + def test_driver_manager_should_use_supplied_extension(self): + em = DriverManager.make_test_instance(a_driver) + self.assertEqual([a_driver], em.extensions) + + def test_driver_manager_should_have_default_namespace(self): + em = DriverManager.make_test_instance(a_driver) + self.assertEqual(em.namespace, 'TESTING') + + def test_driver_manager_should_use_supplied_namespace(self): + namespace = 'testing.1.2.3' + em = DriverManager.make_test_instance(a_driver, namespace=namespace) + self.assertEqual(namespace, em.namespace) + + def test_instance_should_use_driver_name(self): + em = DriverManager.make_test_instance(a_driver) + self.assertEqual(['test_driver'], em.names()) + + def test_instance_call(self): + def invoke(ext, *args, **kwds): + return ext.name, args, kwds + + em = DriverManager.make_test_instance(a_driver) + result = em(invoke, 'a', b='C') + self.assertEqual(result, ('test_driver', ('a',), {'b': 'C'})) + + def test_instance_driver_property(self): + em = DriverManager.make_test_instance(a_driver) + self.assertEqual(sentinel.driver_obj, em.driver) + + # EnabledExtensionManager + def test_enabled_instance_should_use_supplied_extensions(self): + extensions = [test_extension, test_extension2] + em = EnabledExtensionManager.make_test_instance(extensions) + self.assertEqual(extensions, em.extensions) + + # DispatchExtensionManager + def test_dispatch_instance_should_use_supplied_extensions(self): + extensions = [test_extension, test_extension2] + em = DispatchExtensionManager.make_test_instance(extensions) + self.assertEqual(extensions, em.extensions) + + def test_dispatch_map_should_invoke_filter_for_extensions(self): + em = DispatchExtensionManager.make_test_instance([test_extension, + test_extension2]) + filter_func = Mock(return_value=False) + args = ('A',) + kw = {'big': 'Cheese'} + em.map(filter_func, None, *args, **kw) + filter_func.assert_any_call(test_extension, *args, **kw) + filter_func.assert_any_call(test_extension2, *args, **kw) + + # NameDispatchExtensionManager + def test_name_dispatch_instance_should_use_supplied_extensions(self): + extensions = [test_extension, test_extension2] + em = NameDispatchExtensionManager.make_test_instance(extensions) + + self.assertEqual(extensions, em.extensions) + + def test_name_dispatch_instance_should_build_extension_name_map(self): + extensions = [test_extension, test_extension2] + em = NameDispatchExtensionManager.make_test_instance(extensions) + self.assertEqual(test_extension, em.by_name[test_extension.name]) + self.assertEqual(test_extension2, em.by_name[test_extension2.name]) + + def test_named_dispatch_map_should_invoke_filter_for_extensions(self): + em = NameDispatchExtensionManager.make_test_instance([test_extension, + test_extension2]) + func = Mock() + args = ('A',) + kw = {'BIGGER': 'Cheese'} + em.map(['test_extension'], func, *args, **kw) + func.assert_called_once_with(test_extension, *args, **kw) diff --git a/venv/Lib/site-packages/stevedore/tests/utils.py b/venv/Lib/site-packages/stevedore/tests/utils.py new file mode 100644 index 00000000..f452959c --- /dev/null +++ b/venv/Lib/site-packages/stevedore/tests/utils.py @@ -0,0 +1,17 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import unittest + + +class TestCase(unittest.TestCase): + pass diff --git a/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/INSTALLER b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/LICENSE b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/LICENSE new file mode 100644 index 00000000..be9700d6 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2020-202x The virtualenv developers + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/METADATA b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/METADATA new file mode 100644 index 00000000..e905abe5 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/METADATA @@ -0,0 +1,93 @@ +Metadata-Version: 2.1 +Name: virtualenv +Version: 20.0.27 +Summary: Virtual Python Environment builder +Home-page: https://virtualenv.pypa.io/ +Author: Bernat Gabor +Author-email: gaborjbernat@gmail.com +Maintainer: Bernat Gabor +Maintainer-email: gaborjbernat@gmail.com +License: MIT +Project-URL: Source, https://github.com/pypa/virtualenv +Project-URL: Tracker, https://github.com/pypa/virtualenv/issues +Keywords: virtual,environments,isolated +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Testing +Classifier: Topic :: Utilities +Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7 +Description-Content-Type: text/markdown +Requires-Dist: appdirs (<2,>=1.4.3) +Requires-Dist: distlib (<1,>=0.3.1) +Requires-Dist: filelock (<4,>=3.0.0) +Requires-Dist: six (<2,>=1.9.0) +Requires-Dist: pathlib2 (<3,>=2.3.3) ; python_version < "3.4" and sys_platform != "win32" +Requires-Dist: importlib-resources (>=1.0) ; python_version < "3.7" +Requires-Dist: importlib-metadata (<2,>=0.12) ; python_version < "3.8" +Provides-Extra: docs +Requires-Dist: sphinx (>=3) ; extra == 'docs' +Requires-Dist: sphinx-argparse (>=0.2.5) ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme (>=0.4.3) ; extra == 'docs' +Requires-Dist: towncrier (>=19.9.0rc1) ; extra == 'docs' +Requires-Dist: proselint (>=0.10.2) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=4) ; extra == 'testing' +Requires-Dist: coverage (>=5) ; extra == 'testing' +Requires-Dist: coverage-enable-subprocess (>=1) ; extra == 'testing' +Requires-Dist: pytest-xdist (>=1.31.0) ; extra == 'testing' +Requires-Dist: pytest-mock (>=2) ; extra == 'testing' +Requires-Dist: pytest-env (>=0.6.2) ; extra == 'testing' +Requires-Dist: pytest-randomly (>=1) ; extra == 'testing' +Requires-Dist: pytest-timeout (>=1) ; extra == 'testing' +Requires-Dist: pytest-freezegun (>=0.4.1) ; extra == 'testing' +Requires-Dist: flaky (>=3) ; extra == 'testing' +Requires-Dist: packaging (>=20.0) ; (python_version > "3.4") and extra == 'testing' +Requires-Dist: xonsh (>=0.9.16) ; (python_version > "3.4" and python_version != "3.9") and extra == 'testing' + +[![PyPI](https://img.shields.io/pypi/v/virtualenv?style=flat-square)](https://pypi.org/project/virtualenv) +[![PyPI - Implementation](https://img.shields.io/pypi/implementation/virtualenv?style=flat-square)](https://pypi.org/project/virtualenv) +[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/virtualenv?style=flat-square)](https://pypi.org/project/virtualenv) +[![Documentation](https://readthedocs.org/projects/virtualenv/badge/?version=latest&style=flat-square)](http://virtualenv.pypa.io) +[![Gitter Chat](https://img.shields.io/gitter/room/pypa/virtualenv?color=FF004F&style=flat-square)](https://gitter.im/pypa/virtualenv) +[![PyPI - Downloads](https://img.shields.io/pypi/dm/virtualenv?style=flat-square)](https://pypistats.org/packages/virtualenv) +[![PyPI - License](https://img.shields.io/pypi/l/virtualenv?style=flat-square)](https://opensource.org/licenses/MIT) +[![Build Status](https://github.com/pypa/virtualenv/workflows/check/badge.svg?branch=master&event=push)](https://github.com/pypa/virtualenv/actions?query=workflow%3Acheck) +[![codecov](https://codecov.io/gh/pypa/virtualenv/branch/master/graph/badge.svg)](https://codecov.io/gh/pypa/virtualenv) +[![Code style: +black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat-square)](https://github.com/psf/black) + +virtualenv +========== + +A tool for creating isolated ``virtual`` python environments. + +- [Installation](https://virtualenv.pypa.io/en/latest/installation.html) +- [Documentation](https://virtualenv.pypa.io) +- [Changelog](https://virtualenv.pypa.io/en/latest/changelog.html) +- [Issues](https://github.com/pypa/virtualenv/issues) +- [PyPI](https://pypi.org/project/virtualenv) +- [Github](https://github.com/pypa/virtualenv) + +Code of Conduct +--------------- + +Everyone interacting in the virtualenv project's codebases, issue trackers, chat rooms, and mailing lists is expected to +follow the [PyPA Code of Conduct](https://www.pypa.io/en/latest/code-of-conduct/). + + diff --git a/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/RECORD b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/RECORD new file mode 100644 index 00000000..754c9245 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/RECORD @@ -0,0 +1,220 @@ +../../Scripts/virtualenv.exe,sha256=8bnYO4T19B3k1N3TzuNMD1pL8WSRgTlJpVNAmLd03X0,106390 +virtualenv-20.0.27.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +virtualenv-20.0.27.dist-info/LICENSE,sha256=XBWRk3jFsqqrexnOpw2M3HX3aHnjJFTkwDmfi3HRcek,1074 +virtualenv-20.0.27.dist-info/METADATA,sha256=M6Rv0LYZTpGOxxva7RcnHBQpjA8r7_zS958jR0dstP8,4934 +virtualenv-20.0.27.dist-info/RECORD,, +virtualenv-20.0.27.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +virtualenv-20.0.27.dist-info/entry_points.txt,sha256=1DALKzYOcffJa7Q15TQlMQu0yeFXEy5W124y0aJEfYU,1615 +virtualenv-20.0.27.dist-info/top_level.txt,sha256=JV-LVlC8YeIw1DgiYI0hEot7tgFy5IWdKVcSG7NyzaI,11 +virtualenv-20.0.27.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +virtualenv/__init__.py,sha256=aDrew9O-Yz0e738UwR7mYseLSNmBk3DIJociruvGkdU,165 +virtualenv/__main__.py,sha256=UGHwCkjrcPq0dBpJpZN4x4hB6fhv7s-GFb7QWhMcK7E,2786 +virtualenv/__pycache__/__init__.cpython-36.pyc,, +virtualenv/__pycache__/__main__.cpython-36.pyc,, +virtualenv/__pycache__/info.cpython-36.pyc,, +virtualenv/__pycache__/report.cpython-36.pyc,, +virtualenv/__pycache__/version.cpython-36.pyc,, +virtualenv/activation/__init__.py,sha256=jLIERxJXMnHq2fH49RdWqBoaiASres4CTKMdUJOeos0,480 +virtualenv/activation/__pycache__/__init__.cpython-36.pyc,, +virtualenv/activation/__pycache__/activator.cpython-36.pyc,, +virtualenv/activation/__pycache__/via_template.cpython-36.pyc,, +virtualenv/activation/activator.py,sha256=CXomkRvhzcAeygYlDwQdDjfPyZQG85aBab5GIVQPv2M,1341 +virtualenv/activation/bash/__init__.py,sha256=7aC1WfvyzgFrIQs13jOuESuAbuiAnTsKkOe0iReRoaE,312 +virtualenv/activation/bash/__pycache__/__init__.cpython-36.pyc,, +virtualenv/activation/bash/activate.sh,sha256=xkIDykLyI5CO3K7eeAujgPiBP2eleYDTFsqhaXsOUG4,2249 +virtualenv/activation/batch/__init__.py,sha256=K0gVfwuXV7uoaMDL7moWGCq7uTDzI64giZzQQ8s2qnU,733 +virtualenv/activation/batch/__pycache__/__init__.cpython-36.pyc,, +virtualenv/activation/batch/activate.bat,sha256=PeQnWWsjvHT-jIWhYI7hbdzkDBZx5UOstnsCmq5PYtw,1031 +virtualenv/activation/batch/deactivate.bat,sha256=6OznnO-HC2wnWUN7YAT-bj815zeKMXEPC0keyBYwKUU,510 +virtualenv/activation/batch/pydoc.bat,sha256=pVuxn8mn9P_Rd0349fiBEiwIuMvfJQSfgJ2dljUT2fA,24 +virtualenv/activation/cshell/__init__.py,sha256=pw4s5idqQhaEccPxadETEvilBcoxW-UkVQ-RNqPyVCQ,344 +virtualenv/activation/cshell/__pycache__/__init__.cpython-36.pyc,, +virtualenv/activation/cshell/activate.csh,sha256=jYwms8OTiVu9MJwXltuEm43HU09BJUqkrVqyj4sjpDA,1468 +virtualenv/activation/fish/__init__.py,sha256=hDkJq1P1wK2qm6BXydXWA9GMkBpj-TaejbKSceFnGZU,251 +virtualenv/activation/fish/__pycache__/__init__.cpython-36.pyc,, +virtualenv/activation/fish/activate.fish,sha256=V7nVwSI_nsFEMlJjSQxCayNWkjubXi1KSgSw1bEakh8,3099 +virtualenv/activation/powershell/__init__.py,sha256=EA-73s5TUMkgxAhLwucFg3gsBwW5huNh7qB4I7uEU-U,256 +virtualenv/activation/powershell/__pycache__/__init__.cpython-36.pyc,, +virtualenv/activation/powershell/activate.ps1,sha256=qXTD2IZIPXIr3HZbNahnym8l3FFwa1o_pyB8cSmxeg0,1867 +virtualenv/activation/python/__init__.py,sha256=Uv53LqOrIT_2dO1FXcUYAnwH1eypG8CJ2InhSx1GRI4,1323 +virtualenv/activation/python/__pycache__/__init__.cpython-36.pyc,, +virtualenv/activation/python/__pycache__/activate_this.cpython-36.pyc,, +virtualenv/activation/python/activate_this.py,sha256=Xpz7exdGSjmWk0KfwHLofIpDPUtazNSNGrxT0-5ZG_s,1208 +virtualenv/activation/via_template.py,sha256=B88RGc0qoGc46DmKJmLJuzcCJU_iXWhdD93r6KLuYHQ,2204 +virtualenv/activation/xonsh/__init__.py,sha256=7NUevd5EpHRMZdSyR1KgFTe9QQBO94zZOwFH6MR6zjo,355 +virtualenv/activation/xonsh/__pycache__/__init__.cpython-36.pyc,, +virtualenv/activation/xonsh/activate.xsh,sha256=qkKgWfrUjYKrgrmhf45VuBz99EMadtiNU8GMfHZZ7AU,1172 +virtualenv/app_data/__init__.py,sha256=mOgHrNxZ320NcDvl2EU9Uhaq9XSwnJwkldKAWtrMCKo,1882 +virtualenv/app_data/__pycache__/__init__.cpython-36.pyc,, +virtualenv/app_data/__pycache__/base.cpython-36.pyc,, +virtualenv/app_data/__pycache__/na.cpython-36.pyc,, +virtualenv/app_data/__pycache__/via_disk_folder.cpython-36.pyc,, +virtualenv/app_data/__pycache__/via_tempdir.cpython-36.pyc,, +virtualenv/app_data/base.py,sha256=wtoduyV2lc020XNdOwt5gu1lkUJd0n_LMrgT8nifAwo,2054 +virtualenv/app_data/na.py,sha256=fCISMfJt3opkAqCmJBZTW9tody6ECfVPXtSrY5ZJueY,1321 +virtualenv/app_data/via_disk_folder.py,sha256=bJxZIXTAqQekwRZuoYgAE1fvwQ6w1dBR2IJFe09OIjE,5404 +virtualenv/app_data/via_tempdir.py,sha256=vt4I1wAaNouOeFSTFX5DPbA3zPZ1ikVGzXBDsNLNHRM,771 +virtualenv/config/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 +virtualenv/config/__pycache__/__init__.cpython-36.pyc,, +virtualenv/config/__pycache__/convert.cpython-36.pyc,, +virtualenv/config/__pycache__/env_var.cpython-36.pyc,, +virtualenv/config/__pycache__/ini.cpython-36.pyc,, +virtualenv/config/cli/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 +virtualenv/config/cli/__pycache__/__init__.cpython-36.pyc,, +virtualenv/config/cli/__pycache__/parser.cpython-36.pyc,, +virtualenv/config/cli/parser.py,sha256=y5IqHccLBqFpocpE75X611nVrP8v394VW94a9GAojvE,4524 +virtualenv/config/convert.py,sha256=msrkiG2Vq9gYQXf1C5W4n3b0ZTv6weFANV-mLUq6uMo,2091 +virtualenv/config/env_var.py,sha256=48XpOurSLLjMX-kXjvOpZuAoOUP-LvnbotTlmebhhFk,844 +virtualenv/config/ini.py,sha256=xA4a9OAgTPNMlTYcWTtw2C97FONoamukIJeCCczI7Do,2789 +virtualenv/create/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 +virtualenv/create/__pycache__/__init__.cpython-36.pyc,, +virtualenv/create/__pycache__/creator.cpython-36.pyc,, +virtualenv/create/__pycache__/debug.cpython-36.pyc,, +virtualenv/create/__pycache__/describe.cpython-36.pyc,, +virtualenv/create/__pycache__/pyenv_cfg.cpython-36.pyc,, +virtualenv/create/creator.py,sha256=D2YsK817anGhHxjh_ew7MlRCGjPPzW95lroRPlzblGk,8437 +virtualenv/create/debug.py,sha256=ETOke8w4Ib8fiufAHVeOkH3v0zrztljw3WjGvZyE0Mk,3342 +virtualenv/create/describe.py,sha256=bm0V2wpFOjdN_MkzZuJAEBSttmi5YGPVwxtwGYU5zQU,3561 +virtualenv/create/pyenv_cfg.py,sha256=VsOGfzUpaVCO3J29zrhIeip4jZ4b7llbe45iOQAIRGg,1717 +virtualenv/create/via_global_ref/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +virtualenv/create/via_global_ref/__pycache__/__init__.cpython-36.pyc,, +virtualenv/create/via_global_ref/__pycache__/_virtualenv.cpython-36.pyc,, +virtualenv/create/via_global_ref/__pycache__/api.cpython-36.pyc,, +virtualenv/create/via_global_ref/__pycache__/store.cpython-36.pyc,, +virtualenv/create/via_global_ref/__pycache__/venv.cpython-36.pyc,, +virtualenv/create/via_global_ref/_virtualenv.py,sha256=vZYz3j2BIkiNZvz6DH8MnNC1jpFHk43fH4-nbKEratU,5662 +virtualenv/create/via_global_ref/api.py,sha256=uAQjwBRJ-u4d6A24QuxeB-khIrSGiP7-W_SQxNdE-Sk,4277 +virtualenv/create/via_global_ref/builtin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +virtualenv/create/via_global_ref/builtin/__pycache__/__init__.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/__pycache__/builtin_way.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/__pycache__/ref.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/__pycache__/via_global_self_do.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/builtin_way.py,sha256=hO22nT-itVoYiy8wXrXXYzHw86toCp_Uq-cURR7w6ck,546 +virtualenv/create/via_global_ref/builtin/cpython/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 +virtualenv/create/via_global_ref/builtin/cpython/__pycache__/__init__.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/cpython/__pycache__/common.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/cpython/__pycache__/cpython2.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/cpython/__pycache__/cpython3.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/cpython/__pycache__/mac_os.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/cpython/common.py,sha256=_ycHGl1I4Pr7RfsXlUP0otJw9VQ0L744lTpPTTxPo6w,1909 +virtualenv/create/via_global_ref/builtin/cpython/cpython2.py,sha256=p41H2g6wAqhJzeUU48nH3u05-yWEbwCzhyj4pn8rnm4,3757 +virtualenv/create/via_global_ref/builtin/cpython/cpython3.py,sha256=9sY098at5Wp1Fhu1Aux1y06wH2ecbGj3bRucl4CGSQY,2473 +virtualenv/create/via_global_ref/builtin/cpython/mac_os.py,sha256=Hc9FVoQPuf8IV5j17LVj49lH7nyCep2nh_lWlZm49YI,12318 +virtualenv/create/via_global_ref/builtin/pypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +virtualenv/create/via_global_ref/builtin/pypy/__pycache__/__init__.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/pypy/__pycache__/common.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/pypy/__pycache__/pypy2.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/pypy/__pycache__/pypy3.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/pypy/common.py,sha256=KagqBNEuysqqO-n-VCCpuMvBK8MiZFMJQRWbWW12c6g,1696 +virtualenv/create/via_global_ref/builtin/pypy/pypy2.py,sha256=bmMY_KJZ1iD_ifq-X9ZBOlOpJ1aN7839qigBgnWRIdA,3535 +virtualenv/create/via_global_ref/builtin/pypy/pypy3.py,sha256=ti6hmOIC4HiTBnEYKytO-d9wH-eLeMoQxQ0kZRhnNrw,1751 +virtualenv/create/via_global_ref/builtin/python2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +virtualenv/create/via_global_ref/builtin/python2/__pycache__/__init__.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/python2/__pycache__/python2.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/python2/__pycache__/site.cpython-36.pyc,, +virtualenv/create/via_global_ref/builtin/python2/python2.py,sha256=jkJwmkeJVTzwzo95eMIptTfdBA-qmyIqZcpt48iOitU,4276 +virtualenv/create/via_global_ref/builtin/python2/site.py,sha256=mp-Y1VLSXqhIeOojQefy8ffIylWqfq20jEfc2UwMTYU,6120 +virtualenv/create/via_global_ref/builtin/ref.py,sha256=-r9sJSW9X9rHxThk2yyJJRA5KkmJPW_axuffYBsFDIU,5501 +virtualenv/create/via_global_ref/builtin/via_global_self_do.py,sha256=9aH07lmA6SMvtynv7ZTO6Gh2aDDFdUiDQrlbtT9xze4,4057 +virtualenv/create/via_global_ref/store.py,sha256=cqLBEhQ979xHnlidqmxlDjsvj2Wr-mBo7shvGQSEBxU,685 +virtualenv/create/via_global_ref/venv.py,sha256=A6XrGu3oco_ZIHv0s-TybW7lErAomgLOYDMtgpdACc0,2919 +virtualenv/discovery/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 +virtualenv/discovery/__pycache__/__init__.cpython-36.pyc,, +virtualenv/discovery/__pycache__/builtin.cpython-36.pyc,, +virtualenv/discovery/__pycache__/cached_py_info.cpython-36.pyc,, +virtualenv/discovery/__pycache__/discover.cpython-36.pyc,, +virtualenv/discovery/__pycache__/py_info.cpython-36.pyc,, +virtualenv/discovery/__pycache__/py_spec.cpython-36.pyc,, +virtualenv/discovery/builtin.py,sha256=1c4Py9DnkiCLrbHp7bffvQtJI9HkxfVyFedt6IFMKEs,5079 +virtualenv/discovery/cached_py_info.py,sha256=vQiAFDoBjR13zrdmc5q97z4upBxWzUG6H6IHJZjqjeU,5007 +virtualenv/discovery/discover.py,sha256=evJYn4APkwjNmdolNeIBSHiOudkvN59c5oVYI2Zsjlg,1209 +virtualenv/discovery/py_info.py,sha256=PMbQIitL6ELOJpAZF3rrMxPxM8oOmTK9aDvb4YHTDEw,21783 +virtualenv/discovery/py_spec.py,sha256=wQhLzCfXoSPsAAO9nm5-I2lNolVDux4W2vPSUfJGjlc,4790 +virtualenv/discovery/windows/__init__.py,sha256=TPbnzCtRyw47pRVHTO8ikwljNcczxmSLDdWtwasxvQU,1036 +virtualenv/discovery/windows/__pycache__/__init__.cpython-36.pyc,, +virtualenv/discovery/windows/__pycache__/pep514.cpython-36.pyc,, +virtualenv/discovery/windows/pep514.py,sha256=YYiaJzo-XuMtO78BMFMAudqkeJiLQkFnUTOuQZ5lJz8,5451 +virtualenv/info.py,sha256=-2pI_kyC9fNj5OR8AQWkKjlpOk4_96Lmbco3atYYBdY,1921 +virtualenv/report.py,sha256=M2OHHCWdOHZsn74tj1MYYKmaI3QRJF8VA1FZIdkQTMQ,1594 +virtualenv/run/__init__.py,sha256=K-F_1t-y_I2GsmFZgrB6Ee1rNO-jS_7CDPu_DMB4iGQ,4908 +virtualenv/run/__pycache__/__init__.cpython-36.pyc,, +virtualenv/run/__pycache__/session.cpython-36.pyc,, +virtualenv/run/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +virtualenv/run/plugin/__pycache__/__init__.cpython-36.pyc,, +virtualenv/run/plugin/__pycache__/activators.cpython-36.pyc,, +virtualenv/run/plugin/__pycache__/base.cpython-36.pyc,, +virtualenv/run/plugin/__pycache__/creators.cpython-36.pyc,, +virtualenv/run/plugin/__pycache__/discovery.cpython-36.pyc,, +virtualenv/run/plugin/__pycache__/seeders.cpython-36.pyc,, +virtualenv/run/plugin/activators.py,sha256=kmHShj36eHfbnsiAJzX0U5LYvGhe0WkRYjbuKDz6gVM,2117 +virtualenv/run/plugin/base.py,sha256=-2185C01PaxOG7gnMbWWyZlo24n_FYo5J5_naeNZw8s,1934 +virtualenv/run/plugin/creators.py,sha256=5L7G5mk2GIOIaZVvDZszd2VVBSUM3jkAR4aBO0OkOXY,3517 +virtualenv/run/plugin/discovery.py,sha256=ba9szD21er0-AkuMXrdFOjFEDsIekRWljXdXkj_-8EM,994 +virtualenv/run/plugin/seeders.py,sha256=XVKaikuOC4JUO4lirHzowEJ6d0pI1pFfywCx3heE70g,1026 +virtualenv/run/session.py,sha256=mzXJyVQlXXgj_kO9IJDor44Co7WKiyFGnZ0mCTIy5x0,2547 +virtualenv/seed/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57 +virtualenv/seed/__pycache__/__init__.cpython-36.pyc,, +virtualenv/seed/__pycache__/seeder.cpython-36.pyc,, +virtualenv/seed/embed/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +virtualenv/seed/embed/__pycache__/__init__.cpython-36.pyc,, +virtualenv/seed/embed/__pycache__/base_embed.cpython-36.pyc,, +virtualenv/seed/embed/__pycache__/pip_invoke.cpython-36.pyc,, +virtualenv/seed/embed/base_embed.py,sha256=j4qcALcc-ylU0bKjVxiELPqk4MJ8f0pmbe_lVYVH_5Q,4166 +virtualenv/seed/embed/pip_invoke.py,sha256=EMVwIeoW15SuorJ8z_-vBxPXwQJLS0ILA0Va9zNoOLI,2127 +virtualenv/seed/embed/via_app_data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +virtualenv/seed/embed/via_app_data/__pycache__/__init__.cpython-36.pyc,, +virtualenv/seed/embed/via_app_data/__pycache__/via_app_data.cpython-36.pyc,, +virtualenv/seed/embed/via_app_data/pip_install/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +virtualenv/seed/embed/via_app_data/pip_install/__pycache__/__init__.cpython-36.pyc,, +virtualenv/seed/embed/via_app_data/pip_install/__pycache__/base.cpython-36.pyc,, +virtualenv/seed/embed/via_app_data/pip_install/__pycache__/copy.cpython-36.pyc,, +virtualenv/seed/embed/via_app_data/pip_install/__pycache__/symlink.cpython-36.pyc,, +virtualenv/seed/embed/via_app_data/pip_install/base.py,sha256=rnR60JzM7G04cPDo2eH-aR8-iQuFXBgHJ2lQnSf0Gfs,6355 +virtualenv/seed/embed/via_app_data/pip_install/copy.py,sha256=gG2NePFHOYh-bsCf6TpsaQ_qrYhdBy67k0RWuwHSAwo,1307 +virtualenv/seed/embed/via_app_data/pip_install/symlink.py,sha256=wHCpfKobvjjaZLUSwM3FSCblZfiBFw4IQYsxwlfEEu0,2362 +virtualenv/seed/embed/via_app_data/via_app_data.py,sha256=quofMFCWc-OxvxdAJwZTWS5mOugnYB7wRqSJZxmkl-E,6053 +virtualenv/seed/seeder.py,sha256=DSGE_8Ycj01vj8mkppUBA9h7JG76XsVBMt-5MWlMF6k,1178 +virtualenv/seed/wheels/__init__.py,sha256=1J7el7lNjAwGxM4dmricrbVhSbYxs5sPzv9PTx2A6qA,226 +virtualenv/seed/wheels/__pycache__/__init__.cpython-36.pyc,, +virtualenv/seed/wheels/__pycache__/acquire.cpython-36.pyc,, +virtualenv/seed/wheels/__pycache__/bundle.cpython-36.pyc,, +virtualenv/seed/wheels/__pycache__/periodic_update.cpython-36.pyc,, +virtualenv/seed/wheels/__pycache__/util.cpython-36.pyc,, +virtualenv/seed/wheels/acquire.py,sha256=Iq_QPJgUPCyHfrS3t2EUUj8fieymfcKNve1JbJQ5Uyc,4420 +virtualenv/seed/wheels/bundle.py,sha256=xYbhh5nQEwec2vnqNJWNVtDL3VyjGkgDWVXSuXzXB-Y,1961 +virtualenv/seed/wheels/embed/__init__.py,sha256=UM6Z3KuzhGT8fILSfuHT4LN_W_rBMvc4NrGEfImSrXg,1995 +virtualenv/seed/wheels/embed/__pycache__/__init__.cpython-36.pyc,, +virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl,sha256=mTE08EdUcbkUUsoCnUOQ3I8pisY6cSgU8QHNG220ZnY,1360957 +virtualenv/seed/wheels/embed/pip-20.1.1-py2.py3-none-any.whl,sha256=snxN7a6MQapZEI8vo4v3jgiQ5ZBUW8js583OtLpg9uQ,1490666 +virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl,sha256=pn-qUVGe8ozYJhr_DiIbbkw3D4-4utqKo-etiUUZmWM,583228 +virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whl,sha256=J6cUwJJTE05gpvpoEw94xwN-VWLE8h-PMY8q6QDRUtU,583493 +virtualenv/seed/wheels/embed/setuptools-49.2.0-py3-none-any.whl,sha256=Jyx_SPXN3Fr1kB9CZSdMQhx-7eXIvEVKwpA9P4_DZek,789833 +virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl,sha256=9NoXY9O-zy4s2SoUp8kg8PAOyjD93p6pksg2aFufryg,21556 +virtualenv/seed/wheels/embed/wheel-0.34.2-py2.py3-none-any.whl,sha256=3yd8tR5hNZq6UCII1oD5DASTrexvDoSK-UlId4rtOG4,26502 +virtualenv/seed/wheels/periodic_update.py,sha256=A9l7ZpjtWzGrq2nh39y8g5OppUryHb7_W3bCR7TAnPc,12756 +virtualenv/seed/wheels/util.py,sha256=Zdo76KEDqbNmM5u9JTuyu5uzEN_fQ4oj6qHOt0h0o1M,3960 +virtualenv/util/__init__.py,sha256=om6Hs2lH5igf5lkcSmQFiU7iMZ0Wx4dmSlMc6XW_Llg,199 +virtualenv/util/__pycache__/__init__.cpython-36.pyc,, +virtualenv/util/__pycache__/error.cpython-36.pyc,, +virtualenv/util/__pycache__/lock.cpython-36.pyc,, +virtualenv/util/__pycache__/six.cpython-36.pyc,, +virtualenv/util/__pycache__/zipapp.cpython-36.pyc,, +virtualenv/util/error.py,sha256=SRSZlXvMYQuJwxoUfNhlAyo3VwrAnIsZemSwPOxpjns,352 +virtualenv/util/lock.py,sha256=AyYqRMsz5YnLNDLNTrutjLqS8m0lkLqNNDpxW565b44,3550 +virtualenv/util/path/__init__.py,sha256=YaBAxtzGBdMu0uUtppe0ZeCHw5HhO-5zjeb3-fzyMoI,336 +virtualenv/util/path/__pycache__/__init__.cpython-36.pyc,, +virtualenv/util/path/__pycache__/_permission.cpython-36.pyc,, +virtualenv/util/path/__pycache__/_sync.cpython-36.pyc,, +virtualenv/util/path/_pathlib/__init__.py,sha256=evaey395zOuDc3FlukUeJ8SzRqLx1xWOKmg-TEPsRd4,1446 +virtualenv/util/path/_pathlib/__pycache__/__init__.cpython-36.pyc,, +virtualenv/util/path/_pathlib/__pycache__/via_os_path.cpython-36.pyc,, +virtualenv/util/path/_pathlib/via_os_path.py,sha256=wlnf0MIktEP01kG9z8iKOI86levXmkX7UX1LMLK7A-E,3701 +virtualenv/util/path/_permission.py,sha256=XpO2vGAk_92_biD4MEQcAQq2Zc8_rpm3M3n_hMUA1rw,745 +virtualenv/util/path/_sync.py,sha256=7oEmxJB5fc7NIsVgPJTJ4mpVPjNaNHAdhqlgIFSDfm0,2370 +virtualenv/util/six.py,sha256=_8KWXUWi3-AaFmz4LkdyNra-uNuf70vlxwjN7oeRo8g,1463 +virtualenv/util/subprocess/__init__.py,sha256=2H-ZQ74OKq60eSpxfRWD-8gVUydczkrKSexl18DzOXI,753 +virtualenv/util/subprocess/__pycache__/__init__.cpython-36.pyc,, +virtualenv/util/subprocess/__pycache__/_win_subprocess.cpython-36.pyc,, +virtualenv/util/subprocess/_win_subprocess.py,sha256=0-eJVcxBs1Fe76OVTQnUVLaLzB4tlJxEZ-vZECQ4xL4,5576 +virtualenv/util/zipapp.py,sha256=jtf4Vn7XBnjPs_B_ObIQv_x4pFlIlPKAWHYLFV59h6U,1054 +virtualenv/version.py,sha256=I6DdBe5W48XpWVyauQA1RLTrczVHXqmm5ykMSnETFE8,66 diff --git a/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/WHEEL b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/entry_points.txt b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/entry_points.txt new file mode 100644 index 00000000..3effb4ba --- /dev/null +++ b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/entry_points.txt @@ -0,0 +1,32 @@ +[console_scripts] +virtualenv = virtualenv.__main__:run_with_catch + +[virtualenv.activate] +bash = virtualenv.activation.bash:BashActivator +batch = virtualenv.activation.batch:BatchActivator +cshell = virtualenv.activation.cshell:CShellActivator +fish = virtualenv.activation.fish:FishActivator +powershell = virtualenv.activation.powershell:PowerShellActivator +python = virtualenv.activation.python:PythonActivator +xonsh = virtualenv.activation.xonsh:XonshActivator + +[virtualenv.create] +cpython2-mac-framework = virtualenv.create.via_global_ref.builtin.cpython.mac_os:CPython2macOsFramework +cpython2-posix = virtualenv.create.via_global_ref.builtin.cpython.cpython2:CPython2Posix +cpython2-win = virtualenv.create.via_global_ref.builtin.cpython.cpython2:CPython2Windows +cpython3-mac-framework = virtualenv.create.via_global_ref.builtin.cpython.mac_os:CPython3macOsFramework +cpython3-posix = virtualenv.create.via_global_ref.builtin.cpython.cpython3:CPython3Posix +cpython3-win = virtualenv.create.via_global_ref.builtin.cpython.cpython3:CPython3Windows +pypy2-posix = virtualenv.create.via_global_ref.builtin.pypy.pypy2:PyPy2Posix +pypy2-win = virtualenv.create.via_global_ref.builtin.pypy.pypy2:Pypy2Windows +pypy3-posix = virtualenv.create.via_global_ref.builtin.pypy.pypy3:PyPy3Posix +pypy3-win = virtualenv.create.via_global_ref.builtin.pypy.pypy3:Pypy3Windows +venv = virtualenv.create.via_global_ref.venv:Venv + +[virtualenv.discovery] +builtin = virtualenv.discovery.builtin:Builtin + +[virtualenv.seed] +app-data = virtualenv.seed.embed.via_app_data.via_app_data:FromAppData +pip = virtualenv.seed.embed.pip_invoke:PipInvoke + diff --git a/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/top_level.txt b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/top_level.txt new file mode 100644 index 00000000..66072c76 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/top_level.txt @@ -0,0 +1 @@ +virtualenv diff --git a/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/zip-safe b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/zip-safe new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv-20.0.27.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/venv/Lib/site-packages/virtualenv/__init__.py b/venv/Lib/site-packages/virtualenv/__init__.py new file mode 100644 index 00000000..c72643f0 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/__init__.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import, unicode_literals + +from .run import cli_run +from .version import __version__ + +__all__ = ( + "__version__", + "cli_run", +) diff --git a/venv/Lib/site-packages/virtualenv/__main__.py b/venv/Lib/site-packages/virtualenv/__main__.py new file mode 100644 index 00000000..16f12bff --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/__main__.py @@ -0,0 +1,78 @@ +from __future__ import absolute_import, print_function, unicode_literals + +import logging +import os +import sys +from datetime import datetime + + +def run(args=None, options=None): + start = datetime.now() + from virtualenv.util.error import ProcessCallFailed + from virtualenv.run import cli_run + + if args is None: + args = sys.argv[1:] + try: + session = cli_run(args, options) + logging.warning(LogSession(session, start)) + except ProcessCallFailed as exception: + print("subprocess call failed for {} with code {}".format(exception.cmd, exception.code)) + print(exception.out, file=sys.stdout, end="") + print(exception.err, file=sys.stderr, end="") + raise SystemExit(exception.code) + + +class LogSession(object): + def __init__(self, session, start): + self.session = session + self.start = start + + def __str__(self): + from virtualenv.util.six import ensure_text + + spec = self.session.creator.interpreter.spec + elapsed = (datetime.now() - self.start).total_seconds() * 1000 + lines = [ + "created virtual environment {} in {:.0f}ms".format(spec, elapsed), + " creator {}".format(ensure_text(str(self.session.creator))), + ] + if self.session.seeder.enabled: + lines += ( + " seeder {}".format(ensure_text(str(self.session.seeder))), + " added seed packages: {}".format( + ", ".join( + sorted( + "==".join(i.stem.split("-")) + for i in self.session.creator.purelib.iterdir() + if i.suffix == ".dist-info" + ), + ), + ), + ) + if self.session.activators: + lines.append(" activators {}".format(",".join(i.__class__.__name__ for i in self.session.activators))) + return os.linesep.join(lines) + + +def run_with_catch(args=None): + from virtualenv.config.cli.parser import VirtualEnvOptions + + options = VirtualEnvOptions() + try: + run(args, options) + except (KeyboardInterrupt, SystemExit, Exception) as exception: + try: + if getattr(options, "with_traceback", False): + raise + else: + if not (isinstance(exception, SystemExit) and exception.code == 0): + logging.error("%s: %s", type(exception).__name__, exception) + code = exception.code if isinstance(exception, SystemExit) else 1 + sys.exit(code) + finally: + logging.shutdown() # force flush of log messages before the trace is printed + + +if __name__ == "__main__": # pragma: no cov + run_with_catch() # pragma: no cov diff --git a/venv/Lib/site-packages/virtualenv/activation/__init__.py b/venv/Lib/site-packages/virtualenv/activation/__init__.py new file mode 100644 index 00000000..fa2f0b4a --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/__init__.py @@ -0,0 +1,19 @@ +from __future__ import absolute_import, unicode_literals + +from .bash import BashActivator +from .batch import BatchActivator +from .cshell import CShellActivator +from .fish import FishActivator +from .powershell import PowerShellActivator +from .python import PythonActivator +from .xonsh import XonshActivator + +__all__ = [ + "BashActivator", + "PowerShellActivator", + "XonshActivator", + "CShellActivator", + "PythonActivator", + "BatchActivator", + "FishActivator", +] diff --git a/venv/Lib/site-packages/virtualenv/activation/activator.py b/venv/Lib/site-packages/virtualenv/activation/activator.py new file mode 100644 index 00000000..587ac105 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/activator.py @@ -0,0 +1,44 @@ +from __future__ import absolute_import, unicode_literals + +from abc import ABCMeta, abstractmethod + +from six import add_metaclass + + +@add_metaclass(ABCMeta) +class Activator(object): + """Generates an activate script for the virtual environment""" + + def __init__(self, options): + """Create a new activator generator. + + :param options: the parsed options as defined within :meth:`add_parser_arguments` + """ + self.flag_prompt = options.prompt + + @classmethod + def supports(cls, interpreter): + """Check if the activation script is supported in the given interpreter. + + :param interpreter: the interpreter we need to support + :return: ``True`` if supported, ``False`` otherwise + """ + return True + + @classmethod + def add_parser_arguments(cls, parser, interpreter): + """ + Add CLI arguments for this activation script. + + :param parser: the CLI parser + :param interpreter: the interpreter this virtual environment is based of + """ + + @abstractmethod + def generate(self, creator): + """Generate the activate script for the given creator. + + :param creator: the creator (based of :class:`virtualenv.create.creator.Creator`) we used to create this \ + virtual environment + """ + raise NotImplementedError diff --git a/venv/Lib/site-packages/virtualenv/activation/bash/__init__.py b/venv/Lib/site-packages/virtualenv/activation/bash/__init__.py new file mode 100644 index 00000000..22c90c38 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/bash/__init__.py @@ -0,0 +1,13 @@ +from __future__ import absolute_import, unicode_literals + +from virtualenv.util.path import Path + +from ..via_template import ViaTemplateActivator + + +class BashActivator(ViaTemplateActivator): + def templates(self): + yield Path("activate.sh") + + def as_name(self, template): + return template.stem diff --git a/venv/Lib/site-packages/virtualenv/activation/bash/activate.sh b/venv/Lib/site-packages/virtualenv/activation/bash/activate.sh new file mode 100644 index 00000000..19bf552b --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/bash/activate.sh @@ -0,0 +1,84 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + + +if [ "${BASH_SOURCE-}" = "$0" ]; then + echo "You must source this script: \$ source $0" >&2 + exit 33 +fi + +deactivate () { + unset -f pydoc >/dev/null 2>&1 + + # reset old environment variables + # ! [ -z ${VAR+_} ] returns true if VAR is declared at all + if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then + PATH="$_OLD_VIRTUAL_PATH" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then + PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then + hash -r 2>/dev/null + fi + + if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then + PS1="$_OLD_VIRTUAL_PS1" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "${1-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV='__VIRTUAL_ENV__' +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/__BIN_NAME__:$PATH" +export PATH + +# unset PYTHONHOME if set +if ! [ -z "${PYTHONHOME+_}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1-}" + if [ "x__VIRTUAL_PROMPT__" != x ] ; then + PS1="__VIRTUAL_PROMPT__${PS1-}" + else + PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}" + fi + export PS1 +fi + +# Make sure to unalias pydoc if it's already there +alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true + +pydoc () { + python -m pydoc "$@" +} + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then + hash -r 2>/dev/null +fi diff --git a/venv/Lib/site-packages/virtualenv/activation/batch/__init__.py b/venv/Lib/site-packages/virtualenv/activation/batch/__init__.py new file mode 100644 index 00000000..4149712d --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/batch/__init__.py @@ -0,0 +1,23 @@ +from __future__ import absolute_import, unicode_literals + +import os + +from virtualenv.util.path import Path + +from ..via_template import ViaTemplateActivator + + +class BatchActivator(ViaTemplateActivator): + @classmethod + def supports(cls, interpreter): + return interpreter.os == "nt" + + def templates(self): + yield Path("activate.bat") + yield Path("deactivate.bat") + yield Path("pydoc.bat") + + def instantiate_template(self, replacements, template, creator): + # ensure the text has all newlines as \r\n - required by batch + base = super(BatchActivator, self).instantiate_template(replacements, template, creator) + return base.replace(os.linesep, "\n").replace("\n", os.linesep) diff --git a/venv/Lib/site-packages/virtualenv/activation/batch/activate.bat b/venv/Lib/site-packages/virtualenv/activation/batch/activate.bat new file mode 100644 index 00000000..c0949ba8 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/batch/activate.bat @@ -0,0 +1,40 @@ +@echo off + +set "VIRTUAL_ENV=__VIRTUAL_ENV__" + +if defined _OLD_VIRTUAL_PROMPT ( + set "PROMPT=%_OLD_VIRTUAL_PROMPT%" +) else ( + if not defined PROMPT ( + set "PROMPT=$P$G" + ) + if not defined VIRTUAL_ENV_DISABLE_PROMPT ( + set "_OLD_VIRTUAL_PROMPT=%PROMPT%" + ) +) +if not defined VIRTUAL_ENV_DISABLE_PROMPT ( + set "ENV_PROMPT=__VIRTUAL_PROMPT__" + if NOT DEFINED ENV_PROMPT ( + for %%d in ("%VIRTUAL_ENV%") do set "ENV_PROMPT=(%%~nxd) " + ) + ) + set "PROMPT=%ENV_PROMPT%%PROMPT%" +) + +REM Don't use () to avoid problems with them in %PATH% +if defined _OLD_VIRTUAL_PYTHONHOME goto ENDIFVHOME + set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%" +:ENDIFVHOME + +set PYTHONHOME= + +REM if defined _OLD_VIRTUAL_PATH ( +if not defined _OLD_VIRTUAL_PATH goto ENDIFVPATH1 + set "PATH=%_OLD_VIRTUAL_PATH%" +:ENDIFVPATH1 +REM ) else ( +if defined _OLD_VIRTUAL_PATH goto ENDIFVPATH2 + set "_OLD_VIRTUAL_PATH=%PATH%" +:ENDIFVPATH2 + +set "PATH=%VIRTUAL_ENV%\__BIN_NAME__;%PATH%" diff --git a/venv/Lib/site-packages/virtualenv/activation/batch/deactivate.bat b/venv/Lib/site-packages/virtualenv/activation/batch/deactivate.bat new file mode 100644 index 00000000..7bbc5688 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/batch/deactivate.bat @@ -0,0 +1,19 @@ +@echo off + +set VIRTUAL_ENV= + +REM Don't use () to avoid problems with them in %PATH% +if not defined _OLD_VIRTUAL_PROMPT goto ENDIFVPROMPT + set "PROMPT=%_OLD_VIRTUAL_PROMPT%" + set _OLD_VIRTUAL_PROMPT= +:ENDIFVPROMPT + +if not defined _OLD_VIRTUAL_PYTHONHOME goto ENDIFVHOME + set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%" + set _OLD_VIRTUAL_PYTHONHOME= +:ENDIFVHOME + +if not defined _OLD_VIRTUAL_PATH goto ENDIFVPATH + set "PATH=%_OLD_VIRTUAL_PATH%" + set _OLD_VIRTUAL_PATH= +:ENDIFVPATH diff --git a/venv/Lib/site-packages/virtualenv/activation/batch/pydoc.bat b/venv/Lib/site-packages/virtualenv/activation/batch/pydoc.bat new file mode 100644 index 00000000..3d46a231 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/batch/pydoc.bat @@ -0,0 +1 @@ +python.exe -m pydoc %* diff --git a/venv/Lib/site-packages/virtualenv/activation/cshell/__init__.py b/venv/Lib/site-packages/virtualenv/activation/cshell/__init__.py new file mode 100644 index 00000000..b25c602a --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/cshell/__init__.py @@ -0,0 +1,14 @@ +from __future__ import absolute_import, unicode_literals + +from virtualenv.util.path import Path + +from ..via_template import ViaTemplateActivator + + +class CShellActivator(ViaTemplateActivator): + @classmethod + def supports(cls, interpreter): + return interpreter.os != "nt" + + def templates(self): + yield Path("activate.csh") diff --git a/venv/Lib/site-packages/virtualenv/activation/cshell/activate.csh b/venv/Lib/site-packages/virtualenv/activation/cshell/activate.csh new file mode 100644 index 00000000..72b2cf8e --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/cshell/activate.csh @@ -0,0 +1,55 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . + +set newline='\ +' + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV '__VIRTUAL_ENV__' + +set _OLD_VIRTUAL_PATH="$PATH:q" +setenv PATH "$VIRTUAL_ENV:q/__BIN_NAME__:$PATH:q" + + + +if ('__VIRTUAL_PROMPT__' != "") then + set env_name = '__VIRTUAL_PROMPT__' +else + set env_name = '('"$VIRTUAL_ENV:t:q"') ' +endif + +if ( $?VIRTUAL_ENV_DISABLE_PROMPT ) then + if ( $VIRTUAL_ENV_DISABLE_PROMPT == "" ) then + set do_prompt = "1" + else + set do_prompt = "0" + endif +else + set do_prompt = "1" +endif + +if ( $do_prompt == "1" ) then + # Could be in a non-interactive environment, + # in which case, $prompt is undefined and we wouldn't + # care about the prompt anyway. + if ( $?prompt ) then + set _OLD_VIRTUAL_PROMPT="$prompt:q" + if ( "$prompt:q" =~ *"$newline:q"* ) then + : + else + set prompt = "$env_name:q$prompt:q" + endif + endif +endif + +unset env_name +unset do_prompt + +alias pydoc python -m pydoc + +rehash diff --git a/venv/Lib/site-packages/virtualenv/activation/fish/__init__.py b/venv/Lib/site-packages/virtualenv/activation/fish/__init__.py new file mode 100644 index 00000000..8d0e19c2 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/fish/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import, unicode_literals + +from virtualenv.util.path import Path + +from ..via_template import ViaTemplateActivator + + +class FishActivator(ViaTemplateActivator): + def templates(self): + yield Path("activate.fish") diff --git a/venv/Lib/site-packages/virtualenv/activation/fish/activate.fish b/venv/Lib/site-packages/virtualenv/activation/fish/activate.fish new file mode 100644 index 00000000..faa26227 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/fish/activate.fish @@ -0,0 +1,100 @@ +# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*. +# Do not run it directly. + +function _bashify_path -d "Converts a fish path to something bash can recognize" + set fishy_path $argv + set bashy_path $fishy_path[1] + for path_part in $fishy_path[2..-1] + set bashy_path "$bashy_path:$path_part" + end + echo $bashy_path +end + +function _fishify_path -d "Converts a bash path to something fish can recognize" + echo $argv | tr ':' '\n' +end + +function deactivate -d 'Exit virtualenv mode and return to the normal environment.' + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + # https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling + if test (echo $FISH_VERSION | head -c 1) -lt 3 + set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH") + else + set -gx PATH "$_OLD_VIRTUAL_PATH" + end + set -e _OLD_VIRTUAL_PATH + end + + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME "$_OLD_VIRTUAL_PYTHONHOME" + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + and functions -q _old_fish_prompt + # Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`. + set -l fish_function_path + + # Erase virtualenv's `fish_prompt` and restore the original. + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + end + + set -e VIRTUAL_ENV + + if test "$argv[1]" != 'nondestructive' + # Self-destruct! + functions -e pydoc + functions -e deactivate + functions -e _bashify_path + functions -e _fishify_path + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV '__VIRTUAL_ENV__' + +# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling +if test (echo $FISH_VERSION | head -c 1) -lt 3 + set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH) +else + set -gx _OLD_VIRTUAL_PATH "$PATH" +end +set -gx PATH "$VIRTUAL_ENV"'/__BIN_NAME__' $PATH + +# Unset `$PYTHONHOME` if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +function pydoc + python -m pydoc $argv +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # Copy the current `fish_prompt` function as `_old_fish_prompt`. + functions -c fish_prompt _old_fish_prompt + + function fish_prompt + # Run the user's prompt first; it might depend on (pipe)status. + set -l prompt (_old_fish_prompt) + + # Prompt override provided? + # If not, just prepend the environment name. + if test -n '__VIRTUAL_PROMPT__' + printf '%s%s' '__VIRTUAL_PROMPT__' (set_color normal) + else + printf '%s(%s) ' (set_color normal) (basename "$VIRTUAL_ENV") + end + + string join -- \n $prompt # handle multi-line prompts + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/venv/Lib/site-packages/virtualenv/activation/powershell/__init__.py b/venv/Lib/site-packages/virtualenv/activation/powershell/__init__.py new file mode 100644 index 00000000..4fadc63b --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/powershell/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import, unicode_literals + +from virtualenv.util.path import Path + +from ..via_template import ViaTemplateActivator + + +class PowerShellActivator(ViaTemplateActivator): + def templates(self): + yield Path("activate.ps1") diff --git a/venv/Lib/site-packages/virtualenv/activation/powershell/activate.ps1 b/venv/Lib/site-packages/virtualenv/activation/powershell/activate.ps1 new file mode 100644 index 00000000..a370a63f --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/powershell/activate.ps1 @@ -0,0 +1,60 @@ +$script:THIS_PATH = $myinvocation.mycommand.path +$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent + +function global:deactivate([switch] $NonDestructive) { + if (Test-Path variable:_OLD_VIRTUAL_PATH) { + $env:PATH = $variable:_OLD_VIRTUAL_PATH + Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global + } + + if (Test-Path function:_old_virtual_prompt) { + $function:prompt = $function:_old_virtual_prompt + Remove-Item function:\_old_virtual_prompt + } + + if ($env:VIRTUAL_ENV) { + Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue + } + + if (!$NonDestructive) { + # Self destruct! + Remove-Item function:deactivate + Remove-Item function:pydoc + } +} + +function global:pydoc { + python -m pydoc $args +} + +# unset irrelevant variables +deactivate -nondestructive + +$VIRTUAL_ENV = $BASE_DIR +$env:VIRTUAL_ENV = $VIRTUAL_ENV + +New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH + +$env:PATH = "$env:VIRTUAL_ENV/__BIN_NAME____PATH_SEP__" + $env:PATH +if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) { + function global:_old_virtual_prompt { + "" + } + $function:_old_virtual_prompt = $function:prompt + + if ("__VIRTUAL_PROMPT__" -ne "") { + function global:prompt { + # Add the custom prefix to the existing prompt + $previous_prompt_value = & $function:_old_virtual_prompt + ("__VIRTUAL_PROMPT__" + $previous_prompt_value) + } + } + else { + function global:prompt { + # Add a prefix to the current prompt, but don't discard it. + $previous_prompt_value = & $function:_old_virtual_prompt + $new_prompt_value = "($( Split-Path $env:VIRTUAL_ENV -Leaf )) " + ($new_prompt_value + $previous_prompt_value) + } + } +} diff --git a/venv/Lib/site-packages/virtualenv/activation/python/__init__.py b/venv/Lib/site-packages/virtualenv/activation/python/__init__.py new file mode 100644 index 00000000..9e579124 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/python/__init__.py @@ -0,0 +1,35 @@ +from __future__ import absolute_import, unicode_literals + +import os +import sys +from collections import OrderedDict + +from virtualenv.util.path import Path +from virtualenv.util.six import ensure_text + +from ..via_template import ViaTemplateActivator + + +class PythonActivator(ViaTemplateActivator): + def templates(self): + yield Path("activate_this.py") + + def replacements(self, creator, dest_folder): + replacements = super(PythonActivator, self).replacements(creator, dest_folder) + lib_folders = OrderedDict((os.path.relpath(str(i), str(dest_folder)), None) for i in creator.libs) + win_py2 = creator.interpreter.platform == "win32" and creator.interpreter.version_info.major == 2 + replacements.update( + { + "__LIB_FOLDERS__": ensure_text(os.pathsep.join(lib_folders.keys())), + "__DECODE_PATH__": ("yes" if win_py2 else ""), + }, + ) + return replacements + + @staticmethod + def _repr_unicode(creator, value): + py2 = creator.interpreter.version_info.major == 2 + if py2: # on Python 2 we need to encode this into explicit utf-8, py3 supports unicode literals + start = 2 if sys.version_info[0] == 3 else 1 + value = ensure_text(repr(value.encode("utf-8"))[start:-1]) + return value diff --git a/venv/Lib/site-packages/virtualenv/activation/python/activate_this.py b/venv/Lib/site-packages/virtualenv/activation/python/activate_this.py new file mode 100644 index 00000000..29debe3e --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/python/activate_this.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +"""Activate virtualenv for current interpreter: + +Use exec(open(this_file).read(), {'__file__': this_file}). + +This can be used when you must use an existing Python interpreter, not the virtualenv bin/python. +""" +import os +import site +import sys + +try: + abs_file = os.path.abspath(__file__) +except NameError: + raise AssertionError("You must use exec(open(this_file).read(), {'__file__': this_file}))") + +bin_dir = os.path.dirname(abs_file) +base = bin_dir[: -len("__BIN_NAME__") - 1] # strip away the bin part from the __file__, plus the path separator + +# prepend bin to PATH (this file is inside the bin directory) +os.environ["PATH"] = os.pathsep.join([bin_dir] + os.environ.get("PATH", "").split(os.pathsep)) +os.environ["VIRTUAL_ENV"] = base # virtual env is right above bin directory + +# add the virtual environments libraries to the host python import mechanism +prev_length = len(sys.path) +for lib in "__LIB_FOLDERS__".split(os.pathsep): + path = os.path.realpath(os.path.join(bin_dir, lib)) + site.addsitedir(path.decode("utf-8") if "__DECODE_PATH__" else path) +sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length] + +sys.real_prefix = sys.prefix +sys.prefix = base diff --git a/venv/Lib/site-packages/virtualenv/activation/via_template.py b/venv/Lib/site-packages/virtualenv/activation/via_template.py new file mode 100644 index 00000000..7a9d3c8e --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/via_template.py @@ -0,0 +1,65 @@ +from __future__ import absolute_import, unicode_literals + +import os +import sys +from abc import ABCMeta, abstractmethod + +from six import add_metaclass + +from virtualenv.util.six import ensure_text + +from .activator import Activator + +if sys.version_info >= (3, 7): + from importlib.resources import read_text +else: + from importlib_resources import read_text + + +@add_metaclass(ABCMeta) +class ViaTemplateActivator(Activator): + @abstractmethod + def templates(self): + raise NotImplementedError + + def generate(self, creator): + dest_folder = creator.bin_dir + replacements = self.replacements(creator, dest_folder) + generated = self._generate(replacements, self.templates(), dest_folder, creator) + if self.flag_prompt is not None: + creator.pyenv_cfg["prompt"] = self.flag_prompt + return generated + + def replacements(self, creator, dest_folder): + return { + "__VIRTUAL_PROMPT__": "" if self.flag_prompt is None else self.flag_prompt, + "__VIRTUAL_ENV__": ensure_text(str(creator.dest)), + "__VIRTUAL_NAME__": creator.env_name, + "__BIN_NAME__": ensure_text(str(creator.bin_dir.relative_to(creator.dest))), + "__PATH_SEP__": ensure_text(os.pathsep), + } + + def _generate(self, replacements, templates, to_folder, creator): + generated = [] + for template in templates: + text = self.instantiate_template(replacements, template, creator) + dest = to_folder / self.as_name(template) + dest.write_text(text, encoding="utf-8") + generated.append(dest) + return generated + + def as_name(self, template): + return template.name + + def instantiate_template(self, replacements, template, creator): + # read text and do replacements + text = read_text(self.__module__, str(template), encoding="utf-8", errors="strict") + for key, value in replacements.items(): + value = self._repr_unicode(creator, value) + text = text.replace(key, value) + return text + + @staticmethod + def _repr_unicode(creator, value): + # by default we just let it be unicode + return value diff --git a/venv/Lib/site-packages/virtualenv/activation/xonsh/__init__.py b/venv/Lib/site-packages/virtualenv/activation/xonsh/__init__.py new file mode 100644 index 00000000..d92411c2 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/xonsh/__init__.py @@ -0,0 +1,14 @@ +from __future__ import absolute_import, unicode_literals + +from virtualenv.util.path import Path + +from ..via_template import ViaTemplateActivator + + +class XonshActivator(ViaTemplateActivator): + def templates(self): + yield Path("activate.xsh") + + @classmethod + def supports(cls, interpreter): + return interpreter.version_info >= (3, 5) diff --git a/venv/Lib/site-packages/virtualenv/activation/xonsh/activate.xsh b/venv/Lib/site-packages/virtualenv/activation/xonsh/activate.xsh new file mode 100644 index 00000000..c77ea627 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/activation/xonsh/activate.xsh @@ -0,0 +1,46 @@ +"""Xonsh activate script for virtualenv""" +from xonsh.tools import get_sep as _get_sep + +def _deactivate(args): + if "pydoc" in aliases: + del aliases["pydoc"] + + if ${...}.get("_OLD_VIRTUAL_PATH", ""): + $PATH = $_OLD_VIRTUAL_PATH + del $_OLD_VIRTUAL_PATH + + if ${...}.get("_OLD_VIRTUAL_PYTHONHOME", ""): + $PYTHONHOME = $_OLD_VIRTUAL_PYTHONHOME + del $_OLD_VIRTUAL_PYTHONHOME + + if "VIRTUAL_ENV" in ${...}: + del $VIRTUAL_ENV + + if "VIRTUAL_ENV_PROMPT" in ${...}: + del $VIRTUAL_ENV_PROMPT + + if "nondestructive" not in args: + # Self destruct! + del aliases["deactivate"] + + +# unset irrelevant variables +_deactivate(["nondestructive"]) +aliases["deactivate"] = _deactivate + +$VIRTUAL_ENV = r"__VIRTUAL_ENV__" + +$_OLD_VIRTUAL_PATH = $PATH +$PATH = $PATH[:] +$PATH.add($VIRTUAL_ENV + _get_sep() + "__BIN_NAME__", front=True, replace=True) + +if ${...}.get("PYTHONHOME", ""): + # unset PYTHONHOME if set + $_OLD_VIRTUAL_PYTHONHOME = $PYTHONHOME + del $PYTHONHOME + +$VIRTUAL_ENV_PROMPT = "__VIRTUAL_PROMPT__" +if not $VIRTUAL_ENV_PROMPT: + del $VIRTUAL_ENV_PROMPT + +aliases["pydoc"] = ["python", "-m", "pydoc"] diff --git a/venv/Lib/site-packages/virtualenv/app_data/__init__.py b/venv/Lib/site-packages/virtualenv/app_data/__init__.py new file mode 100644 index 00000000..1d85745a --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/app_data/__init__.py @@ -0,0 +1,62 @@ +""" +Application data stored by virtualenv. +""" +from __future__ import absolute_import, unicode_literals + +import logging +import os +from argparse import Action, ArgumentError + +from appdirs import user_data_dir + +from .na import AppDataDisabled +from .via_disk_folder import AppDataDiskFolder +from .via_tempdir import TempAppData + + +class AppDataAction(Action): + def __call__(self, parser, namespace, values, option_string=None): + folder = self._check_folder(values) + if folder is None: + raise ArgumentError("app data path {} is not valid".format(values)) + setattr(namespace, self.dest, AppDataDiskFolder(folder)) + + @staticmethod + def _check_folder(folder): + folder = os.path.abspath(folder) + if not os.path.exists(folder): + try: + os.makedirs(folder) + logging.debug("created app data folder %s", folder) + except OSError as exception: + logging.info("could not create app data folder %s due to %r", folder, exception) + return None + write_enabled = os.access(folder, os.W_OK) + if write_enabled: + return folder + logging.debug("app data folder %s has no write access", folder) + return None + + @staticmethod + def default(): + for folder in AppDataAction._app_data_candidates(): + folder = AppDataAction._check_folder(folder) + if folder is not None: + return AppDataDiskFolder(folder) + return AppDataDisabled() + + @staticmethod + def _app_data_candidates(): + key = str("VIRTUALENV_OVERRIDE_APP_DATA") + if key in os.environ: + yield os.environ[key] + else: + yield user_data_dir(appname="virtualenv", appauthor="pypa") + + +__all__ = ( + "AppDataDiskFolder", + "TempAppData", + "AppDataAction", + "AppDataDisabled", +) diff --git a/venv/Lib/site-packages/virtualenv/app_data/base.py b/venv/Lib/site-packages/virtualenv/app_data/base.py new file mode 100644 index 00000000..d0da0fc1 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/app_data/base.py @@ -0,0 +1,91 @@ +""" +Application data stored by virtualenv. +""" +from __future__ import absolute_import, unicode_literals + +from abc import ABCMeta, abstractmethod +from contextlib import contextmanager + +import six + +from virtualenv.info import IS_ZIPAPP + + +@six.add_metaclass(ABCMeta) +class AppData(object): + """Abstract storage interface for the virtualenv application""" + + @abstractmethod + def close(self): + """called before virtualenv exits""" + + @abstractmethod + def reset(self): + """called when the user passes in the reset app data""" + + @abstractmethod + def py_info(self, path): + raise NotImplementedError + + @abstractmethod + def py_info_clear(self): + raise NotImplementedError + + @abstractmethod + def embed_update_log(self, distribution, for_py_version): + raise NotImplementedError + + @property + def house(self): + raise NotImplementedError + + @property + def transient(self): + raise NotImplementedError + + @abstractmethod + def wheel_image(self, for_py_version, name): + raise NotImplementedError + + @contextmanager + def ensure_extracted(self, path, to_folder=None): + """Some paths might be within the zipapp, unzip these to a path on the disk""" + if IS_ZIPAPP: + with self.extract(path, to_folder) as result: + yield result + else: + yield path + + @abstractmethod + @contextmanager + def extract(self, path, to_folder): + raise NotImplementedError + + @abstractmethod + @contextmanager + def locked(self, path): + raise NotImplementedError + + +@six.add_metaclass(ABCMeta) +class ContentStore(object): + @abstractmethod + def exists(self): + raise NotImplementedError + + @abstractmethod + def read(self): + raise NotImplementedError + + @abstractmethod + def write(self, content): + raise NotImplementedError + + @abstractmethod + def remove(self): + raise NotImplementedError + + @abstractmethod + @contextmanager + def locked(self): + pass diff --git a/venv/Lib/site-packages/virtualenv/app_data/na.py b/venv/Lib/site-packages/virtualenv/app_data/na.py new file mode 100644 index 00000000..937aa9a4 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/app_data/na.py @@ -0,0 +1,67 @@ +from __future__ import absolute_import, unicode_literals + +from contextlib import contextmanager + +from .base import AppData, ContentStore + + +class AppDataDisabled(AppData): + """No application cache available (most likely as we don't have write permissions)""" + + def __init__(self): + pass + + error = RuntimeError("no app data folder available, probably no write access to the folder") + + def close(self): + """do nothing""" + + def reset(self): + """do nothing""" + + def py_info(self, path): + return ContentStoreNA() + + def embed_update_log(self, distribution, for_py_version): + return ContentStoreNA() + + def extract(self, path, to_folder): + raise self.error + + @contextmanager + def locked(self, path): + """do nothing""" + yield + + @property + def house(self): + raise self.error + + def wheel_image(self, for_py_version, name): + raise self.error + + @property + def transient(self): + return True + + def py_info_clear(self): + """""" + + +class ContentStoreNA(ContentStore): + def exists(self): + return False + + def read(self): + """""" + return None + + def write(self, content): + """""" + + def remove(self): + """""" + + @contextmanager + def locked(self): + yield diff --git a/venv/Lib/site-packages/virtualenv/app_data/via_disk_folder.py b/venv/Lib/site-packages/virtualenv/app_data/via_disk_folder.py new file mode 100644 index 00000000..6b12ef8b --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/app_data/via_disk_folder.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +""" +A rough layout of the current storage goes as: + +virtualenv-app-data +├── py - +│   └── *.json/lock +├── wheel +│   ├── house +│ │ └── *.whl +│ └── -> 3.9 +│ ├── img- +│ │ └── image +│ │ └── -> CopyPipInstall / SymlinkPipInstall +│ │ └── -> pip-20.1.1-py2.py3-none-any +│ └── embed +│ └── 1 +│ └── *.json -> for every distribution contains data about newer embed versions and releases +└─── unzip + └── + ├── py_info.py + ├── debug.py + └── _virtualenv.py +""" +from __future__ import absolute_import, unicode_literals + +import json +import logging +from abc import ABCMeta +from contextlib import contextmanager +from hashlib import sha256 + +import six + +from virtualenv.util.lock import ReentrantFileLock +from virtualenv.util.path import safe_delete +from virtualenv.util.six import ensure_text +from virtualenv.util.zipapp import extract +from virtualenv.version import __version__ + +from .base import AppData, ContentStore + + +class AppDataDiskFolder(AppData): + """ + Store the application data on the disk within a folder layout. + """ + + def __init__(self, folder): + self.lock = ReentrantFileLock(folder) + + def __repr__(self): + return "{}".format(self.lock.path) + + @property + def transient(self): + return False + + def reset(self): + logging.debug("reset app data folder %s", self.lock.path) + safe_delete(self.lock.path) + + def close(self): + """do nothing""" + + @contextmanager + def locked(self, path): + path_lock = self.lock / path + with path_lock: + yield path_lock.path + + @contextmanager + def extract(self, path, to_folder): + if to_folder is not None: + root = ReentrantFileLock(to_folder()) + else: + root = self.lock / "unzip" / __version__ + with root.lock_for_key(path.name): + dest = root.path / path.name + if not dest.exists(): + extract(path, dest) + yield dest + + @property + def py_info_at(self): + return self.lock / "py_info" / "1" + + def py_info(self, path): + return PyInfoStoreDisk(self.py_info_at, path) + + def py_info_clear(self): + """""" + py_info_folder = self.py_info_at + with py_info_folder: + for filename in py_info_folder.path.iterdir(): + if filename.suffix == ".json": + with py_info_folder.lock_for_key(filename.stem): + if filename.exists(): + filename.unlink() + + def embed_update_log(self, distribution, for_py_version): + return EmbedDistributionUpdateStoreDisk(self.lock / "wheel" / for_py_version / "embed" / "1", distribution) + + @property + def house(self): + path = self.lock.path / "wheel" / "house" + path.mkdir(parents=True, exist_ok=True) + return path + + def wheel_image(self, for_py_version, name): + return self.lock.path / "wheel" / for_py_version / "image" / "1" / name + + +@six.add_metaclass(ABCMeta) +class JSONStoreDisk(ContentStore): + def __init__(self, in_folder, key, msg, msg_args): + self.in_folder = in_folder + self.key = key + self.msg = msg + self.msg_args = msg_args + (self.file,) + + @property + def file(self): + return self.in_folder.path / "{}.json".format(self.key) + + def exists(self): + return self.file.exists() + + def read(self): + data, bad_format = None, False + try: + data = json.loads(self.file.read_text()) + logging.debug("got {} from %s".format(self.msg), *self.msg_args) + return data + except ValueError: + bad_format = True + except Exception: # noqa + pass + if bad_format: + self.remove() + return None + + def remove(self): + self.file.unlink() + logging.debug("removed {} at %s".format(self.msg), *self.msg_args) + + @contextmanager + def locked(self): + with self.in_folder.lock_for_key(self.key): + yield + + def write(self, content): + folder = self.file.parent + try: + folder.mkdir(parents=True, exist_ok=True) + except OSError: + pass + self.file.write_text(ensure_text(json.dumps(content, sort_keys=True, indent=2))) + logging.debug("wrote {} at %s".format(self.msg), *self.msg_args) + + +class PyInfoStoreDisk(JSONStoreDisk): + def __init__(self, in_folder, path): + key = sha256(str(path).encode("utf-8") if six.PY3 else str(path)).hexdigest() + super(PyInfoStoreDisk, self).__init__(in_folder, key, "python info of %s", (path,)) + + +class EmbedDistributionUpdateStoreDisk(JSONStoreDisk): + def __init__(self, in_folder, distribution): + super(EmbedDistributionUpdateStoreDisk, self).__init__( + in_folder, distribution, "embed update of distribution %s", (distribution,), + ) diff --git a/venv/Lib/site-packages/virtualenv/app_data/via_tempdir.py b/venv/Lib/site-packages/virtualenv/app_data/via_tempdir.py new file mode 100644 index 00000000..e8b387c5 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/app_data/via_tempdir.py @@ -0,0 +1,28 @@ +from __future__ import absolute_import, unicode_literals + +import logging +from tempfile import mkdtemp + +from virtualenv.util.path import safe_delete + +from .via_disk_folder import AppDataDiskFolder + + +class TempAppData(AppDataDiskFolder): + def __init__(self): + super(TempAppData, self).__init__(folder=mkdtemp()) + logging.debug("created temporary app data folder %s", self.lock.path) + + def reset(self): + """this is a temporary folder, is already empty to start with""" + + def close(self): + logging.debug("remove temporary app data folder %s", self.lock.path) + safe_delete(self.lock.path) + + def embed_update_log(self, distribution, for_py_version): + return None + + @property + def transient(self): + return True diff --git a/venv/Lib/site-packages/virtualenv/config/__init__.py b/venv/Lib/site-packages/virtualenv/config/__init__.py new file mode 100644 index 00000000..01e6d4f4 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/config/__init__.py @@ -0,0 +1 @@ +from __future__ import absolute_import, unicode_literals diff --git a/venv/Lib/site-packages/virtualenv/config/cli/__init__.py b/venv/Lib/site-packages/virtualenv/config/cli/__init__.py new file mode 100644 index 00000000..01e6d4f4 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/config/cli/__init__.py @@ -0,0 +1 @@ +from __future__ import absolute_import, unicode_literals diff --git a/venv/Lib/site-packages/virtualenv/config/cli/parser.py b/venv/Lib/site-packages/virtualenv/config/cli/parser.py new file mode 100644 index 00000000..eb4db30a --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/config/cli/parser.py @@ -0,0 +1,120 @@ +from __future__ import absolute_import, unicode_literals + +from argparse import SUPPRESS, ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace +from collections import OrderedDict + +from virtualenv.config.convert import get_type + +from ..env_var import get_env_var +from ..ini import IniConfig + + +class VirtualEnvOptions(Namespace): + def __init__(self, **kwargs): + super(VirtualEnvOptions, self).__init__(**kwargs) + self._src = None + self._sources = {} + + def set_src(self, key, value, src): + setattr(self, key, value) + if src.startswith("env var"): + src = "env var" + self._sources[key] = src + + def __setattr__(self, key, value): + if getattr(self, "_src", None) is not None: + self._sources[key] = self._src + super(VirtualEnvOptions, self).__setattr__(key, value) + + def get_source(self, key): + return self._sources.get(key) + + @property + def verbosity(self): + if not hasattr(self, "verbose") and not hasattr(self, "quiet"): + return None + return max(self.verbose - self.quiet, 0) + + def __repr__(self): + return "{}({})".format( + type(self).__name__, + ", ".join("{}={}".format(k, v) for k, v in vars(self).items() if not k.startswith("_")), + ) + + +class VirtualEnvConfigParser(ArgumentParser): + """ + Custom option parser which updates its defaults by checking the configuration files and environmental variables + """ + + def __init__(self, options=None, *args, **kwargs): + self.file_config = IniConfig() + self.epilog_list = [] + kwargs["epilog"] = self.file_config.epilog + kwargs["add_help"] = False + kwargs["formatter_class"] = HelpFormatter + kwargs["prog"] = "virtualenv" + super(VirtualEnvConfigParser, self).__init__(*args, **kwargs) + self._fixed = set() + if options is not None and not isinstance(options, VirtualEnvOptions): + raise TypeError("options must be of type VirtualEnvOptions") + self.options = VirtualEnvOptions() if options is None else options + self._interpreter = None + self._app_data = None + + def _fix_defaults(self): + for action in self._actions: + action_id = id(action) + if action_id not in self._fixed: + self._fix_default(action) + self._fixed.add(action_id) + + def _fix_default(self, action): + if hasattr(action, "default") and hasattr(action, "dest") and action.default != SUPPRESS: + as_type = get_type(action) + names = OrderedDict((i.lstrip("-").replace("-", "_"), None) for i in action.option_strings) + outcome = None + for name in names: + outcome = get_env_var(name, as_type) + if outcome is not None: + break + if outcome is None and self.file_config: + for name in names: + outcome = self.file_config.get(name, as_type) + if outcome is not None: + break + if outcome is not None: + action.default, action.default_source = outcome + else: + outcome = action.default, "default" + self.options.set_src(action.dest, *outcome) + + def enable_help(self): + self._fix_defaults() + self.add_argument("-h", "--help", action="help", default=SUPPRESS, help="show this help message and exit") + + def parse_known_args(self, args=None, namespace=None): + if namespace is None: + namespace = self.options + elif namespace is not self.options: + raise ValueError("can only pass in parser.options") + self._fix_defaults() + self.options._src = "cli" + try: + return super(VirtualEnvConfigParser, self).parse_known_args(args, namespace=namespace) + finally: + self.options._src = None + + +class HelpFormatter(ArgumentDefaultsHelpFormatter): + def __init__(self, prog): + super(HelpFormatter, self).__init__(prog, max_help_position=32, width=240) + + def _get_help_string(self, action): + # noinspection PyProtectedMember + text = super(HelpFormatter, self)._get_help_string(action) + if hasattr(action, "default_source"): + default = " (default: %(default)s)" + if text.endswith(default): + text = "{} (default: %(default)s -> from %(default_source)s)".format(text[: -len(default)]) + return text diff --git a/venv/Lib/site-packages/virtualenv/config/convert.py b/venv/Lib/site-packages/virtualenv/config/convert.py new file mode 100644 index 00000000..27821fc0 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/config/convert.py @@ -0,0 +1,81 @@ +from __future__ import absolute_import, unicode_literals + +import logging +import os + + +class TypeData(object): + def __init__(self, default_type, as_type): + self.default_type = default_type + self.as_type = as_type + + def __repr__(self): + return "{}(base={}, as={})".format(self.__class__.__name__, self.default_type, self.as_type) + + def convert(self, value): + return self.default_type(value) + + +class BoolType(TypeData): + BOOLEAN_STATES = { + "1": True, + "yes": True, + "true": True, + "on": True, + "0": False, + "no": False, + "false": False, + "off": False, + } + + def convert(self, value): + if value.lower() not in self.BOOLEAN_STATES: + raise ValueError("Not a boolean: %s" % value) + return self.BOOLEAN_STATES[value.lower()] + + +class NoneType(TypeData): + def convert(self, value): + if not value: + return None + return str(value) + + +class ListType(TypeData): + def _validate(self): + """""" + + def convert(self, value, flatten=True): + if isinstance(value, (str, bytes)): + value = filter(None, [x.strip() for x in value.splitlines()]) + values = list(value) + result = [] + for value in values: + sub_values = value.split(os.pathsep) + result.extend(sub_values) + converted = [self.as_type(i) for i in result] + return converted + + +def convert(value, as_type, source): + """Convert the value as a given type where the value comes from the given source""" + try: + return as_type.convert(value) + except Exception as exception: + logging.warning("%s failed to convert %r as %r because %r", source, value, as_type, exception) + raise + + +_CONVERT = {bool: BoolType, type(None): NoneType, list: ListType} + + +def get_type(action): + default_type = type(action.default) + as_type = default_type if action.type is None else action.type + return _CONVERT.get(default_type, TypeData)(default_type, as_type) + + +__all__ = ( + "convert", + "get_type", +) diff --git a/venv/Lib/site-packages/virtualenv/config/env_var.py b/venv/Lib/site-packages/virtualenv/config/env_var.py new file mode 100644 index 00000000..259399a7 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/config/env_var.py @@ -0,0 +1,29 @@ +from __future__ import absolute_import, unicode_literals + +import os + +from virtualenv.util.six import ensure_str, ensure_text + +from .convert import convert + + +def get_env_var(key, as_type): + """Get the environment variable option. + + :param key: the config key requested + :param as_type: the type we would like to convert it to + :return: + """ + environ_key = ensure_str("VIRTUALENV_{}".format(key.upper())) + if os.environ.get(environ_key): + value = os.environ[environ_key] + # noinspection PyBroadException + try: + source = "env var {}".format(ensure_text(environ_key)) + as_type = convert(value, as_type, source) + return as_type, source + except Exception: # note the converter already logs a warning when failures happen + pass + + +__all__ = ("get_env_var",) diff --git a/venv/Lib/site-packages/virtualenv/config/ini.py b/venv/Lib/site-packages/virtualenv/config/ini.py new file mode 100644 index 00000000..c8789475 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/config/ini.py @@ -0,0 +1,83 @@ +from __future__ import absolute_import, unicode_literals + +import logging +import os + +from appdirs import user_config_dir + +from virtualenv.info import PY3 +from virtualenv.util import ConfigParser +from virtualenv.util.path import Path +from virtualenv.util.six import ensure_str + +from .convert import convert + + +class IniConfig(object): + VIRTUALENV_CONFIG_FILE_ENV_VAR = ensure_str("VIRTUALENV_CONFIG_FILE") + STATE = {None: "failed to parse", True: "active", False: "missing"} + + section = "virtualenv" + + def __init__(self): + config_file = os.environ.get(self.VIRTUALENV_CONFIG_FILE_ENV_VAR, None) + self.is_env_var = config_file is not None + config_file = ( + Path(config_file) + if config_file is not None + else Path(user_config_dir(appname="virtualenv", appauthor="pypa")) / "virtualenv.ini" + ) + self.config_file = config_file + self._cache = {} + + exception = None + self.has_config_file = None + try: + self.has_config_file = self.config_file.exists() + except OSError as exc: + exception = exc + else: + if self.has_config_file: + self.config_file = self.config_file.resolve() + self.config_parser = ConfigParser.ConfigParser() + try: + self._load() + self.has_virtualenv_section = self.config_parser.has_section(self.section) + except Exception as exc: + exception = exc + if exception is not None: + logging.error("failed to read config file %s because %r", config_file, exception) + + def _load(self): + with self.config_file.open("rt") as file_handler: + reader = getattr(self.config_parser, "read_file" if PY3 else "readfp") + reader(file_handler) + + def get(self, key, as_type): + cache_key = key, as_type + if cache_key in self._cache: + return self._cache[cache_key] + # noinspection PyBroadException + try: + source = "file" + raw_value = self.config_parser.get(self.section, key.lower()) + value = convert(raw_value, as_type, source) + result = value, source + except Exception: + result = None + self._cache[cache_key] = result + return result + + def __bool__(self): + return bool(self.has_config_file) and bool(self.has_virtualenv_section) + + @property + def epilog(self): + msg = "{}config file {} {} (change{} via env var {})" + return msg.format( + os.linesep, + self.config_file, + self.STATE[self.has_config_file], + "d" if self.is_env_var else "", + self.VIRTUALENV_CONFIG_FILE_ENV_VAR, + ) diff --git a/venv/Lib/site-packages/virtualenv/create/__init__.py b/venv/Lib/site-packages/virtualenv/create/__init__.py new file mode 100644 index 00000000..01e6d4f4 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/__init__.py @@ -0,0 +1 @@ +from __future__ import absolute_import, unicode_literals diff --git a/venv/Lib/site-packages/virtualenv/create/creator.py b/venv/Lib/site-packages/virtualenv/create/creator.py new file mode 100644 index 00000000..4d5b306e --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/creator.py @@ -0,0 +1,224 @@ +from __future__ import absolute_import, print_function, unicode_literals + +import json +import logging +import os +import sys +from abc import ABCMeta, abstractmethod +from argparse import ArgumentTypeError +from ast import literal_eval +from collections import OrderedDict +from textwrap import dedent + +from six import add_metaclass + +from virtualenv.discovery.cached_py_info import LogCmd +from virtualenv.info import WIN_CPYTHON_2 +from virtualenv.util.path import Path, safe_delete +from virtualenv.util.six import ensure_str, ensure_text +from virtualenv.util.subprocess import run_cmd +from virtualenv.version import __version__ + +from .pyenv_cfg import PyEnvCfg + +HERE = Path(os.path.abspath(__file__)).parent +DEBUG_SCRIPT = HERE / "debug.py" + + +class CreatorMeta(object): + def __init__(self): + self.error = None + + +@add_metaclass(ABCMeta) +class Creator(object): + """A class that given a python Interpreter creates a virtual environment""" + + def __init__(self, options, interpreter): + """Construct a new virtual environment creator. + + :param options: the CLI option as parsed from :meth:`add_parser_arguments` + :param interpreter: the interpreter to create virtual environment from + """ + self.interpreter = interpreter + self._debug = None + self.dest = Path(options.dest) + self.clear = options.clear + self.pyenv_cfg = PyEnvCfg.from_folder(self.dest) + self.app_data = options.app_data + + def __repr__(self): + return ensure_str(self.__unicode__()) + + def __unicode__(self): + return "{}({})".format(self.__class__.__name__, ", ".join("{}={}".format(k, v) for k, v in self._args())) + + def _args(self): + return [ + ("dest", ensure_text(str(self.dest))), + ("clear", self.clear), + ] + + @classmethod + def can_create(cls, interpreter): + """Determine if we can create a virtual environment. + + :param interpreter: the interpreter in question + :return: ``None`` if we can't create, any other object otherwise that will be forwarded to \ + :meth:`add_parser_arguments` + """ + return True + + @classmethod + def add_parser_arguments(cls, parser, interpreter, meta, app_data): + """Add CLI arguments for the creator. + + :param parser: the CLI parser + :param app_data: the application data folder + :param interpreter: the interpreter we're asked to create virtual environment for + :param meta: value as returned by :meth:`can_create` + """ + parser.add_argument( + "dest", help="directory to create virtualenv at", type=cls.validate_dest, + ) + parser.add_argument( + "--clear", + dest="clear", + action="store_true", + help="remove the destination directory if exist before starting (will overwrite files otherwise)", + default=False, + ) + + @abstractmethod + def create(self): + """Perform the virtual environment creation.""" + raise NotImplementedError + + @classmethod + def validate_dest(cls, raw_value): + """No path separator in the path, valid chars and must be write-able""" + + def non_write_able(dest, value): + common = Path(*os.path.commonprefix([value.parts, dest.parts])) + raise ArgumentTypeError( + "the destination {} is not write-able at {}".format(dest.relative_to(common), common), + ) + + # the file system must be able to encode + # note in newer CPython this is always utf-8 https://www.python.org/dev/peps/pep-0529/ + encoding = sys.getfilesystemencoding() + refused = OrderedDict() + kwargs = {"errors": "ignore"} if encoding != "mbcs" else {} + for char in ensure_text(raw_value): + try: + trip = char.encode(encoding, **kwargs).decode(encoding) + if trip == char: + continue + raise ValueError(trip) + except ValueError: + refused[char] = None + if refused: + raise ArgumentTypeError( + "the file system codec ({}) cannot handle characters {!r} within {!r}".format( + encoding, "".join(refused.keys()), raw_value, + ), + ) + if os.pathsep in raw_value: + raise ArgumentTypeError( + "destination {!r} must not contain the path separator ({}) as this would break " + "the activation scripts".format(raw_value, os.pathsep), + ) + + value = Path(raw_value) + if value.exists() and value.is_file(): + raise ArgumentTypeError("the destination {} already exists and is a file".format(value)) + if (3, 3) <= sys.version_info <= (3, 6): + # pre 3.6 resolve is always strict, aka must exists, sidestep by using os.path operation + dest = Path(os.path.realpath(raw_value)) + else: + dest = Path(os.path.abspath(str(value))).resolve() # on Windows absolute does not imply resolve so use both + value = dest + while dest: + if dest.exists(): + if os.access(ensure_text(str(dest)), os.W_OK): + break + else: + non_write_able(dest, value) + base, _ = dest.parent, dest.name + if base == dest: + non_write_able(dest, value) # pragma: no cover + dest = base + return str(value) + + def run(self): + if self.dest.exists() and self.clear: + logging.debug("delete %s", self.dest) + safe_delete(self.dest) + self.create() + self.set_pyenv_cfg() + self.setup_ignore_vcs() + + def set_pyenv_cfg(self): + self.pyenv_cfg.content = OrderedDict() + self.pyenv_cfg["home"] = self.interpreter.system_exec_prefix + self.pyenv_cfg["implementation"] = self.interpreter.implementation + self.pyenv_cfg["version_info"] = ".".join(str(i) for i in self.interpreter.version_info) + self.pyenv_cfg["virtualenv"] = __version__ + + def setup_ignore_vcs(self): + """Generate ignore instructions for version control systems.""" + # mark this folder to be ignored by VCS, handle https://www.python.org/dev/peps/pep-0610/#registered-vcs + git_ignore = self.dest / ".gitignore" + if not git_ignore.exists(): + git_ignore.write_text( + dedent( + """ + # created by virtualenv automatically + * + """, + ).lstrip(), + ) + # Mercurial - does not support the .hgignore file inside a subdirectory directly, but only if included via the + # subinclude directive from root, at which point on might as well ignore the directory itself, see + # https://www.selenic.com/mercurial/hgignore.5.html for more details + # Bazaar - does not support ignore files in sub-directories, only at root level via .bzrignore + # Subversion - does not support ignore files, requires direct manipulation with the svn tool + + @property + def debug(self): + """ + :return: debug information about the virtual environment (only valid after :meth:`create` has run) + """ + if self._debug is None and self.exe is not None: + self._debug = get_env_debug_info(self.exe, self.debug_script(), self.app_data) + return self._debug + + # noinspection PyMethodMayBeStatic + def debug_script(self): + return DEBUG_SCRIPT + + +def get_env_debug_info(env_exe, debug_script, app_data): + env = os.environ.copy() + env.pop(str("PYTHONPATH"), None) + + with app_data.ensure_extracted(debug_script) as debug_script: + cmd = [str(env_exe), str(debug_script)] + if WIN_CPYTHON_2: + cmd = [ensure_text(i) for i in cmd] + logging.debug(str("debug via %r"), LogCmd(cmd)) + code, out, err = run_cmd(cmd) + + # noinspection PyBroadException + try: + if code != 0: + result = literal_eval(out) + else: + result = json.loads(out) + if err: + result["err"] = err + except Exception as exception: + return {"out": out, "err": err, "returncode": code, "exception": repr(exception)} + if "sys" in result and "path" in result["sys"]: + del result["sys"]["path"][0] + return result diff --git a/venv/Lib/site-packages/virtualenv/create/debug.py b/venv/Lib/site-packages/virtualenv/create/debug.py new file mode 100644 index 00000000..0cdaa494 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/debug.py @@ -0,0 +1,110 @@ +"""Inspect a target Python interpreter virtual environment wise""" +import sys # built-in + +PYPY2_WIN = hasattr(sys, "pypy_version_info") and sys.platform != "win32" and sys.version_info[0] == 2 + + +def encode_path(value): + if value is None: + return None + if not isinstance(value, (str, bytes)): + if isinstance(value, type): + value = repr(value) + else: + value = repr(type(value)) + if isinstance(value, bytes) and not PYPY2_WIN: + value = value.decode(sys.getfilesystemencoding()) + return value + + +def encode_list_path(value): + return [encode_path(i) for i in value] + + +def run(): + """print debug data about the virtual environment""" + try: + from collections import OrderedDict + except ImportError: # pragma: no cover + # this is possible if the standard library cannot be accessed + # noinspection PyPep8Naming + OrderedDict = dict # pragma: no cover + result = OrderedDict([("sys", OrderedDict())]) + path_keys = ( + "executable", + "_base_executable", + "prefix", + "base_prefix", + "real_prefix", + "exec_prefix", + "base_exec_prefix", + "path", + "meta_path", + ) + for key in path_keys: + value = getattr(sys, key, None) + if isinstance(value, list): + value = encode_list_path(value) + else: + value = encode_path(value) + result["sys"][key] = value + result["sys"]["fs_encoding"] = sys.getfilesystemencoding() + result["sys"]["io_encoding"] = getattr(sys.stdout, "encoding", None) + result["version"] = sys.version + + try: + import sysconfig + + # https://bugs.python.org/issue22199 + makefile = getattr(sysconfig, "get_makefile_filename", getattr(sysconfig, "_get_makefile_filename", None)) + result["makefile_filename"] = encode_path(makefile()) + except ImportError: + pass + + import os # landmark + + result["os"] = repr(os) + + try: + # noinspection PyUnresolvedReferences + import site # site + + result["site"] = repr(site) + except ImportError as exception: # pragma: no cover + result["site"] = repr(exception) # pragma: no cover + + try: + # noinspection PyUnresolvedReferences + import datetime # site + + result["datetime"] = repr(datetime) + except ImportError as exception: # pragma: no cover + result["datetime"] = repr(exception) # pragma: no cover + + try: + # noinspection PyUnresolvedReferences + import math # site + + result["math"] = repr(math) + except ImportError as exception: # pragma: no cover + result["math"] = repr(exception) # pragma: no cover + + # try to print out, this will validate if other core modules are available (json in this case) + try: + import json + + result["json"] = repr(json) + except ImportError as exception: + result["json"] = repr(exception) + else: + try: + content = json.dumps(result, indent=2) + sys.stdout.write(content) + except (ValueError, TypeError) as exception: # pragma: no cover + sys.stderr.write(repr(exception)) + sys.stdout.write(repr(result)) # pragma: no cover + raise SystemExit(1) # pragma: no cover + + +if __name__ == "__main__": + run() diff --git a/venv/Lib/site-packages/virtualenv/create/describe.py b/venv/Lib/site-packages/virtualenv/create/describe.py new file mode 100644 index 00000000..1e59aaea --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/describe.py @@ -0,0 +1,117 @@ +from __future__ import absolute_import, print_function, unicode_literals + +from abc import ABCMeta +from collections import OrderedDict + +from six import add_metaclass + +from virtualenv.info import IS_WIN +from virtualenv.util.path import Path +from virtualenv.util.six import ensure_text + + +@add_metaclass(ABCMeta) +class Describe(object): + """Given a host interpreter tell us information about what the created interpreter might look like""" + + suffix = ".exe" if IS_WIN else "" + + def __init__(self, dest, interpreter): + self.interpreter = interpreter + self.dest = dest + self._stdlib = None + self._stdlib_platform = None + self._system_stdlib = None + self._conf_vars = None + + @property + def bin_dir(self): + return self.script_dir + + @property + def script_dir(self): + return self.dest / Path(self.interpreter.distutils_install["scripts"]) + + @property + def purelib(self): + return self.dest / self.interpreter.distutils_install["purelib"] + + @property + def platlib(self): + return self.dest / self.interpreter.distutils_install["platlib"] + + @property + def libs(self): + return list(OrderedDict(((self.platlib, None), (self.purelib, None))).keys()) + + @property + def stdlib(self): + if self._stdlib is None: + self._stdlib = Path(self.interpreter.sysconfig_path("stdlib", config_var=self._config_vars)) + return self._stdlib + + @property + def stdlib_platform(self): + if self._stdlib_platform is None: + self._stdlib_platform = Path(self.interpreter.sysconfig_path("platstdlib", config_var=self._config_vars)) + return self._stdlib_platform + + @property + def _config_vars(self): + if self._conf_vars is None: + self._conf_vars = self._calc_config_vars(ensure_text(str(self.dest))) + return self._conf_vars + + def _calc_config_vars(self, to): + return { + k: (to if v.startswith(self.interpreter.prefix) else v) for k, v in self.interpreter.sysconfig_vars.items() + } + + @classmethod + def can_describe(cls, interpreter): + """Knows means it knows how the output will look""" + return True + + @property + def env_name(self): + return ensure_text(self.dest.parts[-1]) + + @property + def exe(self): + return self.bin_dir / "{}{}".format(self.exe_stem(), self.suffix) + + @classmethod + def exe_stem(cls): + """executable name without suffix - there seems to be no standard way to get this without creating it""" + raise NotImplementedError + + def script(self, name): + return self.script_dir / "{}{}".format(name, self.suffix) + + +@add_metaclass(ABCMeta) +class Python2Supports(Describe): + @classmethod + def can_describe(cls, interpreter): + return interpreter.version_info.major == 2 and super(Python2Supports, cls).can_describe(interpreter) + + +@add_metaclass(ABCMeta) +class Python3Supports(Describe): + @classmethod + def can_describe(cls, interpreter): + return interpreter.version_info.major == 3 and super(Python3Supports, cls).can_describe(interpreter) + + +@add_metaclass(ABCMeta) +class PosixSupports(Describe): + @classmethod + def can_describe(cls, interpreter): + return interpreter.os == "posix" and super(PosixSupports, cls).can_describe(interpreter) + + +@add_metaclass(ABCMeta) +class WindowsSupports(Describe): + @classmethod + def can_describe(cls, interpreter): + return interpreter.os == "nt" and super(WindowsSupports, cls).can_describe(interpreter) diff --git a/venv/Lib/site-packages/virtualenv/create/pyenv_cfg.py b/venv/Lib/site-packages/virtualenv/create/pyenv_cfg.py new file mode 100644 index 00000000..1a8d8244 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/pyenv_cfg.py @@ -0,0 +1,61 @@ +from __future__ import absolute_import, unicode_literals + +import logging +from collections import OrderedDict + +from virtualenv.util.six import ensure_text + + +class PyEnvCfg(object): + def __init__(self, content, path): + self.content = content + self.path = path + + @classmethod + def from_folder(cls, folder): + return cls.from_file(folder / "pyvenv.cfg") + + @classmethod + def from_file(cls, path): + content = cls._read_values(path) if path.exists() else OrderedDict() + return PyEnvCfg(content, path) + + @staticmethod + def _read_values(path): + content = OrderedDict() + for line in path.read_text(encoding="utf-8").splitlines(): + equals_at = line.index("=") + key = line[:equals_at].strip() + value = line[equals_at + 1 :].strip() + content[key] = value + return content + + def write(self): + logging.debug("write %s", ensure_text(str(self.path))) + text = "" + for key, value in self.content.items(): + line = "{} = {}".format(key, value) + logging.debug("\t%s", line) + text += line + text += "\n" + self.path.write_text(text, encoding="utf-8") + + def refresh(self): + self.content = self._read_values(self.path) + return self.content + + def __setitem__(self, key, value): + self.content[key] = value + + def __getitem__(self, key): + return self.content[key] + + def __contains__(self, item): + return item in self.content + + def update(self, other): + self.content.update(other) + return self + + def __repr__(self): + return "{}(path={})".format(self.__class__.__name__, self.path) diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/__init__.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/_virtualenv.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/_virtualenv.py new file mode 100644 index 00000000..31f9b81b --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/_virtualenv.py @@ -0,0 +1,130 @@ +"""Patches that are applied at runtime to the virtual environment""" +# -*- coding: utf-8 -*- + +import os +import sys + +VIRTUALENV_PATCH_FILE = os.path.join(__file__) + + +def patch_dist(dist): + """ + Distutils allows user to configure some arguments via a configuration file: + https://docs.python.org/3/install/index.html#distutils-configuration-files + + Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up. + """ + # we cannot allow some install config as that would get packages installed outside of the virtual environment + old_parse_config_files = dist.Distribution.parse_config_files + + def parse_config_files(self, *args, **kwargs): + result = old_parse_config_files(self, *args, **kwargs) + install = self.get_option_dict("install") + + if "prefix" in install: # the prefix governs where to install the libraries + install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix) + for base in ("purelib", "platlib", "headers", "scripts", "data"): + key = "install_{}".format(base) + if key in install: # do not allow global configs to hijack venv paths + install.pop(key, None) + return result + + dist.Distribution.parse_config_files = parse_config_files + + +# Import hook that patches some modules to ignore configuration values that break package installation in case +# of virtual environments. +_DISTUTILS_PATCH = "distutils.dist", "setuptools.dist" +if sys.version_info > (3, 4): + # https://docs.python.org/3/library/importlib.html#setting-up-an-importer + from importlib.abc import MetaPathFinder + from importlib.util import find_spec + from functools import partial + + class _Finder(MetaPathFinder): + """A meta path finder that allows patching the imported distutils modules""" + + fullname = None + + # lock[0] is threading.Lock(), but initialized lazily to avoid importing threading very early at startup, + # because there are gevent-based applications that need to be first to import threading by themselves. + # See https://github.com/pypa/virtualenv/issues/1895 for details. + lock = [] + + def find_spec(self, fullname, path, target=None): + if fullname in _DISTUTILS_PATCH and self.fullname is None: + # initialize lock[0] lazily + if len(self.lock) == 0: + import threading + + lock = threading.Lock() + # there is possibility that two threads T1 and T2 are simultaneously running into find_spec, + # observing .lock as empty, and further going into hereby initialization. However due to the GIL, + # list.append() operation is atomic and this way only one of the threads will "win" to put the lock + # - that every thread will use - into .lock[0]. + # https://docs.python.org/3/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe + self.lock.append(lock) + + with self.lock[0]: + self.fullname = fullname + try: + spec = find_spec(fullname, path) + if spec is not None: + # https://www.python.org/dev/peps/pep-0451/#how-loading-will-work + is_new_api = hasattr(spec.loader, "exec_module") + func_name = "exec_module" if is_new_api else "load_module" + old = getattr(spec.loader, func_name) + func = self.exec_module if is_new_api else self.load_module + if old is not func: + try: + setattr(spec.loader, func_name, partial(func, old)) + except AttributeError: + pass # C-Extension loaders are r/o such as zipimporter with ver >= (3, 7) or (3, 8, 3) > ver >= (3, 8)) + + +class CPython3Windows(CPythonWindows, CPython3): + """""" + + @classmethod + def setup_meta(cls, interpreter): + if is_store_python(interpreter): # store python is not supported here + return None + return super(CPython3Windows, cls).setup_meta(interpreter) + + @classmethod + def sources(cls, interpreter): + for src in super(CPython3Windows, cls).sources(interpreter): + yield src + for src in cls.include_dll_and_pyd(interpreter): + yield src + + @classmethod + def include_dll_and_pyd(cls, interpreter): + dll_folder = Path(interpreter.system_prefix) / "DLLs" + host_exe_folder = Path(interpreter.system_executable).parent + for folder in [host_exe_folder, dll_folder]: + for file in folder.iterdir(): + if file.suffix in (".pyd", ".dll"): + yield PathRefToDest(file, dest=cls.to_dll_and_pyd) + + def to_dll_and_pyd(self, src): + return self.bin_dir / src.name diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py new file mode 100644 index 00000000..6dc3ba4e --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +"""The Apple Framework builds require their own customization""" +import logging +import os +import struct +import subprocess +from abc import ABCMeta, abstractmethod +from textwrap import dedent + +from six import add_metaclass + +from virtualenv.create.via_global_ref.builtin.ref import ExePathRefToDest, PathRefToDest +from virtualenv.util.path import Path +from virtualenv.util.six import ensure_text + +from .common import CPython, CPythonPosix, is_mac_os_framework +from .cpython2 import CPython2PosixBase +from .cpython3 import CPython3 + + +@add_metaclass(ABCMeta) +class CPythonmacOsFramework(CPython): + @classmethod + def can_describe(cls, interpreter): + return is_mac_os_framework(interpreter) and super(CPythonmacOsFramework, cls).can_describe(interpreter) + + @classmethod + def sources(cls, interpreter): + for src in super(CPythonmacOsFramework, cls).sources(interpreter): + yield src + # add a symlink to the host python image + ref = PathRefToDest(cls.image_ref(interpreter), dest=lambda self, _: self.dest / ".Python", must_symlink=True) + yield ref + + def create(self): + super(CPythonmacOsFramework, self).create() + + # change the install_name of the copied python executables + target = "@executable_path/../.Python" + current = self.current_mach_o_image_path() + for src in self._sources: + if isinstance(src, ExePathRefToDest): + if src.must_copy or not self.symlinks: + exes = [self.bin_dir / src.base] + if not self.symlinks: + exes.extend(self.bin_dir / a for a in src.aliases) + for exe in exes: + fix_mach_o(str(exe), current, target, self.interpreter.max_size) + + @classmethod + def _executables(cls, interpreter): + for _, targets in super(CPythonmacOsFramework, cls)._executables(interpreter): + # Make sure we use the embedded interpreter inside the framework, even if sys.executable points to the + # stub executable in ${sys.prefix}/bin. + # See http://groups.google.com/group/python-virtualenv/browse_thread/thread/17cab2f85da75951 + fixed_host_exe = Path(interpreter.prefix) / "Resources" / "Python.app" / "Contents" / "MacOS" / "Python" + yield fixed_host_exe, targets + + @abstractmethod + def current_mach_o_image_path(self): + raise NotImplementedError + + @classmethod + def image_ref(cls, interpreter): + raise NotImplementedError + + +class CPython2macOsFramework(CPythonmacOsFramework, CPython2PosixBase): + @classmethod + def image_ref(cls, interpreter): + return Path(interpreter.prefix) / "Python" + + def current_mach_o_image_path(self): + return os.path.join(self.interpreter.prefix, "Python") + + @classmethod + def sources(cls, interpreter): + for src in super(CPython2macOsFramework, cls).sources(interpreter): + yield src + # landmark for exec_prefix + exec_marker_file, to_path, _ = cls.from_stdlib(cls.mappings(interpreter), "lib-dynload") + yield PathRefToDest(exec_marker_file, dest=to_path) + + @property + def reload_code(self): + result = super(CPython2macOsFramework, self).reload_code + result = dedent( + """ + # the bundled site.py always adds the global site package if we're on python framework build, escape this + import sysconfig + config = sysconfig.get_config_vars() + before = config["PYTHONFRAMEWORK"] + try: + config["PYTHONFRAMEWORK"] = "" + {} + finally: + config["PYTHONFRAMEWORK"] = before + """.format( + result, + ), + ) + return result + + +class CPython3macOsFramework(CPythonmacOsFramework, CPython3, CPythonPosix): + @classmethod + def image_ref(cls, interpreter): + return Path(interpreter.prefix) / "Python3" + + def current_mach_o_image_path(self): + return "@executable_path/../../../../Python3" + + @property + def reload_code(self): + result = super(CPython3macOsFramework, self).reload_code + result = dedent( + """ + # the bundled site.py always adds the global site package if we're on python framework build, escape this + import sys + before = sys._framework + try: + sys._framework = None + {} + finally: + sys._framework = before + """.format( + result, + ), + ) + return result + + +def fix_mach_o(exe, current, new, max_size): + """ + https://en.wikipedia.org/wiki/Mach-O + + Mach-O, short for Mach object file format, is a file format for executables, object code, shared libraries, + dynamically-loaded code, and core dumps. A replacement for the a.out format, Mach-O offers more extensibility and + faster access to information in the symbol table. + + Each Mach-O file is made up of one Mach-O header, followed by a series of load commands, followed by one or more + segments, each of which contains between 0 and 255 sections. Mach-O uses the REL relocation format to handle + references to symbols. When looking up symbols Mach-O uses a two-level namespace that encodes each symbol into an + 'object/symbol name' pair that is then linearly searched for by first the object and then the symbol name. + + The basic structure—a list of variable-length "load commands" that reference pages of data elsewhere in the file—was + also used in the executable file format for Accent. The Accent file format was in turn, based on an idea from Spice + Lisp. + + With the introduction of Mac OS X 10.6 platform the Mach-O file underwent a significant modification that causes + binaries compiled on a computer running 10.6 or later to be (by default) executable only on computers running Mac + OS X 10.6 or later. The difference stems from load commands that the dynamic linker, in previous Mac OS X versions, + does not understand. Another significant change to the Mach-O format is the change in how the Link Edit tables + (found in the __LINKEDIT section) function. In 10.6 these new Link Edit tables are compressed by removing unused and + unneeded bits of information, however Mac OS X 10.5 and earlier cannot read this new Link Edit table format. + """ + try: + logging.debug(u"change Mach-O for %s from %s to %s", ensure_text(exe), current, ensure_text(new)) + _builtin_change_mach_o(max_size)(exe, current, new) + except Exception as e: + logging.warning("Could not call _builtin_change_mac_o: %s. " "Trying to call install_name_tool instead.", e) + try: + cmd = ["install_name_tool", "-change", current, new, exe] + subprocess.check_call(cmd) + except Exception: + logging.fatal("Could not call install_name_tool -- you must " "have Apple's development tools installed") + raise + + +def _builtin_change_mach_o(maxint): + MH_MAGIC = 0xFEEDFACE + MH_CIGAM = 0xCEFAEDFE + MH_MAGIC_64 = 0xFEEDFACF + MH_CIGAM_64 = 0xCFFAEDFE + FAT_MAGIC = 0xCAFEBABE + BIG_ENDIAN = ">" + LITTLE_ENDIAN = "<" + LC_LOAD_DYLIB = 0xC + + class FileView(object): + """A proxy for file-like objects that exposes a given view of a file. Modified from macholib.""" + + def __init__(self, file_obj, start=0, size=maxint): + if isinstance(file_obj, FileView): + self._file_obj = file_obj._file_obj + else: + self._file_obj = file_obj + self._start = start + self._end = start + size + self._pos = 0 + + def __repr__(self): + return "".format(self._start, self._end, self._file_obj) + + def tell(self): + return self._pos + + def _checkwindow(self, seek_to, op): + if not (self._start <= seek_to <= self._end): + msg = "{} to offset {:d} is outside window [{:d}, {:d}]".format(op, seek_to, self._start, self._end) + raise IOError(msg) + + def seek(self, offset, whence=0): + seek_to = offset + if whence == os.SEEK_SET: + seek_to += self._start + elif whence == os.SEEK_CUR: + seek_to += self._start + self._pos + elif whence == os.SEEK_END: + seek_to += self._end + else: + raise IOError("Invalid whence argument to seek: {!r}".format(whence)) + self._checkwindow(seek_to, "seek") + self._file_obj.seek(seek_to) + self._pos = seek_to - self._start + + def write(self, content): + here = self._start + self._pos + self._checkwindow(here, "write") + self._checkwindow(here + len(content), "write") + self._file_obj.seek(here, os.SEEK_SET) + self._file_obj.write(content) + self._pos += len(content) + + def read(self, size=maxint): + assert size >= 0 + here = self._start + self._pos + self._checkwindow(here, "read") + size = min(size, self._end - here) + self._file_obj.seek(here, os.SEEK_SET) + read_bytes = self._file_obj.read(size) + self._pos += len(read_bytes) + return read_bytes + + def read_data(file, endian, num=1): + """Read a given number of 32-bits unsigned integers from the given file with the given endianness.""" + res = struct.unpack(endian + "L" * num, file.read(num * 4)) + if len(res) == 1: + return res[0] + return res + + def mach_o_change(at_path, what, value): + """Replace a given name (what) in any LC_LOAD_DYLIB command found in the given binary with a new name (value), + provided it's shorter.""" + + def do_macho(file, bits, endian): + # Read Mach-O header (the magic number is assumed read by the caller) + cpu_type, cpu_sub_type, file_type, n_commands, size_of_commands, flags = read_data(file, endian, 6) + # 64-bits header has one more field. + if bits == 64: + read_data(file, endian) + # The header is followed by n commands + for _ in range(n_commands): + where = file.tell() + # Read command header + cmd, cmd_size = read_data(file, endian, 2) + if cmd == LC_LOAD_DYLIB: + # The first data field in LC_LOAD_DYLIB commands is the offset of the name, starting from the + # beginning of the command. + name_offset = read_data(file, endian) + file.seek(where + name_offset, os.SEEK_SET) + # Read the NUL terminated string + load = file.read(cmd_size - name_offset).decode() + load = load[: load.index("\0")] + # If the string is what is being replaced, overwrite it. + if load == what: + file.seek(where + name_offset, os.SEEK_SET) + file.write(value.encode() + b"\0") + # Seek to the next command + file.seek(where + cmd_size, os.SEEK_SET) + + def do_file(file, offset=0, size=maxint): + file = FileView(file, offset, size) + # Read magic number + magic = read_data(file, BIG_ENDIAN) + if magic == FAT_MAGIC: + # Fat binaries contain nfat_arch Mach-O binaries + n_fat_arch = read_data(file, BIG_ENDIAN) + for _ in range(n_fat_arch): + # Read arch header + cpu_type, cpu_sub_type, offset, size, align = read_data(file, BIG_ENDIAN, 5) + do_file(file, offset, size) + elif magic == MH_MAGIC: + do_macho(file, 32, BIG_ENDIAN) + elif magic == MH_CIGAM: + do_macho(file, 32, LITTLE_ENDIAN) + elif magic == MH_MAGIC_64: + do_macho(file, 64, BIG_ENDIAN) + elif magic == MH_CIGAM_64: + do_macho(file, 64, LITTLE_ENDIAN) + + assert len(what) >= len(value) + + with open(at_path, "r+b") as f: + do_file(f) + + return mach_o_change diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/__init__.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/common.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/common.py new file mode 100644 index 00000000..90da51fe --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/common.py @@ -0,0 +1,52 @@ +from __future__ import absolute_import, unicode_literals + +import abc + +from six import add_metaclass + +from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest +from virtualenv.util.path import Path + +from ..via_global_self_do import ViaGlobalRefVirtualenvBuiltin + + +@add_metaclass(abc.ABCMeta) +class PyPy(ViaGlobalRefVirtualenvBuiltin): + @classmethod + def can_describe(cls, interpreter): + return interpreter.implementation == "PyPy" and super(PyPy, cls).can_describe(interpreter) + + @classmethod + def _executables(cls, interpreter): + host = Path(interpreter.system_executable) + targets = sorted("{}{}".format(name, PyPy.suffix) for name in cls.exe_names(interpreter)) + yield host, targets + + @classmethod + def exe_names(cls, interpreter): + return { + cls.exe_stem(), + "python", + "python{}".format(interpreter.version_info.major), + "python{}.{}".format(*interpreter.version_info), + } + + @classmethod + def sources(cls, interpreter): + for src in super(PyPy, cls).sources(interpreter): + yield src + for host in cls._add_shared_libs(interpreter): + yield PathRefToDest(host, dest=lambda self, s: self.bin_dir / s.name) + + @classmethod + def _add_shared_libs(cls, interpreter): + # https://bitbucket.org/pypy/pypy/issue/1922/future-proofing-virtualenv + python_dir = Path(interpreter.system_executable).resolve().parent + for libname in cls._shared_libs(): + src = python_dir / libname + if src.exists(): + yield src + + @classmethod + def _shared_libs(cls): + raise NotImplementedError diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py new file mode 100644 index 00000000..020000b3 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py @@ -0,0 +1,121 @@ +from __future__ import absolute_import, unicode_literals + +import abc +import logging +import os + +from six import add_metaclass + +from virtualenv.create.describe import PosixSupports, WindowsSupports +from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest +from virtualenv.util.path import Path + +from ..python2.python2 import Python2 +from .common import PyPy + + +@add_metaclass(abc.ABCMeta) +class PyPy2(PyPy, Python2): + """""" + + @classmethod + def exe_stem(cls): + return "pypy" + + @classmethod + def sources(cls, interpreter): + for src in super(PyPy2, cls).sources(interpreter): + yield src + # include folder needed on Python 2 as we don't have pyenv.cfg + host_include_marker = cls.host_include_marker(interpreter) + if host_include_marker.exists(): + yield PathRefToDest(host_include_marker.parent, dest=lambda self, _: self.include) + + @classmethod + def needs_stdlib_py_module(cls): + return True + + @classmethod + def host_include_marker(cls, interpreter): + return Path(interpreter.system_include) / "PyPy.h" + + @property + def include(self): + return self.dest / self.interpreter.distutils_install["headers"] + + @classmethod + def modules(cls): + # pypy2 uses some modules before the site.py loads, so we need to include these too + return super(PyPy2, cls).modules() + [ + "os", + "copy_reg", + "genericpath", + "linecache", + "stat", + "UserDict", + "warnings", + ] + + @property + def lib_pypy(self): + return self.dest / "lib_pypy" + + def ensure_directories(self): + dirs = super(PyPy2, self).ensure_directories() + dirs.add(self.lib_pypy) + host_include_marker = self.host_include_marker(self.interpreter) + if host_include_marker.exists(): + dirs.add(self.include.parent) + else: + logging.debug("no include folders as can't find include marker %s", host_include_marker) + return dirs + + @property + def skip_rewrite(self): + """ + PyPy2 built-in imports are handled by this path entry, don't overwrite to not disable it + see: https://github.com/pypa/virtualenv/issues/1652 + """ + return 'or path.endswith("lib_pypy{}__extensions__") # PyPy2 built-in import marker'.format(os.sep) + + +class PyPy2Posix(PyPy2, PosixSupports): + """PyPy 2 on POSIX""" + + @classmethod + def modules(cls): + return super(PyPy2Posix, cls).modules() + ["posixpath"] + + @classmethod + def _shared_libs(cls): + return ["libpypy-c.so", "libpypy-c.dylib"] + + @property + def lib(self): + return self.dest / "lib" + + @classmethod + def sources(cls, interpreter): + for src in super(PyPy2Posix, cls).sources(interpreter): + yield src + host_lib = Path(interpreter.system_prefix) / "lib" + if host_lib.exists(): + yield PathRefToDest(host_lib, dest=lambda self, _: self.lib) + + +class Pypy2Windows(PyPy2, WindowsSupports): + """PyPy 2 on Windows""" + + @classmethod + def modules(cls): + return super(Pypy2Windows, cls).modules() + ["ntpath"] + + @classmethod + def _shared_libs(cls): + return ["libpypy-c.dll"] + + @classmethod + def sources(cls, interpreter): + for src in super(Pypy2Windows, cls).sources(interpreter): + yield src + yield PathRefToDest(Path(interpreter.system_prefix) / "libs", dest=lambda self, s: self.dest / s.name) diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py new file mode 100644 index 00000000..95887067 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py @@ -0,0 +1,63 @@ +from __future__ import absolute_import, unicode_literals + +import abc + +from six import add_metaclass + +from virtualenv.create.describe import PosixSupports, Python3Supports, WindowsSupports +from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest +from virtualenv.util.path import Path + +from .common import PyPy + + +@add_metaclass(abc.ABCMeta) +class PyPy3(PyPy, Python3Supports): + @classmethod + def exe_stem(cls): + return "pypy3" + + @property + def stdlib(self): + """ + PyPy3 seems to respect sysconfig only for the host python... + virtual environments purelib is instead lib/pythonx.y + """ + return self.dest / "lib" / "python{}".format(self.interpreter.version_release_str) / "site-packages" + + @classmethod + def exe_names(cls, interpreter): + return super(PyPy3, cls).exe_names(interpreter) | {"pypy"} + + +class PyPy3Posix(PyPy3, PosixSupports): + """PyPy 2 on POSIX""" + + @classmethod + def _shared_libs(cls): + return ["libpypy3-c.so", "libpypy3-c.dylib"] + + def to_lib(self, src): + return self.dest / "lib" / src.name + + @classmethod + def sources(cls, interpreter): + for src in super(PyPy3Posix, cls).sources(interpreter): + yield src + host_lib = Path(interpreter.system_prefix) / "lib" + if host_lib.exists() and host_lib.is_dir(): + for path in host_lib.iterdir(): + yield PathRefToDest(path, dest=cls.to_lib) + + +class Pypy3Windows(PyPy3, WindowsSupports): + """PyPy 2 on Windows""" + + @property + def bin_dir(self): + """PyPy3 needs to fallback to pypy definition""" + return self.dest / "Scripts" + + @classmethod + def _shared_libs(cls): + return ["libpypy3-c.dll"] diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/python2/__init__.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/python2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/python2/python2.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/python2/python2.py new file mode 100644 index 00000000..cacd56ec --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/python2/python2.py @@ -0,0 +1,111 @@ +from __future__ import absolute_import, unicode_literals + +import abc +import json +import os + +from six import add_metaclass + +from virtualenv.create.describe import Python2Supports +from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest +from virtualenv.info import IS_ZIPAPP +from virtualenv.util.path import Path +from virtualenv.util.six import ensure_text +from virtualenv.util.zipapp import read as read_from_zipapp + +from ..via_global_self_do import ViaGlobalRefVirtualenvBuiltin + +HERE = Path(os.path.abspath(__file__)).parent + + +@add_metaclass(abc.ABCMeta) +class Python2(ViaGlobalRefVirtualenvBuiltin, Python2Supports): + def create(self): + """Perform operations needed to make the created environment work on Python 2""" + super(Python2, self).create() + # install a patched site-package, the default Python 2 site.py is not smart enough to understand pyvenv.cfg, + # so we inject a small shim that can do this, the location of this depends where it's on host + sys_std_plat = Path(self.interpreter.system_stdlib_platform) + site_py_in = ( + self.stdlib_platform + if ((sys_std_plat / "site.py").exists() or (sys_std_plat / "site.pyc").exists()) + else self.stdlib + ) + site_py = site_py_in / "site.py" + + custom_site = get_custom_site() + if IS_ZIPAPP: + custom_site_text = read_from_zipapp(custom_site) + else: + custom_site_text = custom_site.read_text() + expected = json.dumps([os.path.relpath(ensure_text(str(i)), ensure_text(str(site_py))) for i in self.libs]) + + custom_site_text = custom_site_text.replace("___EXPECTED_SITE_PACKAGES___", expected) + + reload_code = os.linesep.join(" {}".format(i) for i in self.reload_code.splitlines()).lstrip() + custom_site_text = custom_site_text.replace("# ___RELOAD_CODE___", reload_code) + + skip_rewrite = os.linesep.join(" {}".format(i) for i in self.skip_rewrite.splitlines()).lstrip() + custom_site_text = custom_site_text.replace("# ___SKIP_REWRITE____", skip_rewrite) + + site_py.write_text(custom_site_text) + + @property + def reload_code(self): + return 'reload(sys.modules["site"]) # noqa # call system site.py to setup import libraries' + + @property + def skip_rewrite(self): + return "" + + @classmethod + def sources(cls, interpreter): + for src in super(Python2, cls).sources(interpreter): + yield src + # install files needed to run site.py, either from stdlib or stdlib_platform, at least pyc, but both if exists + # if neither exists return the module file to trigger failure + mappings, needs_py_module = ( + cls.mappings(interpreter), + cls.needs_stdlib_py_module(), + ) + for req in cls.modules(): + module_file, to_module, module_exists = cls.from_stdlib(mappings, "{}.py".format(req)) + compiled_file, to_compiled, compiled_exists = cls.from_stdlib(mappings, "{}.pyc".format(req)) + if needs_py_module or module_exists or not compiled_exists: + yield PathRefToDest(module_file, dest=to_module) + if compiled_exists: + yield PathRefToDest(compiled_file, dest=to_compiled) + + @staticmethod + def from_stdlib(mappings, name): + for from_std, to_std in mappings: + src = from_std / name + if src.exists(): + return src, to_std, True + # if not exists, fallback to first in list + return mappings[0][0] / name, mappings[0][1], False + + @classmethod + def mappings(cls, interpreter): + mappings = [(Path(interpreter.system_stdlib_platform), cls.to_stdlib_platform)] + if interpreter.system_stdlib_platform != interpreter.system_stdlib: + mappings.append((Path(interpreter.system_stdlib), cls.to_stdlib)) + return mappings + + def to_stdlib(self, src): + return self.stdlib / src.name + + def to_stdlib_platform(self, src): + return self.stdlib_platform / src.name + + @classmethod + def needs_stdlib_py_module(cls): + raise NotImplementedError + + @classmethod + def modules(cls): + return [] + + +def get_custom_site(): + return HERE / "site.py" diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/python2/site.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/python2/site.py new file mode 100644 index 00000000..366908e7 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/python2/site.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +""" +A simple shim module to fix up things on Python 2 only. + +Note: until we setup correctly the paths we can only import built-ins. +""" +import sys + + +def main(): + """Patch what needed, and invoke the original site.py""" + config = read_pyvenv() + sys.real_prefix = sys.base_prefix = config["base-prefix"] + sys.base_exec_prefix = config["base-exec-prefix"] + sys.base_executable = config["base-executable"] + global_site_package_enabled = config.get("include-system-site-packages", False) == "true" + rewrite_standard_library_sys_path() + disable_user_site_package() + load_host_site() + if global_site_package_enabled: + add_global_site_package() + + +def load_host_site(): + """trigger reload of site.py - now it will use the standard library instance that will take care of init""" + # we have a duality here, we generate the platform and pure library path based on what distutils.install specifies + # because this is what pip will be using; the host site.py though may contain it's own pattern for where the + # platform and pure library paths should exist + + # notably on Ubuntu there's a patch for getsitepackages to point to + # - prefix + local/lib/pythonx.y/dist-packages + # - prefix + lib/pythonx.y/dist-packages + # while distutils.install.cmd still points both of these to + # - prefix + lib/python2.7/site-packages + + # to facilitate when the two match, or not we first reload the site.py, now triggering the import of host site.py, + # as this will ensure that initialization code within host site.py runs + + here = __file__ # the distutils.install patterns will be injected relative to this site.py, save it here + + # ___RELOAD_CODE___ + + # and then if the distutils site packages are not on the sys.path we add them via add_site_dir; note we must add + # them by invoking add_site_dir to trigger the processing of pth files + import os + + site_packages = r""" + ___EXPECTED_SITE_PACKAGES___ + """ + import json + + add_site_dir = sys.modules["site"].addsitedir + for path in json.loads(site_packages): + full_path = os.path.abspath(os.path.join(here, path.encode("utf-8"))) + if full_path not in sys.path: + add_site_dir(full_path) + + +sep = "\\" if sys.platform == "win32" else "/" # no os module here yet - poor mans version + + +def read_pyvenv(): + """read pyvenv.cfg""" + config_file = "{}{}pyvenv.cfg".format(sys.prefix, sep) + with open(config_file) as file_handler: + lines = file_handler.readlines() + config = {} + for line in lines: + try: + split_at = line.index("=") + except ValueError: + continue # ignore bad/empty lines + else: + config[line[:split_at].strip()] = line[split_at + 1 :].strip() + return config + + +def rewrite_standard_library_sys_path(): + """Once this site file is loaded the standard library paths have already been set, fix them up""" + exe, prefix, exec_prefix = get_exe_prefixes(base=False) + base_exe, base_prefix, base_exec = get_exe_prefixes(base=True) + exe_dir = exe[: exe.rfind(sep)] + for at, path in enumerate(sys.path): + path = abs_path(path) # replace old sys prefix path starts with new + skip_rewrite = path == exe_dir # don't fix the current executable location, notably on Windows this gets added + skip_rewrite = skip_rewrite # ___SKIP_REWRITE____ + if not skip_rewrite: + sys.path[at] = map_path(path, base_exe, exe_dir, exec_prefix, base_prefix, prefix, base_exec) + + # the rewrite above may have changed elements from PYTHONPATH, revert these if on + if sys.flags.ignore_environment: + return + import os + + python_paths = [] + if "PYTHONPATH" in os.environ and os.environ["PYTHONPATH"]: + for path in os.environ["PYTHONPATH"].split(os.pathsep): + if path not in python_paths: + python_paths.append(path) + sys.path[: len(python_paths)] = python_paths + + +def get_exe_prefixes(base=False): + return tuple(abs_path(getattr(sys, ("base_" if base else "") + i)) for i in ("executable", "prefix", "exec_prefix")) + + +def abs_path(value): + values, keep = value.split(sep), [] + at = len(values) - 1 + while at >= 0: + if values[at] == "..": + at -= 1 + else: + keep.append(values[at]) + at -= 1 + return sep.join(keep[::-1]) + + +def map_path(path, base_executable, exe_dir, exec_prefix, base_prefix, prefix, base_exec_prefix): + if path_starts_with(path, exe_dir): + # content inside the exe folder needs to remap to original executables folder + orig_exe_folder = base_executable[: base_executable.rfind(sep)] + return "{}{}".format(orig_exe_folder, path[len(exe_dir) :]) + elif path_starts_with(path, prefix): + return "{}{}".format(base_prefix, path[len(prefix) :]) + elif path_starts_with(path, exec_prefix): + return "{}{}".format(base_exec_prefix, path[len(exec_prefix) :]) + return path + + +def path_starts_with(directory, value): + return directory.startswith(value if value[-1] == sep else value + sep) + + +def disable_user_site_package(): + """Flip the switch on enable user site package""" + # sys.flags is a c-extension type, so we cannot monkeypatch it, replace it with a python class to flip it + sys.original_flags = sys.flags + + class Flags(object): + def __init__(self): + self.__dict__ = {key: getattr(sys.flags, key) for key in dir(sys.flags) if not key.startswith("_")} + + sys.flags = Flags() + sys.flags.no_user_site = 1 + + +def add_global_site_package(): + """add the global site package""" + import site + + # add user site package + sys.flags = sys.original_flags # restore original + site.ENABLE_USER_SITE = None # reset user site check + # add the global site package to the path - use new prefix and delegate to site.py + orig_prefixes = None + try: + orig_prefixes = site.PREFIXES + site.PREFIXES = [sys.base_prefix, sys.base_exec_prefix] + site.main() + finally: + site.PREFIXES = orig_prefixes + + +main() diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/ref.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/ref.py new file mode 100644 index 00000000..263da3b1 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/ref.py @@ -0,0 +1,163 @@ +""" +Virtual environments in the traditional sense are built as reference to the host python. This file allows declarative +references to elements on the file system, allowing our system to automatically detect what modes it can support given +the constraints: e.g. can the file system symlink, can the files be read, executed, etc. +""" +from __future__ import absolute_import, unicode_literals + +import os +from abc import ABCMeta, abstractmethod +from collections import OrderedDict +from stat import S_IXGRP, S_IXOTH, S_IXUSR + +from six import add_metaclass + +from virtualenv.info import fs_is_case_sensitive, fs_supports_symlink +from virtualenv.util.path import copy, make_exe, symlink +from virtualenv.util.six import ensure_text + + +@add_metaclass(ABCMeta) +class PathRef(object): + """Base class that checks if a file reference can be symlink/copied""" + + FS_SUPPORTS_SYMLINK = fs_supports_symlink() + FS_CASE_SENSITIVE = fs_is_case_sensitive() + + def __init__(self, src, must_symlink, must_copy): + self.must_symlink = must_symlink + self.must_copy = must_copy + self.src = src + try: + self.exists = src.exists() + except OSError: + self.exists = False + self._can_read = None if self.exists else False + self._can_copy = None if self.exists else False + self._can_symlink = None if self.exists else False + if self.must_copy is True and self.must_symlink is True: + raise ValueError("can copy and symlink at the same time") + + def __repr__(self): + return "{}(src={})".format(self.__class__.__name__, self.src) + + @property + def can_read(self): + if self._can_read is None: + if self.src.is_file(): + try: + with self.src.open("rb"): + self._can_read = True + except OSError: + self._can_read = False + else: + self._can_read = os.access(ensure_text(str(self.src)), os.R_OK) + return self._can_read + + @property + def can_copy(self): + if self._can_copy is None: + if self.must_symlink: + self._can_copy = self.can_symlink + else: + self._can_copy = self.can_read + return self._can_copy + + @property + def can_symlink(self): + if self._can_symlink is None: + if self.must_copy: + self._can_symlink = self.can_copy + else: + self._can_symlink = self.FS_SUPPORTS_SYMLINK and self.can_read + return self._can_symlink + + @abstractmethod + def run(self, creator, symlinks): + raise NotImplementedError + + def method(self, symlinks): + if self.must_symlink: + return symlink + if self.must_copy: + return copy + return symlink if symlinks else copy + + +@add_metaclass(ABCMeta) +class ExePathRef(PathRef): + """Base class that checks if a executable can be references via symlink/copy""" + + def __init__(self, src, must_symlink, must_copy): + super(ExePathRef, self).__init__(src, must_symlink, must_copy) + self._can_run = None + + @property + def can_symlink(self): + if self.FS_SUPPORTS_SYMLINK: + return self.can_run + return False + + @property + def can_run(self): + if self._can_run is None: + mode = self.src.stat().st_mode + for key in [S_IXUSR, S_IXGRP, S_IXOTH]: + if mode & key: + self._can_run = True + break + else: + self._can_run = False + return self._can_run + + +class PathRefToDest(PathRef): + """Link a path on the file system""" + + def __init__(self, src, dest, must_symlink=False, must_copy=False): + super(PathRefToDest, self).__init__(src, must_symlink, must_copy) + self.dest = dest + + def run(self, creator, symlinks): + dest = self.dest(creator, self.src) + method = self.method(symlinks) + dest_iterable = dest if isinstance(dest, list) else (dest,) + if not dest.parent.exists(): + dest.parent.mkdir(parents=True, exist_ok=True) + for dst in dest_iterable: + method(self.src, dst) + + +class ExePathRefToDest(PathRefToDest, ExePathRef): + """Link a exe path on the file system""" + + def __init__(self, src, targets, dest, must_symlink=False, must_copy=False): + ExePathRef.__init__(self, src, must_symlink, must_copy) + PathRefToDest.__init__(self, src, dest, must_symlink, must_copy) + if not self.FS_CASE_SENSITIVE: + targets = list(OrderedDict((i.lower(), None) for i in targets).keys()) + self.base = targets[0] + self.aliases = targets[1:] + self.dest = dest + self.must_copy = must_copy + + def run(self, creator, symlinks): + bin_dir = self.dest(creator, self.src).parent + dest = bin_dir / self.base + method = self.method(symlinks) + method(self.src, dest) + if not symlinks: + make_exe(dest) + for extra in self.aliases: + link_file = bin_dir / extra + if link_file.exists(): + link_file.unlink() + if symlinks: + link_file.symlink_to(self.base) + else: + copy(self.src, link_file) + if not symlinks: + make_exe(link_file) + + def __repr__(self): + return "{}(src={}, alias={})".format(self.__class__.__name__, self.src, self.aliases) diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/via_global_self_do.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/via_global_self_do.py new file mode 100644 index 00000000..338f5d18 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/builtin/via_global_self_do.py @@ -0,0 +1,99 @@ +from __future__ import absolute_import, unicode_literals + +from abc import ABCMeta + +from six import add_metaclass + +from virtualenv.create.via_global_ref.builtin.ref import ExePathRefToDest +from virtualenv.util.path import ensure_dir + +from ..api import ViaGlobalRefApi, ViaGlobalRefMeta +from .builtin_way import VirtualenvBuiltin + + +class BuiltinViaGlobalRefMeta(ViaGlobalRefMeta): + def __init__(self): + super(BuiltinViaGlobalRefMeta, self).__init__() + self.sources = [] + + +@add_metaclass(ABCMeta) +class ViaGlobalRefVirtualenvBuiltin(ViaGlobalRefApi, VirtualenvBuiltin): + def __init__(self, options, interpreter): + super(ViaGlobalRefVirtualenvBuiltin, self).__init__(options, interpreter) + self._sources = getattr(options.meta, "sources", None) # if we're created as a describer this might be missing + + @classmethod + def can_create(cls, interpreter): + """By default all built-in methods assume that if we can describe it we can create it""" + # first we must be able to describe it + if cls.can_describe(interpreter): + meta = cls.setup_meta(interpreter) + if meta is not None and meta: + for src in cls.sources(interpreter): + if src.exists: + if meta.can_copy and not src.can_copy: + meta.copy_error = "cannot copy {}".format(src) + if meta.can_symlink and not src.can_symlink: + meta.symlink_error = "cannot symlink {}".format(src) + if not meta.can_copy and not meta.can_symlink: + meta.error = "neither copy or symlink supported, copy: {} symlink: {}".format( + meta.copy_error, meta.symlink_error, + ) + else: + meta.error = "missing required file {}".format(src) + if meta.error: + break + meta.sources.append(src) + return meta + return None + + @classmethod + def setup_meta(cls, interpreter): + return BuiltinViaGlobalRefMeta() + + @classmethod + def sources(cls, interpreter): + is_py2 = interpreter.version_info.major == 2 + for host_exe, targets in cls._executables(interpreter): + yield ExePathRefToDest(host_exe, dest=cls.to_bin, targets=targets, must_copy=is_py2) + + def to_bin(self, src): + return self.bin_dir / src.name + + @classmethod + def _executables(cls, interpreter): + raise NotImplementedError + + def create(self): + dirs = self.ensure_directories() + for directory in list(dirs): + if any(i for i in dirs if i is not directory and directory.parts == i.parts[: len(directory.parts)]): + dirs.remove(directory) + for directory in sorted(dirs): + ensure_dir(directory) + + self.set_pyenv_cfg() + self.pyenv_cfg.write() + true_system_site = self.enable_system_site_package + try: + self.enable_system_site_package = False + for src in self._sources: + src.run(self, self.symlinks) + finally: + if true_system_site != self.enable_system_site_package: + self.enable_system_site_package = true_system_site + super(ViaGlobalRefVirtualenvBuiltin, self).create() + + def ensure_directories(self): + return {self.dest, self.bin_dir, self.script_dir, self.stdlib} | set(self.libs) + + def set_pyenv_cfg(self): + """ + We directly inject the base prefix and base exec prefix to avoid site.py needing to discover these + from home (which usually is done within the interpreter itself) + """ + super(ViaGlobalRefVirtualenvBuiltin, self).set_pyenv_cfg() + self.pyenv_cfg["base-prefix"] = self.interpreter.system_prefix + self.pyenv_cfg["base-exec-prefix"] = self.interpreter.system_exec_prefix + self.pyenv_cfg["base-executable"] = self.interpreter.system_executable diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/store.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/store.py new file mode 100644 index 00000000..134a5358 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/store.py @@ -0,0 +1,26 @@ +from __future__ import absolute_import, unicode_literals + +from virtualenv.util.path import Path + + +def handle_store_python(meta, interpreter): + if is_store_python(interpreter): + meta.symlink_error = "Windows Store Python does not support virtual environments via symlink" + return meta + + +def is_store_python(interpreter): + parts = Path(interpreter.system_executable).parts + return ( + len(parts) > 4 + and parts[-4] == "Microsoft" + and parts[-3] == "WindowsApps" + and parts[-2].startswith("PythonSoftwareFoundation.Python.3.") + and parts[-1].startswith("python") + ) + + +__all__ = ( + "handle_store_python", + "is_store_python", +) diff --git a/venv/Lib/site-packages/virtualenv/create/via_global_ref/venv.py b/venv/Lib/site-packages/virtualenv/create/via_global_ref/venv.py new file mode 100644 index 00000000..4a4ed770 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/create/via_global_ref/venv.py @@ -0,0 +1,80 @@ +from __future__ import absolute_import, unicode_literals + +import logging +from copy import copy + +from virtualenv.create.via_global_ref.store import handle_store_python +from virtualenv.discovery.py_info import PythonInfo +from virtualenv.util.error import ProcessCallFailed +from virtualenv.util.path import ensure_dir +from virtualenv.util.subprocess import run_cmd + +from .api import ViaGlobalRefApi, ViaGlobalRefMeta + + +class Venv(ViaGlobalRefApi): + def __init__(self, options, interpreter): + self.describe = options.describe + super(Venv, self).__init__(options, interpreter) + self.can_be_inline = ( + interpreter is PythonInfo.current() and interpreter.executable == interpreter.system_executable + ) + self._context = None + + def _args(self): + return super(Venv, self)._args() + ([("describe", self.describe.__class__.__name__)] if self.describe else []) + + @classmethod + def can_create(cls, interpreter): + if interpreter.has_venv: + meta = ViaGlobalRefMeta() + if interpreter.platform == "win32" and interpreter.version_info.major == 3: + meta = handle_store_python(meta, interpreter) + return meta + return None + + def create(self): + if self.can_be_inline: + self.create_inline() + else: + self.create_via_sub_process() + for lib in self.libs: + ensure_dir(lib) + super(Venv, self).create() + + def create_inline(self): + from venv import EnvBuilder + + builder = EnvBuilder( + system_site_packages=self.enable_system_site_package, clear=False, symlinks=self.symlinks, with_pip=False, + ) + builder.create(str(self.dest)) + + def create_via_sub_process(self): + cmd = self.get_host_create_cmd() + logging.info("using host built-in venv to create via %s", " ".join(cmd)) + code, out, err = run_cmd(cmd) + if code != 0: + raise ProcessCallFailed(code, out, err, cmd) + + def get_host_create_cmd(self): + cmd = [self.interpreter.system_executable, "-m", "venv", "--without-pip"] + if self.enable_system_site_package: + cmd.append("--system-site-packages") + cmd.append("--symlinks" if self.symlinks else "--copies") + cmd.append(str(self.dest)) + return cmd + + def set_pyenv_cfg(self): + # prefer venv options over ours, but keep our extra + venv_content = copy(self.pyenv_cfg.refresh()) + super(Venv, self).set_pyenv_cfg() + self.pyenv_cfg.update(venv_content) + + def __getattribute__(self, item): + describe = object.__getattribute__(self, "describe") + if describe is not None and hasattr(describe, item): + element = getattr(describe, item) + if not callable(element) or item in ("script",): + return element + return object.__getattribute__(self, item) diff --git a/venv/Lib/site-packages/virtualenv/discovery/__init__.py b/venv/Lib/site-packages/virtualenv/discovery/__init__.py new file mode 100644 index 00000000..01e6d4f4 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/discovery/__init__.py @@ -0,0 +1 @@ +from __future__ import absolute_import, unicode_literals diff --git a/venv/Lib/site-packages/virtualenv/discovery/builtin.py b/venv/Lib/site-packages/virtualenv/discovery/builtin.py new file mode 100644 index 00000000..4d57fa58 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/discovery/builtin.py @@ -0,0 +1,155 @@ +from __future__ import absolute_import, unicode_literals + +import logging +import os +import sys + +from virtualenv.info import IS_WIN +from virtualenv.util.six import ensure_str, ensure_text + +from .discover import Discover +from .py_info import PythonInfo +from .py_spec import PythonSpec + + +class Builtin(Discover): + def __init__(self, options): + super(Builtin, self).__init__(options) + self.python_spec = options.python + self.app_data = options.app_data + + @classmethod + def add_parser_arguments(cls, parser): + parser.add_argument( + "-p", + "--python", + dest="python", + metavar="py", + help="target interpreter for which to create a virtual (either absolute path or identifier string)", + default=sys.executable, + ) + + def run(self): + return get_interpreter(self.python_spec, self.app_data) + + def __repr__(self): + return ensure_str(self.__unicode__()) + + def __unicode__(self): + return "{} discover of python_spec={!r}".format(self.__class__.__name__, self.python_spec) + + +def get_interpreter(key, app_data=None): + spec = PythonSpec.from_string_spec(key) + logging.info("find interpreter for spec %r", spec) + proposed_paths = set() + for interpreter, impl_must_match in propose_interpreters(spec, app_data): + key = interpreter.system_executable, impl_must_match + if key in proposed_paths: + continue + logging.info("proposed %s", interpreter) + if interpreter.satisfies(spec, impl_must_match): + logging.debug("accepted %s", interpreter) + return interpreter + proposed_paths.add(key) + + +def propose_interpreters(spec, app_data): + # 1. if it's a path and exists + if spec.path is not None: + try: + os.lstat(spec.path) # Windows Store Python does not work with os.path.exists, but does for os.lstat + except OSError: + if spec.is_abs: + raise + else: + yield PythonInfo.from_exe(os.path.abspath(spec.path), app_data), True + if spec.is_abs: + return + else: + # 2. otherwise try with the current + yield PythonInfo.current_system(app_data), True + + # 3. otherwise fallback to platform default logic + if IS_WIN: + from .windows import propose_interpreters + + for interpreter in propose_interpreters(spec, app_data): + yield interpreter, True + # finally just find on path, the path order matters (as the candidates are less easy to control by end user) + paths = get_paths() + tested_exes = set() + for pos, path in enumerate(paths): + path = ensure_text(path) + logging.debug(LazyPathDump(pos, path)) + for candidate, match in possible_specs(spec): + found = check_path(candidate, path) + if found is not None: + exe = os.path.abspath(found) + if exe not in tested_exes: + tested_exes.add(exe) + interpreter = PathPythonInfo.from_exe(exe, app_data, raise_on_error=False) + if interpreter is not None: + yield interpreter, match + + +def get_paths(): + path = os.environ.get(str("PATH"), None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + path = os.defpath + if not path: + paths = [] + else: + paths = [p for p in path.split(os.pathsep) if os.path.exists(p)] + return paths + + +class LazyPathDump(object): + def __init__(self, pos, path): + self.pos = pos + self.path = path + + def __repr__(self): + return ensure_str(self.__unicode__()) + + def __unicode__(self): + content = "discover PATH[{}]={}".format(self.pos, self.path) + if os.environ.get(str("_VIRTUALENV_DEBUG")): # this is the over the board debug + content += " with =>" + for file_name in os.listdir(self.path): + try: + file_path = os.path.join(self.path, file_name) + if os.path.isdir(file_path) or not os.access(file_path, os.X_OK): + continue + except OSError: + pass + content += " " + content += file_name + return content + + +def check_path(candidate, path): + _, ext = os.path.splitext(candidate) + if sys.platform == "win32" and ext != ".exe": + candidate = candidate + ".exe" + if os.path.isfile(candidate): + return candidate + candidate = os.path.join(path, candidate) + if os.path.isfile(candidate): + return candidate + return None + + +def possible_specs(spec): + # 4. then maybe it's something exact on PATH - if it was direct lookup implementation no longer counts + yield spec.str_spec, False + # 5. or from the spec we can deduce a name on path that matches + for exe, match in spec.generate_names(): + yield exe, match + + +class PathPythonInfo(PythonInfo): + """""" diff --git a/venv/Lib/site-packages/virtualenv/discovery/cached_py_info.py b/venv/Lib/site-packages/virtualenv/discovery/cached_py_info.py new file mode 100644 index 00000000..13a213d7 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/discovery/cached_py_info.py @@ -0,0 +1,145 @@ +""" + +We acquire the python information by running an interrogation script via subprocess trigger. This operation is not +cheap, especially not on Windows. To not have to pay this hefty cost every time we apply multiple levels of +caching. +""" +from __future__ import absolute_import, unicode_literals + +import logging +import os +import pipes +import sys +from collections import OrderedDict + +from virtualenv.app_data import AppDataDisabled +from virtualenv.discovery.py_info import PythonInfo +from virtualenv.info import PY2 +from virtualenv.util.path import Path +from virtualenv.util.six import ensure_text +from virtualenv.util.subprocess import Popen, subprocess + +_CACHE = OrderedDict() +_CACHE[Path(sys.executable)] = PythonInfo() + + +def from_exe(cls, app_data, exe, raise_on_error=True, ignore_cache=False): + """""" + result = _get_from_cache(cls, app_data, exe, ignore_cache=ignore_cache) + if isinstance(result, Exception): + if raise_on_error: + raise result + else: + logging.info("%s", str(result)) + result = None + return result + + +def _get_from_cache(cls, app_data, exe, ignore_cache=True): + # note here we cannot resolve symlinks, as the symlink may trigger different prefix information if there's a + # pyenv.cfg somewhere alongside on python3.4+ + exe_path = Path(exe) + if not ignore_cache and exe_path in _CACHE: # check in the in-memory cache + result = _CACHE[exe_path] + else: # otherwise go through the app data cache + py_info = _get_via_file_cache(cls, app_data, exe_path, exe) + result = _CACHE[exe_path] = py_info + # independent if it was from the file or in-memory cache fix the original executable location + if isinstance(result, PythonInfo): + result.executable = exe + return result + + +def _get_via_file_cache(cls, app_data, path, exe): + path_text = ensure_text(str(path)) + try: + path_modified = path.stat().st_mtime + except OSError: + path_modified = -1 + if app_data is None: + app_data = AppDataDisabled() + py_info, py_info_store = None, app_data.py_info(path) + with py_info_store.locked(): + if py_info_store.exists(): # if exists and matches load + data = py_info_store.read() + of_path, of_st_mtime, of_content = data["path"], data["st_mtime"], data["content"] + if of_path == path_text and of_st_mtime == path_modified: + py_info = cls._from_dict({k: v for k, v in of_content.items()}) + else: + py_info_store.remove() + if py_info is None: # if not loaded run and save + failure, py_info = _run_subprocess(cls, exe, app_data) + if failure is None: + data = {"st_mtime": path_modified, "path": path_text, "content": py_info._to_dict()} + py_info_store.write(data) + else: + py_info = failure + return py_info + + +def _run_subprocess(cls, exe, app_data): + py_info_script = Path(os.path.abspath(__file__)).parent / "py_info.py" + with app_data.ensure_extracted(py_info_script) as py_info_script: + cmd = [exe, str(py_info_script)] + # prevent sys.prefix from leaking into the child process - see https://bugs.python.org/issue22490 + env = os.environ.copy() + env.pop("__PYVENV_LAUNCHER__", None) + logging.debug("get interpreter info via cmd: %s", LogCmd(cmd)) + try: + process = Popen( + cmd, + universal_newlines=True, + stdin=subprocess.PIPE, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + env=env, + ) + out, err = process.communicate() + code = process.returncode + except OSError as os_error: + out, err, code = "", os_error.strerror, os_error.errno + result, failure = None, None + if code == 0: + result = cls._from_json(out) + result.executable = exe # keep original executable as this may contain initialization code + else: + msg = "failed to query {} with code {}{}{}".format( + exe, code, " out: {!r}".format(out) if out else "", " err: {!r}".format(err) if err else "", + ) + failure = RuntimeError(msg) + return failure, result + + +class LogCmd(object): + def __init__(self, cmd, env=None): + self.cmd = cmd + self.env = env + + def __repr__(self): + def e(v): + return v.decode("utf-8") if isinstance(v, bytes) else v + + cmd_repr = e(" ").join(pipes.quote(e(c)) for c in self.cmd) + if self.env is not None: + cmd_repr += e(" env of {!r}").format(self.env) + if PY2: + return cmd_repr.encode("utf-8") + return cmd_repr + + def __unicode__(self): + raw = repr(self) + if PY2: + return raw.decode("utf-8") + return raw + + +def clear(app_data): + app_data.py_info_clear() + _CACHE.clear() + + +___all___ = ( + "from_exe", + "clear", + "LogCmd", +) diff --git a/venv/Lib/site-packages/virtualenv/discovery/discover.py b/venv/Lib/site-packages/virtualenv/discovery/discover.py new file mode 100644 index 00000000..93c3ea7a --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/discovery/discover.py @@ -0,0 +1,46 @@ +from __future__ import absolute_import, unicode_literals + +from abc import ABCMeta, abstractmethod + +from six import add_metaclass + + +@add_metaclass(ABCMeta) +class Discover(object): + """Discover and provide the requested Python interpreter""" + + @classmethod + def add_parser_arguments(cls, parser): + """Add CLI arguments for this discovery mechanisms. + + :param parser: the CLI parser + """ + raise NotImplementedError + + # noinspection PyUnusedLocal + def __init__(self, options): + """Create a new discovery mechanism. + + :param options: the parsed options as defined within :meth:`add_parser_arguments` + """ + self._has_run = False + self._interpreter = None + + @abstractmethod + def run(self): + """Discovers an interpreter. + + + :return: the interpreter ready to use for virtual environment creation + """ + raise NotImplementedError + + @property + def interpreter(self): + """ + :return: the interpreter as returned by :meth:`run`, cached + """ + if self._has_run is False: + self._interpreter = self.run() + self._has_run = True + return self._interpreter diff --git a/venv/Lib/site-packages/virtualenv/discovery/py_info.py b/venv/Lib/site-packages/virtualenv/discovery/py_info.py new file mode 100644 index 00000000..6f121282 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/discovery/py_info.py @@ -0,0 +1,489 @@ +""" +The PythonInfo contains information about a concrete instance of a Python interpreter + +Note: this file is also used to query target interpreters, so can only use standard library methods +""" +from __future__ import absolute_import, print_function + +import json +import logging +import os +import platform +import re +import sys +import sysconfig +from collections import OrderedDict, namedtuple +from distutils import dist +from distutils.command.install import SCHEME_KEYS +from string import digits + +VersionInfo = namedtuple("VersionInfo", ["major", "minor", "micro", "releaselevel", "serial"]) + + +def _get_path_extensions(): + return list(OrderedDict.fromkeys([""] + os.environ.get("PATHEXT", "").lower().split(os.pathsep))) + + +EXTENSIONS = _get_path_extensions() +_CONF_VAR_RE = re.compile(r"\{\w+\}") + + +class PythonInfo(object): + """Contains information for a Python interpreter""" + + def __init__(self): + def u(v): + return v.decode("utf-8") if isinstance(v, bytes) else v + + def abs_path(v): + return None if v is None else os.path.abspath(v) # unroll relative elements from path (e.g. ..) + + # qualifies the python + self.platform = u(sys.platform) + self.implementation = u(platform.python_implementation()) + if self.implementation == "PyPy": + self.pypy_version_info = tuple(u(i) for i in sys.pypy_version_info) + + # this is a tuple in earlier, struct later, unify to our own named tuple + self.version_info = VersionInfo(*list(u(i) for i in sys.version_info)) + self.architecture = 64 if sys.maxsize > 2 ** 32 else 32 + + self.version = u(sys.version) + self.os = u(os.name) + + # information about the prefix - determines python home + self.prefix = u(abs_path(getattr(sys, "prefix", None))) # prefix we think + self.base_prefix = u(abs_path(getattr(sys, "base_prefix", None))) # venv + self.real_prefix = u(abs_path(getattr(sys, "real_prefix", None))) # old virtualenv + + # information about the exec prefix - dynamic stdlib modules + self.base_exec_prefix = u(abs_path(getattr(sys, "base_exec_prefix", None))) + self.exec_prefix = u(abs_path(getattr(sys, "exec_prefix", None))) + + self.executable = u(abs_path(sys.executable)) # the executable we were invoked via + self.original_executable = u(abs_path(self.executable)) # the executable as known by the interpreter + self.system_executable = self._fast_get_system_executable() # the executable we are based of (if available) + + try: + __import__("venv") + has = True + except ImportError: + has = False + self.has_venv = has + self.path = [u(i) for i in sys.path] + self.file_system_encoding = u(sys.getfilesystemencoding()) + self.stdout_encoding = u(getattr(sys.stdout, "encoding", None)) + + self.sysconfig_paths = {u(i): u(sysconfig.get_path(i, expand=False)) for i in sysconfig.get_path_names()} + # https://bugs.python.org/issue22199 + makefile = getattr(sysconfig, "get_makefile_filename", getattr(sysconfig, "_get_makefile_filename", None)) + self.sysconfig = { + u(k): u(v) + for k, v in [ + # a list of content to store from sysconfig + ("makefile_filename", makefile()), + ] + if k is not None + } + + config_var_keys = set() + for element in self.sysconfig_paths.values(): + for k in _CONF_VAR_RE.findall(element): + config_var_keys.add(u(k[1:-1])) + config_var_keys.add("PYTHONFRAMEWORK") + + self.sysconfig_vars = {u(i): u(sysconfig.get_config_var(i) or "") for i in config_var_keys} + if self.implementation == "PyPy" and sys.version_info.major == 2: + self.sysconfig_vars[u"implementation_lower"] = u"python" + + self.distutils_install = {u(k): u(v) for k, v in self._distutils_install().items()} + confs = {k: (self.system_prefix if v.startswith(self.prefix) else v) for k, v in self.sysconfig_vars.items()} + self.system_stdlib = self.sysconfig_path("stdlib", confs) + self.system_stdlib_platform = self.sysconfig_path("platstdlib", confs) + self.max_size = getattr(sys, "maxsize", getattr(sys, "maxint", None)) + self._creators = None + + def _fast_get_system_executable(self): + """Try to get the system executable by just looking at properties""" + if self.real_prefix or ( + self.base_prefix is not None and self.base_prefix != self.prefix + ): # if this is a virtual environment + if self.real_prefix is None: + base_executable = getattr(sys, "_base_executable", None) # some platforms may set this to help us + if base_executable is not None: # use the saved system executable if present + if sys.executable != base_executable: # we know we're in a virtual environment, cannot be us + return base_executable + return None # in this case we just can't tell easily without poking around FS and calling them, bail + # if we're not in a virtual environment, this is already a system python, so return the original executable + # note we must choose the original and not the pure executable as shim scripts might throw us off + return self.original_executable + + @staticmethod + def _distutils_install(): + # follow https://github.com/pypa/pip/blob/master/src/pip/_internal/locations.py#L95 + # note here we don't import Distribution directly to allow setuptools to patch it + d = dist.Distribution({"script_args": "--no-user-cfg"}) # conf files not parsed so they do not hijack paths + if hasattr(sys, "_framework"): + sys._framework = None # disable macOS static paths for framework + i = d.get_command_obj("install", create=True) + i.prefix = os.sep # paths generated are relative to prefix that contains the path sep, this makes it relative + i.finalize_options() + result = {key: (getattr(i, "install_{}".format(key))[1:]).lstrip(os.sep) for key in SCHEME_KEYS} + return result + + @property + def version_str(self): + return ".".join(str(i) for i in self.version_info[0:3]) + + @property + def version_release_str(self): + return ".".join(str(i) for i in self.version_info[0:2]) + + @property + def python_name(self): + version_info = self.version_info + return "python{}.{}".format(version_info.major, version_info.minor) + + @property + def is_old_virtualenv(self): + return self.real_prefix is not None + + @property + def is_venv(self): + return self.base_prefix is not None and self.version_info.major == 3 + + def sysconfig_path(self, key, config_var=None, sep=os.sep): + pattern = self.sysconfig_paths[key] + if config_var is None: + config_var = self.sysconfig_vars + else: + base = {k: v for k, v in self.sysconfig_vars.items()} + base.update(config_var) + config_var = base + return pattern.format(**config_var).replace(u"/", sep) + + def creators(self, refresh=False): + if self._creators is None or refresh is True: + from virtualenv.run.plugin.creators import CreatorSelector + + self._creators = CreatorSelector.for_interpreter(self) + return self._creators + + @property + def system_include(self): + path = self.sysconfig_path( + "include", + {k: (self.system_prefix if v.startswith(self.prefix) else v) for k, v in self.sysconfig_vars.items()}, + ) + if not os.path.exists(path): # some broken packaging don't respect the sysconfig, fallback to distutils path + # the pattern include the distribution name too at the end, remove that via the parent call + fallback = os.path.join(self.prefix, os.path.dirname(self.distutils_install["headers"])) + if os.path.exists(fallback): + path = fallback + return path + + @property + def system_prefix(self): + return self.real_prefix or self.base_prefix or self.prefix + + @property + def system_exec_prefix(self): + return self.real_prefix or self.base_exec_prefix or self.exec_prefix + + def __unicode__(self): + content = repr(self) + if sys.version_info == 2: + content = content.decode("utf-8") + return content + + def __repr__(self): + return "{}({!r})".format( + self.__class__.__name__, {k: v for k, v in self.__dict__.items() if not k.startswith("_")}, + ) + + def __str__(self): + content = "{}({})".format( + self.__class__.__name__, + ", ".join( + "{}={}".format(k, v) + for k, v in ( + ("spec", self.spec), + ( + "system" + if self.system_executable is not None and self.system_executable != self.executable + else None, + self.system_executable, + ), + ( + "original" + if ( + self.original_executable != self.system_executable + and self.original_executable != self.executable + ) + else None, + self.original_executable, + ), + ("exe", self.executable), + ("platform", self.platform), + ("version", repr(self.version)), + ("encoding_fs_io", "{}-{}".format(self.file_system_encoding, self.stdout_encoding)), + ) + if k is not None + ), + ) + return content + + @property + def spec(self): + return "{}{}-{}".format(self.implementation, ".".join(str(i) for i in self.version_info), self.architecture) + + @classmethod + def clear_cache(cls, app_data): + # this method is not used by itself, so here and called functions can import stuff locally + from virtualenv.discovery.cached_py_info import clear + + clear(app_data) + cls._cache_exe_discovery.clear() + + def satisfies(self, spec, impl_must_match): + """check if a given specification can be satisfied by the this python interpreter instance""" + if spec.path: + if self.executable == os.path.abspath(spec.path): + return True # if the path is a our own executable path we're done + if not spec.is_abs: + # if path set, and is not our original executable name, this does not match + basename = os.path.basename(self.original_executable) + spec_path = spec.path + if sys.platform == "win32": + basename, suffix = os.path.splitext(basename) + if spec_path.endswith(suffix): + spec_path = spec_path[: -len(suffix)] + if basename != spec_path: + return False + + if impl_must_match: + if spec.implementation is not None and spec.implementation.lower() != self.implementation.lower(): + return False + + if spec.architecture is not None and spec.architecture != self.architecture: + return False + + for our, req in zip(self.version_info[0:3], (spec.major, spec.minor, spec.micro)): + if req is not None and our is not None and our != req: + return False + return True + + _current_system = None + _current = None + + @classmethod + def current(cls, app_data=None): + """ + This locates the current host interpreter information. This might be different than what we run into in case + the host python has been upgraded from underneath us. + """ + if cls._current is None: + cls._current = cls.from_exe(sys.executable, app_data, raise_on_error=True, resolve_to_host=False) + return cls._current + + @classmethod + def current_system(cls, app_data=None): + """ + This locates the current host interpreter information. This might be different than what we run into in case + the host python has been upgraded from underneath us. + """ + if cls._current_system is None: + cls._current_system = cls.from_exe(sys.executable, app_data, raise_on_error=True, resolve_to_host=True) + return cls._current_system + + def _to_json(self): + # don't save calculated paths, as these are non primitive types + return json.dumps(self._to_dict(), indent=2) + + def _to_dict(self): + data = {var: (getattr(self, var) if var not in ("_creators",) else None) for var in vars(self)} + # noinspection PyProtectedMember + data["version_info"] = data["version_info"]._asdict() # namedtuple to dictionary + return data + + @classmethod + def from_exe(cls, exe, app_data=None, raise_on_error=True, ignore_cache=False, resolve_to_host=True): + """Given a path to an executable get the python information""" + # this method is not used by itself, so here and called functions can import stuff locally + from virtualenv.discovery.cached_py_info import from_exe + + proposed = from_exe(cls, app_data, exe, raise_on_error=raise_on_error, ignore_cache=ignore_cache) + # noinspection PyProtectedMember + if isinstance(proposed, PythonInfo) and resolve_to_host: + try: + proposed = proposed._resolve_to_system(app_data, proposed) + except Exception as exception: + if raise_on_error: + raise exception + logging.info("ignore %s due cannot resolve system due to %r", proposed.original_executable, exception) + proposed = None + return proposed + + @classmethod + def _from_json(cls, payload): + # the dictionary unroll here is to protect against pypy bug of interpreter crashing + raw = json.loads(payload) + return cls._from_dict({k: v for k, v in raw.items()}) + + @classmethod + def _from_dict(cls, data): + data["version_info"] = VersionInfo(**data["version_info"]) # restore this to a named tuple structure + result = cls() + result.__dict__ = {k: v for k, v in data.items()} + return result + + @classmethod + def _resolve_to_system(cls, app_data, target): + start_executable = target.executable + prefixes = OrderedDict() + while target.system_executable is None: + prefix = target.real_prefix or target.base_prefix or target.prefix + if prefix in prefixes: + if len(prefixes) == 1: + # if we're linking back to ourselves accept ourselves with a WARNING + logging.info("%r links back to itself via prefixes", target) + target.system_executable = target.executable + break + for at, (p, t) in enumerate(prefixes.items(), start=1): + logging.error("%d: prefix=%s, info=%r", at, p, t) + logging.error("%d: prefix=%s, info=%r", len(prefixes) + 1, prefix, target) + raise RuntimeError("prefixes are causing a circle {}".format("|".join(prefixes.keys()))) + prefixes[prefix] = target + target = target.discover_exe(app_data, prefix=prefix, exact=False) + if target.executable != target.system_executable: + target = cls.from_exe(target.system_executable, app_data) + target.executable = start_executable + return target + + _cache_exe_discovery = {} + + def discover_exe(self, app_data, prefix, exact=True): + key = prefix, exact + if key in self._cache_exe_discovery and prefix: + logging.debug("discover exe from cache %s - exact %s: %r", prefix, exact, self._cache_exe_discovery[key]) + return self._cache_exe_discovery[key] + logging.debug("discover exe for %s in %s", self, prefix) + # we don't know explicitly here, do some guess work - our executable name should tell + possible_names = self._find_possible_exe_names() + possible_folders = self._find_possible_folders(prefix) + discovered = [] + for folder in possible_folders: + for name in possible_names: + info = self._check_exe(app_data, folder, name, exact, discovered) + if info is not None: + self._cache_exe_discovery[key] = info + return info + if exact is False and discovered: + info = self._select_most_likely(discovered, self) + folders = os.pathsep.join(possible_folders) + self._cache_exe_discovery[key] = info + logging.debug("no exact match found, chosen most similar of %s within base folders %s", info, folders) + return info + msg = "failed to detect {} in {}".format("|".join(possible_names), os.pathsep.join(possible_folders)) + raise RuntimeError(msg) + + def _check_exe(self, app_data, folder, name, exact, discovered): + exe_path = os.path.join(folder, name) + if not os.path.exists(exe_path): + return None + info = self.from_exe(exe_path, app_data, resolve_to_host=False, raise_on_error=False) + if info is None: # ignore if for some reason we can't query + return None + for item in ["implementation", "architecture", "version_info"]: + found = getattr(info, item) + searched = getattr(self, item) + if found != searched: + if item == "version_info": + found, searched = ".".join(str(i) for i in found), ".".join(str(i) for i in searched) + executable = info.executable + logging.debug("refused interpreter %s because %s differs %s != %s", executable, item, found, searched) + if exact is False: + discovered.append(info) + break + else: + return info + return None + + @staticmethod + def _select_most_likely(discovered, target): + # no exact match found, start relaxing our requirements then to facilitate system package upgrades that + # could cause this (when using copy strategy of the host python) + def sort_by(info): + # we need to setup some priority of traits, this is as follows: + # implementation, major, minor, micro, architecture, tag, serial + matches = [ + info.implementation == target.implementation, + info.version_info.major == target.version_info.major, + info.version_info.minor == target.version_info.minor, + info.architecture == target.architecture, + info.version_info.micro == target.version_info.micro, + info.version_info.releaselevel == target.version_info.releaselevel, + info.version_info.serial == target.version_info.serial, + ] + priority = sum((1 << pos if match else 0) for pos, match in enumerate(reversed(matches))) + return priority + + sorted_discovered = sorted(discovered, key=sort_by, reverse=True) # sort by priority in decreasing order + most_likely = sorted_discovered[0] + return most_likely + + def _find_possible_folders(self, inside_folder): + candidate_folder = OrderedDict() + executables = OrderedDict() + executables[os.path.realpath(self.executable)] = None + executables[self.executable] = None + executables[os.path.realpath(self.original_executable)] = None + executables[self.original_executable] = None + for exe in executables.keys(): + base = os.path.dirname(exe) + # following path pattern of the current + if base.startswith(self.prefix): + relative = base[len(self.prefix) :] + candidate_folder["{}{}".format(inside_folder, relative)] = None + + # or at root level + candidate_folder[inside_folder] = None + return list(i for i in candidate_folder.keys() if os.path.exists(i)) + + def _find_possible_exe_names(self): + name_candidate = OrderedDict() + for name in self._possible_base(): + for at in (3, 2, 1, 0): + version = ".".join(str(i) for i in self.version_info[:at]) + for arch in ["-{}".format(self.architecture), ""]: + for ext in EXTENSIONS: + candidate = "{}{}{}{}".format(name, version, arch, ext) + name_candidate[candidate] = None + return list(name_candidate.keys()) + + def _possible_base(self): + possible_base = OrderedDict() + basename = os.path.splitext(os.path.basename(self.executable))[0].rstrip(digits) + possible_base[basename] = None + possible_base[self.implementation] = None + # python is always the final option as in practice is used by multiple implementation as exe name + if "python" in possible_base: + del possible_base["python"] + possible_base["python"] = None + for base in possible_base: + lower = base.lower() + yield lower + from virtualenv.info import fs_is_case_sensitive + + if fs_is_case_sensitive(): + if base != lower: + yield base + upper = base.upper() + if upper != base: + yield upper + + +if __name__ == "__main__": + # dump a JSON representation of the current python + # noinspection PyProtectedMember + print(PythonInfo()._to_json()) diff --git a/venv/Lib/site-packages/virtualenv/discovery/py_spec.py b/venv/Lib/site-packages/virtualenv/discovery/py_spec.py new file mode 100644 index 00000000..cb63e151 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/discovery/py_spec.py @@ -0,0 +1,122 @@ +"""A Python specification is an abstract requirement definition of a interpreter""" +from __future__ import absolute_import, unicode_literals + +import os +import re +import sys +from collections import OrderedDict + +from virtualenv.info import fs_is_case_sensitive +from virtualenv.util.six import ensure_str + +PATTERN = re.compile(r"^(?P[a-zA-Z]+)?(?P[0-9.]+)?(?:-(?P32|64))?$") +IS_WIN = sys.platform == "win32" + + +class PythonSpec(object): + """Contains specification about a Python Interpreter""" + + def __init__(self, str_spec, implementation, major, minor, micro, architecture, path): + self.str_spec = str_spec + self.implementation = implementation + self.major = major + self.minor = minor + self.micro = micro + self.architecture = architecture + self.path = path + + @classmethod + def from_string_spec(cls, string_spec): + impl, major, minor, micro, arch, path = None, None, None, None, None, None + if os.path.isabs(string_spec): + path = string_spec + else: + ok = False + match = re.match(PATTERN, string_spec) + if match: + + def _int_or_none(val): + return None if val is None else int(val) + + try: + groups = match.groupdict() + version = groups["version"] + if version is not None: + versions = tuple(int(i) for i in version.split(".") if i) + if len(versions) > 3: + raise ValueError + if len(versions) == 3: + major, minor, micro = versions + elif len(versions) == 2: + major, minor = versions + elif len(versions) == 1: + version_data = versions[0] + major = int(str(version_data)[0]) # first digit major + if version_data > 9: + minor = int(str(version_data)[1:]) + ok = True + except ValueError: + pass + else: + impl = groups["impl"] + if impl == "py" or impl == "python": + impl = "CPython" + arch = _int_or_none(groups["arch"]) + + if not ok: + path = string_spec + + return cls(string_spec, impl, major, minor, micro, arch, path) + + def generate_names(self): + impls = OrderedDict() + if self.implementation: + # first consider implementation as it is + impls[self.implementation] = False + if fs_is_case_sensitive(): + # for case sensitive file systems consider lower and upper case versions too + # trivia: MacBooks and all pre 2018 Windows-es were case insensitive by default + impls[self.implementation.lower()] = False + impls[self.implementation.upper()] = False + impls["python"] = True # finally consider python as alias, implementation must match now + version = self.major, self.minor, self.micro + try: + version = version[: version.index(None)] + except ValueError: + pass + for impl, match in impls.items(): + for at in range(len(version), -1, -1): + cur_ver = version[0:at] + spec = "{}{}".format(impl, ".".join(str(i) for i in cur_ver)) + yield spec, match + + @property + def is_abs(self): + return self.path is not None and os.path.isabs(self.path) + + def satisfies(self, spec): + """called when there's a candidate metadata spec to see if compatible - e.g. PEP-514 on Windows""" + if spec.is_abs and self.is_abs and self.path != spec.path: + return False + if spec.implementation is not None and spec.implementation.lower() != self.implementation.lower(): + return False + if spec.architecture is not None and spec.architecture != self.architecture: + return False + + for our, req in zip((self.major, self.minor, self.micro), (spec.major, spec.minor, spec.micro)): + if req is not None and our is not None and our != req: + return False + return True + + def __unicode__(self): + return "{}({})".format( + type(self).__name__, + ", ".join( + "{}={}".format(k, getattr(self, k)) + for k in ("implementation", "major", "minor", "micro", "architecture", "path") + if getattr(self, k) is not None + ), + ) + + def __repr__(self): + return ensure_str(self.__unicode__()) diff --git a/venv/Lib/site-packages/virtualenv/discovery/windows/__init__.py b/venv/Lib/site-packages/virtualenv/discovery/windows/__init__.py new file mode 100644 index 00000000..9063ab8d --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/discovery/windows/__init__.py @@ -0,0 +1,28 @@ +from __future__ import absolute_import, unicode_literals + +from ..py_info import PythonInfo +from ..py_spec import PythonSpec +from .pep514 import discover_pythons + + +class Pep514PythonInfo(PythonInfo): + """""" + + +def propose_interpreters(spec, cache_dir): + # see if PEP-514 entries are good + + # start with higher python versions in an effort to use the latest version available + existing = list(discover_pythons()) + existing.sort(key=lambda i: tuple(-1 if j is None else j for j in i[1:4]), reverse=True) + + for name, major, minor, arch, exe, _ in existing: + # pre-filter + if name in ("PythonCore", "ContinuumAnalytics"): + name = "CPython" + registry_spec = PythonSpec(None, name, major, minor, None, arch, exe) + if registry_spec.satisfies(spec): + interpreter = Pep514PythonInfo.from_exe(exe, cache_dir, raise_on_error=False) + if interpreter is not None: + if interpreter.satisfies(spec, impl_must_match=True): + yield interpreter diff --git a/venv/Lib/site-packages/virtualenv/discovery/windows/pep514.py b/venv/Lib/site-packages/virtualenv/discovery/windows/pep514.py new file mode 100644 index 00000000..048436a6 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/discovery/windows/pep514.py @@ -0,0 +1,161 @@ +"""Implement https://www.python.org/dev/peps/pep-0514/ to discover interpreters - Windows only""" +from __future__ import absolute_import, print_function, unicode_literals + +import os +import re +from logging import basicConfig, getLogger + +import six + +if six.PY3: + import winreg +else: + # noinspection PyUnresolvedReferences + import _winreg as winreg + +LOGGER = getLogger(__name__) + + +def enum_keys(key): + at = 0 + while True: + try: + yield winreg.EnumKey(key, at) + except OSError: + break + at += 1 + + +def get_value(key, value_name): + try: + return winreg.QueryValueEx(key, value_name)[0] + except OSError: + return None + + +def discover_pythons(): + for hive, hive_name, key, flags, default_arch in [ + (winreg.HKEY_CURRENT_USER, "HKEY_CURRENT_USER", r"Software\Python", 0, 64), + (winreg.HKEY_LOCAL_MACHINE, "HKEY_LOCAL_MACHINE", r"Software\Python", winreg.KEY_WOW64_64KEY, 64), + (winreg.HKEY_LOCAL_MACHINE, "HKEY_LOCAL_MACHINE", r"Software\Python", winreg.KEY_WOW64_32KEY, 32), + ]: + for spec in process_set(hive, hive_name, key, flags, default_arch): + yield spec + + +def process_set(hive, hive_name, key, flags, default_arch): + try: + with winreg.OpenKeyEx(hive, key, 0, winreg.KEY_READ | flags) as root_key: + for company in enum_keys(root_key): + if company == "PyLauncher": # reserved + continue + for spec in process_company(hive_name, company, root_key, default_arch): + yield spec + except OSError: + pass + + +def process_company(hive_name, company, root_key, default_arch): + with winreg.OpenKeyEx(root_key, company) as company_key: + for tag in enum_keys(company_key): + spec = process_tag(hive_name, company, company_key, tag, default_arch) + if spec is not None: + yield spec + + +def process_tag(hive_name, company, company_key, tag, default_arch): + with winreg.OpenKeyEx(company_key, tag) as tag_key: + version = load_version_data(hive_name, company, tag, tag_key) + if version is not None: # if failed to get version bail + major, minor, _ = version + arch = load_arch_data(hive_name, company, tag, tag_key, default_arch) + if arch is not None: + exe_data = load_exe(hive_name, company, company_key, tag) + if exe_data is not None: + exe, args = exe_data + return company, major, minor, arch, exe, args + + +def load_exe(hive_name, company, company_key, tag): + key_path = "{}/{}/{}".format(hive_name, company, tag) + try: + with winreg.OpenKeyEx(company_key, r"{}\InstallPath".format(tag)) as ip_key: + with ip_key: + exe = get_value(ip_key, "ExecutablePath") + if exe is None: + ip = get_value(ip_key, None) + if ip is None: + msg(key_path, "no ExecutablePath or default for it") + + else: + exe = os.path.join(ip, str("python.exe")) + if exe is not None and os.path.exists(exe): + args = get_value(ip_key, "ExecutableArguments") + return exe, args + else: + msg(key_path, "could not load exe with value {}".format(exe)) + except OSError: + msg("{}/{}".format(key_path, "InstallPath"), "missing") + return None + + +def load_arch_data(hive_name, company, tag, tag_key, default_arch): + arch_str = get_value(tag_key, "SysArchitecture") + if arch_str is not None: + key_path = "{}/{}/{}/SysArchitecture".format(hive_name, company, tag) + try: + return parse_arch(arch_str) + except ValueError as sys_arch: + msg(key_path, sys_arch) + return default_arch + + +def parse_arch(arch_str): + if isinstance(arch_str, six.string_types): + match = re.match(r"^(\d+)bit$", arch_str) + if match: + return int(next(iter(match.groups()))) + error = "invalid format {}".format(arch_str) + else: + error = "arch is not string: {}".format(repr(arch_str)) + raise ValueError(error) + + +def load_version_data(hive_name, company, tag, tag_key): + for candidate, key_path in [ + (get_value(tag_key, "SysVersion"), "{}/{}/{}/SysVersion".format(hive_name, company, tag)), + (tag, "{}/{}/{}".format(hive_name, company, tag)), + ]: + if candidate is not None: + try: + return parse_version(candidate) + except ValueError as sys_version: + msg(key_path, sys_version) + return None + + +def parse_version(version_str): + if isinstance(version_str, six.string_types): + match = re.match(r"^(\d+)(?:\.(\d+))?(?:\.(\d+))?$", version_str) + if match: + return tuple(int(i) if i is not None else None for i in match.groups()) + error = "invalid format {}".format(version_str) + else: + error = "version is not string: {}".format(repr(version_str)) + raise ValueError(error) + + +def msg(path, what): + LOGGER.warning("PEP-514 violation in Windows Registry at {} error: {}".format(path, what)) + + +def _run(): + basicConfig() + interpreters = [] + for spec in discover_pythons(): + interpreters.append(repr(spec)) + print("\n".join(sorted(interpreters))) + + +if __name__ == "__main__": + _run() diff --git a/venv/Lib/site-packages/virtualenv/info.py b/venv/Lib/site-packages/virtualenv/info.py new file mode 100644 index 00000000..afe40977 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/info.py @@ -0,0 +1,65 @@ +from __future__ import absolute_import, unicode_literals + +import logging +import os +import platform +import sys +import tempfile + +IMPLEMENTATION = platform.python_implementation() +IS_PYPY = IMPLEMENTATION == "PyPy" +IS_CPYTHON = IMPLEMENTATION == "CPython" +PY3 = sys.version_info[0] == 3 +PY2 = sys.version_info[0] == 2 +IS_WIN = sys.platform == "win32" +ROOT = os.path.realpath(os.path.join(os.path.abspath(__file__), os.path.pardir, os.path.pardir)) +IS_ZIPAPP = os.path.isfile(ROOT) +WIN_CPYTHON_2 = IS_CPYTHON and IS_WIN and PY2 + +_CAN_SYMLINK = _FS_CASE_SENSITIVE = _CFG_DIR = _DATA_DIR = None + + +def fs_is_case_sensitive(): + global _FS_CASE_SENSITIVE + + if _FS_CASE_SENSITIVE is None: + with tempfile.NamedTemporaryFile(prefix="TmP") as tmp_file: + _FS_CASE_SENSITIVE = not os.path.exists(tmp_file.name.lower()) + logging.debug("filesystem is %scase-sensitive", "" if _FS_CASE_SENSITIVE else "not ") + return _FS_CASE_SENSITIVE + + +def fs_supports_symlink(): + global _CAN_SYMLINK + + if _CAN_SYMLINK is None: + can = False + if hasattr(os, "symlink"): + if IS_WIN: + with tempfile.NamedTemporaryFile(prefix="TmP") as tmp_file: + temp_dir = os.path.dirname(tmp_file.name) + dest = os.path.join(temp_dir, "{}-{}".format(tmp_file.name, "b")) + try: + os.symlink(tmp_file.name, dest) + can = True + except (OSError, NotImplementedError): + pass + logging.debug("symlink on filesystem does%s work", "" if can else " not") + else: + can = True + _CAN_SYMLINK = can + return _CAN_SYMLINK + + +__all__ = ( + "IS_PYPY", + "IS_CPYTHON", + "PY3", + "PY2", + "IS_WIN", + "fs_is_case_sensitive", + "fs_supports_symlink", + "ROOT", + "IS_ZIPAPP", + "WIN_CPYTHON_2", +) diff --git a/venv/Lib/site-packages/virtualenv/report.py b/venv/Lib/site-packages/virtualenv/report.py new file mode 100644 index 00000000..665b293c --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/report.py @@ -0,0 +1,57 @@ +from __future__ import absolute_import, unicode_literals + +import logging +import sys + +from virtualenv.util.six import ensure_str + +LEVELS = { + 0: logging.CRITICAL, + 1: logging.ERROR, + 2: logging.WARNING, + 3: logging.INFO, + 4: logging.DEBUG, + 5: logging.NOTSET, +} + +MAX_LEVEL = max(LEVELS.keys()) +LOGGER = logging.getLogger() + + +def setup_report(verbosity, show_pid=False): + _clean_handlers(LOGGER) + if verbosity > MAX_LEVEL: + verbosity = MAX_LEVEL # pragma: no cover + level = LEVELS[verbosity] + msg_format = "%(message)s" + filelock_logger = logging.getLogger("filelock") + if level <= logging.DEBUG: + locate = "module" + msg_format = "%(relativeCreated)d {} [%(levelname)s %({})s:%(lineno)d]".format(msg_format, locate) + filelock_logger.setLevel(level) + else: + filelock_logger.setLevel(logging.WARN) + if show_pid: + msg_format = "[%(process)d] " + msg_format + formatter = logging.Formatter(ensure_str(msg_format)) + stream_handler = logging.StreamHandler(stream=sys.stdout) + stream_handler.setLevel(level) + LOGGER.setLevel(logging.NOTSET) + stream_handler.setFormatter(formatter) + LOGGER.addHandler(stream_handler) + level_name = logging.getLevelName(level) + logging.debug("setup logging to %s", level_name) + logging.getLogger("distlib").setLevel(logging.ERROR) + return verbosity + + +def _clean_handlers(log): + for log_handler in list(log.handlers): # remove handlers of libraries + log.removeHandler(log_handler) + + +__all__ = ( + "LEVELS", + "MAX_LEVEL", + "setup_report", +) diff --git a/venv/Lib/site-packages/virtualenv/run/__init__.py b/venv/Lib/site-packages/virtualenv/run/__init__.py new file mode 100644 index 00000000..8de7962a --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/run/__init__.py @@ -0,0 +1,134 @@ +from __future__ import absolute_import, unicode_literals + +import logging + +from ..app_data import AppDataAction, AppDataDisabled, TempAppData +from ..config.cli.parser import VirtualEnvConfigParser +from ..report import LEVELS, setup_report +from ..run.session import Session +from ..seed.wheels.periodic_update import manual_upgrade +from ..version import __version__ +from .plugin.activators import ActivationSelector +from .plugin.creators import CreatorSelector +from .plugin.discovery import get_discover +from .plugin.seeders import SeederSelector + + +def cli_run(args, options=None): + """Create a virtual environment given some command line interface arguments + + :param args: the command line arguments + :param options: passing in a ``VirtualEnvOptions`` object allows return of the parsed options + :return: the session object of the creation (its structure for now is experimental and might change on short notice) + """ + session = session_via_cli(args, options) + with session: + session.run() + return session + + +# noinspection PyProtectedMember +def session_via_cli(args, options=None): + parser, elements = build_parser(args, options) + options = parser.parse_args(args) + creator, seeder, activators = tuple(e.create(options) for e in elements) # create types + session = Session(options.verbosity, options.app_data, parser._interpreter, creator, seeder, activators) + return session + + +# noinspection PyProtectedMember +def build_parser(args=None, options=None): + parser = VirtualEnvConfigParser(options) + add_version_flag(parser) + parser.add_argument( + "--with-traceback", + dest="with_traceback", + action="store_true", + default=False, + help="on failure also display the stacktrace internals of virtualenv", + ) + _do_report_setup(parser, args) + options = load_app_data(args, parser, options) + handle_extra_commands(options) + + discover = get_discover(parser, args) + parser._interpreter = interpreter = discover.interpreter + if interpreter is None: + raise RuntimeError("failed to find interpreter for {}".format(discover)) + elements = [ + CreatorSelector(interpreter, parser), + SeederSelector(interpreter, parser), + ActivationSelector(interpreter, parser), + ] + options, _ = parser.parse_known_args(args) + for element in elements: + element.handle_selected_arg_parse(options) + parser.enable_help() + return parser, elements + + +def build_parser_only(args=None): + """Used to provide a parser for the doc generation""" + return build_parser(args)[0] + + +def handle_extra_commands(options): + if options.upgrade_embed_wheels: + result = manual_upgrade(options.app_data) + raise SystemExit(result) + + +def load_app_data(args, parser, options): + # here we need a write-able application data (e.g. the zipapp might need this for discovery cache) + default_app_data = AppDataAction.default() + parser.add_argument( + "--app-data", + dest="app_data", + action=AppDataAction, + default="" if isinstance(default_app_data, AppDataDisabled) else default_app_data, + help="a data folder used as cache by the virtualenv", + ) + parser.add_argument( + "--reset-app-data", + dest="reset_app_data", + action="store_true", + help="start with empty app data folder", + default=False, + ) + parser.add_argument( + "--upgrade-embed-wheels", + dest="upgrade_embed_wheels", + action="store_true", + help="trigger a manual update of the embedded wheels", + default=False, + ) + options, _ = parser.parse_known_args(args, namespace=options) + if options.app_data == "": + options.app_data = TempAppData() + if options.reset_app_data: + options.app_data.reset() + return options + + +def add_version_flag(parser): + import virtualenv + + parser.add_argument( + "--version", + action="version", + version="%(prog)s {} from {}".format(__version__, virtualenv.__file__), + help="display the version of the virtualenv package and it's location, then exit", + ) + + +def _do_report_setup(parser, args): + level_map = ", ".join("{}={}".format(logging.getLevelName(l), c) for c, l in sorted(list(LEVELS.items()))) + msg = "verbosity = verbose - quiet, default {}, mapping => {}" + verbosity_group = parser.add_argument_group( + title="verbosity", description=msg.format(logging.getLevelName(LEVELS[3]), level_map), + ) + verbosity = verbosity_group.add_mutually_exclusive_group() + verbosity.add_argument("-v", "--verbose", action="count", dest="verbose", help="increase verbosity", default=2) + verbosity.add_argument("-q", "--quiet", action="count", dest="quiet", help="decrease verbosity", default=0) + option, _ = parser.parse_known_args(args) + setup_report(option.verbosity) diff --git a/venv/Lib/site-packages/virtualenv/run/plugin/__init__.py b/venv/Lib/site-packages/virtualenv/run/plugin/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/virtualenv/run/plugin/activators.py b/venv/Lib/site-packages/virtualenv/run/plugin/activators.py new file mode 100644 index 00000000..dea28277 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/run/plugin/activators.py @@ -0,0 +1,53 @@ +from __future__ import absolute_import, unicode_literals + +from argparse import ArgumentTypeError +from collections import OrderedDict + +from .base import ComponentBuilder + + +class ActivationSelector(ComponentBuilder): + def __init__(self, interpreter, parser): + self.default = None + possible = OrderedDict( + (k, v) for k, v in self.options("virtualenv.activate").items() if v.supports(interpreter) + ) + super(ActivationSelector, self).__init__(interpreter, parser, "activators", possible) + self.parser.description = "options for activation scripts" + self.active = None + + def add_selector_arg_parse(self, name, choices): + self.default = ",".join(choices) + self.parser.add_argument( + "--{}".format(name), + default=self.default, + metavar="comma_sep_list", + required=False, + help="activators to generate - default is all supported", + type=self._extract_activators, + ) + + def _extract_activators(self, entered_str): + elements = [e.strip() for e in entered_str.split(",") if e.strip()] + missing = [e for e in elements if e not in self.possible] + if missing: + raise ArgumentTypeError("the following activators are not available {}".format(",".join(missing))) + return elements + + def handle_selected_arg_parse(self, options): + selected_activators = ( + self._extract_activators(self.default) if options.activators is self.default else options.activators + ) + self.active = {k: v for k, v in self.possible.items() if k in selected_activators} + self.parser.add_argument( + "--prompt", + dest="prompt", + metavar="prompt", + help="provides an alternative prompt prefix for this environment", + default=None, + ) + for activator in self.active.values(): + activator.add_parser_arguments(self.parser, self.interpreter) + + def create(self, options): + return [activator_class(options) for activator_class in self.active.values()] diff --git a/venv/Lib/site-packages/virtualenv/run/plugin/base.py b/venv/Lib/site-packages/virtualenv/run/plugin/base.py new file mode 100644 index 00000000..ed10fe0e --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/run/plugin/base.py @@ -0,0 +1,58 @@ +from __future__ import absolute_import, unicode_literals + +import sys +from collections import OrderedDict + +if sys.version_info >= (3, 8): + from importlib.metadata import entry_points +else: + from importlib_metadata import entry_points + + +class PluginLoader(object): + _OPTIONS = None + _ENTRY_POINTS = None + + @classmethod + def entry_points_for(cls, key): + return OrderedDict((e.name, e.load()) for e in cls.entry_points().get(key, {})) + + @staticmethod + def entry_points(): + if PluginLoader._ENTRY_POINTS is None: + PluginLoader._ENTRY_POINTS = entry_points() + return PluginLoader._ENTRY_POINTS + + +class ComponentBuilder(PluginLoader): + def __init__(self, interpreter, parser, name, possible): + self.interpreter = interpreter + self.name = name + self._impl_class = None + self.possible = possible + self.parser = parser.add_argument_group(title=name) + self.add_selector_arg_parse(name, list(self.possible)) + + @classmethod + def options(cls, key): + if cls._OPTIONS is None: + cls._OPTIONS = cls.entry_points_for(key) + return cls._OPTIONS + + def add_selector_arg_parse(self, name, choices): + raise NotImplementedError + + def handle_selected_arg_parse(self, options): + selected = getattr(options, self.name) + if selected not in self.possible: + raise RuntimeError("No implementation for {}".format(self.interpreter)) + self._impl_class = self.possible[selected] + self.populate_selected_argparse(selected, options.app_data) + return selected + + def populate_selected_argparse(self, selected, app_data): + self.parser.description = "options for {} {}".format(self.name, selected) + self._impl_class.add_parser_arguments(self.parser, self.interpreter, app_data) + + def create(self, options): + return self._impl_class(options, self.interpreter) diff --git a/venv/Lib/site-packages/virtualenv/run/plugin/creators.py b/venv/Lib/site-packages/virtualenv/run/plugin/creators.py new file mode 100644 index 00000000..d6d6216f --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/run/plugin/creators.py @@ -0,0 +1,77 @@ +from __future__ import absolute_import, unicode_literals + +from collections import OrderedDict, defaultdict, namedtuple + +from virtualenv.create.describe import Describe +from virtualenv.create.via_global_ref.builtin.builtin_way import VirtualenvBuiltin + +from .base import ComponentBuilder + +CreatorInfo = namedtuple("CreatorInfo", ["key_to_class", "key_to_meta", "describe", "builtin_key"]) + + +class CreatorSelector(ComponentBuilder): + def __init__(self, interpreter, parser): + creators, self.key_to_meta, self.describe, self.builtin_key = self.for_interpreter(interpreter) + super(CreatorSelector, self).__init__(interpreter, parser, "creator", creators) + + @classmethod + def for_interpreter(cls, interpreter): + key_to_class, key_to_meta, builtin_key, describe = OrderedDict(), {}, None, None + errored = defaultdict(list) + for key, creator_class in cls.options("virtualenv.create").items(): + if key == "builtin": + raise RuntimeError("builtin creator is a reserved name") + meta = creator_class.can_create(interpreter) + if meta: + if meta.error: + errored[meta.error].append(creator_class) + else: + if "builtin" not in key_to_class and issubclass(creator_class, VirtualenvBuiltin): + builtin_key = key + key_to_class["builtin"] = creator_class + key_to_meta["builtin"] = meta + key_to_class[key] = creator_class + key_to_meta[key] = meta + if describe is None and issubclass(creator_class, Describe) and creator_class.can_describe(interpreter): + describe = creator_class + if not key_to_meta: + if errored: + raise RuntimeError( + "\n".join( + "{} for creators {}".format(k, ", ".join(i.__name__ for i in v)) for k, v in errored.items() + ), + ) + else: + raise RuntimeError("No virtualenv implementation for {}".format(interpreter)) + return CreatorInfo( + key_to_class=key_to_class, key_to_meta=key_to_meta, describe=describe, builtin_key=builtin_key, + ) + + def add_selector_arg_parse(self, name, choices): + # prefer the built-in venv if present, otherwise fallback to first defined type + choices = sorted(choices, key=lambda a: 0 if a == "builtin" else 1) + default_value = self._get_default(choices) + self.parser.add_argument( + "--{}".format(name), + choices=choices, + default=default_value, + required=False, + help="create environment via{}".format( + "" if self.builtin_key is None else " (builtin = {})".format(self.builtin_key), + ), + ) + + @staticmethod + def _get_default(choices): + return next(iter(choices)) + + def populate_selected_argparse(self, selected, app_data): + self.parser.description = "options for {} {}".format(self.name, selected) + self._impl_class.add_parser_arguments(self.parser, self.interpreter, self.key_to_meta[selected], app_data) + + def create(self, options): + options.meta = self.key_to_meta[getattr(options, self.name)] + if not issubclass(self._impl_class, Describe): + options.describe = self.describe(options, self.interpreter) + return super(CreatorSelector, self).create(options) diff --git a/venv/Lib/site-packages/virtualenv/run/plugin/discovery.py b/venv/Lib/site-packages/virtualenv/run/plugin/discovery.py new file mode 100644 index 00000000..e2cfe927 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/run/plugin/discovery.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import, unicode_literals + +from .base import PluginLoader + + +class Discovery(PluginLoader): + """""" + + +def get_discover(parser, args): + discover_types = Discovery.entry_points_for("virtualenv.discovery") + discovery_parser = parser.add_argument_group( + title="discovery", description="discover and provide a target interpreter", + ) + discovery_parser.add_argument( + "--discovery", + choices=_get_default_discovery(discover_types), + default=next(i for i in discover_types.keys()), + required=False, + help="interpreter discovery method", + ) + options, _ = parser.parse_known_args(args) + discover_class = discover_types[options.discovery] + discover_class.add_parser_arguments(discovery_parser) + options, _ = parser.parse_known_args(args, namespace=options) + discover = discover_class(options) + return discover + + +def _get_default_discovery(discover_types): + return list(discover_types.keys()) diff --git a/venv/Lib/site-packages/virtualenv/run/plugin/seeders.py b/venv/Lib/site-packages/virtualenv/run/plugin/seeders.py new file mode 100644 index 00000000..26a33850 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/run/plugin/seeders.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import, unicode_literals + +from .base import ComponentBuilder + + +class SeederSelector(ComponentBuilder): + def __init__(self, interpreter, parser): + possible = self.options("virtualenv.seed") + super(SeederSelector, self).__init__(interpreter, parser, "seeder", possible) + + def add_selector_arg_parse(self, name, choices): + self.parser.add_argument( + "--{}".format(name), + choices=choices, + default=self._get_default(), + required=False, + help="seed packages install method", + ) + self.parser.add_argument( + "--no-seed", "--without-pip", help="do not install seed packages", action="store_true", dest="no_seed", + ) + + @staticmethod + def _get_default(): + return "app-data" + + def handle_selected_arg_parse(self, options): + return super(SeederSelector, self).handle_selected_arg_parse(options) + + def create(self, options): + return self._impl_class(options) diff --git a/venv/Lib/site-packages/virtualenv/run/session.py b/venv/Lib/site-packages/virtualenv/run/session.py new file mode 100644 index 00000000..c936089f --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/run/session.py @@ -0,0 +1,90 @@ +from __future__ import absolute_import, unicode_literals + +import json +import logging + +from virtualenv.util.six import ensure_text + + +class Session(object): + """Represents a virtual environment creation session""" + + def __init__(self, verbosity, app_data, interpreter, creator, seeder, activators): + self._verbosity = verbosity + self._app_data = app_data + self._interpreter = interpreter + self._creator = creator + self._seeder = seeder + self._activators = activators + + @property + def verbosity(self): + """The verbosity of the run""" + return self._verbosity + + @property + def interpreter(self): + """Create a virtual environment based on this reference interpreter""" + return self._interpreter + + @property + def creator(self): + """The creator used to build the virtual environment (must be compatible with the interpreter)""" + return self._creator + + @property + def seeder(self): + """The mechanism used to provide the seed packages (pip, setuptools, wheel)""" + return self._seeder + + @property + def activators(self): + """Activators used to generate activations scripts""" + return self._activators + + def run(self): + self._create() + self._seed() + self._activate() + self.creator.pyenv_cfg.write() + + def _create(self): + logging.info("create virtual environment via %s", ensure_text(str(self.creator))) + self.creator.run() + logging.debug(_DEBUG_MARKER) + logging.debug("%s", _Debug(self.creator)) + + def _seed(self): + if self.seeder is not None and self.seeder.enabled: + logging.info("add seed packages via %s", self.seeder) + self.seeder.run(self.creator) + + def _activate(self): + if self.activators: + logging.info( + "add activators for %s", ", ".join(type(i).__name__.replace("Activator", "") for i in self.activators), + ) + for activator in self.activators: + activator.generate(self.creator) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self._app_data.close() + + +_DEBUG_MARKER = "=" * 30 + " target debug " + "=" * 30 + + +class _Debug(object): + """lazily populate debug""" + + def __init__(self, creator): + self.creator = creator + + def __unicode__(self): + return ensure_text(repr(self)) + + def __repr__(self): + return json.dumps(self.creator.debug, indent=2) diff --git a/venv/Lib/site-packages/virtualenv/seed/__init__.py b/venv/Lib/site-packages/virtualenv/seed/__init__.py new file mode 100644 index 00000000..01e6d4f4 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/__init__.py @@ -0,0 +1 @@ +from __future__ import absolute_import, unicode_literals diff --git a/venv/Lib/site-packages/virtualenv/seed/embed/__init__.py b/venv/Lib/site-packages/virtualenv/seed/embed/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/virtualenv/seed/embed/base_embed.py b/venv/Lib/site-packages/virtualenv/seed/embed/base_embed.py new file mode 100644 index 00000000..f41b5fc4 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/embed/base_embed.py @@ -0,0 +1,117 @@ +from __future__ import absolute_import, unicode_literals + +from abc import ABCMeta + +from six import add_metaclass + +from virtualenv.util.path import Path +from virtualenv.util.six import ensure_str, ensure_text + +from ..seeder import Seeder +from ..wheels import Version + +PERIODIC_UPDATE_ON_BY_DEFAULT = True + + +@add_metaclass(ABCMeta) +class BaseEmbed(Seeder): + def __init__(self, options): + super(BaseEmbed, self).__init__(options, enabled=options.no_seed is False) + + self.download = options.download + self.extra_search_dir = [i.resolve() for i in options.extra_search_dir if i.exists()] + + self.pip_version = options.pip + self.setuptools_version = options.setuptools + self.wheel_version = options.wheel + + self.no_pip = options.no_pip + self.no_setuptools = options.no_setuptools + self.no_wheel = options.no_wheel + self.app_data = options.app_data + self.periodic_update = not options.no_periodic_update + + if not self.distribution_to_versions(): + self.enabled = False + + @classmethod + def distributions(cls): + return { + "pip": Version.bundle, + "setuptools": Version.bundle, + "wheel": Version.bundle, + } + + def distribution_to_versions(self): + return { + distribution: getattr(self, "{}_version".format(distribution)) + for distribution in self.distributions() + if getattr(self, "no_{}".format(distribution)) is False + } + + @classmethod + def add_parser_arguments(cls, parser, interpreter, app_data): + group = parser.add_mutually_exclusive_group() + group.add_argument( + "--no-download", + "--never-download", + dest="download", + action="store_false", + help="pass to disable download of the latest {} from PyPI".format("/".join(cls.distributions())), + default=True, + ) + group.add_argument( + "--download", + dest="download", + action="store_true", + help="pass to enable download of the latest {} from PyPI".format("/".join(cls.distributions())), + default=False, + ) + parser.add_argument( + "--extra-search-dir", + metavar="d", + type=Path, + nargs="+", + help="a path containing wheels to extend the internal wheel list (can be set 1+ times)", + default=[], + ) + for distribution, default in cls.distributions().items(): + parser.add_argument( + "--{}".format(distribution), + dest=distribution, + metavar="version", + help="version of {} to install as seed: embed, bundle or exact version".format(distribution), + default=default, + ) + for distribution in cls.distributions(): + parser.add_argument( + "--no-{}".format(distribution), + dest="no_{}".format(distribution), + action="store_true", + help="do not install {}".format(distribution), + default=False, + ) + parser.add_argument( + "--no-periodic-update", + dest="no_periodic_update", + action="store_true", + help="disable the periodic (once every 14 days) update of the embedded wheels", + default=not PERIODIC_UPDATE_ON_BY_DEFAULT, + ) + + def __unicode__(self): + result = self.__class__.__name__ + result += "(" + if self.extra_search_dir: + result += "extra_search_dir={},".format(", ".join(ensure_text(str(i)) for i in self.extra_search_dir)) + result += "download={},".format(self.download) + for distribution in self.distributions(): + if getattr(self, "no_{}".format(distribution)): + continue + result += " {}{},".format( + distribution, "={}".format(getattr(self, "{}_version".format(distribution), None) or "latest"), + ) + return result[:-1] + ")" + + def __repr__(self): + return ensure_str(self.__unicode__()) diff --git a/venv/Lib/site-packages/virtualenv/seed/embed/pip_invoke.py b/venv/Lib/site-packages/virtualenv/seed/embed/pip_invoke.py new file mode 100644 index 00000000..372e140d --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/embed/pip_invoke.py @@ -0,0 +1,56 @@ +from __future__ import absolute_import, unicode_literals + +import logging +from contextlib import contextmanager + +from virtualenv.discovery.cached_py_info import LogCmd +from virtualenv.seed.embed.base_embed import BaseEmbed +from virtualenv.util.subprocess import Popen + +from ..wheels import Version, get_wheel, pip_wheel_env_run + + +class PipInvoke(BaseEmbed): + def __init__(self, options): + super(PipInvoke, self).__init__(options) + + def run(self, creator): + if not self.enabled: + return + for_py_version = creator.interpreter.version_release_str + with self.get_pip_install_cmd(creator.exe, for_py_version) as cmd: + env = pip_wheel_env_run(self.extra_search_dir, self.app_data) + self._execute(cmd, env) + + @staticmethod + def _execute(cmd, env): + logging.debug("pip seed by running: %s", LogCmd(cmd, env)) + process = Popen(cmd, env=env) + process.communicate() + if process.returncode != 0: + raise RuntimeError("failed seed with code {}".format(process.returncode)) + return process + + @contextmanager + def get_pip_install_cmd(self, exe, for_py_version): + cmd = [str(exe), "-m", "pip", "-q", "install", "--only-binary", ":all:", "--disable-pip-version-check"] + if not self.download: + cmd.append("--no-index") + folders = set() + for dist, version in self.distribution_to_versions().items(): + wheel = get_wheel( + distribution=dist, + version=version, + for_py_version=for_py_version, + search_dirs=self.extra_search_dir, + download=False, + app_data=self.app_data, + do_periodic_update=self.periodic_update, + ) + if wheel is None: + raise RuntimeError("could not get wheel for distribution {}".format(dist)) + folders.add(str(wheel.path.parent)) + cmd.append(Version.as_pip_req(dist, wheel.version)) + for folder in sorted(folders): + cmd.extend(["--find-links", str(folder)]) + yield cmd diff --git a/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/__init__.py b/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/__init__.py b/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/base.py b/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/base.py new file mode 100644 index 00000000..a1d946d5 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/base.py @@ -0,0 +1,158 @@ +from __future__ import absolute_import, unicode_literals + +import logging +import os +import re +import zipfile +from abc import ABCMeta, abstractmethod +from tempfile import mkdtemp + +from distlib.scripts import ScriptMaker, enquote_executable +from six import PY3, add_metaclass + +from virtualenv.util import ConfigParser +from virtualenv.util.path import Path, safe_delete +from virtualenv.util.six import ensure_text + + +@add_metaclass(ABCMeta) +class PipInstall(object): + def __init__(self, wheel, creator, image_folder): + self._wheel = wheel + self._creator = creator + self._image_dir = image_folder + self._extracted = False + self.__dist_info = None + self._console_entry_points = None + + @abstractmethod + def _sync(self, src, dst): + raise NotImplementedError + + def install(self, version_info): + self._extracted = True + # sync image + for filename in self._image_dir.iterdir(): + into = self._creator.purelib / filename.name + if into.exists(): + if into.is_dir() and not into.is_symlink(): + safe_delete(into) + else: + into.unlink() + self._sync(filename, into) + # generate console executables + consoles = set() + script_dir = self._creator.script_dir + for name, module in self._console_scripts.items(): + consoles.update(self._create_console_entry_point(name, module, script_dir, version_info)) + logging.debug("generated console scripts %s", " ".join(i.name for i in consoles)) + + def build_image(self): + # 1. first extract the wheel + logging.debug("build install image for %s to %s", self._wheel.name, self._image_dir) + with zipfile.ZipFile(str(self._wheel)) as zip_ref: + zip_ref.extractall(str(self._image_dir)) + self._extracted = True + # 2. now add additional files not present in the distribution + new_files = self._generate_new_files() + # 3. finally fix the records file + self._fix_records(new_files) + + def _records_text(self, files): + record_data = "\n".join( + "{},,".format(os.path.relpath(ensure_text(str(rec)), ensure_text(str(self._image_dir)))) for rec in files + ) + return record_data + + def _generate_new_files(self): + new_files = set() + installer = self._dist_info / "INSTALLER" + installer.write_text("pip\n") + new_files.add(installer) + # inject a no-op root element, as workaround for bug in https://github.com/pypa/pip/issues/7226 + marker = self._image_dir / "{}.virtualenv".format(self._dist_info.stem) + marker.write_text("") + new_files.add(marker) + folder = mkdtemp() + try: + to_folder = Path(folder) + rel = os.path.relpath(ensure_text(str(self._creator.script_dir)), ensure_text(str(self._creator.purelib))) + version_info = self._creator.interpreter.version_info + for name, module in self._console_scripts.items(): + new_files.update( + Path(os.path.normpath(ensure_text(str(self._image_dir / rel / i.name)))) + for i in self._create_console_entry_point(name, module, to_folder, version_info) + ) + finally: + safe_delete(folder) + return new_files + + @property + def _dist_info(self): + if self._extracted is False: + return None # pragma: no cover + if self.__dist_info is None: + files = [] + for filename in self._image_dir.iterdir(): + files.append(filename.name) + if filename.suffix == ".dist-info": + self.__dist_info = filename + break + else: + msg = "no .dist-info at {}, has {}".format(self._image_dir, ", ".join(files)) # pragma: no cover + raise RuntimeError(msg) # pragma: no cover + return self.__dist_info + + @abstractmethod + def _fix_records(self, extra_record_data): + raise NotImplementedError + + @property + def _console_scripts(self): + if self._extracted is False: + return None # pragma: no cover + if self._console_entry_points is None: + self._console_entry_points = {} + entry_points = self._dist_info / "entry_points.txt" + if entry_points.exists(): + parser = ConfigParser.ConfigParser() + with entry_points.open() as file_handler: + reader = getattr(parser, "read_file" if PY3 else "readfp") + reader(file_handler) + if "console_scripts" in parser.sections(): + for name, value in parser.items("console_scripts"): + match = re.match(r"(.*?)-?\d\.?\d*", name) + if match: + name = match.groups(1)[0] + self._console_entry_points[name] = value + return self._console_entry_points + + def _create_console_entry_point(self, name, value, to_folder, version_info): + result = [] + maker = ScriptMakerCustom(to_folder, version_info, self._creator.exe, name) + specification = "{} = {}".format(name, value) + new_files = maker.make(specification) + result.extend(Path(i) for i in new_files) + return result + + def clear(self): + if self._image_dir.exists(): + safe_delete(self._image_dir) + + def has_image(self): + return self._image_dir.exists() and next(self._image_dir.iterdir()) is not None + + +class ScriptMakerCustom(ScriptMaker): + def __init__(self, target_dir, version_info, executable, name): + super(ScriptMakerCustom, self).__init__(None, str(target_dir)) + self.clobber = True # overwrite + self.set_mode = True # ensure they are executable + self.executable = enquote_executable(str(executable)) + self.version_info = version_info.major, version_info.minor + self.variants = {"", "X", "X.Y"} + self._name = name + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + names.add("{}{}.{}".format(self._name, *self.version_info)) + super(ScriptMakerCustom, self)._write_script(names, shebang, script_bytes, filenames, ext) diff --git a/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/copy.py b/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/copy.py new file mode 100644 index 00000000..29d0bc88 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/copy.py @@ -0,0 +1,35 @@ +from __future__ import absolute_import, unicode_literals + +import os + +from virtualenv.util.path import Path, copy +from virtualenv.util.six import ensure_text + +from .base import PipInstall + + +class CopyPipInstall(PipInstall): + def _sync(self, src, dst): + copy(src, dst) + + def _generate_new_files(self): + # create the pyc files + new_files = super(CopyPipInstall, self)._generate_new_files() + new_files.update(self._cache_files()) + return new_files + + def _cache_files(self): + version = self._creator.interpreter.version_info + py_c_ext = ".{}-{}{}.pyc".format(self._creator.interpreter.implementation.lower(), version.major, version.minor) + for root, dirs, files in os.walk(ensure_text(str(self._image_dir)), topdown=True): + root_path = Path(root) + for name in files: + if name.endswith(".py"): + yield root_path / "{}{}".format(name[:-3], py_c_ext) + for name in dirs: + yield root_path / name / "__pycache__" + + def _fix_records(self, new_files): + extra_record_data_str = self._records_text(new_files) + with open(ensure_text(str(self._dist_info / "RECORD")), "ab") as file_handler: + file_handler.write(extra_record_data_str.encode("utf-8")) diff --git a/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/symlink.py b/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/symlink.py new file mode 100644 index 00000000..f958b654 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/pip_install/symlink.py @@ -0,0 +1,61 @@ +from __future__ import absolute_import, unicode_literals + +import os +import subprocess +from stat import S_IREAD, S_IRGRP, S_IROTH + +from virtualenv.util.path import safe_delete, set_tree +from virtualenv.util.six import ensure_text +from virtualenv.util.subprocess import Popen + +from .base import PipInstall + + +class SymlinkPipInstall(PipInstall): + def _sync(self, src, dst): + src_str = ensure_text(str(src)) + dest_str = ensure_text(str(dst)) + os.symlink(src_str, dest_str) + + def _generate_new_files(self): + # create the pyc files, as the build image will be R/O + process = Popen( + [ensure_text(str(self._creator.exe)), "-m", "compileall", ensure_text(str(self._image_dir))], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + process.communicate() + # the root pyc is shared, so we'll not symlink that - but still add the pyc files to the RECORD for close + root_py_cache = self._image_dir / "__pycache__" + new_files = set() + if root_py_cache.exists(): + new_files.update(root_py_cache.iterdir()) + new_files.add(root_py_cache) + safe_delete(root_py_cache) + core_new_files = super(SymlinkPipInstall, self)._generate_new_files() + # remove files that are within the image folder deeper than one level (as these will be not linked directly) + for file in core_new_files: + try: + rel = file.relative_to(self._image_dir) + if len(rel.parts) > 1: + continue + except ValueError: + pass + new_files.add(file) + return new_files + + def _fix_records(self, new_files): + new_files.update(i for i in self._image_dir.iterdir()) + extra_record_data_str = self._records_text(sorted(new_files, key=str)) + with open(ensure_text(str(self._dist_info / "RECORD")), "wb") as file_handler: + file_handler.write(extra_record_data_str.encode("utf-8")) + + def build_image(self): + super(SymlinkPipInstall, self).build_image() + # protect the image by making it read only + set_tree(self._image_dir, S_IREAD | S_IRGRP | S_IROTH) + + def clear(self): + if self._image_dir.exists(): + safe_delete(self._image_dir) + super(SymlinkPipInstall, self).clear() diff --git a/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/via_app_data.py b/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/via_app_data.py new file mode 100644 index 00000000..e5951280 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/embed/via_app_data/via_app_data.py @@ -0,0 +1,142 @@ +"""Bootstrap""" +from __future__ import absolute_import, unicode_literals + +import logging +import sys +import traceback +from contextlib import contextmanager +from subprocess import CalledProcessError +from threading import Lock, Thread + +import six + +from virtualenv.info import fs_supports_symlink +from virtualenv.seed.embed.base_embed import BaseEmbed +from virtualenv.seed.wheels import get_wheel +from virtualenv.util.lock import _CountedFileLock +from virtualenv.util.path import Path +from virtualenv.util.six import ensure_text + +from .pip_install.copy import CopyPipInstall +from .pip_install.symlink import SymlinkPipInstall + + +class FromAppData(BaseEmbed): + def __init__(self, options): + super(FromAppData, self).__init__(options) + self.symlinks = options.symlink_app_data + + @classmethod + def add_parser_arguments(cls, parser, interpreter, app_data): + super(FromAppData, cls).add_parser_arguments(parser, interpreter, app_data) + can_symlink = app_data.transient is False and fs_supports_symlink() + parser.add_argument( + "--symlink-app-data", + dest="symlink_app_data", + action="store_true" if can_symlink else "store_false", + help="{} symlink the python packages from the app-data folder (requires seed pip>=19.3)".format( + "" if can_symlink else "not supported - ", + ), + default=False, + ) + + def run(self, creator): + if not self.enabled: + return + with self._get_seed_wheels(creator) as name_to_whl: + pip_version = name_to_whl["pip"].version_tuple if "pip" in name_to_whl else None + installer_class = self.installer_class(pip_version) + exceptions = {} + + def _install(name, wheel): + try: + logging.debug("install %s from wheel %s via %s", name, wheel, installer_class.__name__) + key = Path(installer_class.__name__) / wheel.path.stem + wheel_img = self.app_data.wheel_image(creator.interpreter.version_release_str, key) + installer = installer_class(wheel.path, creator, wheel_img) + with _CountedFileLock(ensure_text(str(wheel_img.parent / "{}.lock".format(wheel_img.name)))): + if not installer.has_image(): + installer.build_image() + installer.install(creator.interpreter.version_info) + except Exception: # noqa + exceptions[name] = sys.exc_info() + + threads = list(Thread(target=_install, args=(n, w)) for n, w in name_to_whl.items()) + for thread in threads: + thread.start() + for thread in threads: + thread.join() + if exceptions: + messages = ["failed to build image {} because:".format(", ".join(exceptions.keys()))] + for value in exceptions.values(): + exc_type, exc_value, exc_traceback = value + messages.append("".join(traceback.format_exception(exc_type, exc_value, exc_traceback))) + raise RuntimeError("\n".join(messages)) + + @contextmanager + def _get_seed_wheels(self, creator): + name_to_whl, lock, fail = {}, Lock(), {} + + def _get(distribution, version): + for_py_version = creator.interpreter.version_release_str + failure, result = None, None + # fallback to download in case the exact version is not available + for download in [True] if self.download else [False, True]: + failure = None + try: + result = get_wheel( + distribution=distribution, + version=version, + for_py_version=for_py_version, + search_dirs=self.extra_search_dir, + download=download, + app_data=self.app_data, + do_periodic_update=self.periodic_update, + ) + if result is not None: + break + except Exception as exception: # noqa + logging.exception("fail") + failure = exception + if failure: + if isinstance(failure, CalledProcessError): + msg = "failed to download {}".format(distribution) + if version is not None: + msg += " version {}".format(version) + msg += ", pip download exit code {}".format(failure.returncode) + output = failure.output if six.PY2 else (failure.output + failure.stderr) + if output: + msg += "\n" + msg += output + else: + msg = repr(failure) + logging.error(msg) + with lock: + fail[distribution] = version + else: + with lock: + name_to_whl[distribution] = result + + threads = list( + Thread(target=_get, args=(distribution, version)) + for distribution, version in self.distribution_to_versions().items() + ) + for thread in threads: + thread.start() + for thread in threads: + thread.join() + if fail: + raise RuntimeError("seed failed due to failing to download wheels {}".format(", ".join(fail.keys()))) + yield name_to_whl + + def installer_class(self, pip_version_tuple): + if self.symlinks and pip_version_tuple: + # symlink support requires pip 19.3+ + if pip_version_tuple >= (19, 3): + return SymlinkPipInstall + return CopyPipInstall + + def __unicode__(self): + base = super(FromAppData, self).__unicode__() + msg = ", via={}, app_data_dir={}".format("symlink" if self.symlinks else "copy", self.app_data) + return base[:-1] + msg + base[-1] diff --git a/venv/Lib/site-packages/virtualenv/seed/seeder.py b/venv/Lib/site-packages/virtualenv/seed/seeder.py new file mode 100644 index 00000000..2bcccfc7 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/seeder.py @@ -0,0 +1,39 @@ +from __future__ import absolute_import, unicode_literals + +from abc import ABCMeta, abstractmethod + +from six import add_metaclass + + +@add_metaclass(ABCMeta) +class Seeder(object): + """A seeder will install some seed packages into a virtual environment.""" + + # noinspection PyUnusedLocal + def __init__(self, options, enabled): + """ + + :param options: the parsed options as defined within :meth:`add_parser_arguments` + :param enabled: a flag weather the seeder is enabled or not + """ + self.enabled = enabled + + @classmethod + def add_parser_arguments(cls, parser, interpreter, app_data): + """ + Add CLI arguments for this seed mechanisms. + + :param parser: the CLI parser + :param app_data: the CLI parser + :param interpreter: the interpreter this virtual environment is based of + """ + raise NotImplementedError + + @abstractmethod + def run(self, creator): + """Perform the seed operation. + + :param creator: the creator (based of :class:`virtualenv.create.creator.Creator`) we used to create this \ + virtual environment + """ + raise NotImplementedError diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/__init__.py b/venv/Lib/site-packages/virtualenv/seed/wheels/__init__.py new file mode 100644 index 00000000..dbffe2e4 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/wheels/__init__.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import, unicode_literals + +from .acquire import get_wheel, pip_wheel_env_run +from .util import Version, Wheel + +__all__ = ( + "get_wheel", + "pip_wheel_env_run", + "Version", + "Wheel", +) diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/acquire.py b/venv/Lib/site-packages/virtualenv/seed/wheels/acquire.py new file mode 100644 index 00000000..823d3484 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/wheels/acquire.py @@ -0,0 +1,122 @@ +"""Bootstrap""" +from __future__ import absolute_import, unicode_literals + +import logging +import os +import sys +from operator import eq, lt + +import six + +from virtualenv.util.path import Path +from virtualenv.util.six import ensure_str +from virtualenv.util.subprocess import Popen, subprocess + +from .bundle import from_bundle +from .util import Version, Wheel, discover_wheels + + +def get_wheel(distribution, version, for_py_version, search_dirs, download, app_data, do_periodic_update): + """ + Get a wheel with the given distribution-version-for_py_version trio, by using the extra search dir + download + """ + # not all wheels are compatible with all python versions, so we need to py version qualify it + # 1. acquire from bundle + wheel = from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update) + + # 2. download from the internet + if version not in Version.non_version and download: + wheel = download_wheel( + distribution=distribution, + version_spec=Version.as_version_spec(version), + for_py_version=for_py_version, + search_dirs=search_dirs, + app_data=app_data, + to_folder=app_data.house, + ) + return wheel + + +def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder): + to_download = "{}{}".format(distribution, version_spec or "") + logging.debug("download wheel %s %s to %s", to_download, for_py_version, to_folder) + cmd = [ + sys.executable, + "-m", + "pip", + "download", + "--progress-bar", + "off", + "--disable-pip-version-check", + "--only-binary=:all:", + "--no-deps", + "--python-version", + for_py_version, + "-d", + str(to_folder), + to_download, + ] + # pip has no interface in python - must be a new sub-process + env = pip_wheel_env_run(search_dirs, app_data) + process = Popen(cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) + out, err = process.communicate() + if process.returncode != 0: + kwargs = {"output": out} + if six.PY2: + kwargs["output"] += err + else: + kwargs["stderr"] = err + raise subprocess.CalledProcessError(process.returncode, cmd, **kwargs) + result = _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out) + logging.debug("downloaded wheel %s", result.name) + return result + + +def _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out): + for line in out.splitlines(): + line = line.lstrip() + for marker in ("Saved ", "File was already downloaded "): + if line.startswith(marker): + return Wheel(Path(line[len(marker) :]).absolute()) + # if for some reason the output does not match fallback to latest version with that spec + return find_compatible_in_house(distribution, version_spec, for_py_version, to_folder) + + +def find_compatible_in_house(distribution, version_spec, for_py_version, in_folder): + wheels = discover_wheels(in_folder, distribution, None, for_py_version) + start, end = 0, len(wheels) + if version_spec is not None: + if version_spec.startswith("<"): + from_pos, op = 1, lt + elif version_spec.startswith("=="): + from_pos, op = 2, eq + else: + raise ValueError(version_spec) + version = Wheel.as_version_tuple(version_spec[from_pos:]) + start = next((at for at, w in enumerate(wheels) if op(w.version_tuple, version)), len(wheels)) + + return None if start == end else wheels[start] + + +def pip_wheel_env_run(search_dirs, app_data): + for_py_version = "{}.{}".format(*sys.version_info[0:2]) + env = os.environ.copy() + env.update( + { + ensure_str(k): str(v) # python 2 requires these to be string only (non-unicode) + for k, v in {"PIP_USE_WHEEL": "1", "PIP_USER": "0", "PIP_NO_INPUT": "1"}.items() + }, + ) + wheel = get_wheel( + distribution="pip", + version=None, + for_py_version=for_py_version, + search_dirs=search_dirs, + download=False, + app_data=app_data, + do_periodic_update=False, + ) + if wheel is None: + raise RuntimeError("could not find the embedded pip") + env[str("PYTHONPATH")] = str(wheel.path) + return env diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/bundle.py b/venv/Lib/site-packages/virtualenv/seed/wheels/bundle.py new file mode 100644 index 00000000..6ac15f9e --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/wheels/bundle.py @@ -0,0 +1,51 @@ +from __future__ import absolute_import, unicode_literals + +from virtualenv.app_data import AppDataDiskFolder, TempAppData + +from ..wheels.embed import get_embed_wheel +from .periodic_update import periodic_update +from .util import Version, Wheel, discover_wheels + + +def from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update): + """ + Load the bundled wheel to a cache directory. + """ + of_version = Version.of_version(version) + wheel = load_embed_wheel(app_data, distribution, for_py_version, of_version) + + if version != Version.embed: + # 2. check if we have upgraded embed + if isinstance(app_data, AppDataDiskFolder) and not isinstance(app_data, TempAppData): + wheel = periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, do_periodic_update) + + # 3. acquire from extra search dir + found_wheel = from_dir(distribution, of_version, for_py_version, search_dirs) + if found_wheel is not None: + if wheel is None: + wheel = found_wheel + elif found_wheel.version_tuple > wheel.version_tuple: + wheel = found_wheel + return wheel + + +def load_embed_wheel(app_data, distribution, for_py_version, version): + wheel = get_embed_wheel(distribution, for_py_version) + if wheel is not None: + version_match = version == wheel.version + if version is None or version_match: + with app_data.ensure_extracted(wheel.path, lambda: app_data.house) as wheel_path: + wheel = Wheel(wheel_path) + else: # if version does not match ignore + wheel = None + return wheel + + +def from_dir(distribution, version, for_py_version, directories): + """ + Load a compatible wheel from a given folder. + """ + for folder in directories: + for wheel in discover_wheels(folder, distribution, version, for_py_version): + return wheel + return None diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/embed/__init__.py b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/__init__.py new file mode 100644 index 00000000..ef887fa8 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/__init__.py @@ -0,0 +1,62 @@ +from __future__ import absolute_import, unicode_literals + +from virtualenv.seed.wheels.util import Wheel +from virtualenv.util.path import Path + +BUNDLE_FOLDER = Path(__file__).absolute().parent +BUNDLE_SUPPORT = { + "3.10": { + "pip": "pip-20.1.1-py2.py3-none-any.whl", + "setuptools": "setuptools-49.2.0-py3-none-any.whl", + "wheel": "wheel-0.34.2-py2.py3-none-any.whl", + }, + "3.9": { + "pip": "pip-20.1.1-py2.py3-none-any.whl", + "setuptools": "setuptools-49.2.0-py3-none-any.whl", + "wheel": "wheel-0.34.2-py2.py3-none-any.whl", + }, + "3.8": { + "pip": "pip-20.1.1-py2.py3-none-any.whl", + "setuptools": "setuptools-49.2.0-py3-none-any.whl", + "wheel": "wheel-0.34.2-py2.py3-none-any.whl", + }, + "3.7": { + "pip": "pip-20.1.1-py2.py3-none-any.whl", + "setuptools": "setuptools-49.2.0-py3-none-any.whl", + "wheel": "wheel-0.34.2-py2.py3-none-any.whl", + }, + "3.6": { + "pip": "pip-20.1.1-py2.py3-none-any.whl", + "setuptools": "setuptools-49.2.0-py3-none-any.whl", + "wheel": "wheel-0.34.2-py2.py3-none-any.whl", + }, + "3.5": { + "pip": "pip-20.1.1-py2.py3-none-any.whl", + "setuptools": "setuptools-49.2.0-py3-none-any.whl", + "wheel": "wheel-0.34.2-py2.py3-none-any.whl", + }, + "3.4": { + "pip": "pip-19.1.1-py2.py3-none-any.whl", + "setuptools": "setuptools-43.0.0-py2.py3-none-any.whl", + "wheel": "wheel-0.33.6-py2.py3-none-any.whl", + }, + "2.7": { + "pip": "pip-20.1.1-py2.py3-none-any.whl", + "setuptools": "setuptools-44.1.1-py2.py3-none-any.whl", + "wheel": "wheel-0.34.2-py2.py3-none-any.whl", + }, +} +MAX = "3.10" + + +def get_embed_wheel(distribution, for_py_version): + path = BUNDLE_FOLDER / (BUNDLE_SUPPORT.get(for_py_version, {}) or BUNDLE_SUPPORT[MAX]).get(distribution) + return Wheel.from_path(path) + + +__all__ = ( + "get_embed_wheel", + "BUNDLE_SUPPORT", + "MAX", + "BUNDLE_FOLDER", +) diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl new file mode 100644 index 00000000..8476c119 Binary files /dev/null and b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl differ diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/embed/pip-20.1.1-py2.py3-none-any.whl b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/pip-20.1.1-py2.py3-none-any.whl new file mode 100644 index 00000000..ea1d0f7c Binary files /dev/null and b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/pip-20.1.1-py2.py3-none-any.whl differ diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl new file mode 100644 index 00000000..733faa6a Binary files /dev/null and b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl differ diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whl b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whl new file mode 100644 index 00000000..bf28513c Binary files /dev/null and b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whl differ diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/embed/setuptools-49.2.0-py3-none-any.whl b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/setuptools-49.2.0-py3-none-any.whl new file mode 100644 index 00000000..25e5ef38 Binary files /dev/null and b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/setuptools-49.2.0-py3-none-any.whl differ diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl new file mode 100644 index 00000000..2a71896b Binary files /dev/null and b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl differ diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/embed/wheel-0.34.2-py2.py3-none-any.whl b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/wheel-0.34.2-py2.py3-none-any.whl new file mode 100644 index 00000000..becbee8a Binary files /dev/null and b/venv/Lib/site-packages/virtualenv/seed/wheels/embed/wheel-0.34.2-py2.py3-none-any.whl differ diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/periodic_update.py b/venv/Lib/site-packages/virtualenv/seed/wheels/periodic_update.py new file mode 100644 index 00000000..4a8e8bc8 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/wheels/periodic_update.py @@ -0,0 +1,364 @@ +""" +Periodically update bundled versions. +""" + +from __future__ import absolute_import, unicode_literals + +import json +import logging +import os +import ssl +import subprocess +import sys +from datetime import datetime, timedelta +from itertools import groupby +from shutil import copy2 +from textwrap import dedent +from threading import Thread + +from six.moves.urllib.error import URLError +from six.moves.urllib.request import urlopen + +from virtualenv.app_data import AppDataDiskFolder +from virtualenv.info import PY2 +from virtualenv.util.path import Path +from virtualenv.util.subprocess import DETACHED_PROCESS, Popen + +from ..wheels.embed import BUNDLE_SUPPORT +from ..wheels.util import Wheel + +if PY2: + # on Python 2 datetime.strptime throws the error below if the import did not trigger on main thread + # Failed to import _strptime because the import lock is held by + try: + import _strptime # noqa + except ImportError: # pragma: no cov + pass # pragma: no cov + + +def periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, do_periodic_update): + if do_periodic_update: + handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data) + + now = datetime.now() + + u_log = UpdateLog.from_app_data(app_data, distribution, for_py_version) + u_log_older_than_hour = now - u_log.completed > timedelta(hours=1) if u_log.completed is not None else False + for _, group in groupby(u_log.versions, key=lambda v: v.wheel.version_tuple[0:2]): + version = next(group) # use only latest patch version per minor, earlier assumed to be buggy + if wheel is not None and Path(version.filename).name == wheel.name: + break + if u_log.periodic is False or (u_log_older_than_hour and version.use(now)): + updated_wheel = Wheel(app_data.house / version.filename) + logging.debug("using %supdated wheel %s", "periodically " if updated_wheel else "", updated_wheel) + wheel = updated_wheel + break + + return wheel + + +def handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data): + embed_update_log = app_data.embed_update_log(distribution, for_py_version) + u_log = UpdateLog.from_dict(embed_update_log.read()) + if u_log.needs_update: + u_log.periodic = True + u_log.started = datetime.now() + embed_update_log.write(u_log.to_dict()) + trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, periodic=True) + + +DATETIME_FMT = "%Y-%m-%dT%H:%M:%S.%fZ" + + +def dump_datetime(value): + return None if value is None else value.strftime(DATETIME_FMT) + + +def load_datetime(value): + return None if value is None else datetime.strptime(value, DATETIME_FMT) + + +class NewVersion(object): + def __init__(self, filename, found_date, release_date): + self.filename = filename + self.found_date = found_date + self.release_date = release_date + + @classmethod + def from_dict(cls, dictionary): + return cls( + filename=dictionary["filename"], + found_date=load_datetime(dictionary["found_date"]), + release_date=load_datetime(dictionary["release_date"]), + ) + + def to_dict(self): + return { + "filename": self.filename, + "release_date": dump_datetime(self.release_date), + "found_date": dump_datetime(self.found_date), + } + + def use(self, now): + compare_from = self.release_date or self.found_date + return now - compare_from >= timedelta(days=28) + + def __repr__(self): + return "{}(filename={}), found_date={}, release_date={})".format( + self.__class__.__name__, self.filename, self.found_date, self.release_date, + ) + + def __eq__(self, other): + return type(self) == type(other) and all( + getattr(self, k) == getattr(other, k) for k in ["filename", "release_date", "found_date"] + ) + + def __ne__(self, other): + return not (self == other) + + @property + def wheel(self): + return Wheel(Path(self.filename)) + + +class UpdateLog(object): + def __init__(self, started, completed, versions, periodic): + self.started = started + self.completed = completed + self.versions = versions + self.periodic = periodic + + @classmethod + def from_dict(cls, dictionary): + if dictionary is None: + dictionary = {} + return cls( + load_datetime(dictionary.get("started")), + load_datetime(dictionary.get("completed")), + [NewVersion.from_dict(v) for v in dictionary.get("versions", [])], + dictionary.get("periodic"), + ) + + @classmethod + def from_app_data(cls, app_data, distribution, for_py_version): + raw_json = app_data.embed_update_log(distribution, for_py_version).read() + return cls.from_dict(raw_json) + + def to_dict(self): + return { + "started": dump_datetime(self.started), + "completed": dump_datetime(self.completed), + "periodic": self.periodic, + "versions": [r.to_dict() for r in self.versions], + } + + @property + def needs_update(self): + now = datetime.now() + if self.completed is None: # never completed + return self._check_start(now) + else: + if now - self.completed <= timedelta(days=14): + return False + return self._check_start(now) + + def _check_start(self, now): + return self.started is None or now - self.started > timedelta(hours=1) + + +def trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, periodic): + wheel_path = None if wheel is None else str(wheel.path) + cmd = [ + sys.executable, + "-c", + dedent( + """ + from virtualenv.report import setup_report, MAX_LEVEL + from virtualenv.seed.wheels.periodic_update import do_update + setup_report(MAX_LEVEL, show_pid=True) + do_update({!r}, {!r}, {!r}, {!r}, {!r}, {!r}) + """, + ) + .strip() + .format(distribution, for_py_version, wheel_path, str(app_data), [str(p) for p in search_dirs], periodic), + ] + debug = os.environ.get(str("_VIRTUALENV_PERIODIC_UPDATE_INLINE")) == str("1") + pipe = None if debug else subprocess.PIPE + kwargs = {"stdout": pipe, "stderr": pipe} + if not debug and sys.platform == "win32": + kwargs["creationflags"] = DETACHED_PROCESS + process = Popen(cmd, **kwargs) + logging.info( + "triggered periodic upgrade of %s%s (for python %s) via background process having PID %d", + distribution, + "" if wheel is None else "=={}".format(wheel.version), + for_py_version, + process.pid, + ) + if debug: + process.communicate() # on purpose not called to make it a background process + + +def do_update(distribution, for_py_version, embed_filename, app_data, search_dirs, periodic): + versions = None + try: + versions = _run_do_update(app_data, distribution, embed_filename, for_py_version, periodic, search_dirs) + finally: + logging.debug("done %s %s with %s", distribution, for_py_version, versions) + return versions + + +def _run_do_update(app_data, distribution, embed_filename, for_py_version, periodic, search_dirs): + from virtualenv.seed.wheels import acquire + + wheel_filename = None if embed_filename is None else Path(embed_filename) + embed_version = None if wheel_filename is None else Wheel(wheel_filename).version_tuple + app_data = AppDataDiskFolder(app_data) if isinstance(app_data, str) else app_data + search_dirs = [Path(p) if isinstance(p, str) else p for p in search_dirs] + wheelhouse = app_data.house + embed_update_log = app_data.embed_update_log(distribution, for_py_version) + u_log = UpdateLog.from_dict(embed_update_log.read()) + now = datetime.now() + if wheel_filename is not None: + dest = wheelhouse / wheel_filename.name + if not dest.exists(): + copy2(str(wheel_filename), str(wheelhouse)) + last, last_version, versions = None, None, [] + while last is None or not last.use(now): + download_time = datetime.now() + dest = acquire.download_wheel( + distribution=distribution, + version_spec=None if last_version is None else "<{}".format(last_version), + for_py_version=for_py_version, + search_dirs=search_dirs, + app_data=app_data, + to_folder=wheelhouse, + ) + if dest is None or (u_log.versions and u_log.versions[0].filename == dest.name): + break + release_date = release_date_for_wheel_path(dest.path) + last = NewVersion(filename=dest.path.name, release_date=release_date, found_date=download_time) + logging.info("detected %s in %s", last, datetime.now() - download_time) + versions.append(last) + last_wheel = Wheel(Path(last.filename)) + last_version = last_wheel.version + if embed_version is not None: + if embed_version >= last_wheel.version_tuple: # stop download if we reach the embed version + break + u_log.periodic = periodic + if not u_log.periodic: + u_log.started = now + u_log.versions = versions + u_log.versions + u_log.completed = datetime.now() + embed_update_log.write(u_log.to_dict()) + return versions + + +def release_date_for_wheel_path(dest): + wheel = Wheel(dest) + # the most accurate is to ask PyPi - e.g. https://pypi.org/pypi/pip/json, + # see https://warehouse.pypa.io/api-reference/json/ for more details + content = _pypi_get_distribution_info_cached(wheel.distribution) + if content is not None: + try: + upload_time = content["releases"][wheel.version][0]["upload_time"] + return datetime.strptime(upload_time, "%Y-%m-%dT%H:%M:%S") + except Exception as exception: + logging.error("could not load release date %s because %r", content, exception) + return None + + +def _request_context(): + yield None + # fallback to non verified HTTPS (the information we request is not sensitive, so fallback) + yield ssl._create_unverified_context() # noqa + + +_PYPI_CACHE = {} + + +def _pypi_get_distribution_info_cached(distribution): + if distribution not in _PYPI_CACHE: + _PYPI_CACHE[distribution] = _pypi_get_distribution_info(distribution) + return _PYPI_CACHE[distribution] + + +def _pypi_get_distribution_info(distribution): + content, url = None, "https://pypi.org/pypi/{}/json".format(distribution) + try: + for context in _request_context(): + try: + with urlopen(url, context=context) as file_handler: + content = json.load(file_handler) + break + except URLError as exception: + logging.error("failed to access %s because %r", url, exception) + except Exception as exception: + logging.error("failed to access %s because %r", url, exception) + return content + + +def manual_upgrade(app_data): + threads = [] + + for for_py_version, distribution_to_package in BUNDLE_SUPPORT.items(): + # load extra search dir for the given for_py + for distribution in distribution_to_package.keys(): + thread = Thread(target=_run_manual_upgrade, args=(app_data, distribution, for_py_version)) + thread.start() + threads.append(thread) + + for thread in threads: + thread.join() + + +def _run_manual_upgrade(app_data, distribution, for_py_version): + start = datetime.now() + from .bundle import from_bundle + + current = from_bundle( + distribution=distribution, + version=None, + for_py_version=for_py_version, + search_dirs=[], + app_data=app_data, + do_periodic_update=False, + ) + logging.warning( + "upgrade %s for python %s with current %s", + distribution, + for_py_version, + "" if current is None else current.name, + ) + versions = do_update( + distribution=distribution, + for_py_version=for_py_version, + embed_filename=current.path, + app_data=app_data, + search_dirs=[], + periodic=False, + ) + msg = "upgraded %s for python %s in %s {}".format( + "new entries found:\n%s" if versions else "no new versions found", + ) + args = [ + distribution, + for_py_version, + datetime.now() - start, + ] + if versions: + args.append("\n".join("\t{}".format(v) for v in versions)) + logging.warning(msg, *args) + + +__all__ = ( + "periodic_update", + "do_update", + "manual_upgrade", + "NewVersion", + "UpdateLog", + "load_datetime", + "dump_datetime", + "trigger_update", + "release_date_for_wheel_path", +) diff --git a/venv/Lib/site-packages/virtualenv/seed/wheels/util.py b/venv/Lib/site-packages/virtualenv/seed/wheels/util.py new file mode 100644 index 00000000..1240eb2d --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/seed/wheels/util.py @@ -0,0 +1,116 @@ +from __future__ import absolute_import, unicode_literals + +from operator import attrgetter +from zipfile import ZipFile + +from virtualenv.util.six import ensure_text + + +class Wheel(object): + def __init__(self, path): + # https://www.python.org/dev/peps/pep-0427/#file-name-convention + # The wheel filename is {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl + self.path = path + self._parts = path.stem.split("-") + + @classmethod + def from_path(cls, path): + if path is not None and path.suffix == ".whl" and len(path.stem.split("-")) >= 5: + return cls(path) + return None + + @property + def distribution(self): + return self._parts[0] + + @property + def version(self): + return self._parts[1] + + @property + def version_tuple(self): + return self.as_version_tuple(self.version) + + @staticmethod + def as_version_tuple(version): + result = [] + for part in version.split(".")[0:3]: + try: + result.append(int(part)) + except ValueError: + break + if not result: + raise ValueError(version) + return tuple(result) + + @property + def name(self): + return self.path.name + + def support_py(self, py_version): + name = "{}.dist-info/METADATA".format("-".join(self.path.stem.split("-")[0:2])) + with ZipFile(ensure_text(str(self.path)), "r") as zip_file: + metadata = zip_file.read(name).decode("utf-8") + marker = "Requires-Python:" + requires = next((i[len(marker) :] for i in metadata.splitlines() if i.startswith(marker)), None) + if requires is None: # if it does not specify a python requires the assumption is compatible + return True + py_version_int = tuple(int(i) for i in py_version.split(".")) + for require in (i.strip() for i in requires.split(",")): + # https://www.python.org/dev/peps/pep-0345/#version-specifiers + for operator, check in [ + ("!=", lambda v: py_version_int != v), + ("==", lambda v: py_version_int == v), + ("<=", lambda v: py_version_int <= v), + (">=", lambda v: py_version_int >= v), + ("<", lambda v: py_version_int < v), + (">", lambda v: py_version_int > v), + ]: + if require.startswith(operator): + ver_str = require[len(operator) :].strip() + version = tuple((int(i) if i != "*" else None) for i in ver_str.split("."))[0:2] + if not check(version): + return False + break + return True + + def __repr__(self): + return "{}({})".format(self.__class__.__name__, self.path) + + def __str__(self): + return str(self.path) + + +def discover_wheels(from_folder, distribution, version, for_py_version): + wheels = [] + for filename in from_folder.iterdir(): + wheel = Wheel.from_path(filename) + if wheel and wheel.distribution == distribution: + if version is None or wheel.version == version: + if wheel.support_py(for_py_version): + wheels.append(wheel) + return sorted(wheels, key=attrgetter("version_tuple", "distribution"), reverse=True) + + +class Version: + #: the version bundled with virtualenv + bundle = "bundle" + embed = "embed" + #: custom version handlers + non_version = ( + bundle, + embed, + ) + + @staticmethod + def of_version(value): + return None if value in Version.non_version else value + + @staticmethod + def as_pip_req(distribution, version): + return "{}{}".format(distribution, Version.as_version_spec(version)) + + @staticmethod + def as_version_spec(version): + of_version = Version.of_version(version) + return "" if of_version is None else "=={}".format(of_version) diff --git a/venv/Lib/site-packages/virtualenv/util/__init__.py b/venv/Lib/site-packages/virtualenv/util/__init__.py new file mode 100644 index 00000000..32d02925 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/__init__.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import, unicode_literals + +import sys + +if sys.version_info[0] == 3: + import configparser as ConfigParser +else: + import ConfigParser + + +__all__ = ("ConfigParser",) diff --git a/venv/Lib/site-packages/virtualenv/util/error.py b/venv/Lib/site-packages/virtualenv/util/error.py new file mode 100644 index 00000000..ac5aa502 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/error.py @@ -0,0 +1,13 @@ +"""Errors""" +from __future__ import absolute_import, unicode_literals + + +class ProcessCallFailed(RuntimeError): + """Failed a process call""" + + def __init__(self, code, out, err, cmd): + super(ProcessCallFailed, self).__init__(code, out, err, cmd) + self.code = code + self.out = out + self.err = err + self.cmd = cmd diff --git a/venv/Lib/site-packages/virtualenv/util/lock.py b/venv/Lib/site-packages/virtualenv/util/lock.py new file mode 100644 index 00000000..1fb8e4e7 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/lock.py @@ -0,0 +1,120 @@ +"""holds locking functionality that works across processes""" +from __future__ import absolute_import, unicode_literals + +import logging +import os +from contextlib import contextmanager +from threading import Lock, RLock + +from filelock import FileLock, Timeout + +from virtualenv.util.path import Path + + +class _CountedFileLock(FileLock): + def __init__(self, lock_file): + parent = os.path.dirname(lock_file) + if not os.path.exists(parent): + try: + os.makedirs(parent) + except OSError: + pass + super(_CountedFileLock, self).__init__(lock_file) + self.count = 0 + self.thread_safe = RLock() + + def acquire(self, timeout=None, poll_intervall=0.05): + with self.thread_safe: + if self.count == 0: + super(_CountedFileLock, self).acquire(timeout=timeout, poll_intervall=poll_intervall) + self.count += 1 + + def release(self, force=False): + with self.thread_safe: + if self.count == 1: + super(_CountedFileLock, self).release() + self.count = max(self.count - 1, 0) + + +_lock_store = {} +_store_lock = Lock() + + +class ReentrantFileLock(object): + def __init__(self, folder): + self._lock = None + path = Path(folder) + self.path = path.resolve() if path.exists() else path + + def __repr__(self): + return "{}({})".format(self.__class__.__name__, self.path) + + def __div__(self, other): + return ReentrantFileLock(self.path / other) + + def __truediv__(self, other): + return self.__div__(other) + + def _create_lock(self, name=""): + lock_file = str(self.path / "{}.lock".format(name)) + with _store_lock: + if lock_file not in _lock_store: + _lock_store[lock_file] = _CountedFileLock(lock_file) + return _lock_store[lock_file] + + @staticmethod + def _del_lock(lock): + with _store_lock: + if lock is not None: + with lock.thread_safe: + if lock.count == 0: + _lock_store.pop(lock.lock_file, None) + + def __del__(self): + self._del_lock(self._lock) + + def __enter__(self): + self._lock = self._create_lock() + self._lock_file(self._lock) + + def __exit__(self, exc_type, exc_val, exc_tb): + self._release(self._lock) + + def _lock_file(self, lock, no_block=False): + # multiple processes might be trying to get a first lock... so we cannot check if this directory exist without + # a lock, but that lock might then become expensive, and it's not clear where that lock should live. + # Instead here we just ignore if we fail to create the directory. + try: + os.makedirs(str(self.path)) + except OSError: + pass + try: + lock.acquire(0.0001) + except Timeout: + if no_block: + raise + logging.debug("lock file %s present, will block until released", lock.lock_file) + lock.release() # release the acquire try from above + lock.acquire() + + @staticmethod + def _release(lock): + lock.release() + + @contextmanager + def lock_for_key(self, name, no_block=False): + lock = self._create_lock(name) + try: + try: + self._lock_file(lock, no_block) + yield + finally: + self._release(lock) + finally: + self._del_lock(lock) + + +__all__ = ( + "Timeout", + "ReentrantFileLock", +) diff --git a/venv/Lib/site-packages/virtualenv/util/path/__init__.py b/venv/Lib/site-packages/virtualenv/util/path/__init__.py new file mode 100644 index 00000000..a7f71634 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/path/__init__.py @@ -0,0 +1,16 @@ +from __future__ import absolute_import, unicode_literals + +from ._pathlib import Path +from ._permission import make_exe, set_tree +from ._sync import copy, copytree, ensure_dir, safe_delete, symlink + +__all__ = ( + "ensure_dir", + "symlink", + "copy", + "copytree", + "Path", + "make_exe", + "set_tree", + "safe_delete", +) diff --git a/venv/Lib/site-packages/virtualenv/util/path/_pathlib/__init__.py b/venv/Lib/site-packages/virtualenv/util/path/_pathlib/__init__.py new file mode 100644 index 00000000..29a8c6bf --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/path/_pathlib/__init__.py @@ -0,0 +1,45 @@ +from __future__ import absolute_import, unicode_literals + +import sys + +import six + +if six.PY3: + from pathlib import Path + + if sys.version_info[0:2] == (3, 4): + # no read/write text on python3.4 + BuiltinPath = Path + + class Path(type(BuiltinPath())): + def read_text(self, encoding=None, errors=None): + """ + Open the file in text mode, read it, and close the file. + """ + with self.open(mode="r", encoding=encoding, errors=errors) as f: + return f.read() + + def write_text(self, data, encoding=None, errors=None): + """ + Open the file in text mode, write to it, and close the file. + """ + if not isinstance(data, str): + raise TypeError("data must be str, not %s" % data.__class__.__name__) + with self.open(mode="w", encoding=encoding, errors=errors) as f: + return f.write(data) + + def mkdir(self, mode=0o777, parents=False, exist_ok=False): + if exist_ok and self.exists(): + return + super(type(BuiltinPath()), self).mkdir(mode, parents) + + +else: + if sys.platform == "win32": + # workaround for https://github.com/mcmtroffaes/pathlib2/issues/56 + from .via_os_path import Path + else: + from pathlib2 import Path + + +__all__ = ("Path",) diff --git a/venv/Lib/site-packages/virtualenv/util/path/_pathlib/via_os_path.py b/venv/Lib/site-packages/virtualenv/util/path/_pathlib/via_os_path.py new file mode 100644 index 00000000..d11aeaaa --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/path/_pathlib/via_os_path.py @@ -0,0 +1,145 @@ +from __future__ import absolute_import, unicode_literals + +import os +import platform +from contextlib import contextmanager + +from virtualenv.util.six import ensure_str, ensure_text + +IS_PYPY = platform.python_implementation() == "PyPy" + + +class Path(object): + def __init__(self, path): + if isinstance(path, Path): + _path = path._path + else: + _path = ensure_text(path) + if IS_PYPY: + _path = _path.encode("utf-8") + self._path = _path + + def __repr__(self): + return ensure_str("Path({})".format(ensure_text(self._path))) + + def __unicode__(self): + return ensure_text(self._path) + + def __str__(self): + return ensure_str(self._path) + + def __div__(self, other): + if isinstance(other, Path): + right = other._path + else: + right = ensure_text(other) + if IS_PYPY: + right = right.encode("utf-8") + return Path(os.path.join(self._path, right)) + + def __truediv__(self, other): + return self.__div__(other) + + def __eq__(self, other): + return self._path == (other._path if isinstance(other, Path) else None) + + def __ne__(self, other): + return not (self == other) + + def __hash__(self): + return hash(self._path) + + def exists(self): + return os.path.exists(self._path) + + @property + def parent(self): + return Path(os.path.abspath(os.path.join(self._path, os.path.pardir))) + + def resolve(self): + return Path(os.path.realpath(self._path)) + + @property + def name(self): + return os.path.basename(self._path) + + @property + def parts(self): + return self._path.split(os.sep) + + def is_file(self): + return os.path.isfile(self._path) + + def is_dir(self): + return os.path.isdir(self._path) + + def mkdir(self, parents=True, exist_ok=True): + try: + os.makedirs(self._path) + except OSError: + if not exist_ok: + raise + + def read_text(self, encoding="utf-8"): + return self.read_bytes().decode(encoding) + + def read_bytes(self): + with open(self._path, "rb") as file_handler: + return file_handler.read() + + def write_text(self, text, encoding="utf-8"): + with open(self._path, "wb") as file_handler: + file_handler.write(text.encode(encoding)) + + def iterdir(self): + for p in os.listdir(self._path): + yield Path(os.path.join(self._path, p)) + + @property + def suffix(self): + _, ext = os.path.splitext(self.name) + return ext + + @property + def stem(self): + base, _ = os.path.splitext(self.name) + return base + + @contextmanager + def open(self, mode="r"): + with open(self._path, mode) as file_handler: + yield file_handler + + @property + def parents(self): + result = [] + parts = self.parts + for i in range(len(parts) - 1): + result.append(Path(os.sep.join(parts[0 : i + 1]))) + return result[::-1] + + def unlink(self): + os.remove(self._path) + + def with_name(self, name): + return self.parent / name + + def is_symlink(self): + return os.path.islink(self._path) + + def relative_to(self, other): + if not self._path.startswith(other._path): + raise ValueError("{} does not start with {}".format(self._path, other._path)) + return Path(os.sep.join(self.parts[len(other.parts) :])) + + def stat(self): + return os.stat(self._path) + + def chmod(self, mode): + os.chmod(self._path, mode) + + def absolute(self): + return Path(os.path.abspath(self._path)) + + +__all__ = ("Path",) diff --git a/venv/Lib/site-packages/virtualenv/util/path/_permission.py b/venv/Lib/site-packages/virtualenv/util/path/_permission.py new file mode 100644 index 00000000..73bb6e81 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/path/_permission.py @@ -0,0 +1,32 @@ +from __future__ import absolute_import, unicode_literals + +import os +from stat import S_IXGRP, S_IXOTH, S_IXUSR + +from virtualenv.util.six import ensure_text + + +def make_exe(filename): + original_mode = filename.stat().st_mode + levels = [S_IXUSR, S_IXGRP, S_IXOTH] + for at in range(len(levels), 0, -1): + try: + mode = original_mode + for level in levels[:at]: + mode |= level + filename.chmod(mode) + break + except OSError: + continue + + +def set_tree(folder, stat): + for root, _, files in os.walk(ensure_text(str(folder))): + for filename in files: + os.chmod(os.path.join(root, filename), stat) + + +__all__ = ( + "make_exe", + "set_tree", +) diff --git a/venv/Lib/site-packages/virtualenv/util/path/_sync.py b/venv/Lib/site-packages/virtualenv/util/path/_sync.py new file mode 100644 index 00000000..421eb46b --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/path/_sync.py @@ -0,0 +1,96 @@ +from __future__ import absolute_import, unicode_literals + +import logging +import os +import shutil +from stat import S_IWUSR + +from six import PY2 + +from virtualenv.info import IS_CPYTHON, IS_WIN +from virtualenv.util.six import ensure_text + +if PY2 and IS_CPYTHON and IS_WIN: # CPython2 on Windows supports unicode paths if passed as unicode + + def norm(src): + return ensure_text(str(src)) + + +else: + norm = str + + +def ensure_dir(path): + if not path.exists(): + logging.debug("create folder %s", ensure_text(str(path))) + os.makedirs(norm(path)) + + +def ensure_safe_to_do(src, dest): + if src == dest: + raise ValueError("source and destination is the same {}".format(src)) + if not dest.exists(): + return + if dest.is_dir() and not dest.is_symlink(): + logging.debug("remove directory %s", dest) + safe_delete(dest) + else: + logging.debug("remove file %s", dest) + dest.unlink() + + +def symlink(src, dest): + ensure_safe_to_do(src, dest) + logging.debug("symlink %s", _Debug(src, dest)) + dest.symlink_to(src, target_is_directory=src.is_dir()) + + +def copy(src, dest): + ensure_safe_to_do(src, dest) + is_dir = src.is_dir() + method = copytree if is_dir else shutil.copy + logging.debug("copy %s", _Debug(src, dest)) + method(norm(src), norm(dest)) + + +def copytree(src, dest): + for root, _, files in os.walk(src): + dest_dir = os.path.join(dest, os.path.relpath(root, src)) + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) + for name in files: + src_f = os.path.join(root, name) + dest_f = os.path.join(dest_dir, name) + shutil.copy(src_f, dest_f) + + +def safe_delete(dest): + def onerror(func, path, exc_info): + if not os.access(path, os.W_OK): + os.chmod(path, S_IWUSR) + func(path) + else: + raise + + shutil.rmtree(ensure_text(str(dest)), ignore_errors=True, onerror=onerror) + + +class _Debug(object): + def __init__(self, src, dest): + self.src = src + self.dest = dest + + def __str__(self): + return "{}{} to {}".format( + "directory " if self.src.is_dir() else "", ensure_text(str(self.src)), ensure_text(str(self.dest)), + ) + + +__all__ = ( + "ensure_dir", + "symlink", + "copy", + "symlink", + "copytree", + "safe_delete", +) diff --git a/venv/Lib/site-packages/virtualenv/util/six.py b/venv/Lib/site-packages/virtualenv/util/six.py new file mode 100644 index 00000000..16f1c6c9 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/six.py @@ -0,0 +1,50 @@ +"""Backward compatibility layer with older version of six. + +This is used to avoid virtualenv requring a version of six newer than what +the system may have. +""" +from __future__ import absolute_import + +from six import PY2, PY3, binary_type, text_type + +try: + from six import ensure_text +except ImportError: + + def ensure_text(s, encoding="utf-8", errors="strict"): + """Coerce *s* to six.text_type. + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +try: + from six import ensure_str +except ImportError: + + def ensure_str(s, encoding="utf-8", errors="strict"): + """Coerce *s* to `str`. + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + if PY2 and isinstance(s, text_type): + s = s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + s = s.decode(encoding, errors) + return s diff --git a/venv/Lib/site-packages/virtualenv/util/subprocess/__init__.py b/venv/Lib/site-packages/virtualenv/util/subprocess/__init__.py new file mode 100644 index 00000000..22006da8 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/subprocess/__init__.py @@ -0,0 +1,36 @@ +from __future__ import absolute_import, unicode_literals + +import subprocess +import sys + +import six + +if six.PY2 and sys.platform == "win32": + from . import _win_subprocess + + Popen = _win_subprocess.Popen +else: + Popen = subprocess.Popen + + +DETACHED_PROCESS = 0x00000008 + + +def run_cmd(cmd): + try: + process = Popen( + cmd, universal_newlines=True, stdin=subprocess.PIPE, stderr=subprocess.PIPE, stdout=subprocess.PIPE, + ) + out, err = process.communicate() # input disabled + code = process.returncode + except OSError as os_error: + code, out, err = os_error.errno, "", os_error.strerror + return code, out, err + + +__all__ = ( + "subprocess", + "Popen", + "run_cmd", + "DETACHED_PROCESS", +) diff --git a/venv/Lib/site-packages/virtualenv/util/subprocess/_win_subprocess.py b/venv/Lib/site-packages/virtualenv/util/subprocess/_win_subprocess.py new file mode 100644 index 00000000..33c77e31 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/subprocess/_win_subprocess.py @@ -0,0 +1,171 @@ +# flake8: noqa +# fmt: off +## issue: https://bugs.python.org/issue19264 + +import ctypes +import os +import platform +import subprocess +from ctypes import Structure, WinError, byref, c_char_p, c_void_p, c_wchar, c_wchar_p, sizeof, windll +from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LPVOID, LPWSTR, WORD + +import _subprocess + +## +## Types +## + +CREATE_UNICODE_ENVIRONMENT = 0x00000400 +LPCTSTR = c_char_p +LPTSTR = c_wchar_p +LPSECURITY_ATTRIBUTES = c_void_p +LPBYTE = ctypes.POINTER(BYTE) + +class STARTUPINFOW(Structure): + _fields_ = [ + ("cb", DWORD), ("lpReserved", LPWSTR), + ("lpDesktop", LPWSTR), ("lpTitle", LPWSTR), + ("dwX", DWORD), ("dwY", DWORD), + ("dwXSize", DWORD), ("dwYSize", DWORD), + ("dwXCountChars", DWORD), ("dwYCountChars", DWORD), + ("dwFillAtrribute", DWORD), ("dwFlags", DWORD), + ("wShowWindow", WORD), ("cbReserved2", WORD), + ("lpReserved2", LPBYTE), ("hStdInput", HANDLE), + ("hStdOutput", HANDLE), ("hStdError", HANDLE), + ] + +LPSTARTUPINFOW = ctypes.POINTER(STARTUPINFOW) + + +class PROCESS_INFORMATION(Structure): + _fields_ = [ + ("hProcess", HANDLE), ("hThread", HANDLE), + ("dwProcessId", DWORD), ("dwThreadId", DWORD), + ] + +LPPROCESS_INFORMATION = ctypes.POINTER(PROCESS_INFORMATION) + + +class DUMMY_HANDLE(ctypes.c_void_p): + + def __init__(self, *a, **kw): + super(DUMMY_HANDLE, self).__init__(*a, **kw) + self.closed = False + + def Close(self): + if not self.closed: + windll.kernel32.CloseHandle(self) + self.closed = True + + def __int__(self): + return self.value + + +CreateProcessW = windll.kernel32.CreateProcessW +CreateProcessW.argtypes = [ + LPCTSTR, LPTSTR, LPSECURITY_ATTRIBUTES, + LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCTSTR, + LPSTARTUPINFOW, LPPROCESS_INFORMATION, +] +CreateProcessW.restype = BOOL + + +## +## Patched functions/classes +## + +def CreateProcess( + executable, args, _p_attr, _t_attr, + inherit_handles, creation_flags, env, cwd, + startup_info, +): + """Create a process supporting unicode executable and args for win32 + + Python implementation of CreateProcess using CreateProcessW for Win32 + + """ + + si = STARTUPINFOW( + dwFlags=startup_info.dwFlags, + wShowWindow=startup_info.wShowWindow, + cb=sizeof(STARTUPINFOW), + ## XXXvlab: not sure of the casting here to ints. + hStdInput=startup_info.hStdInput if startup_info.hStdInput is None else int(startup_info.hStdInput), + hStdOutput=startup_info.hStdOutput if startup_info.hStdOutput is None else int(startup_info.hStdOutput), + hStdError=startup_info.hStdError if startup_info.hStdError is None else int(startup_info.hStdError), + ) + + wenv = None + if env is not None: + ## LPCWSTR seems to be c_wchar_p, so let's say CWSTR is c_wchar + env = ( + unicode("").join([ + unicode("%s=%s\0") % (k, v) + for k, v in env.items() + ]) + ) + unicode("\0") + wenv = (c_wchar * len(env))() + wenv.value = env + + pi = PROCESS_INFORMATION() + creation_flags |= CREATE_UNICODE_ENVIRONMENT + + if CreateProcessW( + executable, args, None, None, + inherit_handles, creation_flags, + wenv, cwd, byref(si), byref(pi), + ): + return ( + DUMMY_HANDLE(pi.hProcess), DUMMY_HANDLE(pi.hThread), + pi.dwProcessId, pi.dwThreadId, + ) + raise WinError() + + +class Popen(subprocess.Popen): + """This superseeds Popen and corrects a bug in cPython 2.7 implem""" + + def _execute_child( + self, args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, + startupinfo, creationflags, shell, to_close, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite, + ): + """Code from part of _execute_child from Python 2.7 (9fbb65e) + + There are only 2 little changes concerning the construction of + the the final string in shell mode: we preempt the creation of + the command string when shell is True, because original function + will try to encode unicode args which we want to avoid to be able to + sending it as-is to ``CreateProcess``. + + """ + if startupinfo is None: + startupinfo = subprocess.STARTUPINFO() + if not isinstance(args, subprocess.types.StringTypes): + args = [i if isinstance(i, bytes) else i.encode('utf-8') for i in args] + args = subprocess.list2cmdline(args) + if platform.python_implementation() == "CPython": + args = args.decode('utf-8') + startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW + startupinfo.wShowWindow = _subprocess.SW_HIDE + comspec = os.environ.get("COMSPEC", unicode("cmd.exe")) + if ( + _subprocess.GetVersion() >= 0x80000000 or + os.path.basename(comspec).lower() == "command.com" + ): + w9xpopen = self._find_w9xpopen() + args = unicode('"%s" %s') % (w9xpopen, args) + creationflags |= _subprocess.CREATE_NEW_CONSOLE + + super(Popen, self)._execute_child( + args, executable, + preexec_fn, close_fds, cwd, env, universal_newlines, + startupinfo, creationflags, False, to_close, p2cread, + p2cwrite, c2pread, c2pwrite, errread, errwrite, + ) + +_subprocess.CreateProcess = CreateProcess +# fmt: on diff --git a/venv/Lib/site-packages/virtualenv/util/zipapp.py b/venv/Lib/site-packages/virtualenv/util/zipapp.py new file mode 100644 index 00000000..85d9294f --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/util/zipapp.py @@ -0,0 +1,33 @@ +from __future__ import absolute_import, unicode_literals + +import logging +import os +import zipfile + +from virtualenv.info import IS_WIN, ROOT +from virtualenv.util.six import ensure_text + + +def read(full_path): + sub_file = _get_path_within_zip(full_path) + with zipfile.ZipFile(ROOT, "r") as zip_file: + with zip_file.open(sub_file) as file_handler: + return file_handler.read().decode("utf-8") + + +def extract(full_path, dest): + logging.debug("extract %s to %s", full_path, dest) + sub_file = _get_path_within_zip(full_path) + with zipfile.ZipFile(ROOT, "r") as zip_file: + info = zip_file.getinfo(sub_file) + info.filename = dest.name + zip_file.extract(info, ensure_text(str(dest.parent))) + + +def _get_path_within_zip(full_path): + full_path = os.path.abspath(str(full_path)) + sub_file = full_path[len(ROOT) + 1 :] + if IS_WIN: + # paths are always UNIX separators, even on Windows, though __file__ still follows platform default + sub_file = sub_file.replace(os.sep, "/") + return sub_file diff --git a/venv/Lib/site-packages/virtualenv/version.py b/venv/Lib/site-packages/virtualenv/version.py new file mode 100644 index 00000000..ac6882ef --- /dev/null +++ b/venv/Lib/site-packages/virtualenv/version.py @@ -0,0 +1,3 @@ +from __future__ import unicode_literals; + +__version__ = "20.0.27" \ No newline at end of file diff --git a/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/INSTALLER b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/LICENSE b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/LICENSE new file mode 100644 index 00000000..35df6eff --- /dev/null +++ b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2011, Edward George, based on code contained within the +virtualenv project. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/METADATA b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/METADATA new file mode 100644 index 00000000..c0e16fa7 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/METADATA @@ -0,0 +1,61 @@ +Metadata-Version: 2.1 +Name: virtualenv-clone +Version: 0.5.4 +Summary: script to clone virtualenvs. +Home-page: https://github.com/edwardgeorge/virtualenv-clone +Author: Edward George +Author-email: edwardgeorge@gmail.com +License: MIT +Platform: UNKNOWN +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Intended Audience :: Developers +Classifier: Development Status :: 3 - Alpha +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Description-Content-Type: text/markdown + +virtualenv cloning script. + +[![Build Status](https://travis-ci.org/edwardgeorge/virtualenv-clone.svg?branch=master)](https://travis-ci.org/edwardgeorge/virtualenv-clone) + +A script for cloning a non-relocatable virtualenv. + +Virtualenv provides a way to make virtualenv's relocatable which could then be +copied as we wanted. However making a virtualenv relocatable this way breaks +the no-site-packages isolation of the virtualenv as well as other aspects that +come with relative paths and `/usr/bin/env` shebangs that may be undesirable. + +Also, the .pth and .egg-link rewriting doesn't seem to work as intended. This +attempts to overcome these issues and provide a way to easily clone an +existing virtualenv. + +It performs the following: + +- copies `sys.argv[1]` dir to `sys.argv[2]` +- updates the hardcoded `VIRTUAL_ENV` variable in the activate script to the + new repo location. (`--relocatable` doesn't touch this) +- updates the shebangs of the various scripts in bin to the new Python if + they pointed to the old Python. (version numbering is retained.) + + it can also change `/usr/bin/env python` shebangs to be absolute too, + though this functionality is not exposed at present. + +- checks `sys.path` of the cloned virtualenv and if any of the paths are from + the old environment it finds any `.pth` or `.egg` link files within sys.path + located in the new environment and makes sure any absolute paths to the + old environment are updated to the new environment. + +- finally it double checks `sys.path` again and will fail if there are still + paths from the old environment present. + +NOTE: This script requires Python 2.7 or 3.4+ + + diff --git a/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/RECORD b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/RECORD new file mode 100644 index 00000000..6474d006 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/RECORD @@ -0,0 +1,10 @@ +../../Scripts/virtualenv-clone.exe,sha256=WZqgVBqu_FVVlpOt_HTE171DZzOPUAsPicFmvKGWk4k,106366 +__pycache__/clonevirtualenv.cpython-36.pyc,, +clonevirtualenv.py,sha256=vMwJ1FGFN8mu9U2vp0VI3JOshu_jm4sEfx3Rwy45-dA,10716 +virtualenv_clone-0.5.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +virtualenv_clone-0.5.4.dist-info/LICENSE,sha256=3PnuvXQqZEhe8jCBwUMO2YLhMvsTmk5JIbwFotjtB0U,1114 +virtualenv_clone-0.5.4.dist-info/METADATA,sha256=ZMasD8Oq9wJAJh9LPnrPCbDAHtODr4ljRNNrB_pXluE,2569 +virtualenv_clone-0.5.4.dist-info/RECORD,, +virtualenv_clone-0.5.4.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +virtualenv_clone-0.5.4.dist-info/entry_points.txt,sha256=k9yE5XeeSqTq-iEPVcmhgR-LJEwq0bU9eCknEit93bc,59 +virtualenv_clone-0.5.4.dist-info/top_level.txt,sha256=7UgyrNAtFl6jcGdichUP2e0v90RtaKIWw7jDUFEJvyg,16 diff --git a/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/WHEEL b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/entry_points.txt b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/entry_points.txt new file mode 100644 index 00000000..0a28b1f0 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +virtualenv-clone = clonevirtualenv:main + diff --git a/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/top_level.txt b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/top_level.txt new file mode 100644 index 00000000..415a89a8 --- /dev/null +++ b/venv/Lib/site-packages/virtualenv_clone-0.5.4.dist-info/top_level.txt @@ -0,0 +1 @@ +clonevirtualenv diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4-py3.6-nspkg.pth b/venv/Lib/site-packages/virtualenvwrapper-4.8.4-py3.6-nspkg.pth new file mode 100644 index 00000000..5a7c7cdf --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4-py3.6-nspkg.pth @@ -0,0 +1 @@ +import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('virtualenvwrapper',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('virtualenvwrapper', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('virtualenvwrapper', [os.path.dirname(p)])));m = m or sys.modules.setdefault('virtualenvwrapper', types.ModuleType('virtualenvwrapper'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/AUTHORS b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/AUTHORS new file mode 100644 index 00000000..56de9bf2 --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/AUTHORS @@ -0,0 +1,87 @@ +Andy Dirnberger +Aron Griffis +Axel H. +Bernardo B. Marques +Carl Karsten +Chris L +ChrisHas35 +Christopher Arndt +Clay McClure +Daniel Hahler +Daniel Kraus +David Szotten +David Wolever +Devin Sevilla +Doug Harris +Doug Harris +Doug Hellmann +Doug Hellmann +Erick M'bwana +Erik B +Greg Haskins +Harrison Katz +Harrison Katz +Harry +Hiro Ashiya +Ismail Badawi +Ismail Sunni +Jakob Gerhard Martinussen +James Bennett +Jason Myers +Jason Myers +Jason Myers +Jason Veatch +Jeff Byrnes +Jeff Widman +Jessamyn Smith +Joel Cross +John Brewer +Justin Abrahms +Justin Barber +Kevin Deldycke +Manuel Kaufmann +Martin Etnestad Johansen +Michael A. Smith +Michael Brooks +Michael Elsdoerfer +Michael Elsdörfer +Mike Fogel +Monty Taylor +Nat Williams +Nishikar Sapkota +Oleg Broytman +OmeGak +Paul McLanahan +Peter Bittner +Radu Voicilas +Ralph Bean +Raphael Das Gupta +Robson Peixoto +Sam Brightman +Sander Smits +Scott Stevenson +Shrikant-Sharat +SpotlightKid +Stephen McQuay +Steve Steiner +Thomas Desvenain +Tobias +William McVey +Xidorn Quan +Zhiming Wang +anatoly techtonik +bwanamarko +convert-repo +dbr +humitos +jforcier +kk6 +lendenmc +lonetwin +matt@sprout.staffknex.com +noirbizarre +seth@seth-ec-laptop +t2y +t2y +wam@talyn.cisco.com +Éric Lemoine diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/INSTALLER b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/LICENSE b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/LICENSE new file mode 100644 index 00000000..c0dfc2e3 --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/LICENSE @@ -0,0 +1,17 @@ +Copyright Doug Hellmann, All Rights Reserved + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Doug Hellmann not be used +in advertising or publicity pertaining to distribution of the software +without specific, written prior permission. + +DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO +EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF +USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/METADATA b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/METADATA new file mode 100644 index 00000000..ba6afed7 --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/METADATA @@ -0,0 +1,148 @@ +Metadata-Version: 2.1 +Name: virtualenvwrapper +Version: 4.8.4 +Summary: Enhancements to virtualenv +Home-page: https://virtualenvwrapper.readthedocs.io/ +Author: Doug Hellmann +Author-email: doug@doughellmann.com +License: MIT +Keywords: virtualenv +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Intended Audience :: Developers +Classifier: Environment :: Console +Requires-Dist: virtualenv +Requires-Dist: virtualenv-clone +Requires-Dist: stevedore + +.. -*- mode: rst -*- + +################# +virtualenvwrapper +################# + +virtualenvwrapper is a set of extensions to Ian Bicking's `virtualenv +`_ tool. The extensions include +wrappers for creating and deleting virtual environments and otherwise +managing your development workflow, making it easier to work on more +than one project at a time without introducing conflicts in their +dependencies. + +**Warning:** The 4.x release includes some potentially incompatible +changes for extensions from 3.x. The python modules for extensions are +now *always* run with ``PWD=$WORKON_HOME`` (previously the value of +PWD varied depending on the hook). The *shell* portion of any hook +(anything sourced by the user's shell when the hook is run) is still +run in the same place as before. + +======== +Features +======== + +1. Organizes all of your virtual environments in one place. + +2. Wrappers for creating, copying and deleting environments, including + user-configurable hooks. + +3. Use a single command to switch between environments. + +4. Tab completion for commands that take a virtual environment as + argument. + +5. User-configurable hooks for all operations. + +6. Plugin system for more creating sharable extensions. + +Rich Leland has created a short `screencast +`__ +showing off the features of virtualenvwrapper. + +============ +Installation +============ + +See the `project documentation +`__ for +installation and setup instructions. + +Supported Shells +================ + +virtualenvwrapper is a set of shell *functions* defined in Bourne +shell compatible syntax. It is tested under ``bash``, ``ksh``, and ``zsh``. +It may work with other shells, so if you find that it does work with a +shell not listed here please let me know. If you can modify it to +work with another shell, without completely rewriting it, send a pull +request through the bitbucket project page. If you write a clone to +work with an incompatible shell, let me know and I will link to it +from this page. + +Python Versions +=============== + +virtualenvwrapper is tested under Python 2.6 - 3.6. + +======= +Support +======= + +Join the `virtualenvwrapper Google Group +`__ to discuss +issues and features. + +Report bugs via the `bug tracker on Bitbucket +`__. + +Shell Aliases +============= + +Since virtualenvwrapper is largely a shell script, it uses shell +commands for a lot of its actions. If your environment makes heavy +use of shell aliases or other customizations, you may encounter +issues. Before reporting bugs in the bug tracker, please test +*without* your aliases enabled. If you can identify the alias causing +the problem, that will help make virtualenvwrapper more robust. + +========== +Change Log +========== + +The `release history`_ is part of the project documentation. + +.. _release history: https://virtualenvwrapper.readthedocs.io/en/latest/history.html + + +======= +License +======= + +Copyright Doug Hellmann, All Rights Reserved + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Doug Hellmann not be used +in advertising or publicity pertaining to distribution of the software +without specific, written prior permission. + +DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO +EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF +USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + + diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/RECORD b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/RECORD new file mode 100644 index 00000000..bd1b1749 --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/RECORD @@ -0,0 +1,20 @@ +../../Scripts/virtualenvwrapper.sh,sha256=GUY7fQFpfnVHNuhNmbjaerhTtTMPt5U3qEg5iQGiSu4,41703 +../../Scripts/virtualenvwrapper_lazy.sh,sha256=eDsBBT2GUsLdPWC7lNdJjgpPCbwzmUD0Hzy2HOcNzcw,2210 +virtualenvwrapper-4.8.4-py3.6-nspkg.pth,sha256=AKzPHyQQLovCsZqPQe9KIKV8DbctcuwmzpmibFKKK_o,595 +virtualenvwrapper-4.8.4.dist-info/AUTHORS,sha256=DsjN1N8c_jHjW32lxAw8UNO_QRJ-0zpaMEiojWiutQI,3112 +virtualenvwrapper-4.8.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +virtualenvwrapper-4.8.4.dist-info/LICENSE,sha256=Q7nymeNOCB_ySQS5uokKRcmz2hWnT2ckt1m7ktAcUe0,952 +virtualenvwrapper-4.8.4.dist-info/METADATA,sha256=Z6bkPkuPtx7VqoawV9ttSR4CqLXfqclOqI4U7FGZC9o,5044 +virtualenvwrapper-4.8.4.dist-info/RECORD,, +virtualenvwrapper-4.8.4.dist-info/WHEEL,sha256=6T3TYZE4YFi2HTS1BeZHNXAi8N52OZT4O-dJ6-ome_4,116 +virtualenvwrapper-4.8.4.dist-info/entry_points.txt,sha256=i8J8yFdSFNnXtWAhwrH6XheMxSbYW7-ck5xHZNWASpw,1663 +virtualenvwrapper-4.8.4.dist-info/namespace_packages.txt,sha256=dMyzcfQMYyC3JQ15o2vhaoSkzzB9wm6UgJ8lkmbxlHg,18 +virtualenvwrapper-4.8.4.dist-info/pbr.json,sha256=0zqRWaNgQxfEBdVk-YWPn749ijFQ8NfFPwKBHTsHT2E,46 +virtualenvwrapper-4.8.4.dist-info/top_level.txt,sha256=dMyzcfQMYyC3JQ15o2vhaoSkzzB9wm6UgJ8lkmbxlHg,18 +virtualenvwrapper-4.8.4.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +virtualenvwrapper/__pycache__/hook_loader.cpython-36.pyc,, +virtualenvwrapper/__pycache__/project.cpython-36.pyc,, +virtualenvwrapper/__pycache__/user_scripts.cpython-36.pyc,, +virtualenvwrapper/hook_loader.py,sha256=_LWuLU6zrHapal01hH5XSMToivqKiSv_0eGnTvbISzI,6610 +virtualenvwrapper/project.py,sha256=IptaA-j6xe5-OTGK0WBlr2oLy5DxCHPyoYXmzlD0L_U,1411 +virtualenvwrapper/user_scripts.py,sha256=-qN8zepMAFW2PusGmi0tuBPX7W9CHRMm63lRM6ftix0,9398 diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/WHEEL b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/entry_points.txt b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/entry_points.txt new file mode 100644 index 00000000..7a2a485e --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/entry_points.txt @@ -0,0 +1,47 @@ +[virtualenvwrapper.get_env_details] +user_scripts = virtualenvwrapper.user_scripts:get_env_details + +[virtualenvwrapper.initialize] +project = virtualenvwrapper.project:initialize +user_scripts = virtualenvwrapper.user_scripts:initialize + +[virtualenvwrapper.initialize_source] +user_scripts = virtualenvwrapper.user_scripts:initialize_source + +[virtualenvwrapper.post_activate_source] +project = virtualenvwrapper.project:post_activate_source +user_scripts = virtualenvwrapper.user_scripts:post_activate_source + +[virtualenvwrapper.post_cpvirtualenv_source] +user_scripts = virtualenvwrapper.user_scripts:post_cpvirtualenv_source + +[virtualenvwrapper.post_deactivate_source] +user_scripts = virtualenvwrapper.user_scripts:post_deactivate_source + +[virtualenvwrapper.post_mkvirtualenv_source] +user_scripts = virtualenvwrapper.user_scripts:post_mkvirtualenv_source + +[virtualenvwrapper.post_rmvirtualenv] +user_scripts = virtualenvwrapper.user_scripts:post_rmvirtualenv + +[virtualenvwrapper.pre_activate] +user_scripts = virtualenvwrapper.user_scripts:pre_activate + +[virtualenvwrapper.pre_cpvirtualenv] +user_scripts = virtualenvwrapper.user_scripts:pre_cpvirtualenv + +[virtualenvwrapper.pre_deactivate_source] +user_scripts = virtualenvwrapper.user_scripts:pre_deactivate_source + +[virtualenvwrapper.pre_mkvirtualenv] +user_scripts = virtualenvwrapper.user_scripts:pre_mkvirtualenv + +[virtualenvwrapper.pre_rmvirtualenv] +user_scripts = virtualenvwrapper.user_scripts:pre_rmvirtualenv + +[virtualenvwrapper.project.post_mkproject_source] +project = virtualenvwrapper.project:post_mkproject_source + +[virtualenvwrapper.project.pre_mkproject] +project = virtualenvwrapper.project:pre_mkproject + diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/namespace_packages.txt b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/namespace_packages.txt new file mode 100644 index 00000000..5a1f32b0 --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/namespace_packages.txt @@ -0,0 +1 @@ +virtualenvwrapper diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/pbr.json b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/pbr.json new file mode 100644 index 00000000..b6d63ffc --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/pbr.json @@ -0,0 +1 @@ +{"git_version": "aa2ac7f", "is_release": true} \ No newline at end of file diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/top_level.txt b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/top_level.txt new file mode 100644 index 00000000..5a1f32b0 --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/top_level.txt @@ -0,0 +1 @@ +virtualenvwrapper diff --git a/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/zip-safe b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/zip-safe new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper-4.8.4.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/venv/Lib/site-packages/virtualenvwrapper/hook_loader.py b/venv/Lib/site-packages/virtualenvwrapper/hook_loader.py new file mode 100644 index 00000000..542ee1db --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper/hook_loader.py @@ -0,0 +1,223 @@ +# encoding: utf-8 +# +# Copyright (c) 2010 Doug Hellmann. All rights reserved. +# +"""Load hooks for virtualenvwrapper. +""" + +import inspect +import itertools +import logging +import logging.handlers +import optparse +import os +import sys + +from stevedore import ExtensionManager +from stevedore import NamedExtensionManager + +LOG_FORMAT = '%(asctime)s %(levelname)s %(name)s %(message)s' + + +class GroupWriteRotatingFileHandler(logging.handlers.RotatingFileHandler): + """Taken from http://stackoverflow.com/questions/1407474 + """ + def _open(self): + prevumask = os.umask(0o002) + rtv = logging.handlers.RotatingFileHandler._open(self) + os.umask(prevumask) + return rtv + + +def main(): + parser = optparse.OptionParser( + usage='usage: %prog [options] []', + prog='virtualenvwrapper.hook_loader', + description='Manage hooks for virtualenvwrapper', + ) + + parser.add_option( + '-S', '--script', + help='Runs "hook" then "_source", writing the ' + + 'result to ', + dest='script_filename', + default=None, + ) + parser.add_option( + '-s', '--source', + help='Print the shell commands to be run in the current shell', + action='store_true', + dest='sourcing', + default=False, + ) + parser.add_option( + '-l', '--list', + help='Print a list of the plugins available for the given hook', + action='store_true', + default=False, + dest='listing', + ) + parser.add_option( + '-v', '--verbose', + help='Show more information on the console', + action='store_const', + const=2, + default=1, + dest='verbose_level', + ) + parser.add_option( + '-q', '--quiet', + help='Show less information on the console', + action='store_const', + const=0, + dest='verbose_level', + ) + parser.add_option( + '-n', '--name', + help='Only run the hook from the named plugin', + action='append', + dest='names', + default=[], + ) + parser.disable_interspersed_args() # stop when on option without an '-' + options, args = parser.parse_args() + + root_logger = logging.getLogger('virtualenvwrapper') + + # Set up logging to a file + logfile = os.environ.get('VIRTUALENVWRAPPER_LOG_FILE') + if logfile: + root_logger.setLevel(logging.DEBUG) + file_handler = GroupWriteRotatingFileHandler( + logfile, + maxBytes=10240, + backupCount=1, + ) + formatter = logging.Formatter(LOG_FORMAT) + file_handler.setFormatter(formatter) + root_logger.addHandler(file_handler) + + # Send higher-level messages to the console, too + console = logging.StreamHandler(sys.stderr) + console_level = [logging.WARNING, + logging.INFO, + logging.DEBUG, + ][options.verbose_level] + console.setLevel(console_level) + formatter = logging.Formatter('%(name)s %(message)s') + console.setFormatter(formatter) + root_logger.addHandler(console) + root_logger.setLevel(console_level) + + # logging.getLogger(__name__).debug('cli args %s', args) + + # Determine which hook we're running + if not args: + if options.listing: + list_hooks() + return 0 + else: + parser.error('Please specify the hook to run') + hook = args[0] + + if options.sourcing and options.script_filename: + parser.error('--source and --script are mutually exclusive.') + + if options.sourcing: + hook += '_source' + + log = logging.getLogger('virtualenvwrapper.hook_loader') + + log.debug('Running %s hooks', hook) + run_hooks(hook, options, args) + + if options.script_filename: + log.debug('Saving sourcable %s hooks to %s', + hook, options.script_filename) + options.sourcing = True + output = open(options.script_filename, "w") + try: + output.write('# %s\n' % hook) + # output.write('echo %s\n' % hook) + # output.write('set -x\n') + run_hooks(hook + '_source', options, args, output) + finally: + output.close() + + return 0 + + +def run_hooks(hook, options, args, output=None): + log = logging.getLogger('virtualenvwrapper.hook_loader') + if output is None: + output = sys.stdout + + namespace = 'virtualenvwrapper.%s' % hook + if options.names: + log.debug('looking for %s hooks %s' % (namespace, options.names)) + hook_mgr = NamedExtensionManager(namespace, options.names) + else: + log.debug('looking for %s hooks' % namespace) + hook_mgr = ExtensionManager(namespace) + + if options.listing: + def show(ext): + output.write(' %-10s -- %s\n' % + (ext.name, inspect.getdoc(ext.plugin) or '')) + try: + hook_mgr.map(show) + except RuntimeError: # no templates + output.write(' No templates installed.\n') + + elif options.sourcing: + def get_source(ext, args): + # Show the shell commands so they can + # be run in the calling shell. + log.debug('getting source instructions for %s' % ext.name) + contents = (ext.plugin(args) or '').strip() + if contents: + output.write('# %s\n' % ext.name) + output.write(contents) + output.write("\n") + try: + hook_mgr.map(get_source, args[1:]) + except RuntimeError: + pass + + else: + # Just run the plugin ourselves + def invoke(ext, args): + log.debug('running %s' % ext.name) + ext.plugin(args) + try: + hook_mgr.map(invoke, args[1:]) + except RuntimeError: + pass + + +def list_hooks(output=None): + if output is None: + output = sys.stdout + static_names = [ + 'initialize', + 'get_env_details', + 'project.pre_mkproject', + 'project.post_mkproject', + 'project.template', + ] + pre_post_hooks = ( + '_'.join(h) + for h in itertools.product(['pre', 'post'], + ['mkvirtualenv', + 'rmvirtualenv', + 'activate', + 'deactivate', + 'cpvirtualenv', + ]) + ) + for hook in itertools.chain(static_names, pre_post_hooks): + output.write(hook + '\n') + + +if __name__ == '__main__': + main() diff --git a/venv/Lib/site-packages/virtualenvwrapper/project.py b/venv/Lib/site-packages/virtualenvwrapper/project.py new file mode 100644 index 00000000..51ec50bd --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper/project.py @@ -0,0 +1,61 @@ +# encoding: utf-8 +# +# Copyright (c) 2010 Doug Hellmann. All rights reserved. +# +"""virtualenvwrapper.project +""" + +import logging +import os + +from virtualenvwrapper.user_scripts import make_hook, run_global, PERMISSIONS + +log = logging.getLogger(__name__) + +GLOBAL_HOOKS = [ + # mkproject + ("premkproject", + "This hook is run after a new project is created " + "and before it is activated.", + PERMISSIONS), + ("postmkproject", + "This hook is run after a new project is activated.", + PERMISSIONS), +] + + +def initialize(args): + """Set up user hooks + """ + for filename, comment, permissions in GLOBAL_HOOKS: + make_hook(os.path.join('$VIRTUALENVWRAPPER_HOOK_DIR', filename), + comment, permissions) + return + + +def pre_mkproject(args): + log.debug('pre_mkproject %s', str(args)) + run_global('premkproject', *args) + return + + +def post_mkproject_source(args): + return """ +# +# Run user-provided scripts +# +[ -f "$VIRTUALENVWRAPPER_HOOK_DIR/postmkproject" ] && \ + source "$VIRTUALENVWRAPPER_HOOK_DIR/postmkproject" +""" + + +def post_activate_source(args): + return """ +# +# Change to the project directory, as long as we haven't been told not to. +# +[ -f "$VIRTUAL_ENV/$VIRTUALENVWRAPPER_PROJECT_FILENAME" \ + -a "$VIRTUALENVWRAPPER_PROJECT_CD" = 1 ] && \ + virtualenvwrapper_cd \ + "$(cat \"$VIRTUAL_ENV/$VIRTUALENVWRAPPER_PROJECT_FILENAME\")" +""" diff --git a/venv/Lib/site-packages/virtualenvwrapper/user_scripts.py b/venv/Lib/site-packages/virtualenvwrapper/user_scripts.py new file mode 100644 index 00000000..8a5eedc9 --- /dev/null +++ b/venv/Lib/site-packages/virtualenvwrapper/user_scripts.py @@ -0,0 +1,340 @@ +# encoding: utf-8 +# +# Copyright (c) 2010 Doug Hellmann. All rights reserved. +# +"""Plugin to handle hooks in user-defined scripts. +""" + +import logging +import os +import re +import stat +import subprocess +import sys + + +log = logging.getLogger(__name__) + +# Are we running under msys +if sys.platform == 'win32' and \ + os.environ.get('OS') == 'Windows_NT' and \ + os.environ.get('MSYSTEM') in ('MINGW32', 'MINGW64'): + is_msys = True + script_folder = 'Scripts' +else: + is_msys = False + script_folder = 'bin' + + +def _get_msys_shell(): + if 'MSYS_HOME' in os.environ: + return [get_path(os.environ['MSYS_HOME'], 'bin', 'sh.exe')] + else: + for path in os.environ['PATH'].split(';'): + if os.path.exists(os.path.join(path, 'sh.exe')): + return [get_path(path, 'sh.exe')] + raise Exception('Could not find sh.exe') + + +def run_script(script_path, *args): + """Execute a script in a subshell. + """ + if os.path.exists(script_path): + cmd = [script_path] + list(args) + if is_msys: + cmd = _get_msys_shell() + cmd + log.debug('running %s', str(cmd)) + try: + subprocess.call(cmd) + except OSError: + _, msg, _ = sys.exc_info() + log.error('could not run "%s": %s', script_path, str(msg)) + # log.debug('Returned %s', return_code) + return + + +def run_global(script_name, *args): + """Run a script from $VIRTUALENVWRAPPER_HOOK_DIR. + """ + script_path = get_path('$VIRTUALENVWRAPPER_HOOK_DIR', script_name) + run_script(script_path, *args) + return + + +PERMISSIONS = ( + stat.S_IRWXU # read/write/execute, user + | stat.S_IRGRP # read, group + | stat.S_IXGRP # execute, group + | stat.S_IROTH # read, others + | stat.S_IXOTH # execute, others +) +PERMISSIONS_SOURCED = PERMISSIONS & ~( + # remove executable bits for + stat.S_IXUSR # ... user + | stat.S_IXGRP # ... group + | stat.S_IXOTH # ... others +) + + +GLOBAL_HOOKS = [ + # initialize + ("initialize", + "This hook is sourced during the startup phase " + "when loading virtualenvwrapper.sh.", + PERMISSIONS_SOURCED), + + # mkvirtualenv + ("premkvirtualenv", + "This hook is run after a new virtualenv is created " + "and before it is activated.\n" + "# argument: name of new environment", + PERMISSIONS), + ("postmkvirtualenv", + "This hook is sourced after a new virtualenv is activated.", + PERMISSIONS_SOURCED), + + # cpvirtualenv: + # precpvirtualenv (run), + # postcpvirtualenv (sourced) + + # rmvirtualenv + ("prermvirtualenv", + "This hook is run before a virtualenv is deleted.\n" + "# argument: full path to environment directory", + PERMISSIONS), + ("postrmvirtualenv", + "This hook is run after a virtualenv is deleted.\n" + "# argument: full path to environment directory", + PERMISSIONS), + + # deactivate + ("predeactivate", + "This hook is sourced before every virtualenv is deactivated.", + PERMISSIONS_SOURCED), + ("postdeactivate", + "This hook is sourced after every virtualenv is deactivated.", + PERMISSIONS_SOURCED), + + # activate + ("preactivate", + "This hook is run before every virtualenv is activated.\n" + "# argument: environment name", + PERMISSIONS), + ("postactivate", + "This hook is sourced after every virtualenv is activated.", + PERMISSIONS_SOURCED), + + # mkproject: + # premkproject (run), + # postmkproject (sourced) + + # get_env_details + ("get_env_details", + "This hook is run when the list of virtualenvs is printed " + "so each name can include details.\n" + "# argument: environment name", + PERMISSIONS), +] + + +LOCAL_HOOKS = [ + # deactivate + ("predeactivate", + "This hook is sourced before this virtualenv is deactivated.", + PERMISSIONS_SOURCED), + ("postdeactivate", + "This hook is sourced after this virtualenv is deactivated.", + PERMISSIONS_SOURCED), + + # activate + ("preactivate", + "This hook is run before this virtualenv is activated.", + PERMISSIONS), + ("postactivate", + "This hook is sourced after this virtualenv is activated.", + PERMISSIONS_SOURCED), + + # get_env_details + ("get_env_details", + "This hook is run when the list of virtualenvs is printed " + "in 'long' mode so each name can include details.\n" + "# argument: environment name", + PERMISSIONS), +] + + +def make_hook(filename, comment, permissions): + """Create a hook script. + + :param filename: The name of the file to write. + :param comment: The comment to insert into the file. + """ + filename = get_path(filename) + if not os.path.exists(filename): + log.info('creating %s', filename) + f = open(filename, 'w') + try: + # for sourced scripts, the shebang line won't be used; + # it is useful for editors to recognize the file type, though + f.write("#!%(shell)s\n# %(comment)s\n\n" % { + 'comment': comment, + 'shell': os.environ.get('SHELL', '/bin/sh'), + }) + finally: + f.close() + os.chmod(filename, permissions) + return + + +# HOOKS + + +def initialize(args): + for filename, comment, permissions in GLOBAL_HOOKS: + make_hook(get_path('$VIRTUALENVWRAPPER_HOOK_DIR', filename), + comment, permissions) + return + + +def initialize_source(args): + return """ +# +# Run user-provided scripts +# +[ -f "$VIRTUALENVWRAPPER_HOOK_DIR/initialize" ] && \ + source "$VIRTUALENVWRAPPER_HOOK_DIR/initialize" +""" + + +def pre_mkvirtualenv(args): + log.debug('pre_mkvirtualenv %s', str(args)) + envname = args[0] + for filename, comment, permissions in LOCAL_HOOKS: + make_hook(get_path('$WORKON_HOME', envname, script_folder, filename), + comment, permissions) + run_global('premkvirtualenv', *args) + return + + +def post_mkvirtualenv_source(args): + log.debug('post_mkvirtualenv_source %s', str(args)) + return """ +# +# Run user-provided scripts +# +[ -f "$VIRTUALENVWRAPPER_HOOK_DIR/postmkvirtualenv" ] && \ + source "$VIRTUALENVWRAPPER_HOOK_DIR/postmkvirtualenv" +""" + + +def pre_cpvirtualenv(args): + log.debug('pre_cpvirtualenv %s', str(args)) + envname = args[0] + for filename, comment, permissions in LOCAL_HOOKS: + make_hook(get_path('$WORKON_HOME', envname, script_folder, filename), + comment, permissions) + run_global('precpvirtualenv', *args) + return + + +def post_cpvirtualenv_source(args): + log.debug('post_cpvirtualenv_source %s', str(args)) + return """ +# +# Run user-provided scripts +# +[ -f "$VIRTUALENVWRAPPER_HOOK_DIR/postcpvirtualenv" ] && \ + source "$VIRTUALENVWRAPPER_HOOK_DIR/postcpvirtualenv" +""" + + +def pre_rmvirtualenv(args): + log.debug('pre_rmvirtualenv') + run_global('prermvirtualenv', *args) + return + + +def post_rmvirtualenv(args): + log.debug('post_rmvirtualenv') + run_global('postrmvirtualenv', *args) + return + + +def pre_activate(args): + log.debug('pre_activate') + run_global('preactivate', *args) + script_path = get_path('$WORKON_HOME', args[0], + script_folder, 'preactivate') + run_script(script_path, *args) + return + + +def post_activate_source(args): + log.debug('post_activate_source') + return """ +# +# Run user-provided scripts +# +[ -f "$VIRTUALENVWRAPPER_HOOK_DIR/postactivate" ] && \ + source "$VIRTUALENVWRAPPER_HOOK_DIR/postactivate" +[ -f "$VIRTUAL_ENV/$VIRTUALENVWRAPPER_ENV_BIN_DIR/postactivate" ] && \ + source "$VIRTUAL_ENV/$VIRTUALENVWRAPPER_ENV_BIN_DIR/postactivate" +""" + + +def pre_deactivate_source(args): + log.debug('pre_deactivate_source') + return """ +# +# Run user-provided scripts +# +[ -f "$VIRTUAL_ENV/$VIRTUALENVWRAPPER_ENV_BIN_DIR/predeactivate" ] && \ + source "$VIRTUAL_ENV/$VIRTUALENVWRAPPER_ENV_BIN_DIR/predeactivate" +[ -f "$VIRTUALENVWRAPPER_HOOK_DIR/predeactivate" ] && \ + source "$VIRTUALENVWRAPPER_HOOK_DIR/predeactivate" +""" + + +def post_deactivate_source(args): + log.debug('post_deactivate_source') + return """ +# +# Run user-provided scripts +# +VIRTUALENVWRAPPER_LAST_VIRTUAL_ENV="$WORKON_HOME/%(env_name)s" +[ -f "$WORKON_HOME/%(env_name)s/bin/postdeactivate" ] && \ + source "$WORKON_HOME/%(env_name)s/bin/postdeactivate" +[ -f "$VIRTUALENVWRAPPER_HOOK_DIR/postdeactivate" ] && \ + source "$VIRTUALENVWRAPPER_HOOK_DIR/postdeactivate" +unset VIRTUALENVWRAPPER_LAST_VIRTUAL_ENV +""" % {'env_name': args[0]} + + +def get_env_details(args): + log.debug('get_env_details') + run_global('get_env_details', *args) + script_path = get_path('$WORKON_HOME', args[0], + script_folder, 'get_env_details') + run_script(script_path, *args) + return + + +def get_path(*args): + ''' + Get a full path from args. + + Path separator is determined according to the os and the shell and + allow to use is_msys. + + Variables and user are expanded during the process. + ''' + path = os.path.expanduser(os.path.expandvars(os.path.join(*args))) + if is_msys: + # MSYS accept unix or Win32 and sometimes + # it drives to mixed style paths + if re.match(r'^/[a-zA-Z](/|^)', path): + # msys path could starts with '/c/'-form drive letter + path = ''.join((path[1], ':', path[2:])) + path = path.replace('/', os.sep) + + return os.path.abspath(path) diff --git a/venv/Lib/site-packages/zipp-3.1.0.dist-info/INSTALLER b/venv/Lib/site-packages/zipp-3.1.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/venv/Lib/site-packages/zipp-3.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/Lib/site-packages/zipp-3.1.0.dist-info/LICENSE b/venv/Lib/site-packages/zipp-3.1.0.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/venv/Lib/site-packages/zipp-3.1.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/venv/Lib/site-packages/zipp-3.1.0.dist-info/METADATA b/venv/Lib/site-packages/zipp-3.1.0.dist-info/METADATA new file mode 100644 index 00000000..7f684fed --- /dev/null +++ b/venv/Lib/site-packages/zipp-3.1.0.dist-info/METADATA @@ -0,0 +1,52 @@ +Metadata-Version: 2.1 +Name: zipp +Version: 3.1.0 +Summary: Backport of pathlib-compatible object wrapper for zip files +Home-page: https://github.com/jaraco/zipp +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: jaraco.itertools ; extra == 'testing' +Requires-Dist: func-timeout ; extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/zipp.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/zipp.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/zipp + +.. image:: https://dev.azure.com/jaraco/zipp/_apis/build/status/jaraco.zipp?branchName=master + :target: https://dev.azure.com/jaraco/zipp/_build/latest?definitionId=1&branchName=master + +.. image:: https://img.shields.io/travis/jaraco/zipp/master.svg + :target: https://travis-ci.org/jaraco/zipp + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://img.shields.io/appveyor/ci/jaraco/zipp/master.svg + :target: https://ci.appveyor.com/project/jaraco/zipp/branch/master + +.. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest +.. :target: https://zipp.readthedocs.io/en/latest/?badge=latest + + +A pathlib-compatible Zipfile object wrapper. A backport of the +`Path object `_. + + diff --git a/venv/Lib/site-packages/zipp-3.1.0.dist-info/RECORD b/venv/Lib/site-packages/zipp-3.1.0.dist-info/RECORD new file mode 100644 index 00000000..a1afa1bf --- /dev/null +++ b/venv/Lib/site-packages/zipp-3.1.0.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/zipp.cpython-36.pyc,, +zipp-3.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +zipp-3.1.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 +zipp-3.1.0.dist-info/METADATA,sha256=EbCdCb8ZzwzYDA7RF8R830VeruipjOKnj32zpMxPsFM,1899 +zipp-3.1.0.dist-info/RECORD,, +zipp-3.1.0.dist-info/WHEEL,sha256=g4nMs7d-Xl9-xC9XovUrsDHGXt-FT0E17Yqo92DEfvY,92 +zipp-3.1.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5 +zipp.py,sha256=o8W25XfoR5DD_krEQLbYrNMmK-x26JvaqeKuboC91YY,7121 diff --git a/venv/Lib/site-packages/zipp-3.1.0.dist-info/WHEEL b/venv/Lib/site-packages/zipp-3.1.0.dist-info/WHEEL new file mode 100644 index 00000000..b552003f --- /dev/null +++ b/venv/Lib/site-packages/zipp-3.1.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/Lib/site-packages/zipp-3.1.0.dist-info/top_level.txt b/venv/Lib/site-packages/zipp-3.1.0.dist-info/top_level.txt new file mode 100644 index 00000000..e82f676f --- /dev/null +++ b/venv/Lib/site-packages/zipp-3.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +zipp diff --git a/venv/Lib/site-packages/zipp.py b/venv/Lib/site-packages/zipp.py new file mode 100644 index 00000000..cc0fe5f0 --- /dev/null +++ b/venv/Lib/site-packages/zipp.py @@ -0,0 +1,285 @@ +import io +import posixpath +import zipfile +import itertools +import contextlib +import sys + +if sys.version_info < (3, 7): + from collections import OrderedDict +else: + OrderedDict = dict + + +def _parents(path): + """ + Given a path with elements separated by + posixpath.sep, generate all parents of that path. + + >>> list(_parents('b/d')) + ['b'] + >>> list(_parents('/b/d/')) + ['/b'] + >>> list(_parents('b/d/f/')) + ['b/d', 'b'] + >>> list(_parents('b')) + [] + >>> list(_parents('')) + [] + """ + return itertools.islice(_ancestry(path), 1, None) + + +def _ancestry(path): + """ + Given a path with elements separated by + posixpath.sep, generate all elements of that path + + >>> list(_ancestry('b/d')) + ['b/d', 'b'] + >>> list(_ancestry('/b/d/')) + ['/b/d', '/b'] + >>> list(_ancestry('b/d/f/')) + ['b/d/f', 'b/d', 'b'] + >>> list(_ancestry('b')) + ['b'] + >>> list(_ancestry('')) + [] + """ + path = path.rstrip(posixpath.sep) + while path and path != posixpath.sep: + yield path + path, tail = posixpath.split(path) + + +_dedupe = OrderedDict.fromkeys +"""Deduplicate an iterable in original order""" + + +def _difference(minuend, subtrahend): + """ + Return items in minuend not in subtrahend, retaining order + with O(1) lookup. + """ + return itertools.filterfalse(set(subtrahend).__contains__, minuend) + + +class CompleteDirs(zipfile.ZipFile): + """ + A ZipFile subclass that ensures that implied directories + are always included in the namelist. + """ + + @staticmethod + def _implied_dirs(names): + parents = itertools.chain.from_iterable(map(_parents, names)) + as_dirs = (p + posixpath.sep for p in parents) + return _dedupe(_difference(as_dirs, names)) + + def namelist(self): + names = super(CompleteDirs, self).namelist() + return names + list(self._implied_dirs(names)) + + def _name_set(self): + return set(self.namelist()) + + def resolve_dir(self, name): + """ + If the name represents a directory, return that name + as a directory (with the trailing slash). + """ + names = self._name_set() + dirname = name + '/' + dir_match = name not in names and dirname in names + return dirname if dir_match else name + + @classmethod + def make(cls, source): + """ + Given a source (filename or zipfile), return an + appropriate CompleteDirs subclass. + """ + if isinstance(source, CompleteDirs): + return source + + if not isinstance(source, zipfile.ZipFile): + return cls(_pathlib_compat(source)) + + # Only allow for FastPath when supplied zipfile is read-only + if 'r' not in source.mode: + cls = CompleteDirs + + res = cls.__new__(cls) + vars(res).update(vars(source)) + return res + + +class FastLookup(CompleteDirs): + """ + ZipFile subclass to ensure implicit + dirs exist and are resolved rapidly. + """ + def namelist(self): + with contextlib.suppress(AttributeError): + return self.__names + self.__names = super(FastLookup, self).namelist() + return self.__names + + def _name_set(self): + with contextlib.suppress(AttributeError): + return self.__lookup + self.__lookup = super(FastLookup, self)._name_set() + return self.__lookup + + +def _pathlib_compat(path): + """ + For path-like objects, convert to a filename for compatibility + on Python 3.6.1 and earlier. + """ + try: + return path.__fspath__() + except AttributeError: + return str(path) + + +class Path: + """ + A pathlib-compatible interface for zip files. + + Consider a zip file with this structure:: + + . + ├── a.txt + └── b + ├── c.txt + └── d + └── e.txt + + >>> data = io.BytesIO() + >>> zf = zipfile.ZipFile(data, 'w') + >>> zf.writestr('a.txt', 'content of a') + >>> zf.writestr('b/c.txt', 'content of c') + >>> zf.writestr('b/d/e.txt', 'content of e') + >>> zf.filename = 'abcde.zip' + + Path accepts the zipfile object itself or a filename + + >>> root = Path(zf) + + From there, several path operations are available. + + Directory iteration (including the zip file itself): + + >>> a, b = root.iterdir() + >>> a + Path('abcde.zip', 'a.txt') + >>> b + Path('abcde.zip', 'b/') + + name property: + + >>> b.name + 'b' + + join with divide operator: + + >>> c = b / 'c.txt' + >>> c + Path('abcde.zip', 'b/c.txt') + >>> c.name + 'c.txt' + + Read text: + + >>> c.read_text() + 'content of c' + + existence: + + >>> c.exists() + True + >>> (b / 'missing.txt').exists() + False + + Coercion to string: + + >>> str(c) + 'abcde.zip/b/c.txt' + """ + + __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})" + + def __init__(self, root, at=""): + self.root = FastLookup.make(root) + self.at = at + + def open(self, mode='r', *args, pwd=None, **kwargs): + """ + Open this entry as text or binary following the semantics + of ``pathlib.Path.open()`` by passing arguments through + to io.TextIOWrapper(). + """ + if self.is_dir(): + raise IsADirectoryError(self) + zip_mode = mode[0] + if not self.exists() and zip_mode == 'r': + raise FileNotFoundError(self) + stream = self.root.open(self.at, zip_mode, pwd=pwd) + if 'b' in mode: + if args or kwargs: + raise ValueError("encoding args invalid for binary operation") + return stream + return io.TextIOWrapper(stream, *args, **kwargs) + + @property + def name(self): + return posixpath.basename(self.at.rstrip("/")) + + def read_text(self, *args, **kwargs): + with self.open('r', *args, **kwargs) as strm: + return strm.read() + + def read_bytes(self): + with self.open('rb') as strm: + return strm.read() + + def _is_child(self, path): + return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/") + + def _next(self, at): + return Path(self.root, at) + + def is_dir(self): + return not self.at or self.at.endswith("/") + + def is_file(self): + return not self.is_dir() + + def exists(self): + return self.at in self.root._name_set() + + def iterdir(self): + if not self.is_dir(): + raise ValueError("Can't listdir a file") + subs = map(self._next, self.root.namelist()) + return filter(self._is_child, subs) + + def __str__(self): + return posixpath.join(self.root.filename, self.at) + + def __repr__(self): + return self.__repr.format(self=self) + + def joinpath(self, add): + next = posixpath.join(self.at, _pathlib_compat(add)) + return self._next(self.root.resolve_dir(next)) + + __truediv__ = joinpath + + @property + def parent(self): + parent_at = posixpath.dirname(self.at.rstrip('/')) + if parent_at: + parent_at += '/' + return self._next(parent_at) diff --git a/venv/Scripts/pbr.exe b/venv/Scripts/pbr.exe new file mode 100644 index 00000000..27424a7a Binary files /dev/null and b/venv/Scripts/pbr.exe differ diff --git a/venv/Scripts/virtualenv-clone.exe b/venv/Scripts/virtualenv-clone.exe new file mode 100644 index 00000000..1bad161a Binary files /dev/null and b/venv/Scripts/virtualenv-clone.exe differ diff --git a/venv/Scripts/virtualenv.exe b/venv/Scripts/virtualenv.exe new file mode 100644 index 00000000..1b3fa210 Binary files /dev/null and b/venv/Scripts/virtualenv.exe differ diff --git a/venv/Scripts/virtualenvwrapper.sh b/venv/Scripts/virtualenvwrapper.sh new file mode 100644 index 00000000..ade8ddd2 --- /dev/null +++ b/venv/Scripts/virtualenvwrapper.sh @@ -0,0 +1,1358 @@ +# -*- mode: shell-script -*- +# +# Shell functions to act as wrapper for Ian Bicking's virtualenv +# (http://pypi.python.org/pypi/virtualenv) +# +# +# Copyright Doug Hellmann, All Rights Reserved +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose and without fee is hereby granted, +# provided that the above copyright notice appear in all copies and that +# both that copyright notice and this permission notice appear in +# supporting documentation, and that the name of Doug Hellmann not be used +# in advertising or publicity pertaining to distribution of the software +# without specific, written prior permission. +# +# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO +# EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF +# USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# +# Project home page: http://www.doughellmann.com/projects/virtualenvwrapper/ +# +# +# Setup: +# +# 1. Create a directory to hold the virtual environments. +# (mkdir $HOME/.virtualenvs). +# 2. Add a line like "export WORKON_HOME=$HOME/.virtualenvs" +# to your .bashrc. +# 3. Add a line like "source /path/to/this/file/virtualenvwrapper.sh" +# to your .bashrc. +# 4. Run: source ~/.bashrc +# 5. Run: workon +# 6. A list of environments, empty, is printed. +# 7. Run: mkvirtualenv temp +# 8. Run: workon +# 9. This time, the "temp" environment is included. +# 10. Run: workon temp +# 11. The virtual environment is activated. +# + +# Locate the global Python where virtualenvwrapper is installed. +if [ "${VIRTUALENVWRAPPER_PYTHON:-}" = "" ] +then + VIRTUALENVWRAPPER_PYTHON="$(command \which python)" +fi + +# Set the name of the virtualenv app to use. +if [ "${VIRTUALENVWRAPPER_VIRTUALENV:-}" = "" ] +then + VIRTUALENVWRAPPER_VIRTUALENV="virtualenv" +fi + +# Set the name of the virtualenv-clone app to use. +if [ "${VIRTUALENVWRAPPER_VIRTUALENV_CLONE:-}" = "" ] +then + VIRTUALENVWRAPPER_VIRTUALENV_CLONE="virtualenv-clone" +fi + +# Define script folder depending on the platorm (Win32/Unix) +VIRTUALENVWRAPPER_ENV_BIN_DIR="bin" +if [ "${OS:-}" = "Windows_NT" ] && ([ "${MSYSTEM:-}" = "MINGW32" ] || [ "${MSYSTEM:-}" = "MINGW64" ]) +then + # Only assign this for msys, cygwin use standard Unix paths + # and its own python installation + VIRTUALENVWRAPPER_ENV_BIN_DIR="Scripts" +fi + +# Let the user override the name of the file that holds the project +# directory name. +if [ "${VIRTUALENVWRAPPER_PROJECT_FILENAME:-}" = "" ] +then + export VIRTUALENVWRAPPER_PROJECT_FILENAME=".project" +fi + +# Let the user tell us they never want to cd to projects +# automatically. +export VIRTUALENVWRAPPER_WORKON_CD=${VIRTUALENVWRAPPER_WORKON_CD:-1} + +# Remember where we are running from. +if [ -z "${VIRTUALENVWRAPPER_SCRIPT:-}" ] +then + if [ -n "$BASH" ] + then + export VIRTUALENVWRAPPER_SCRIPT="$BASH_SOURCE" + elif [ -n "$ZSH_VERSION" ] + then + export VIRTUALENVWRAPPER_SCRIPT="$0" + else + export VIRTUALENVWRAPPER_SCRIPT="${.sh.file}" + fi +fi + +# Portable shell scripting is hard, let's go shopping. +# +# People insist on aliasing commands like 'cd', either with a real +# alias or even a shell function. Under bash and zsh, "builtin" forces +# the use of a command that is part of the shell itself instead of an +# alias, function, or external command, while "command" does something +# similar but allows external commands. Under ksh "builtin" registers +# a new command from a shared library, but "command" will pick up +# existing builtin commands. We need to use a builtin for cd because +# we are trying to change the state of the current shell, so we use +# "builtin" for bash and zsh but "command" under ksh. +function virtualenvwrapper_cd { + if [ -n "${BASH:-}" ] + then + builtin \cd "$@" + elif [ -n "${ZSH_VERSION:-}" ] + then + builtin \cd -q "$@" + else + command \cd "$@" + fi +} + +function virtualenvwrapper_expandpath { + if [ "$1" = "" ]; then + return 1 + else + "$VIRTUALENVWRAPPER_PYTHON" -c "import os,sys; sys.stdout.write(os.path.normpath(os.path.expanduser(os.path.expandvars(\"$1\")))+'\n')" + return 0 + fi +} + +function virtualenvwrapper_absolutepath { + if [ "$1" = "" ]; then + return 1 + else + "$VIRTUALENVWRAPPER_PYTHON" -c "import os,sys; sys.stdout.write(os.path.abspath(\"$1\")+'\n')" + return 0 + fi +} + +function virtualenvwrapper_derive_workon_home { + typeset workon_home_dir="$WORKON_HOME" + + # Make sure there is a default value for WORKON_HOME. + # You can override this setting in your .bashrc. + if [ "$workon_home_dir" = "" ] + then + workon_home_dir="$HOME/.virtualenvs" + fi + + # If the path is relative, prefix it with $HOME + # (note: for compatibility) + if echo "$workon_home_dir" | (unset GREP_OPTIONS; command \grep '^[^/~]' > /dev/null) + then + workon_home_dir="$HOME/$WORKON_HOME" + fi + + # Only call on Python to fix the path if it looks like the + # path might contain stuff to expand. + # (it might be possible to do this in shell, but I don't know a + # cross-shell-safe way of doing it -wolever) + if echo "$workon_home_dir" | (unset GREP_OPTIONS; command \egrep '([\$~]|//)' >/dev/null) + then + # This will normalize the path by: + # - Removing extra slashes (e.g., when TMPDIR ends in a slash) + # - Expanding variables (e.g., $foo) + # - Converting ~s to complete paths (e.g., ~/ to /home/brian/ and ~arthur to /home/arthur) + workon_home_dir="$(virtualenvwrapper_expandpath "$workon_home_dir")" + fi + + echo "$workon_home_dir" + return 0 +} + +# Check if the WORKON_HOME directory exists, +# create it if it does not +# seperate from creating the files in it because this used to just error +# and maybe other things rely on the dir existing before that happens. +function virtualenvwrapper_verify_workon_home { + RC=0 + if [ ! -d "$WORKON_HOME/" ] + then + if [ "$1" != "-q" ] + then + echo "NOTE: Virtual environments directory $WORKON_HOME does not exist. Creating..." 1>&2 + fi + mkdir -p "$WORKON_HOME" + RC=$? + fi + return $RC +} + +#HOOK_VERBOSE_OPTION="-q" + +# Function to wrap mktemp so tests can replace it for error condition +# testing. +function virtualenvwrapper_mktemp { + command \mktemp "$@" +} + +# Expects 1 argument, the suffix for the new file. +function virtualenvwrapper_tempfile { + # Note: the 'X's must come last + typeset suffix=${1:-hook} + typeset file + + file="$(virtualenvwrapper_mktemp -t virtualenvwrapper-$suffix-XXXXXXXXXX)" + touch "$file" + if [ $? -ne 0 ] || [ -z "$file" ] || [ ! -f "$file" ] + then + echo "ERROR: virtualenvwrapper could not create a temporary file name." 1>&2 + return 1 + fi + echo $file + return 0 +} + +# Run the hooks +function virtualenvwrapper_run_hook { + typeset hook_script + typeset result + + hook_script="$(virtualenvwrapper_tempfile ${1}-hook)" || return 1 + + # Use a subshell to run the python interpreter with hook_loader so + # we can change the working directory. This avoids having the + # Python 3 interpreter decide that its "prefix" is the virtualenv + # if we happen to be inside the virtualenv when we start. + ( \ + virtualenvwrapper_cd "$WORKON_HOME" && + "$VIRTUALENVWRAPPER_PYTHON" -m 'virtualenvwrapper.hook_loader' \ + ${HOOK_VERBOSE_OPTION:-} --script "$hook_script" "$@" \ + ) + result=$? + + if [ $result -eq 0 ] + then + if [ ! -f "$hook_script" ] + then + echo "ERROR: virtualenvwrapper_run_hook could not find temporary file $hook_script" 1>&2 + command \rm -f "$hook_script" + return 2 + fi + # cat "$hook_script" + source "$hook_script" + elif [ "${1}" = "initialize" ] + then + cat - 1>&2 <&2 + return 1 + fi + if [ ! -e "$exe_path" ] + then + echo "ERROR: Found $1 in path as \"$exe_path\" but that does not exist" >&2 + return 1 + fi + return 0 +} + + +# Verify that virtualenv is installed and visible +function virtualenvwrapper_verify_virtualenv { + virtualenvwrapper_verify_resource $VIRTUALENVWRAPPER_VIRTUALENV +} + + +function virtualenvwrapper_verify_virtualenv_clone { + virtualenvwrapper_verify_resource $VIRTUALENVWRAPPER_VIRTUALENV_CLONE +} + + +# Verify that the requested environment exists +function virtualenvwrapper_verify_workon_environment { + typeset env_name="$1" + if [ ! -d "$WORKON_HOME/$env_name" ] + then + echo "ERROR: Environment '$env_name' does not exist. Create it with 'mkvirtualenv $env_name'." >&2 + return 1 + fi + return 0 +} + +# Verify that the active environment exists +function virtualenvwrapper_verify_active_environment { + if [ ! -n "${VIRTUAL_ENV}" ] || [ ! -d "${VIRTUAL_ENV}" ] + then + echo "ERROR: no virtualenv active, or active virtualenv is missing" >&2 + return 1 + fi + return 0 +} + +# Help text for mkvirtualenv +function virtualenvwrapper_mkvirtualenv_help { + echo "Usage: mkvirtualenv [-a project_path] [-i package] [-r requirements_file] [virtualenv options] env_name" + echo + echo " -a project_path" + echo + echo " Provide a full path to a project directory to associate with" + echo " the new environment." + echo + echo " -i package" + echo + echo " Install a package after the environment is created." + echo " This option may be repeated." + echo + echo " -r requirements_file" + echo + echo " Provide a pip requirements file to install a base set of packages" + echo " into the new environment." + echo; + echo 'virtualenv help:'; + echo; + "$VIRTUALENVWRAPPER_VIRTUALENV" $@; +} + +# Create a new environment, in the WORKON_HOME. +# +# Usage: mkvirtualenv [options] ENVNAME +# (where the options are passed directly to virtualenv) +# +#:help:mkvirtualenv: Create a new virtualenv in $WORKON_HOME +function mkvirtualenv { + typeset -a in_args + typeset -a out_args + typeset -i i + typeset tst + typeset a + typeset envname + typeset requirements + typeset packages + typeset interpreter + typeset project + + in_args=( "$@" ) + + if [ -n "$ZSH_VERSION" ] + then + i=1 + tst="-le" + else + i=0 + tst="-lt" + fi + while [ $i $tst $# ] + do + a="${in_args[$i]}" + # echo "arg $i : $a" + case "$a" in + -a) + i=$(( $i + 1 )) + project="${in_args[$i]}" + if [ ! -d "$project" ] + then + echo "Cannot associate project with $project, it is not a directory" 1>&2 + return 1 + fi + project="$(virtualenvwrapper_absolutepath ${project})";; + -h|--help) + virtualenvwrapper_mkvirtualenv_help $a; + return;; + -i) + i=$(( $i + 1 )); + packages="$packages ${in_args[$i]}";; + -p|--python*) + if echo "$a" | grep -q "=" + then + interpreter="$(echo "$a" | cut -f2 -d=)" + else + i=$(( $i + 1 )) + interpreter="${in_args[$i]}" + fi;; + -r) + i=$(( $i + 1 )); + requirements="${in_args[$i]}"; + requirements="$(virtualenvwrapper_expandpath "$requirements")";; + *) + if [ ${#out_args} -gt 0 ] + then + out_args=( "${out_args[@]-}" "$a" ) + else + out_args=( "$a" ) + fi;; + esac + i=$(( $i + 1 )) + done + + if [ ! -z $interpreter ] + then + out_args=( "--python=$interpreter" ${out_args[@]} ) + fi; + + set -- "${out_args[@]}" + + eval "envname=\$$#" + virtualenvwrapper_verify_workon_home || return 1 + virtualenvwrapper_verify_virtualenv || return 1 + ( + [ -n "$ZSH_VERSION" ] && setopt SH_WORD_SPLIT + virtualenvwrapper_cd "$WORKON_HOME" && + "$VIRTUALENVWRAPPER_VIRTUALENV" $VIRTUALENVWRAPPER_VIRTUALENV_ARGS "$@" && + [ -d "$WORKON_HOME/$envname" ] && \ + virtualenvwrapper_run_hook "pre_mkvirtualenv" "$envname" + ) + typeset RC=$? + [ $RC -ne 0 ] && return $RC + + # If they passed a help option or got an error from virtualenv, + # the environment won't exist. Use that to tell whether + # we should switch to the environment and run the hook. + [ ! -d "$WORKON_HOME/$envname" ] && return 0 + + # If they gave us a project directory, set it up now + # so the activate hooks can find it. + if [ ! -z "$project" ] + then + setvirtualenvproject "$WORKON_HOME/$envname" "$project" + RC=$? + [ $RC -ne 0 ] && return $RC + fi + + # Now activate the new environment + workon "$envname" + + if [ ! -z "$requirements" ] + then + pip install -r "$requirements" + fi + + for a in $packages + do + pip install $a + done + + virtualenvwrapper_run_hook "post_mkvirtualenv" +} + +#:help:rmvirtualenv: Remove a virtualenv +function rmvirtualenv { + virtualenvwrapper_verify_workon_home || return 1 + if [ ${#@} = 0 ] + then + echo "Please specify an environment." >&2 + return 1 + fi + + # support to remove several environments + typeset env_name + # Must quote the parameters, as environments could have spaces in their names + for env_name in "$@" + do + echo "Removing $env_name..." + typeset env_dir="$WORKON_HOME/$env_name" + if [ "$VIRTUAL_ENV" = "$env_dir" ] + then + echo "ERROR: You cannot remove the active environment ('$env_name')." >&2 + echo "Either switch to another environment, or run 'deactivate'." >&2 + return 1 + fi + + if [ ! -d "$env_dir" ]; then + echo "Did not find environment $env_dir to remove." >&2 + fi + + # Move out of the current directory to one known to be + # safe, in case we are inside the environment somewhere. + typeset prior_dir="$(pwd)" + virtualenvwrapper_cd "$WORKON_HOME" + + virtualenvwrapper_run_hook "pre_rmvirtualenv" "$env_name" + command \rm -rf "$env_dir" + virtualenvwrapper_run_hook "post_rmvirtualenv" "$env_name" + + # If the directory we used to be in still exists, move back to it. + if [ -d "$prior_dir" ] + then + virtualenvwrapper_cd "$prior_dir" + fi + done +} + +# List the available environments. +function virtualenvwrapper_show_workon_options { + virtualenvwrapper_verify_workon_home || return 1 + # NOTE: DO NOT use ls or cd here because colorized versions spew control + # characters into the output list. + # echo seems a little faster than find, even with -depth 3. + # Note that this is a little tricky, as there may be spaces in the path. + # + # 1. Look for environments by finding the activate scripts. + # Use a subshell so we can suppress the message printed + # by zsh if the glob pattern fails to match any files. + # This yields a single, space-separated line containing all matches. + # 2. Replace the trailing newline with a space, so every + # possible env has a space following it. + # 3. Strip the bindir/activate script suffix, replacing it with + # a slash, as that is an illegal character in a directory name. + # This yields a slash-separated list of possible env names. + # 4. Replace each slash with a newline to show the output one name per line. + # 5. Eliminate any lines with * on them because that means there + # were no envs. + (virtualenvwrapper_cd "$WORKON_HOME" && echo */$VIRTUALENVWRAPPER_ENV_BIN_DIR/activate) 2>/dev/null \ + | command \tr "\n" " " \ + | command \sed "s|/$VIRTUALENVWRAPPER_ENV_BIN_DIR/activate |/|g" \ + | command \tr "/" "\n" \ + | command \sed "/^\s*$/d" \ + | (unset GREP_OPTIONS; command \egrep -v '^\*$') 2>/dev/null +} + +function _lsvirtualenv_usage { + echo "lsvirtualenv [-blh]" + echo " -b -- brief mode" + echo " -l -- long mode" + echo " -h -- this help message" +} + +#:help:lsvirtualenv: list virtualenvs +function lsvirtualenv { + + typeset long_mode=true + if command -v "getopts" >/dev/null 2>&1 + then + # Use getopts when possible + OPTIND=1 + while getopts ":blh" opt "$@" + do + case "$opt" in + l) long_mode=true;; + b) long_mode=false;; + h) _lsvirtualenv_usage; + return 1;; + ?) echo "Invalid option: -$OPTARG" >&2; + _lsvirtualenv_usage; + return 1;; + esac + done + else + # fallback on getopt for other shell + typeset -a args + args=($(getopt blh "$@")) + if [ $? != 0 ] + then + _lsvirtualenv_usage + return 1 + fi + for opt in $args + do + case "$opt" in + -l) long_mode=true;; + -b) long_mode=false;; + -h) _lsvirtualenv_usage; + return 1;; + esac + done + fi + + if $long_mode + then + allvirtualenv showvirtualenv "$env_name" + else + virtualenvwrapper_show_workon_options + fi +} + +#:help:showvirtualenv: show details of a single virtualenv +function showvirtualenv { + typeset env_name="$1" + if [ -z "$env_name" ] + then + if [ -z "$VIRTUAL_ENV" ] + then + echo "showvirtualenv [env]" + return 1 + fi + env_name=$(basename "$VIRTUAL_ENV") + fi + + virtualenvwrapper_run_hook "get_env_details" "$env_name" + echo +} + +# Show help for workon +function virtualenvwrapper_workon_help { + echo "Usage: workon env_name" + echo "" + echo " Deactivate any currently activated virtualenv" + echo " and activate the named environment, triggering" + echo " any hooks in the process." + echo "" + echo " workon" + echo "" + echo " Print a list of available environments." + echo " (See also lsvirtualenv -b)" + echo "" + echo " workon (-h|--help)" + echo "" + echo " Show this help message." + echo "" + echo " workon (-c|--cd) envname" + echo "" + echo " After activating the environment, cd to the associated" + echo " project directory if it is set." + echo "" + echo " workon (-n|--no-cd) envname" + echo "" + echo " After activating the environment, do not cd to the" + echo " associated project directory." + echo "" +} + +#:help:workon: list or change working virtualenvs +function workon { + typeset -a in_args + typeset -a out_args + + in_args=( "$@" ) + + if [ -n "$ZSH_VERSION" ] + then + i=1 + tst="-le" + else + i=0 + tst="-lt" + fi + typeset cd_after_activate=$VIRTUALENVWRAPPER_WORKON_CD + while [ $i $tst $# ] + do + a="${in_args[$i]}" + case "$a" in + -h|--help) + virtualenvwrapper_workon_help; + return 0;; + -n|--no-cd) + cd_after_activate=0;; + -c|--cd) + cd_after_activate=1;; + *) + if [ ${#out_args} -gt 0 ] + then + out_args=( "${out_args[@]-}" "$a" ) + else + out_args=( "$a" ) + fi;; + esac + i=$(( $i + 1 )) + done + + set -- "${out_args[@]}" + + typeset env_name="$1" + if [ "$env_name" = "" ] + then + lsvirtualenv -b + return 1 + elif [ "$env_name" = "." ] + then + # The IFS default of breaking on whitespace causes issues if there + # are spaces in the env_name, so change it. + IFS='%' + env_name="$(basename $(pwd))" + unset IFS + fi + + virtualenvwrapper_verify_workon_home || return 1 + virtualenvwrapper_verify_workon_environment "$env_name" || return 1 + + activate="$WORKON_HOME/$env_name/$VIRTUALENVWRAPPER_ENV_BIN_DIR/activate" + if [ ! -f "$activate" ] + then + echo "ERROR: Environment '$WORKON_HOME/$env_name' does not contain an activate script." >&2 + return 1 + fi + + # Deactivate any current environment "destructively" + # before switching so we use our override function, + # if it exists, but make sure it's the deactivate function + # we set up + type deactivate >/dev/null 2>&1 + if [ $? -eq 0 ] + then + typeset -f deactivate | grep 'typeset env_postdeactivate_hook' >/dev/null 2>&1 + if [ $? -eq 0 ] + then + deactivate + unset -f deactivate >/dev/null 2>&1 + fi + fi + + virtualenvwrapper_run_hook "pre_activate" "$env_name" + + source "$activate" + + # Save the deactivate function from virtualenv under a different name + virtualenvwrapper_original_deactivate=`typeset -f deactivate | sed 's/deactivate/virtualenv_deactivate/g'` + eval "$virtualenvwrapper_original_deactivate" + unset -f deactivate >/dev/null 2>&1 + + # Replace the deactivate() function with a wrapper. + eval 'deactivate () { + typeset env_postdeactivate_hook + typeset old_env + + # Call the local hook before the global so we can undo + # any settings made by the local postactivate first. + virtualenvwrapper_run_hook "pre_deactivate" + + env_postdeactivate_hook="$VIRTUAL_ENV/$VIRTUALENVWRAPPER_ENV_BIN_DIR/postdeactivate" + old_env=$(basename "$VIRTUAL_ENV") + + # Call the original function. + virtualenv_deactivate $1 + + virtualenvwrapper_run_hook "post_deactivate" "$old_env" + + if [ ! "$1" = "nondestructive" ] + then + # Remove this function + unset -f virtualenv_deactivate >/dev/null 2>&1 + unset -f deactivate >/dev/null 2>&1 + fi + + }' + + VIRTUALENVWRAPPER_PROJECT_CD=$cd_after_activate virtualenvwrapper_run_hook "post_activate" + + return 0 +} + + +# Prints the Python version string for the current interpreter. +function virtualenvwrapper_get_python_version { + # Uses the Python from the virtualenv rather than + # VIRTUALENVWRAPPER_PYTHON because we're trying to determine the + # version installed there so we can build up the path to the + # site-packages directory. + "$VIRTUAL_ENV/$VIRTUALENVWRAPPER_ENV_BIN_DIR/python" -V 2>&1 | cut -f2 -d' ' | cut -f-2 -d. +} + +# Prints the path to the site-packages directory for the current environment. +function virtualenvwrapper_get_site_packages_dir { + "$VIRTUAL_ENV/$VIRTUALENVWRAPPER_ENV_BIN_DIR/python" -c "import distutils.sysconfig; print(distutils.sysconfig.get_python_lib())" +} + +# Path management for packages outside of the virtual env. +# Based on a contribution from James Bennett and Jannis Leidel. +# +# add2virtualenv directory1 directory2 ... +# +# Adds the specified directories to the Python path for the +# currently-active virtualenv. This will be done by placing the +# directory names in a path file named +# "virtualenv_path_extensions.pth" inside the virtualenv's +# site-packages directory; if this file does not exist, it will be +# created first. +# +#:help:add2virtualenv: add directory to the import path +function add2virtualenv { + virtualenvwrapper_verify_workon_home || return 1 + virtualenvwrapper_verify_active_environment || return 1 + + site_packages="`virtualenvwrapper_get_site_packages_dir`" + + if [ ! -d "${site_packages}" ] + then + echo "ERROR: currently-active virtualenv does not appear to have a site-packages directory" >&2 + return 1 + fi + + # Prefix with _ to ensure we are loaded as early as possible, + # and at least before easy_install.pth. + path_file="$site_packages/_virtualenv_path_extensions.pth" + + if [ "$*" = "" ] + then + echo "Usage: add2virtualenv dir [dir ...]" + if [ -f "$path_file" ] + then + echo + echo "Existing paths:" + cat "$path_file" | grep -v "^import" + fi + return 1 + fi + + remove=0 + if [ "$1" = "-d" ] + then + remove=1 + shift + fi + + if [ ! -f "$path_file" ] + then + echo "import sys; sys.__plen = len(sys.path)" > "$path_file" || return 1 + echo "import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)" >> "$path_file" || return 1 + fi + + for pydir in "$@" + do + absolute_path="$(virtualenvwrapper_absolutepath "$pydir")" + if [ "$absolute_path" != "$pydir" ] + then + echo "Warning: Converting \"$pydir\" to \"$absolute_path\"" 1>&2 + fi + + if [ $remove -eq 1 ] + then + sed -i.tmp "\:^$absolute_path$: d" "$path_file" + else + sed -i.tmp '1 a\ +'"$absolute_path"' +' "$path_file" + fi + rm -f "${path_file}.tmp" + done + return 0 +} + +# Does a ``cd`` to the site-packages directory of the currently-active +# virtualenv. +#:help:cdsitepackages: change to the site-packages directory +function cdsitepackages { + virtualenvwrapper_verify_workon_home || return 1 + virtualenvwrapper_verify_active_environment || return 1 + typeset site_packages="`virtualenvwrapper_get_site_packages_dir`" + virtualenvwrapper_cd "$site_packages/$1" +} + +# Does a ``cd`` to the root of the currently-active virtualenv. +#:help:cdvirtualenv: change to the $VIRTUAL_ENV directory +function cdvirtualenv { + virtualenvwrapper_verify_workon_home || return 1 + virtualenvwrapper_verify_active_environment || return 1 + virtualenvwrapper_cd "$VIRTUAL_ENV/$1" +} + +# Shows the content of the site-packages directory of the currently-active +# virtualenv +#:help:lssitepackages: list contents of the site-packages directory +function lssitepackages { + virtualenvwrapper_verify_workon_home || return 1 + virtualenvwrapper_verify_active_environment || return 1 + typeset site_packages="`virtualenvwrapper_get_site_packages_dir`" + ls $@ "$site_packages" + + path_file="$site_packages/_virtualenv_path_extensions.pth" + if [ -f "$path_file" ] + then + echo + echo "_virtualenv_path_extensions.pth:" + cat "$path_file" + fi +} + +# Toggles the currently-active virtualenv between having and not having +# access to the global site-packages. +#:help:toggleglobalsitepackages: turn access to global site-packages on/off +function toggleglobalsitepackages { + virtualenvwrapper_verify_workon_home || return 1 + virtualenvwrapper_verify_active_environment || return 1 + typeset no_global_site_packages_file="`virtualenvwrapper_get_site_packages_dir`/../no-global-site-packages.txt" + if [ -f $no_global_site_packages_file ]; then + rm $no_global_site_packages_file + [ "$1" = "-q" ] || echo "Enabled global site-packages" + else + touch $no_global_site_packages_file + [ "$1" = "-q" ] || echo "Disabled global site-packages" + fi +} + +#:help:cpvirtualenv: duplicate the named virtualenv to make a new one +function cpvirtualenv { + virtualenvwrapper_verify_workon_home || return 1 + virtualenvwrapper_verify_virtualenv_clone || return 1 + + typeset src_name="$1" + typeset trg_name="$2" + typeset src + typeset trg + + # without a source there is nothing to do + if [ "$src_name" = "" ]; then + echo "Please provide a valid virtualenv to copy." + return 1 + else + # see if it's already in workon + if [ ! -e "$WORKON_HOME/$src_name" ]; then + # so it's a virtualenv we are importing + # make sure we have a full path + # and get the name + src="$(virtualenvwrapper_expandpath "$src_name")" + # final verification + if [ ! -e "$src" ]; then + echo "Please provide a valid virtualenv to copy." + return 1 + fi + src_name="$(basename "$src")" + else + src="$WORKON_HOME/$src_name" + fi + fi + + if [ "$trg_name" = "" ]; then + # target not given, assume + # same as source + trg="$WORKON_HOME/$src_name" + trg_name="$src_name" + else + trg="$WORKON_HOME/$trg_name" + fi + trg="$(virtualenvwrapper_expandpath "$trg")" + + # validate trg does not already exist + # catch copying virtualenv in workon home + # to workon home + if [ -e "$trg" ]; then + echo "$trg_name virtualenv already exists." + return 1 + fi + + echo "Copying $src_name as $trg_name..." + ( + [ -n "$ZSH_VERSION" ] && setopt SH_WORD_SPLIT + virtualenvwrapper_cd "$WORKON_HOME" && + "$VIRTUALENVWRAPPER_VIRTUALENV_CLONE" "$src" "$trg" + [ -d "$trg" ] && + virtualenvwrapper_run_hook "pre_cpvirtualenv" "$src" "$trg_name" && + virtualenvwrapper_run_hook "pre_mkvirtualenv" "$trg_name" + ) + typeset RC=$? + [ $RC -ne 0 ] && return $RC + + [ ! -d "$WORKON_HOME/$trg_name" ] && return 1 + + # Now activate the new environment + workon "$trg_name" + + virtualenvwrapper_run_hook "post_mkvirtualenv" + virtualenvwrapper_run_hook "post_cpvirtualenv" +} + +# +# virtualenvwrapper project functions +# + +# Verify that the PROJECT_HOME directory exists +function virtualenvwrapper_verify_project_home { + if [ -z "$PROJECT_HOME" ] + then + echo "ERROR: Set the PROJECT_HOME shell variable to the name of the directory where projects should be created." >&2 + return 1 + fi + if [ ! -d "$PROJECT_HOME" ] + then + [ "$1" != "-q" ] && echo "ERROR: Project directory '$PROJECT_HOME' does not exist. Create it or set PROJECT_HOME to an existing directory." >&2 + return 1 + fi + return 0 +} + +# Given a virtualenv directory and a project directory, +# set the virtualenv up to be associated with the +# project +#:help:setvirtualenvproject: associate a project directory with a virtualenv +function setvirtualenvproject { + typeset venv="$1" + typeset prj="$2" + if [ -z "$venv" ] + then + venv="$VIRTUAL_ENV" + fi + if [ -z "$prj" ] + then + prj="$(pwd)" + else + prj=$(virtualenvwrapper_absolutepath "${prj}") + fi + + # If what we were given isn't a directory, see if it is under + # $WORKON_HOME. + if [ ! -d "$venv" ] + then + venv="$WORKON_HOME/$venv" + fi + if [ ! -d "$venv" ] + then + echo "No virtualenv $(basename $venv)" 1>&2 + return 1 + fi + + # Make sure we have a valid project setting + if [ ! -d "$prj" ] + then + echo "Cannot associate virtualenv with \"$prj\", it is not a directory" 1>&2 + return 1 + fi + + echo "Setting project for $(basename $venv) to $prj" + echo "$prj" > "$venv/$VIRTUALENVWRAPPER_PROJECT_FILENAME" +} + +# Show help for mkproject +function virtualenvwrapper_mkproject_help { + echo "Usage: mkproject [-f|--force] [-t template] [virtualenv options] project_name" + echo + echo "-f, --force Create the virtualenv even if the project directory" + echo " already exists" + echo + echo "Multiple templates may be selected. They are applied in the order" + echo "specified on the command line." + echo + echo "mkvirtualenv help:" + echo + mkvirtualenv -h + echo + echo "Available project templates:" + echo + "$VIRTUALENVWRAPPER_PYTHON" -c 'from virtualenvwrapper.hook_loader import main; main()' -l project.template +} + +#:help:mkproject: create a new project directory and its associated virtualenv +function mkproject { + typeset -a in_args + typeset -a out_args + typeset -i i + typeset tst + typeset a + typeset t + typeset force + typeset templates + + in_args=( "$@" ) + force=0 + + if [ -n "$ZSH_VERSION" ] + then + i=1 + tst="-le" + else + i=0 + tst="-lt" + fi + while [ $i $tst $# ] + do + a="${in_args[$i]}" + case "$a" in + -h|--help) + virtualenvwrapper_mkproject_help; + return;; + -f|--force) + force=1;; + -t) + i=$(( $i + 1 )); + templates="$templates ${in_args[$i]}";; + *) + if [ ${#out_args} -gt 0 ] + then + out_args=( "${out_args[@]-}" "$a" ) + else + out_args=( "$a" ) + fi;; + esac + i=$(( $i + 1 )) + done + + set -- "${out_args[@]}" + + # echo "templates $templates" + # echo "remainder $@" + # return 0 + + eval "typeset envname=\$$#" + virtualenvwrapper_verify_project_home || return 1 + + if [ -d "$PROJECT_HOME/$envname" -a $force -eq 0 ] + then + echo "Project $envname already exists." >&2 + return 1 + fi + + mkvirtualenv "$@" || return 1 + + virtualenvwrapper_cd "$PROJECT_HOME" + + virtualenvwrapper_run_hook "project.pre_mkproject" $envname + + echo "Creating $PROJECT_HOME/$envname" + mkdir -p "$PROJECT_HOME/$envname" + setvirtualenvproject "$VIRTUAL_ENV" "$PROJECT_HOME/$envname" + + virtualenvwrapper_cd "$PROJECT_HOME/$envname" + + for t in $templates + do + echo + echo "Applying template $t" + # For some reason zsh insists on prefixing the template + # names with a space, so strip them out before passing + # the value to the hook loader. + virtualenvwrapper_run_hook --name $(echo $t | sed 's/^ //') "project.template" "$envname" "$PROJECT_HOME/$envname" + done + + virtualenvwrapper_run_hook "project.post_mkproject" +} + +#:help:cdproject: change directory to the active project +function cdproject { + virtualenvwrapper_verify_workon_home || return 1 + virtualenvwrapper_verify_active_environment || return 1 + if [ -f "$VIRTUAL_ENV/$VIRTUALENVWRAPPER_PROJECT_FILENAME" ] + then + typeset project_dir="$(cat "$VIRTUAL_ENV/$VIRTUALENVWRAPPER_PROJECT_FILENAME")" + if [ ! -z "$project_dir" ] + then + virtualenvwrapper_cd "$project_dir" + else + echo "Project directory $project_dir does not exist" 1>&2 + return 1 + fi + else + echo "No project set in $VIRTUAL_ENV/$VIRTUALENVWRAPPER_PROJECT_FILENAME" 1>&2 + return 1 + fi + return 0 +} + +# +# Temporary virtualenv +# +# Originally part of virtualenvwrapper.tmpenv plugin +# +#:help:mktmpenv: create a temporary virtualenv +function mktmpenv { + typeset tmpenvname + typeset RC + typeset -a in_args + typeset -a out_args + + in_args=( "$@" ) + + if [ -n "$ZSH_VERSION" ] + then + i=1 + tst="-le" + else + i=0 + tst="-lt" + fi + typeset cd_after_activate=$VIRTUALENVWRAPPER_WORKON_CD + while [ $i $tst $# ] + do + a="${in_args[$i]}" + case "$a" in + -n|--no-cd) + cd_after_activate=0;; + -c|--cd) + cd_after_activate=1;; + *) + if [ ${#out_args} -gt 0 ] + then + out_args=( "${out_args[@]-}" "$a" ) + else + out_args=( "$a" ) + fi;; + esac + i=$(( $i + 1 )) + done + + set -- "${out_args[@]}" + + # Generate a unique temporary name + tmpenvname=$("$VIRTUALENVWRAPPER_PYTHON" -c 'import uuid,sys; sys.stdout.write(uuid.uuid4()+"\n")' 2>/dev/null) + if [ -z "$tmpenvname" ] + then + # This python does not support uuid + tmpenvname=$("$VIRTUALENVWRAPPER_PYTHON" -c 'import random,sys; sys.stdout.write(hex(random.getrandbits(64))[2:-1]+"\n")' 2>/dev/null) + fi + tmpenvname="tmp-$tmpenvname" + + # Create the environment + mkvirtualenv "$@" "$tmpenvname" + RC=$? + if [ $RC -ne 0 ] + then + return $RC + fi + + # Change working directory + [ "$cd_after_activate" = "1" ] && cdvirtualenv + + # Create the tmpenv marker file + echo "This is a temporary environment. It will be deleted when you run 'deactivate'." | tee "$VIRTUAL_ENV/README.tmpenv" + + # Update the postdeactivate script + cat - >> "$VIRTUAL_ENV/$VIRTUALENVWRAPPER_ENV_BIN_DIR/postdeactivate" < "$req_file" + if [ -n "$(cat "$req_file")" ] + then + echo "Uninstalling packages:" + cat "$req_file" + echo + pip uninstall -y $(cat "$req_file" | grep -v '^-f' | sed 's/>/=/g' | cut -f1 -d=) + else + echo "Nothing to remove." + fi + rm -f "$req_file" +} + +# +# Run a command in each virtualenv +# +#:help:allvirtualenv: run a command in all virtualenvs +function allvirtualenv { + virtualenvwrapper_verify_workon_home || return 1 + typeset d + + # The IFS default of breaking on whitespace causes issues if there + # are spaces in the env_name, so change it. + IFS='%' + virtualenvwrapper_show_workon_options | while read d + do + [ ! -d "$WORKON_HOME/$d" ] && continue + echo "$d" + echo "$d" | sed 's/./=/g' + # Activate the environment, but not with workon + # because we don't want to trigger any hooks. + (source "$WORKON_HOME/$d/$VIRTUALENVWRAPPER_ENV_BIN_DIR/activate"; + virtualenvwrapper_cd "$VIRTUAL_ENV"; + "$@") + echo + done + unset IFS +} + +#:help:virtualenvwrapper: show this help message +function virtualenvwrapper { + cat <&2 +fi + +# Load the real implementation of the API from virtualenvwrapper.sh +function virtualenvwrapper_load { + # Only source the script once. + # We might get called multiple times, because not all of _VIRTUALENVWRAPPER_API gets + # a real completion. + if [ -z $VIRTUALENVWRAPPER_LAZY_LOADED ] + then + # NOTE: For Zsh, I have tried to unset any auto-load completion. + # (via `compctl + $(echo ${_VIRTUALENVWRAPPER_API})`. + # But this does not appear to work / triggers a crash. + source "$VIRTUALENVWRAPPER_SCRIPT" + VIRTUALENVWRAPPER_LAZY_LOADED=1 + fi +} + +# Set up "alias" functions based on the API definition. +function virtualenvwrapper_setup_lazy_loader { + typeset venvw_name + for venvw_name in $(echo ${_VIRTUALENVWRAPPER_API}) + do + eval " +function $venvw_name { + virtualenvwrapper_load + ${venvw_name} \"\$@\" +} +" + done +} + +# Set up completion functions to virtualenvwrapper_load +function virtualenvwrapper_setup_lazy_completion { + if [ -n "$BASH" ] ; then + function virtualenvwrapper_lazy_load { + virtualenvwrapper_load + return 124 + } + complete -o nospace -F virtualenvwrapper_lazy_load $(echo ${_VIRTUALENVWRAPPER_API}) + elif [ -n "$ZSH_VERSION" ] ; then + compctl -K virtualenvwrapper_load $(echo ${_VIRTUALENVWRAPPER_API}) + fi +} + +virtualenvwrapper_setup_lazy_loader +# Cannot be reset in zsh to fallback to files (e.g. mkvirtualenv). +virtualenvwrapper_setup_lazy_completion + +unset virtualenvwrapper_setup_lazy_loader +unset virtualenvwrapper_setup_lazy_completion