mirror of https://github.com/sgoudham/Enso-Bot.git
Installing virtualenv
parent
985ab8da19
commit
5307a578c4
@ -0,0 +1 @@
|
|||||||
|
pip
|
@ -0,0 +1,23 @@
|
|||||||
|
# This is the MIT license
|
||||||
|
|
||||||
|
Copyright (c) 2010 ActiveState Software Inc.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included
|
||||||
|
in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
@ -0,0 +1,264 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: appdirs
|
||||||
|
Version: 1.4.4
|
||||||
|
Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir".
|
||||||
|
Home-page: http://github.com/ActiveState/appdirs
|
||||||
|
Author: Trent Mick
|
||||||
|
Author-email: trentm@gmail.com
|
||||||
|
Maintainer: Jeff Rouse
|
||||||
|
Maintainer-email: jr@its.to
|
||||||
|
License: MIT
|
||||||
|
Keywords: application directory log cache user
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.4
|
||||||
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
|
||||||
|
|
||||||
|
.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png
|
||||||
|
:target: http://travis-ci.org/ActiveState/appdirs
|
||||||
|
|
||||||
|
the problem
|
||||||
|
===========
|
||||||
|
|
||||||
|
What directory should your app use for storing user data? If running on Mac OS X, you
|
||||||
|
should use::
|
||||||
|
|
||||||
|
~/Library/Application Support/<AppName>
|
||||||
|
|
||||||
|
If on Windows (at least English Win XP) that should be::
|
||||||
|
|
||||||
|
C:\Documents and Settings\<User>\Application Data\Local Settings\<AppAuthor>\<AppName>
|
||||||
|
|
||||||
|
or possibly::
|
||||||
|
|
||||||
|
C:\Documents and Settings\<User>\Application Data\<AppAuthor>\<AppName>
|
||||||
|
|
||||||
|
for `roaming profiles <http://bit.ly/9yl3b6>`_ but that is another story.
|
||||||
|
|
||||||
|
On Linux (and other Unices) the dir, according to the `XDG
|
||||||
|
spec <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_, is::
|
||||||
|
|
||||||
|
~/.local/share/<AppName>
|
||||||
|
|
||||||
|
|
||||||
|
``appdirs`` to the rescue
|
||||||
|
=========================
|
||||||
|
|
||||||
|
This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will
|
||||||
|
help you choose an appropriate:
|
||||||
|
|
||||||
|
- user data dir (``user_data_dir``)
|
||||||
|
- user config dir (``user_config_dir``)
|
||||||
|
- user cache dir (``user_cache_dir``)
|
||||||
|
- site data dir (``site_data_dir``)
|
||||||
|
- site config dir (``site_config_dir``)
|
||||||
|
- user log dir (``user_log_dir``)
|
||||||
|
|
||||||
|
and also:
|
||||||
|
|
||||||
|
- is a single module so other Python packages can include their own private copy
|
||||||
|
- is slightly opinionated on the directory names used. Look for "OPINION" in
|
||||||
|
documentation and code for when an opinion is being applied.
|
||||||
|
|
||||||
|
|
||||||
|
some example output
|
||||||
|
===================
|
||||||
|
|
||||||
|
On Mac OS X::
|
||||||
|
|
||||||
|
>>> from appdirs import *
|
||||||
|
>>> appname = "SuperApp"
|
||||||
|
>>> appauthor = "Acme"
|
||||||
|
>>> user_data_dir(appname, appauthor)
|
||||||
|
'/Users/trentm/Library/Application Support/SuperApp'
|
||||||
|
>>> site_data_dir(appname, appauthor)
|
||||||
|
'/Library/Application Support/SuperApp'
|
||||||
|
>>> user_cache_dir(appname, appauthor)
|
||||||
|
'/Users/trentm/Library/Caches/SuperApp'
|
||||||
|
>>> user_log_dir(appname, appauthor)
|
||||||
|
'/Users/trentm/Library/Logs/SuperApp'
|
||||||
|
|
||||||
|
On Windows 7::
|
||||||
|
|
||||||
|
>>> from appdirs import *
|
||||||
|
>>> appname = "SuperApp"
|
||||||
|
>>> appauthor = "Acme"
|
||||||
|
>>> user_data_dir(appname, appauthor)
|
||||||
|
'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp'
|
||||||
|
>>> user_data_dir(appname, appauthor, roaming=True)
|
||||||
|
'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp'
|
||||||
|
>>> user_cache_dir(appname, appauthor)
|
||||||
|
'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache'
|
||||||
|
>>> user_log_dir(appname, appauthor)
|
||||||
|
'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs'
|
||||||
|
|
||||||
|
On Linux::
|
||||||
|
|
||||||
|
>>> from appdirs import *
|
||||||
|
>>> appname = "SuperApp"
|
||||||
|
>>> appauthor = "Acme"
|
||||||
|
>>> user_data_dir(appname, appauthor)
|
||||||
|
'/home/trentm/.local/share/SuperApp
|
||||||
|
>>> site_data_dir(appname, appauthor)
|
||||||
|
'/usr/local/share/SuperApp'
|
||||||
|
>>> site_data_dir(appname, appauthor, multipath=True)
|
||||||
|
'/usr/local/share/SuperApp:/usr/share/SuperApp'
|
||||||
|
>>> user_cache_dir(appname, appauthor)
|
||||||
|
'/home/trentm/.cache/SuperApp'
|
||||||
|
>>> user_log_dir(appname, appauthor)
|
||||||
|
'/home/trentm/.cache/SuperApp/log'
|
||||||
|
>>> user_config_dir(appname)
|
||||||
|
'/home/trentm/.config/SuperApp'
|
||||||
|
>>> site_config_dir(appname)
|
||||||
|
'/etc/xdg/SuperApp'
|
||||||
|
>>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc'
|
||||||
|
>>> site_config_dir(appname, multipath=True)
|
||||||
|
'/etc/SuperApp:/usr/local/etc/SuperApp'
|
||||||
|
|
||||||
|
|
||||||
|
``AppDirs`` for convenience
|
||||||
|
===========================
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
>>> from appdirs import AppDirs
|
||||||
|
>>> dirs = AppDirs("SuperApp", "Acme")
|
||||||
|
>>> dirs.user_data_dir
|
||||||
|
'/Users/trentm/Library/Application Support/SuperApp'
|
||||||
|
>>> dirs.site_data_dir
|
||||||
|
'/Library/Application Support/SuperApp'
|
||||||
|
>>> dirs.user_cache_dir
|
||||||
|
'/Users/trentm/Library/Caches/SuperApp'
|
||||||
|
>>> dirs.user_log_dir
|
||||||
|
'/Users/trentm/Library/Logs/SuperApp'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Per-version isolation
|
||||||
|
=====================
|
||||||
|
|
||||||
|
If you have multiple versions of your app in use that you want to be
|
||||||
|
able to run side-by-side, then you may want version-isolation for these
|
||||||
|
dirs::
|
||||||
|
|
||||||
|
>>> from appdirs import AppDirs
|
||||||
|
>>> dirs = AppDirs("SuperApp", "Acme", version="1.0")
|
||||||
|
>>> dirs.user_data_dir
|
||||||
|
'/Users/trentm/Library/Application Support/SuperApp/1.0'
|
||||||
|
>>> dirs.site_data_dir
|
||||||
|
'/Library/Application Support/SuperApp/1.0'
|
||||||
|
>>> dirs.user_cache_dir
|
||||||
|
'/Users/trentm/Library/Caches/SuperApp/1.0'
|
||||||
|
>>> dirs.user_log_dir
|
||||||
|
'/Users/trentm/Library/Logs/SuperApp/1.0'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
appdirs Changelog
|
||||||
|
=================
|
||||||
|
|
||||||
|
appdirs 1.4.4
|
||||||
|
-------------
|
||||||
|
- [PR #92] Don't import appdirs from setup.py
|
||||||
|
|
||||||
|
Project officially classified as Stable which is important
|
||||||
|
for inclusion in other distros such as ActivePython.
|
||||||
|
|
||||||
|
First of several incremental releases to catch up on maintenance.
|
||||||
|
|
||||||
|
appdirs 1.4.3
|
||||||
|
-------------
|
||||||
|
- [PR #76] Python 3.6 invalid escape sequence deprecation fixes
|
||||||
|
- Fix for Python 3.6 support
|
||||||
|
|
||||||
|
appdirs 1.4.2
|
||||||
|
-------------
|
||||||
|
- [PR #84] Allow installing without setuptools
|
||||||
|
- [PR #86] Fix string delimiters in setup.py description
|
||||||
|
- Add Python 3.6 support
|
||||||
|
|
||||||
|
appdirs 1.4.1
|
||||||
|
-------------
|
||||||
|
- [issue #38] Fix _winreg import on Windows Py3
|
||||||
|
- [issue #55] Make appname optional
|
||||||
|
|
||||||
|
appdirs 1.4.0
|
||||||
|
-------------
|
||||||
|
- [PR #42] AppAuthor is now optional on Windows
|
||||||
|
- [issue 41] Support Jython on Windows, Mac, and Unix-like platforms. Windows
|
||||||
|
support requires `JNA <https://github.com/twall/jna>`_.
|
||||||
|
- [PR #44] Fix incorrect behaviour of the site_config_dir method
|
||||||
|
|
||||||
|
appdirs 1.3.0
|
||||||
|
-------------
|
||||||
|
- [Unix, issue 16] Conform to XDG standard, instead of breaking it for
|
||||||
|
everybody
|
||||||
|
- [Unix] Removes gratuitous case mangling of the case, since \*nix-es are
|
||||||
|
usually case sensitive, so mangling is not wise
|
||||||
|
- [Unix] Fixes the utterly wrong behaviour in ``site_data_dir``, return result
|
||||||
|
based on XDG_DATA_DIRS and make room for respecting the standard which
|
||||||
|
specifies XDG_DATA_DIRS is a multiple-value variable
|
||||||
|
- [Issue 6] Add ``*_config_dir`` which are distinct on nix-es, according to
|
||||||
|
XDG specs; on Windows and Mac return the corresponding ``*_data_dir``
|
||||||
|
|
||||||
|
appdirs 1.2.0
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- [Unix] Put ``user_log_dir`` under the *cache* dir on Unix. Seems to be more
|
||||||
|
typical.
|
||||||
|
- [issue 9] Make ``unicode`` work on py3k.
|
||||||
|
|
||||||
|
appdirs 1.1.0
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- [issue 4] Add ``AppDirs.user_log_dir``.
|
||||||
|
- [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec
|
||||||
|
<http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_.
|
||||||
|
- [Mac, issue 5] Fix ``site_data_dir()`` on Mac.
|
||||||
|
- [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports
|
||||||
|
Python3 now.
|
||||||
|
- [Windows] Append "Cache" to ``user_cache_dir`` on Windows by default. Use
|
||||||
|
``opinion=False`` option to disable this.
|
||||||
|
- Add ``appdirs.AppDirs`` convenience class. Usage:
|
||||||
|
|
||||||
|
>>> dirs = AppDirs("SuperApp", "Acme", version="1.0")
|
||||||
|
>>> dirs.user_data_dir
|
||||||
|
'/Users/trentm/Library/Application Support/SuperApp/1.0'
|
||||||
|
|
||||||
|
- [Windows] Cherry-pick Komodo's change to downgrade paths to the Windows short
|
||||||
|
paths if there are high bit chars.
|
||||||
|
- [Linux] Change default ``user_cache_dir()`` on Linux to be singular, e.g.
|
||||||
|
"~/.superapp/cache".
|
||||||
|
- [Windows] Add ``roaming`` option to ``user_data_dir()`` (for use on Windows only)
|
||||||
|
and change the default ``user_data_dir`` behaviour to use a *non*-roaming
|
||||||
|
profile dir (``CSIDL_LOCAL_APPDATA`` instead of ``CSIDL_APPDATA``). Why? Because
|
||||||
|
a large roaming profile can cause login speed issues. The "only syncs on
|
||||||
|
logout" behaviour can cause surprises in appdata info.
|
||||||
|
|
||||||
|
|
||||||
|
appdirs 1.0.1 (never released)
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
Started this changelog 27 July 2010. Before that this module originated in the
|
||||||
|
`Komodo <http://www.activestate.com/komodo>`_ product as ``applib.py`` and then
|
||||||
|
as `applib/location.py
|
||||||
|
<http://github.com/ActiveState/applib/blob/master/applib/location.py>`_ (used by
|
||||||
|
`PyPM <http://code.activestate.com/pypm/>`_ in `ActivePython
|
||||||
|
<http://www.activestate.com/activepython>`_). This is basically a fork of
|
||||||
|
applib.py 1.0.1 and applib/location.py 1.0.1.
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,8 @@
|
|||||||
|
__pycache__/appdirs.cpython-36.pyc,,
|
||||||
|
appdirs-1.4.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
appdirs-1.4.4.dist-info/LICENSE.txt,sha256=Nt200KdFqTqyAyA9cZCBSxuJcn0lTK_0jHp6-71HAAs,1097
|
||||||
|
appdirs-1.4.4.dist-info/METADATA,sha256=k5TVfXMNKGHTfp2wm6EJKTuGwGNuoQR5TqQgH8iwG8M,8981
|
||||||
|
appdirs-1.4.4.dist-info/RECORD,,
|
||||||
|
appdirs-1.4.4.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
|
||||||
|
appdirs-1.4.4.dist-info/top_level.txt,sha256=nKncE8CUqZERJ6VuQWL4_bkunSPDNfn7KZqb4Tr5YEM,8
|
||||||
|
appdirs.py,sha256=g99s2sXhnvTEm79oj4bWI0Toapc-_SmKKNXvOXHkVic,24720
|
@ -0,0 +1,6 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.34.2)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py2-none-any
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
@ -0,0 +1 @@
|
|||||||
|
appdirs
|
@ -0,0 +1,608 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright (c) 2005-2010 ActiveState Software Inc.
|
||||||
|
# Copyright (c) 2013 Eddy Petrișor
|
||||||
|
|
||||||
|
"""Utilities for determining application-specific dirs.
|
||||||
|
|
||||||
|
See <http://github.com/ActiveState/appdirs> for details and usage.
|
||||||
|
"""
|
||||||
|
# Dev Notes:
|
||||||
|
# - MSDN on where to store app data files:
|
||||||
|
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
|
||||||
|
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
||||||
|
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||||
|
|
||||||
|
__version__ = "1.4.4"
|
||||||
|
__version_info__ = tuple(int(segment) for segment in __version__.split("."))
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
if sys.platform.startswith('java'):
|
||||||
|
import platform
|
||||||
|
os_name = platform.java_ver()[3][0]
|
||||||
|
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
|
||||||
|
system = 'win32'
|
||||||
|
elif os_name.startswith('Mac'): # "Mac OS X", etc.
|
||||||
|
system = 'darwin'
|
||||||
|
else: # "Linux", "SunOS", "FreeBSD", etc.
|
||||||
|
# Setting this to "linux2" is not ideal, but only Windows or Mac
|
||||||
|
# are actually checked for and the rest of the module expects
|
||||||
|
# *sys.platform* style strings.
|
||||||
|
system = 'linux2'
|
||||||
|
else:
|
||||||
|
system = sys.platform
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||||
|
r"""Return full path to the user-specific data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: ~/Library/Application Support/<AppName>
|
||||||
|
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
|
||||||
|
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
|
||||||
|
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||||
|
That means, by default "~/.local/share/<AppName>".
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||||
|
path = os.path.normpath(_get_win_folder(const))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('~/Library/Application Support/')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||||
|
r"""Return full path to the user-shared data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"multipath" is an optional parameter only applicable to *nix
|
||||||
|
which indicates that the entire list of data dirs should be
|
||||||
|
returned. By default, the first item from XDG_DATA_DIRS is
|
||||||
|
returned, or '/usr/local/share/<AppName>',
|
||||||
|
if XDG_DATA_DIRS is not set
|
||||||
|
|
||||||
|
Typical site data directories are:
|
||||||
|
Mac OS X: /Library/Application Support/<AppName>
|
||||||
|
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
|
||||||
|
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||||
|
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
|
||||||
|
|
||||||
|
For Unix, this is using the $XDG_DATA_DIRS[0] default.
|
||||||
|
|
||||||
|
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('/Library/Application Support')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
# XDG default for $XDG_DATA_DIRS
|
||||||
|
# only first, if multipath is False
|
||||||
|
path = os.getenv('XDG_DATA_DIRS',
|
||||||
|
os.pathsep.join(['/usr/local/share', '/usr/share']))
|
||||||
|
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||||
|
if appname:
|
||||||
|
if version:
|
||||||
|
appname = os.path.join(appname, version)
|
||||||
|
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||||
|
|
||||||
|
if multipath:
|
||||||
|
path = os.pathsep.join(pathlist)
|
||||||
|
else:
|
||||||
|
path = pathlist[0]
|
||||||
|
return path
|
||||||
|
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||||
|
r"""Return full path to the user-specific config dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user config directories are:
|
||||||
|
Mac OS X: same as user_data_dir
|
||||||
|
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
|
||||||
|
Win *: same as user_data_dir
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||||
|
That means, by default "~/.config/<AppName>".
|
||||||
|
"""
|
||||||
|
if system in ["win32", "darwin"]:
|
||||||
|
path = user_data_dir(appname, appauthor, None, roaming)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||||
|
r"""Return full path to the user-shared data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"multipath" is an optional parameter only applicable to *nix
|
||||||
|
which indicates that the entire list of config dirs should be
|
||||||
|
returned. By default, the first item from XDG_CONFIG_DIRS is
|
||||||
|
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
|
||||||
|
|
||||||
|
Typical site config directories are:
|
||||||
|
Mac OS X: same as site_data_dir
|
||||||
|
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
|
||||||
|
$XDG_CONFIG_DIRS
|
||||||
|
Win *: same as site_data_dir
|
||||||
|
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||||
|
|
||||||
|
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
|
||||||
|
|
||||||
|
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||||
|
"""
|
||||||
|
if system in ["win32", "darwin"]:
|
||||||
|
path = site_data_dir(appname, appauthor)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
else:
|
||||||
|
# XDG default for $XDG_CONFIG_DIRS
|
||||||
|
# only first, if multipath is False
|
||||||
|
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||||
|
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||||
|
if appname:
|
||||||
|
if version:
|
||||||
|
appname = os.path.join(appname, version)
|
||||||
|
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||||
|
|
||||||
|
if multipath:
|
||||||
|
path = os.pathsep.join(pathlist)
|
||||||
|
else:
|
||||||
|
path = pathlist[0]
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||||
|
r"""Return full path to the user-specific cache dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"opinion" (boolean) can be False to disable the appending of
|
||||||
|
"Cache" to the base app data dir for Windows. See
|
||||||
|
discussion below.
|
||||||
|
|
||||||
|
Typical user cache directories are:
|
||||||
|
Mac OS X: ~/Library/Caches/<AppName>
|
||||||
|
Unix: ~/.cache/<AppName> (XDG default)
|
||||||
|
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
|
||||||
|
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
|
||||||
|
|
||||||
|
On Windows the only suggestion in the MSDN docs is that local settings go in
|
||||||
|
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
|
||||||
|
app data dir (the default returned by `user_data_dir` above). Apps typically
|
||||||
|
put cache data somewhere *under* the given dir here. Some examples:
|
||||||
|
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
||||||
|
...\Acme\SuperApp\Cache\1.0
|
||||||
|
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
||||||
|
This can be disabled with the `opinion=False` option.
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "Cache")
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('~/Library/Caches')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||||
|
r"""Return full path to the user-specific state dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user state directories are:
|
||||||
|
Mac OS X: same as user_data_dir
|
||||||
|
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
|
||||||
|
Win *: same as user_data_dir
|
||||||
|
|
||||||
|
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
|
||||||
|
to extend the XDG spec and support $XDG_STATE_HOME.
|
||||||
|
|
||||||
|
That means, by default "~/.local/state/<AppName>".
|
||||||
|
"""
|
||||||
|
if system in ["win32", "darwin"]:
|
||||||
|
path = user_data_dir(appname, appauthor, None, roaming)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||||
|
r"""Return full path to the user-specific log dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"opinion" (boolean) can be False to disable the appending of
|
||||||
|
"Logs" to the base app data dir for Windows, and "log" to the
|
||||||
|
base cache dir for Unix. See discussion below.
|
||||||
|
|
||||||
|
Typical user log directories are:
|
||||||
|
Mac OS X: ~/Library/Logs/<AppName>
|
||||||
|
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
|
||||||
|
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
|
||||||
|
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
|
||||||
|
|
||||||
|
On Windows the only suggestion in the MSDN docs is that local settings
|
||||||
|
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
|
||||||
|
examples of what some windows apps use for a logs dir.)
|
||||||
|
|
||||||
|
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
|
||||||
|
value for Windows and appends "log" to the user cache dir for Unix.
|
||||||
|
This can be disabled with the `opinion=False` option.
|
||||||
|
"""
|
||||||
|
if system == "darwin":
|
||||||
|
path = os.path.join(
|
||||||
|
os.path.expanduser('~/Library/Logs'),
|
||||||
|
appname)
|
||||||
|
elif system == "win32":
|
||||||
|
path = user_data_dir(appname, appauthor, version)
|
||||||
|
version = False
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "Logs")
|
||||||
|
else:
|
||||||
|
path = user_cache_dir(appname, appauthor, version)
|
||||||
|
version = False
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "log")
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
class AppDirs(object):
|
||||||
|
"""Convenience wrapper for getting application dirs."""
|
||||||
|
def __init__(self, appname=None, appauthor=None, version=None,
|
||||||
|
roaming=False, multipath=False):
|
||||||
|
self.appname = appname
|
||||||
|
self.appauthor = appauthor
|
||||||
|
self.version = version
|
||||||
|
self.roaming = roaming
|
||||||
|
self.multipath = multipath
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_data_dir(self):
|
||||||
|
return user_data_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, roaming=self.roaming)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def site_data_dir(self):
|
||||||
|
return site_data_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, multipath=self.multipath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_config_dir(self):
|
||||||
|
return user_config_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, roaming=self.roaming)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def site_config_dir(self):
|
||||||
|
return site_config_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, multipath=self.multipath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_cache_dir(self):
|
||||||
|
return user_cache_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_state_dir(self):
|
||||||
|
return user_state_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_log_dir(self):
|
||||||
|
return user_log_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version)
|
||||||
|
|
||||||
|
|
||||||
|
#---- internal support stuff
|
||||||
|
|
||||||
|
def _get_win_folder_from_registry(csidl_name):
|
||||||
|
"""This is a fallback technique at best. I'm not sure if using the
|
||||||
|
registry for this guarantees us the correct answer for all CSIDL_*
|
||||||
|
names.
|
||||||
|
"""
|
||||||
|
if PY3:
|
||||||
|
import winreg as _winreg
|
||||||
|
else:
|
||||||
|
import _winreg
|
||||||
|
|
||||||
|
shell_folder_name = {
|
||||||
|
"CSIDL_APPDATA": "AppData",
|
||||||
|
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||||
|
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||||
|
}[csidl_name]
|
||||||
|
|
||||||
|
key = _winreg.OpenKey(
|
||||||
|
_winreg.HKEY_CURRENT_USER,
|
||||||
|
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||||
|
)
|
||||||
|
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||||
|
return dir
|
||||||
|
|
||||||
|
|
||||||
|
def _get_win_folder_with_pywin32(csidl_name):
|
||||||
|
from win32com.shell import shellcon, shell
|
||||||
|
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
|
||||||
|
# Try to make this a unicode path because SHGetFolderPath does
|
||||||
|
# not return unicode strings when there is unicode data in the
|
||||||
|
# path.
|
||||||
|
try:
|
||||||
|
dir = unicode(dir)
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in dir:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
try:
|
||||||
|
import win32api
|
||||||
|
dir = win32api.GetShortPathName(dir)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
except UnicodeError:
|
||||||
|
pass
|
||||||
|
return dir
|
||||||
|
|
||||||
|
|
||||||
|
def _get_win_folder_with_ctypes(csidl_name):
|
||||||
|
import ctypes
|
||||||
|
|
||||||
|
csidl_const = {
|
||||||
|
"CSIDL_APPDATA": 26,
|
||||||
|
"CSIDL_COMMON_APPDATA": 35,
|
||||||
|
"CSIDL_LOCAL_APPDATA": 28,
|
||||||
|
}[csidl_name]
|
||||||
|
|
||||||
|
buf = ctypes.create_unicode_buffer(1024)
|
||||||
|
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in buf:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
buf2 = ctypes.create_unicode_buffer(1024)
|
||||||
|
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||||
|
buf = buf2
|
||||||
|
|
||||||
|
return buf.value
|
||||||
|
|
||||||
|
def _get_win_folder_with_jna(csidl_name):
|
||||||
|
import array
|
||||||
|
from com.sun import jna
|
||||||
|
from com.sun.jna.platform import win32
|
||||||
|
|
||||||
|
buf_size = win32.WinDef.MAX_PATH * 2
|
||||||
|
buf = array.zeros('c', buf_size)
|
||||||
|
shell = win32.Shell32.INSTANCE
|
||||||
|
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
|
||||||
|
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in dir:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
buf = array.zeros('c', buf_size)
|
||||||
|
kernel = win32.Kernel32.INSTANCE
|
||||||
|
if kernel.GetShortPathName(dir, buf, buf_size):
|
||||||
|
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||||
|
|
||||||
|
return dir
|
||||||
|
|
||||||
|
if system == "win32":
|
||||||
|
try:
|
||||||
|
import win32com.shell
|
||||||
|
_get_win_folder = _get_win_folder_with_pywin32
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from ctypes import windll
|
||||||
|
_get_win_folder = _get_win_folder_with_ctypes
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import com.sun.jna
|
||||||
|
_get_win_folder = _get_win_folder_with_jna
|
||||||
|
except ImportError:
|
||||||
|
_get_win_folder = _get_win_folder_from_registry
|
||||||
|
|
||||||
|
|
||||||
|
#---- self test code
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
appname = "MyApp"
|
||||||
|
appauthor = "MyCompany"
|
||||||
|
|
||||||
|
props = ("user_data_dir",
|
||||||
|
"user_config_dir",
|
||||||
|
"user_cache_dir",
|
||||||
|
"user_state_dir",
|
||||||
|
"user_log_dir",
|
||||||
|
"site_data_dir",
|
||||||
|
"site_config_dir")
|
||||||
|
|
||||||
|
print("-- app dirs %s --" % __version__)
|
||||||
|
|
||||||
|
print("-- app dirs (with optional 'version')")
|
||||||
|
dirs = AppDirs(appname, appauthor, version="1.0")
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (without optional 'version')")
|
||||||
|
dirs = AppDirs(appname, appauthor)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (without optional 'appauthor')")
|
||||||
|
dirs = AppDirs(appname)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (with disabled 'appauthor')")
|
||||||
|
dirs = AppDirs(appname, appauthor=False)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
@ -0,0 +1,318 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
from __future__ import with_statement
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import itertools
|
||||||
|
|
||||||
|
__version__ = '0.5.4'
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
env_bin_dir = 'bin'
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
env_bin_dir = 'Scripts'
|
||||||
|
|
||||||
|
|
||||||
|
class UserError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _dirmatch(path, matchwith):
|
||||||
|
"""Check if path is within matchwith's tree.
|
||||||
|
|
||||||
|
>>> _dirmatch('/home/foo/bar', '/home/foo/bar')
|
||||||
|
True
|
||||||
|
>>> _dirmatch('/home/foo/bar/', '/home/foo/bar')
|
||||||
|
True
|
||||||
|
>>> _dirmatch('/home/foo/bar/etc', '/home/foo/bar')
|
||||||
|
True
|
||||||
|
>>> _dirmatch('/home/foo/bar2', '/home/foo/bar')
|
||||||
|
False
|
||||||
|
>>> _dirmatch('/home/foo/bar2/etc', '/home/foo/bar')
|
||||||
|
False
|
||||||
|
"""
|
||||||
|
matchlen = len(matchwith)
|
||||||
|
if (path.startswith(matchwith)
|
||||||
|
and path[matchlen:matchlen + 1] in [os.sep, '']):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _virtualenv_sys(venv_path):
|
||||||
|
"obtain version and path info from a virtualenv."
|
||||||
|
executable = os.path.join(venv_path, env_bin_dir, 'python')
|
||||||
|
# Must use "executable" as the first argument rather than as the
|
||||||
|
# keyword argument "executable" to get correct value from sys.path
|
||||||
|
p = subprocess.Popen([executable,
|
||||||
|
'-c', 'import sys;'
|
||||||
|
'print (sys.version[:3]);'
|
||||||
|
'print ("\\n".join(sys.path));'],
|
||||||
|
env={},
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
stdout, err = p.communicate()
|
||||||
|
assert not p.returncode and stdout
|
||||||
|
lines = stdout.decode('utf-8').splitlines()
|
||||||
|
return lines[0], list(filter(bool, lines[1:]))
|
||||||
|
|
||||||
|
|
||||||
|
def clone_virtualenv(src_dir, dst_dir):
|
||||||
|
if not os.path.exists(src_dir):
|
||||||
|
raise UserError('src dir %r does not exist' % src_dir)
|
||||||
|
if os.path.exists(dst_dir):
|
||||||
|
raise UserError('dest dir %r exists' % dst_dir)
|
||||||
|
#sys_path = _virtualenv_syspath(src_dir)
|
||||||
|
logger.info('cloning virtualenv \'%s\' => \'%s\'...' %
|
||||||
|
(src_dir, dst_dir))
|
||||||
|
shutil.copytree(src_dir, dst_dir, symlinks=True,
|
||||||
|
ignore=shutil.ignore_patterns('*.pyc'))
|
||||||
|
version, sys_path = _virtualenv_sys(dst_dir)
|
||||||
|
logger.info('fixing scripts in bin...')
|
||||||
|
fixup_scripts(src_dir, dst_dir, version)
|
||||||
|
|
||||||
|
has_old = lambda s: any(i for i in s if _dirmatch(i, src_dir))
|
||||||
|
|
||||||
|
if has_old(sys_path):
|
||||||
|
# only need to fix stuff in sys.path if we have old
|
||||||
|
# paths in the sys.path of new python env. right?
|
||||||
|
logger.info('fixing paths in sys.path...')
|
||||||
|
fixup_syspath_items(sys_path, src_dir, dst_dir)
|
||||||
|
v_sys = _virtualenv_sys(dst_dir)
|
||||||
|
remaining = has_old(v_sys[1])
|
||||||
|
assert not remaining, v_sys
|
||||||
|
fix_symlink_if_necessary(src_dir, dst_dir)
|
||||||
|
|
||||||
|
def fix_symlink_if_necessary(src_dir, dst_dir):
|
||||||
|
#sometimes the source virtual environment has symlinks that point to itself
|
||||||
|
#one example is $OLD_VIRTUAL_ENV/local/lib points to $OLD_VIRTUAL_ENV/lib
|
||||||
|
#this function makes sure
|
||||||
|
#$NEW_VIRTUAL_ENV/local/lib will point to $NEW_VIRTUAL_ENV/lib
|
||||||
|
#usually this goes unnoticed unless one tries to upgrade a package though pip, so this bug is hard to find.
|
||||||
|
logger.info("scanning for internal symlinks that point to the original virtual env")
|
||||||
|
for dirpath, dirnames, filenames in os.walk(dst_dir):
|
||||||
|
for a_file in itertools.chain(filenames, dirnames):
|
||||||
|
full_file_path = os.path.join(dirpath, a_file)
|
||||||
|
if os.path.islink(full_file_path):
|
||||||
|
target = os.path.realpath(full_file_path)
|
||||||
|
if target.startswith(src_dir):
|
||||||
|
new_target = target.replace(src_dir, dst_dir)
|
||||||
|
logger.debug('fixing symlink in %s' % (full_file_path,))
|
||||||
|
os.remove(full_file_path)
|
||||||
|
os.symlink(new_target, full_file_path)
|
||||||
|
|
||||||
|
|
||||||
|
def fixup_scripts(old_dir, new_dir, version, rewrite_env_python=False):
|
||||||
|
bin_dir = os.path.join(new_dir, env_bin_dir)
|
||||||
|
root, dirs, files = next(os.walk(bin_dir))
|
||||||
|
pybinre = re.compile(r'pythonw?([0-9]+(\.[0-9]+(\.[0-9]+)?)?)?$')
|
||||||
|
for file_ in files:
|
||||||
|
filename = os.path.join(root, file_)
|
||||||
|
if file_ in ['python', 'python%s' % version, 'activate_this.py']:
|
||||||
|
continue
|
||||||
|
elif file_.startswith('python') and pybinre.match(file_):
|
||||||
|
# ignore other possible python binaries
|
||||||
|
continue
|
||||||
|
elif file_.endswith('.pyc'):
|
||||||
|
# ignore compiled files
|
||||||
|
continue
|
||||||
|
elif file_ == 'activate' or file_.startswith('activate.'):
|
||||||
|
fixup_activate(os.path.join(root, file_), old_dir, new_dir)
|
||||||
|
elif os.path.islink(filename):
|
||||||
|
fixup_link(filename, old_dir, new_dir)
|
||||||
|
elif os.path.isfile(filename):
|
||||||
|
fixup_script_(root, file_, old_dir, new_dir, version,
|
||||||
|
rewrite_env_python=rewrite_env_python)
|
||||||
|
|
||||||
|
|
||||||
|
def fixup_script_(root, file_, old_dir, new_dir, version,
|
||||||
|
rewrite_env_python=False):
|
||||||
|
old_shebang = '#!%s/bin/python' % os.path.normcase(os.path.abspath(old_dir))
|
||||||
|
new_shebang = '#!%s/bin/python' % os.path.normcase(os.path.abspath(new_dir))
|
||||||
|
env_shebang = '#!/usr/bin/env python'
|
||||||
|
|
||||||
|
filename = os.path.join(root, file_)
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
if f.read(2) != b'#!':
|
||||||
|
# no shebang
|
||||||
|
return
|
||||||
|
f.seek(0)
|
||||||
|
lines = f.readlines()
|
||||||
|
|
||||||
|
if not lines:
|
||||||
|
# warn: empty script
|
||||||
|
return
|
||||||
|
|
||||||
|
def rewrite_shebang(version=None):
|
||||||
|
logger.debug('fixing %s' % filename)
|
||||||
|
shebang = new_shebang
|
||||||
|
if version:
|
||||||
|
shebang = shebang + version
|
||||||
|
shebang = (shebang + '\n').encode('utf-8')
|
||||||
|
with open(filename, 'wb') as f:
|
||||||
|
f.write(shebang)
|
||||||
|
f.writelines(lines[1:])
|
||||||
|
|
||||||
|
try:
|
||||||
|
bang = lines[0].decode('utf-8').strip()
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
# binary file
|
||||||
|
return
|
||||||
|
|
||||||
|
# This takes care of the scheme in which shebang is of type
|
||||||
|
# '#!/venv/bin/python3' while the version of system python
|
||||||
|
# is of type 3.x e.g. 3.5.
|
||||||
|
short_version = bang[len(old_shebang):]
|
||||||
|
|
||||||
|
if not bang.startswith('#!'):
|
||||||
|
return
|
||||||
|
elif bang == old_shebang:
|
||||||
|
rewrite_shebang()
|
||||||
|
elif (bang.startswith(old_shebang)
|
||||||
|
and bang[len(old_shebang):] == version):
|
||||||
|
rewrite_shebang(version)
|
||||||
|
elif (bang.startswith(old_shebang)
|
||||||
|
and short_version
|
||||||
|
and bang[len(old_shebang):] == short_version):
|
||||||
|
rewrite_shebang(short_version)
|
||||||
|
elif rewrite_env_python and bang.startswith(env_shebang):
|
||||||
|
if bang == env_shebang:
|
||||||
|
rewrite_shebang()
|
||||||
|
elif bang[len(env_shebang):] == version:
|
||||||
|
rewrite_shebang(version)
|
||||||
|
else:
|
||||||
|
# can't do anything
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def fixup_activate(filename, old_dir, new_dir):
|
||||||
|
logger.debug('fixing %s' % filename)
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
data = f.read().decode('utf-8')
|
||||||
|
|
||||||
|
data = data.replace(old_dir, new_dir)
|
||||||
|
with open(filename, 'wb') as f:
|
||||||
|
f.write(data.encode('utf-8'))
|
||||||
|
|
||||||
|
|
||||||
|
def fixup_link(filename, old_dir, new_dir, target=None):
|
||||||
|
logger.debug('fixing %s' % filename)
|
||||||
|
if target is None:
|
||||||
|
target = os.readlink(filename)
|
||||||
|
|
||||||
|
origdir = os.path.dirname(os.path.abspath(filename)).replace(
|
||||||
|
new_dir, old_dir)
|
||||||
|
if not os.path.isabs(target):
|
||||||
|
target = os.path.abspath(os.path.join(origdir, target))
|
||||||
|
rellink = True
|
||||||
|
else:
|
||||||
|
rellink = False
|
||||||
|
|
||||||
|
if _dirmatch(target, old_dir):
|
||||||
|
if rellink:
|
||||||
|
# keep relative links, but don't keep original in case it
|
||||||
|
# traversed up out of, then back into the venv.
|
||||||
|
# so, recreate a relative link from absolute.
|
||||||
|
target = target[len(origdir):].lstrip(os.sep)
|
||||||
|
else:
|
||||||
|
target = target.replace(old_dir, new_dir, 1)
|
||||||
|
|
||||||
|
# else: links outside the venv, replaced with absolute path to target.
|
||||||
|
_replace_symlink(filename, target)
|
||||||
|
|
||||||
|
|
||||||
|
def _replace_symlink(filename, newtarget):
|
||||||
|
tmpfn = "%s.new" % filename
|
||||||
|
os.symlink(newtarget, tmpfn)
|
||||||
|
os.rename(tmpfn, filename)
|
||||||
|
|
||||||
|
|
||||||
|
def fixup_syspath_items(syspath, old_dir, new_dir):
|
||||||
|
for path in syspath:
|
||||||
|
if not os.path.isdir(path):
|
||||||
|
continue
|
||||||
|
path = os.path.normcase(os.path.abspath(path))
|
||||||
|
if _dirmatch(path, old_dir):
|
||||||
|
path = path.replace(old_dir, new_dir, 1)
|
||||||
|
if not os.path.exists(path):
|
||||||
|
continue
|
||||||
|
elif not _dirmatch(path, new_dir):
|
||||||
|
continue
|
||||||
|
root, dirs, files = next(os.walk(path))
|
||||||
|
for file_ in files:
|
||||||
|
filename = os.path.join(root, file_)
|
||||||
|
if filename.endswith('.pth'):
|
||||||
|
fixup_pth_file(filename, old_dir, new_dir)
|
||||||
|
elif filename.endswith('.egg-link'):
|
||||||
|
fixup_egglink_file(filename, old_dir, new_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def fixup_pth_file(filename, old_dir, new_dir):
|
||||||
|
logger.debug('fixup_pth_file %s' % filename)
|
||||||
|
|
||||||
|
with open(filename, 'r') as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
|
||||||
|
has_change = False
|
||||||
|
|
||||||
|
for num, line in enumerate(lines):
|
||||||
|
line = (line.decode('utf-8') if hasattr(line, 'decode') else line).strip()
|
||||||
|
|
||||||
|
if not line or line.startswith('#') or line.startswith('import '):
|
||||||
|
continue
|
||||||
|
elif _dirmatch(line, old_dir):
|
||||||
|
lines[num] = line.replace(old_dir, new_dir, 1)
|
||||||
|
has_change = True
|
||||||
|
|
||||||
|
if has_change:
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
payload = os.linesep.join([l.strip() for l in lines]) + os.linesep
|
||||||
|
f.write(payload)
|
||||||
|
|
||||||
|
|
||||||
|
def fixup_egglink_file(filename, old_dir, new_dir):
|
||||||
|
logger.debug('fixing %s' % filename)
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
link = f.read().decode('utf-8').strip()
|
||||||
|
if _dirmatch(link, old_dir):
|
||||||
|
link = link.replace(old_dir, new_dir, 1)
|
||||||
|
with open(filename, 'wb') as f:
|
||||||
|
link = (link + '\n').encode('utf-8')
|
||||||
|
f.write(link)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = optparse.OptionParser("usage: %prog [options]"
|
||||||
|
" /path/to/existing/venv /path/to/cloned/venv")
|
||||||
|
parser.add_option('-v',
|
||||||
|
action="count",
|
||||||
|
dest='verbose',
|
||||||
|
default=False,
|
||||||
|
help='verbosity')
|
||||||
|
options, args = parser.parse_args()
|
||||||
|
try:
|
||||||
|
old_dir, new_dir = args
|
||||||
|
except ValueError:
|
||||||
|
print("virtualenv-clone %s" % (__version__,))
|
||||||
|
parser.error("not enough arguments given.")
|
||||||
|
old_dir = os.path.realpath(old_dir)
|
||||||
|
new_dir = os.path.realpath(new_dir)
|
||||||
|
loglevel = (logging.WARNING, logging.INFO, logging.DEBUG)[min(2,
|
||||||
|
options.verbose)]
|
||||||
|
logging.basicConfig(level=loglevel, format='%(message)s')
|
||||||
|
try:
|
||||||
|
clone_virtualenv(old_dir, new_dir)
|
||||||
|
except UserError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
parser.error(str(e))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
@ -0,0 +1 @@
|
|||||||
|
pip
|
@ -0,0 +1,24 @@
|
|||||||
|
Metadata-Version: 1.1
|
||||||
|
Name: distlib
|
||||||
|
Version: 0.3.1
|
||||||
|
Summary: Distribution utilities
|
||||||
|
Description: Low-level components of distutils2/packaging, augmented with higher-level APIs for making packaging easier.
|
||||||
|
Home-page: https://bitbucket.org/pypa/distlib
|
||||||
|
Author: Vinay Sajip
|
||||||
|
Author-email: vinay_sajip@red-dove.com
|
||||||
|
License: Python license
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Environment :: Console
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: Python Software Foundation License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
Download-URL: https://bitbucket.org/pypa/distlib/downloads/distlib-0.3.1.zip
|
@ -0,0 +1,45 @@
|
|||||||
|
distlib-0.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
distlib-0.3.1.dist-info/METADATA,sha256=i6wrPilVkro9BXvaHkwVsaemMZCx5xbWc8jS9oR_ZJw,1128
|
||||||
|
distlib-0.3.1.dist-info/RECORD,,
|
||||||
|
distlib-0.3.1.dist-info/WHEEL,sha256=R4LNelR33E9ZPEGiPwrdPrrHnwkFEjiMPbVCAWVjsxI,106
|
||||||
|
distlib/__init__.py,sha256=3veAk2rPznOB2gsK6tjbbh0TQMmGE5P82eE9wXq6NIk,581
|
||||||
|
distlib/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/compat.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/database.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/index.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/locators.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/manifest.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/markers.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/metadata.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/resources.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/scripts.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/util.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/version.cpython-36.pyc,,
|
||||||
|
distlib/__pycache__/wheel.cpython-36.pyc,,
|
||||||
|
distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274
|
||||||
|
distlib/_backport/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
distlib/_backport/__pycache__/misc.cpython-36.pyc,,
|
||||||
|
distlib/_backport/__pycache__/shutil.cpython-36.pyc,,
|
||||||
|
distlib/_backport/__pycache__/sysconfig.cpython-36.pyc,,
|
||||||
|
distlib/_backport/__pycache__/tarfile.cpython-36.pyc,,
|
||||||
|
distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971
|
||||||
|
distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707
|
||||||
|
distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617
|
||||||
|
distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854
|
||||||
|
distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628
|
||||||
|
distlib/compat.py,sha256=ADA56xiAxar3mU6qemlBhNbsrFPosXRhO44RzsbJPqk,41408
|
||||||
|
distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059
|
||||||
|
distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066
|
||||||
|
distlib/locators.py,sha256=c9E4cDEacJ_uKbuE5BqAVocoWp6rsuBGTkiNDQq3zV4,52100
|
||||||
|
distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811
|
||||||
|
distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387
|
||||||
|
distlib/metadata.py,sha256=z2KPy3h3tcDnb9Xs7nAqQ5Oz0bqjWAUFmKWcFKRoodg,38962
|
||||||
|
distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766
|
||||||
|
distlib/scripts.py,sha256=_MAj3sMuv56kuM8FsiIWXqbT0gmumPGaOR_atOzn4a4,17180
|
||||||
|
distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768
|
||||||
|
distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984
|
||||||
|
distlib/util.py,sha256=f2jZCPrcLCt6LcnC0gUy-Fur60tXD8reA7k4rDpHMDw,59845
|
||||||
|
distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391
|
||||||
|
distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112
|
||||||
|
distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840
|
||||||
|
distlib/wheel.py,sha256=v6DnwTqhNHwrEVFr8_YeiTW6G4ftP_evsywNgrmdb2o,41144
|
@ -0,0 +1,5 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: distlib 0.3.1.dev0
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py2-none-any
|
||||||
|
Tag: py3-none-any
|
@ -0,0 +1,23 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2012-2019 Vinay Sajip.
|
||||||
|
# Licensed to the Python Software Foundation under a contributor agreement.
|
||||||
|
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||||
|
#
|
||||||
|
import logging
|
||||||
|
|
||||||
|
__version__ = '0.3.1'
|
||||||
|
|
||||||
|
class DistlibException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from logging import NullHandler
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
class NullHandler(logging.Handler):
|
||||||
|
def handle(self, record): pass
|
||||||
|
def emit(self, record): pass
|
||||||
|
def createLock(self): self.lock = None
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.addHandler(NullHandler())
|
@ -0,0 +1,6 @@
|
|||||||
|
"""Modules copied from Python 3 standard libraries, for internal use only.
|
||||||
|
|
||||||
|
Individual classes and functions are found in d2._backport.misc. Intended
|
||||||
|
usage is to always import things missing from 3.1 from that module: the
|
||||||
|
built-in/stdlib objects will be used if found.
|
||||||
|
"""
|
@ -0,0 +1,41 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2012 The Python Software Foundation.
|
||||||
|
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||||
|
#
|
||||||
|
"""Backports for individual classes and functions."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
__all__ = ['cache_from_source', 'callable', 'fsencode']
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from imp import cache_from_source
|
||||||
|
except ImportError:
|
||||||
|
def cache_from_source(py_file, debug=__debug__):
|
||||||
|
ext = debug and 'c' or 'o'
|
||||||
|
return py_file + ext
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
callable = callable
|
||||||
|
except NameError:
|
||||||
|
from collections import Callable
|
||||||
|
|
||||||
|
def callable(obj):
|
||||||
|
return isinstance(obj, Callable)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
fsencode = os.fsencode
|
||||||
|
except AttributeError:
|
||||||
|
def fsencode(filename):
|
||||||
|
if isinstance(filename, bytes):
|
||||||
|
return filename
|
||||||
|
elif isinstance(filename, str):
|
||||||
|
return filename.encode(sys.getfilesystemencoding())
|
||||||
|
else:
|
||||||
|
raise TypeError("expect bytes or str, not %s" %
|
||||||
|
type(filename).__name__)
|
@ -0,0 +1,764 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2012 The Python Software Foundation.
|
||||||
|
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||||
|
#
|
||||||
|
"""Utility functions for copying and archiving files and directory trees.
|
||||||
|
|
||||||
|
XXX The functions here don't copy the resource fork or other metadata on Mac.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import stat
|
||||||
|
from os.path import abspath
|
||||||
|
import fnmatch
|
||||||
|
try:
|
||||||
|
from collections.abc import Callable
|
||||||
|
except ImportError:
|
||||||
|
from collections import Callable
|
||||||
|
import errno
|
||||||
|
from . import tarfile
|
||||||
|
|
||||||
|
try:
|
||||||
|
import bz2
|
||||||
|
_BZ2_SUPPORTED = True
|
||||||
|
except ImportError:
|
||||||
|
_BZ2_SUPPORTED = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pwd import getpwnam
|
||||||
|
except ImportError:
|
||||||
|
getpwnam = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
from grp import getgrnam
|
||||||
|
except ImportError:
|
||||||
|
getgrnam = None
|
||||||
|
|
||||||
|
__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
|
||||||
|
"copytree", "move", "rmtree", "Error", "SpecialFileError",
|
||||||
|
"ExecError", "make_archive", "get_archive_formats",
|
||||||
|
"register_archive_format", "unregister_archive_format",
|
||||||
|
"get_unpack_formats", "register_unpack_format",
|
||||||
|
"unregister_unpack_format", "unpack_archive", "ignore_patterns"]
|
||||||
|
|
||||||
|
class Error(EnvironmentError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class SpecialFileError(EnvironmentError):
|
||||||
|
"""Raised when trying to do a kind of operation (e.g. copying) which is
|
||||||
|
not supported on a special file (e.g. a named pipe)"""
|
||||||
|
|
||||||
|
class ExecError(EnvironmentError):
|
||||||
|
"""Raised when a command could not be executed"""
|
||||||
|
|
||||||
|
class ReadError(EnvironmentError):
|
||||||
|
"""Raised when an archive cannot be read"""
|
||||||
|
|
||||||
|
class RegistryError(Exception):
|
||||||
|
"""Raised when a registry operation with the archiving
|
||||||
|
and unpacking registries fails"""
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
WindowsError
|
||||||
|
except NameError:
|
||||||
|
WindowsError = None
|
||||||
|
|
||||||
|
def copyfileobj(fsrc, fdst, length=16*1024):
|
||||||
|
"""copy data from file-like object fsrc to file-like object fdst"""
|
||||||
|
while 1:
|
||||||
|
buf = fsrc.read(length)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
fdst.write(buf)
|
||||||
|
|
||||||
|
def _samefile(src, dst):
|
||||||
|
# Macintosh, Unix.
|
||||||
|
if hasattr(os.path, 'samefile'):
|
||||||
|
try:
|
||||||
|
return os.path.samefile(src, dst)
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# All other platforms: check for same pathname.
|
||||||
|
return (os.path.normcase(os.path.abspath(src)) ==
|
||||||
|
os.path.normcase(os.path.abspath(dst)))
|
||||||
|
|
||||||
|
def copyfile(src, dst):
|
||||||
|
"""Copy data from src to dst"""
|
||||||
|
if _samefile(src, dst):
|
||||||
|
raise Error("`%s` and `%s` are the same file" % (src, dst))
|
||||||
|
|
||||||
|
for fn in [src, dst]:
|
||||||
|
try:
|
||||||
|
st = os.stat(fn)
|
||||||
|
except OSError:
|
||||||
|
# File most likely does not exist
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# XXX What about other special files? (sockets, devices...)
|
||||||
|
if stat.S_ISFIFO(st.st_mode):
|
||||||
|
raise SpecialFileError("`%s` is a named pipe" % fn)
|
||||||
|
|
||||||
|
with open(src, 'rb') as fsrc:
|
||||||
|
with open(dst, 'wb') as fdst:
|
||||||
|
copyfileobj(fsrc, fdst)
|
||||||
|
|
||||||
|
def copymode(src, dst):
|
||||||
|
"""Copy mode bits from src to dst"""
|
||||||
|
if hasattr(os, 'chmod'):
|
||||||
|
st = os.stat(src)
|
||||||
|
mode = stat.S_IMODE(st.st_mode)
|
||||||
|
os.chmod(dst, mode)
|
||||||
|
|
||||||
|
def copystat(src, dst):
|
||||||
|
"""Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
|
||||||
|
st = os.stat(src)
|
||||||
|
mode = stat.S_IMODE(st.st_mode)
|
||||||
|
if hasattr(os, 'utime'):
|
||||||
|
os.utime(dst, (st.st_atime, st.st_mtime))
|
||||||
|
if hasattr(os, 'chmod'):
|
||||||
|
os.chmod(dst, mode)
|
||||||
|
if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
|
||||||
|
try:
|
||||||
|
os.chflags(dst, st.st_flags)
|
||||||
|
except OSError as why:
|
||||||
|
if (not hasattr(errno, 'EOPNOTSUPP') or
|
||||||
|
why.errno != errno.EOPNOTSUPP):
|
||||||
|
raise
|
||||||
|
|
||||||
|
def copy(src, dst):
|
||||||
|
"""Copy data and mode bits ("cp src dst").
|
||||||
|
|
||||||
|
The destination may be a directory.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if os.path.isdir(dst):
|
||||||
|
dst = os.path.join(dst, os.path.basename(src))
|
||||||
|
copyfile(src, dst)
|
||||||
|
copymode(src, dst)
|
||||||
|
|
||||||
|
def copy2(src, dst):
|
||||||
|
"""Copy data and all stat info ("cp -p src dst").
|
||||||
|
|
||||||
|
The destination may be a directory.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if os.path.isdir(dst):
|
||||||
|
dst = os.path.join(dst, os.path.basename(src))
|
||||||
|
copyfile(src, dst)
|
||||||
|
copystat(src, dst)
|
||||||
|
|
||||||
|
def ignore_patterns(*patterns):
|
||||||
|
"""Function that can be used as copytree() ignore parameter.
|
||||||
|
|
||||||
|
Patterns is a sequence of glob-style patterns
|
||||||
|
that are used to exclude files"""
|
||||||
|
def _ignore_patterns(path, names):
|
||||||
|
ignored_names = []
|
||||||
|
for pattern in patterns:
|
||||||
|
ignored_names.extend(fnmatch.filter(names, pattern))
|
||||||
|
return set(ignored_names)
|
||||||
|
return _ignore_patterns
|
||||||
|
|
||||||
|
def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
|
||||||
|
ignore_dangling_symlinks=False):
|
||||||
|
"""Recursively copy a directory tree.
|
||||||
|
|
||||||
|
The destination directory must not already exist.
|
||||||
|
If exception(s) occur, an Error is raised with a list of reasons.
|
||||||
|
|
||||||
|
If the optional symlinks flag is true, symbolic links in the
|
||||||
|
source tree result in symbolic links in the destination tree; if
|
||||||
|
it is false, the contents of the files pointed to by symbolic
|
||||||
|
links are copied. If the file pointed by the symlink doesn't
|
||||||
|
exist, an exception will be added in the list of errors raised in
|
||||||
|
an Error exception at the end of the copy process.
|
||||||
|
|
||||||
|
You can set the optional ignore_dangling_symlinks flag to true if you
|
||||||
|
want to silence this exception. Notice that this has no effect on
|
||||||
|
platforms that don't support os.symlink.
|
||||||
|
|
||||||
|
The optional ignore argument is a callable. If given, it
|
||||||
|
is called with the `src` parameter, which is the directory
|
||||||
|
being visited by copytree(), and `names` which is the list of
|
||||||
|
`src` contents, as returned by os.listdir():
|
||||||
|
|
||||||
|
callable(src, names) -> ignored_names
|
||||||
|
|
||||||
|
Since copytree() is called recursively, the callable will be
|
||||||
|
called once for each directory that is copied. It returns a
|
||||||
|
list of names relative to the `src` directory that should
|
||||||
|
not be copied.
|
||||||
|
|
||||||
|
The optional copy_function argument is a callable that will be used
|
||||||
|
to copy each file. It will be called with the source path and the
|
||||||
|
destination path as arguments. By default, copy2() is used, but any
|
||||||
|
function that supports the same signature (like copy()) can be used.
|
||||||
|
|
||||||
|
"""
|
||||||
|
names = os.listdir(src)
|
||||||
|
if ignore is not None:
|
||||||
|
ignored_names = ignore(src, names)
|
||||||
|
else:
|
||||||
|
ignored_names = set()
|
||||||
|
|
||||||
|
os.makedirs(dst)
|
||||||
|
errors = []
|
||||||
|
for name in names:
|
||||||
|
if name in ignored_names:
|
||||||
|
continue
|
||||||
|
srcname = os.path.join(src, name)
|
||||||
|
dstname = os.path.join(dst, name)
|
||||||
|
try:
|
||||||
|
if os.path.islink(srcname):
|
||||||
|
linkto = os.readlink(srcname)
|
||||||
|
if symlinks:
|
||||||
|
os.symlink(linkto, dstname)
|
||||||
|
else:
|
||||||
|
# ignore dangling symlink if the flag is on
|
||||||
|
if not os.path.exists(linkto) and ignore_dangling_symlinks:
|
||||||
|
continue
|
||||||
|
# otherwise let the copy occurs. copy2 will raise an error
|
||||||
|
copy_function(srcname, dstname)
|
||||||
|
elif os.path.isdir(srcname):
|
||||||
|
copytree(srcname, dstname, symlinks, ignore, copy_function)
|
||||||
|
else:
|
||||||
|
# Will raise a SpecialFileError for unsupported file types
|
||||||
|
copy_function(srcname, dstname)
|
||||||
|
# catch the Error from the recursive copytree so that we can
|
||||||
|
# continue with other files
|
||||||
|
except Error as err:
|
||||||
|
errors.extend(err.args[0])
|
||||||
|
except EnvironmentError as why:
|
||||||
|
errors.append((srcname, dstname, str(why)))
|
||||||
|
try:
|
||||||
|
copystat(src, dst)
|
||||||
|
except OSError as why:
|
||||||
|
if WindowsError is not None and isinstance(why, WindowsError):
|
||||||
|
# Copying file access times may fail on Windows
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
errors.extend((src, dst, str(why)))
|
||||||
|
if errors:
|
||||||
|
raise Error(errors)
|
||||||
|
|
||||||
|
def rmtree(path, ignore_errors=False, onerror=None):
|
||||||
|
"""Recursively delete a directory tree.
|
||||||
|
|
||||||
|
If ignore_errors is set, errors are ignored; otherwise, if onerror
|
||||||
|
is set, it is called to handle the error with arguments (func,
|
||||||
|
path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
|
||||||
|
path is the argument to that function that caused it to fail; and
|
||||||
|
exc_info is a tuple returned by sys.exc_info(). If ignore_errors
|
||||||
|
is false and onerror is None, an exception is raised.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if ignore_errors:
|
||||||
|
def onerror(*args):
|
||||||
|
pass
|
||||||
|
elif onerror is None:
|
||||||
|
def onerror(*args):
|
||||||
|
raise
|
||||||
|
try:
|
||||||
|
if os.path.islink(path):
|
||||||
|
# symlinks to directories are forbidden, see bug #1669
|
||||||
|
raise OSError("Cannot call rmtree on a symbolic link")
|
||||||
|
except OSError:
|
||||||
|
onerror(os.path.islink, path, sys.exc_info())
|
||||||
|
# can't continue even if onerror hook returns
|
||||||
|
return
|
||||||
|
names = []
|
||||||
|
try:
|
||||||
|
names = os.listdir(path)
|
||||||
|
except os.error:
|
||||||
|
onerror(os.listdir, path, sys.exc_info())
|
||||||
|
for name in names:
|
||||||
|
fullname = os.path.join(path, name)
|
||||||
|
try:
|
||||||
|
mode = os.lstat(fullname).st_mode
|
||||||
|
except os.error:
|
||||||
|
mode = 0
|
||||||
|
if stat.S_ISDIR(mode):
|
||||||
|
rmtree(fullname, ignore_errors, onerror)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
os.remove(fullname)
|
||||||
|
except os.error:
|
||||||
|
onerror(os.remove, fullname, sys.exc_info())
|
||||||
|
try:
|
||||||
|
os.rmdir(path)
|
||||||
|
except os.error:
|
||||||
|
onerror(os.rmdir, path, sys.exc_info())
|
||||||
|
|
||||||
|
|
||||||
|
def _basename(path):
|
||||||
|
# A basename() variant which first strips the trailing slash, if present.
|
||||||
|
# Thus we always get the last component of the path, even for directories.
|
||||||
|
return os.path.basename(path.rstrip(os.path.sep))
|
||||||
|
|
||||||
|
def move(src, dst):
|
||||||
|
"""Recursively move a file or directory to another location. This is
|
||||||
|
similar to the Unix "mv" command.
|
||||||
|
|
||||||
|
If the destination is a directory or a symlink to a directory, the source
|
||||||
|
is moved inside the directory. The destination path must not already
|
||||||
|
exist.
|
||||||
|
|
||||||
|
If the destination already exists but is not a directory, it may be
|
||||||
|
overwritten depending on os.rename() semantics.
|
||||||
|
|
||||||
|
If the destination is on our current filesystem, then rename() is used.
|
||||||
|
Otherwise, src is copied to the destination and then removed.
|
||||||
|
A lot more could be done here... A look at a mv.c shows a lot of
|
||||||
|
the issues this implementation glosses over.
|
||||||
|
|
||||||
|
"""
|
||||||
|
real_dst = dst
|
||||||
|
if os.path.isdir(dst):
|
||||||
|
if _samefile(src, dst):
|
||||||
|
# We might be on a case insensitive filesystem,
|
||||||
|
# perform the rename anyway.
|
||||||
|
os.rename(src, dst)
|
||||||
|
return
|
||||||
|
|
||||||
|
real_dst = os.path.join(dst, _basename(src))
|
||||||
|
if os.path.exists(real_dst):
|
||||||
|
raise Error("Destination path '%s' already exists" % real_dst)
|
||||||
|
try:
|
||||||
|
os.rename(src, real_dst)
|
||||||
|
except OSError:
|
||||||
|
if os.path.isdir(src):
|
||||||
|
if _destinsrc(src, dst):
|
||||||
|
raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
|
||||||
|
copytree(src, real_dst, symlinks=True)
|
||||||
|
rmtree(src)
|
||||||
|
else:
|
||||||
|
copy2(src, real_dst)
|
||||||
|
os.unlink(src)
|
||||||
|
|
||||||
|
def _destinsrc(src, dst):
|
||||||
|
src = abspath(src)
|
||||||
|
dst = abspath(dst)
|
||||||
|
if not src.endswith(os.path.sep):
|
||||||
|
src += os.path.sep
|
||||||
|
if not dst.endswith(os.path.sep):
|
||||||
|
dst += os.path.sep
|
||||||
|
return dst.startswith(src)
|
||||||
|
|
||||||
|
def _get_gid(name):
|
||||||
|
"""Returns a gid, given a group name."""
|
||||||
|
if getgrnam is None or name is None:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
result = getgrnam(name)
|
||||||
|
except KeyError:
|
||||||
|
result = None
|
||||||
|
if result is not None:
|
||||||
|
return result[2]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_uid(name):
|
||||||
|
"""Returns an uid, given a user name."""
|
||||||
|
if getpwnam is None or name is None:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
result = getpwnam(name)
|
||||||
|
except KeyError:
|
||||||
|
result = None
|
||||||
|
if result is not None:
|
||||||
|
return result[2]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
|
||||||
|
owner=None, group=None, logger=None):
|
||||||
|
"""Create a (possibly compressed) tar file from all the files under
|
||||||
|
'base_dir'.
|
||||||
|
|
||||||
|
'compress' must be "gzip" (the default), "bzip2", or None.
|
||||||
|
|
||||||
|
'owner' and 'group' can be used to define an owner and a group for the
|
||||||
|
archive that is being built. If not provided, the current owner and group
|
||||||
|
will be used.
|
||||||
|
|
||||||
|
The output tar file will be named 'base_name' + ".tar", possibly plus
|
||||||
|
the appropriate compression extension (".gz", or ".bz2").
|
||||||
|
|
||||||
|
Returns the output filename.
|
||||||
|
"""
|
||||||
|
tar_compression = {'gzip': 'gz', None: ''}
|
||||||
|
compress_ext = {'gzip': '.gz'}
|
||||||
|
|
||||||
|
if _BZ2_SUPPORTED:
|
||||||
|
tar_compression['bzip2'] = 'bz2'
|
||||||
|
compress_ext['bzip2'] = '.bz2'
|
||||||
|
|
||||||
|
# flags for compression program, each element of list will be an argument
|
||||||
|
if compress is not None and compress not in compress_ext:
|
||||||
|
raise ValueError("bad value for 'compress', or compression format not "
|
||||||
|
"supported : {0}".format(compress))
|
||||||
|
|
||||||
|
archive_name = base_name + '.tar' + compress_ext.get(compress, '')
|
||||||
|
archive_dir = os.path.dirname(archive_name)
|
||||||
|
|
||||||
|
if not os.path.exists(archive_dir):
|
||||||
|
if logger is not None:
|
||||||
|
logger.info("creating %s", archive_dir)
|
||||||
|
if not dry_run:
|
||||||
|
os.makedirs(archive_dir)
|
||||||
|
|
||||||
|
# creating the tarball
|
||||||
|
if logger is not None:
|
||||||
|
logger.info('Creating tar archive')
|
||||||
|
|
||||||
|
uid = _get_uid(owner)
|
||||||
|
gid = _get_gid(group)
|
||||||
|
|
||||||
|
def _set_uid_gid(tarinfo):
|
||||||
|
if gid is not None:
|
||||||
|
tarinfo.gid = gid
|
||||||
|
tarinfo.gname = group
|
||||||
|
if uid is not None:
|
||||||
|
tarinfo.uid = uid
|
||||||
|
tarinfo.uname = owner
|
||||||
|
return tarinfo
|
||||||
|
|
||||||
|
if not dry_run:
|
||||||
|
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
|
||||||
|
try:
|
||||||
|
tar.add(base_dir, filter=_set_uid_gid)
|
||||||
|
finally:
|
||||||
|
tar.close()
|
||||||
|
|
||||||
|
return archive_name
|
||||||
|
|
||||||
|
def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
|
||||||
|
# XXX see if we want to keep an external call here
|
||||||
|
if verbose:
|
||||||
|
zipoptions = "-r"
|
||||||
|
else:
|
||||||
|
zipoptions = "-rq"
|
||||||
|
from distutils.errors import DistutilsExecError
|
||||||
|
from distutils.spawn import spawn
|
||||||
|
try:
|
||||||
|
spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
|
||||||
|
except DistutilsExecError:
|
||||||
|
# XXX really should distinguish between "couldn't find
|
||||||
|
# external 'zip' command" and "zip failed".
|
||||||
|
raise ExecError("unable to create zip file '%s': "
|
||||||
|
"could neither import the 'zipfile' module nor "
|
||||||
|
"find a standalone zip utility") % zip_filename
|
||||||
|
|
||||||
|
def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
|
||||||
|
"""Create a zip file from all the files under 'base_dir'.
|
||||||
|
|
||||||
|
The output zip file will be named 'base_name' + ".zip". Uses either the
|
||||||
|
"zipfile" Python module (if available) or the InfoZIP "zip" utility
|
||||||
|
(if installed and found on the default search path). If neither tool is
|
||||||
|
available, raises ExecError. Returns the name of the output zip
|
||||||
|
file.
|
||||||
|
"""
|
||||||
|
zip_filename = base_name + ".zip"
|
||||||
|
archive_dir = os.path.dirname(base_name)
|
||||||
|
|
||||||
|
if not os.path.exists(archive_dir):
|
||||||
|
if logger is not None:
|
||||||
|
logger.info("creating %s", archive_dir)
|
||||||
|
if not dry_run:
|
||||||
|
os.makedirs(archive_dir)
|
||||||
|
|
||||||
|
# If zipfile module is not available, try spawning an external 'zip'
|
||||||
|
# command.
|
||||||
|
try:
|
||||||
|
import zipfile
|
||||||
|
except ImportError:
|
||||||
|
zipfile = None
|
||||||
|
|
||||||
|
if zipfile is None:
|
||||||
|
_call_external_zip(base_dir, zip_filename, verbose, dry_run)
|
||||||
|
else:
|
||||||
|
if logger is not None:
|
||||||
|
logger.info("creating '%s' and adding '%s' to it",
|
||||||
|
zip_filename, base_dir)
|
||||||
|
|
||||||
|
if not dry_run:
|
||||||
|
zip = zipfile.ZipFile(zip_filename, "w",
|
||||||
|
compression=zipfile.ZIP_DEFLATED)
|
||||||
|
|
||||||
|
for dirpath, dirnames, filenames in os.walk(base_dir):
|
||||||
|
for name in filenames:
|
||||||
|
path = os.path.normpath(os.path.join(dirpath, name))
|
||||||
|
if os.path.isfile(path):
|
||||||
|
zip.write(path, path)
|
||||||
|
if logger is not None:
|
||||||
|
logger.info("adding '%s'", path)
|
||||||
|
zip.close()
|
||||||
|
|
||||||
|
return zip_filename
|
||||||
|
|
||||||
|
_ARCHIVE_FORMATS = {
|
||||||
|
'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
|
||||||
|
'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
|
||||||
|
'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
|
||||||
|
'zip': (_make_zipfile, [], "ZIP file"),
|
||||||
|
}
|
||||||
|
|
||||||
|
if _BZ2_SUPPORTED:
|
||||||
|
_ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
|
||||||
|
"bzip2'ed tar-file")
|
||||||
|
|
||||||
|
def get_archive_formats():
|
||||||
|
"""Returns a list of supported formats for archiving and unarchiving.
|
||||||
|
|
||||||
|
Each element of the returned sequence is a tuple (name, description)
|
||||||
|
"""
|
||||||
|
formats = [(name, registry[2]) for name, registry in
|
||||||
|
_ARCHIVE_FORMATS.items()]
|
||||||
|
formats.sort()
|
||||||
|
return formats
|
||||||
|
|
||||||
|
def register_archive_format(name, function, extra_args=None, description=''):
|
||||||
|
"""Registers an archive format.
|
||||||
|
|
||||||
|
name is the name of the format. function is the callable that will be
|
||||||
|
used to create archives. If provided, extra_args is a sequence of
|
||||||
|
(name, value) tuples that will be passed as arguments to the callable.
|
||||||
|
description can be provided to describe the format, and will be returned
|
||||||
|
by the get_archive_formats() function.
|
||||||
|
"""
|
||||||
|
if extra_args is None:
|
||||||
|
extra_args = []
|
||||||
|
if not isinstance(function, Callable):
|
||||||
|
raise TypeError('The %s object is not callable' % function)
|
||||||
|
if not isinstance(extra_args, (tuple, list)):
|
||||||
|
raise TypeError('extra_args needs to be a sequence')
|
||||||
|
for element in extra_args:
|
||||||
|
if not isinstance(element, (tuple, list)) or len(element) !=2:
|
||||||
|
raise TypeError('extra_args elements are : (arg_name, value)')
|
||||||
|
|
||||||
|
_ARCHIVE_FORMATS[name] = (function, extra_args, description)
|
||||||
|
|
||||||
|
def unregister_archive_format(name):
|
||||||
|
del _ARCHIVE_FORMATS[name]
|
||||||
|
|
||||||
|
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
|
||||||
|
dry_run=0, owner=None, group=None, logger=None):
|
||||||
|
"""Create an archive file (eg. zip or tar).
|
||||||
|
|
||||||
|
'base_name' is the name of the file to create, minus any format-specific
|
||||||
|
extension; 'format' is the archive format: one of "zip", "tar", "bztar"
|
||||||
|
or "gztar".
|
||||||
|
|
||||||
|
'root_dir' is a directory that will be the root directory of the
|
||||||
|
archive; ie. we typically chdir into 'root_dir' before creating the
|
||||||
|
archive. 'base_dir' is the directory where we start archiving from;
|
||||||
|
ie. 'base_dir' will be the common prefix of all files and
|
||||||
|
directories in the archive. 'root_dir' and 'base_dir' both default
|
||||||
|
to the current directory. Returns the name of the archive file.
|
||||||
|
|
||||||
|
'owner' and 'group' are used when creating a tar archive. By default,
|
||||||
|
uses the current owner and group.
|
||||||
|
"""
|
||||||
|
save_cwd = os.getcwd()
|
||||||
|
if root_dir is not None:
|
||||||
|
if logger is not None:
|
||||||
|
logger.debug("changing into '%s'", root_dir)
|
||||||
|
base_name = os.path.abspath(base_name)
|
||||||
|
if not dry_run:
|
||||||
|
os.chdir(root_dir)
|
||||||
|
|
||||||
|
if base_dir is None:
|
||||||
|
base_dir = os.curdir
|
||||||
|
|
||||||
|
kwargs = {'dry_run': dry_run, 'logger': logger}
|
||||||
|
|
||||||
|
try:
|
||||||
|
format_info = _ARCHIVE_FORMATS[format]
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError("unknown archive format '%s'" % format)
|
||||||
|
|
||||||
|
func = format_info[0]
|
||||||
|
for arg, val in format_info[1]:
|
||||||
|
kwargs[arg] = val
|
||||||
|
|
||||||
|
if format != 'zip':
|
||||||
|
kwargs['owner'] = owner
|
||||||
|
kwargs['group'] = group
|
||||||
|
|
||||||
|
try:
|
||||||
|
filename = func(base_name, base_dir, **kwargs)
|
||||||
|
finally:
|
||||||
|
if root_dir is not None:
|
||||||
|
if logger is not None:
|
||||||
|
logger.debug("changing back to '%s'", save_cwd)
|
||||||
|
os.chdir(save_cwd)
|
||||||
|
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
|
def get_unpack_formats():
|
||||||
|
"""Returns a list of supported formats for unpacking.
|
||||||
|
|
||||||
|
Each element of the returned sequence is a tuple
|
||||||
|
(name, extensions, description)
|
||||||
|
"""
|
||||||
|
formats = [(name, info[0], info[3]) for name, info in
|
||||||
|
_UNPACK_FORMATS.items()]
|
||||||
|
formats.sort()
|
||||||
|
return formats
|
||||||
|
|
||||||
|
def _check_unpack_options(extensions, function, extra_args):
|
||||||
|
"""Checks what gets registered as an unpacker."""
|
||||||
|
# first make sure no other unpacker is registered for this extension
|
||||||
|
existing_extensions = {}
|
||||||
|
for name, info in _UNPACK_FORMATS.items():
|
||||||
|
for ext in info[0]:
|
||||||
|
existing_extensions[ext] = name
|
||||||
|
|
||||||
|
for extension in extensions:
|
||||||
|
if extension in existing_extensions:
|
||||||
|
msg = '%s is already registered for "%s"'
|
||||||
|
raise RegistryError(msg % (extension,
|
||||||
|
existing_extensions[extension]))
|
||||||
|
|
||||||
|
if not isinstance(function, Callable):
|
||||||
|
raise TypeError('The registered function must be a callable')
|
||||||
|
|
||||||
|
|
||||||
|
def register_unpack_format(name, extensions, function, extra_args=None,
|
||||||
|
description=''):
|
||||||
|
"""Registers an unpack format.
|
||||||
|
|
||||||
|
`name` is the name of the format. `extensions` is a list of extensions
|
||||||
|
corresponding to the format.
|
||||||
|
|
||||||
|
`function` is the callable that will be
|
||||||
|
used to unpack archives. The callable will receive archives to unpack.
|
||||||
|
If it's unable to handle an archive, it needs to raise a ReadError
|
||||||
|
exception.
|
||||||
|
|
||||||
|
If provided, `extra_args` is a sequence of
|
||||||
|
(name, value) tuples that will be passed as arguments to the callable.
|
||||||
|
description can be provided to describe the format, and will be returned
|
||||||
|
by the get_unpack_formats() function.
|
||||||
|
"""
|
||||||
|
if extra_args is None:
|
||||||
|
extra_args = []
|
||||||
|
_check_unpack_options(extensions, function, extra_args)
|
||||||
|
_UNPACK_FORMATS[name] = extensions, function, extra_args, description
|
||||||
|
|
||||||
|
def unregister_unpack_format(name):
|
||||||
|
"""Removes the pack format from the registry."""
|
||||||
|
del _UNPACK_FORMATS[name]
|
||||||
|
|
||||||
|
def _ensure_directory(path):
|
||||||
|
"""Ensure that the parent directory of `path` exists"""
|
||||||
|
dirname = os.path.dirname(path)
|
||||||
|
if not os.path.isdir(dirname):
|
||||||
|
os.makedirs(dirname)
|
||||||
|
|
||||||
|
def _unpack_zipfile(filename, extract_dir):
|
||||||
|
"""Unpack zip `filename` to `extract_dir`
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
import zipfile
|
||||||
|
except ImportError:
|
||||||
|
raise ReadError('zlib not supported, cannot unpack this archive.')
|
||||||
|
|
||||||
|
if not zipfile.is_zipfile(filename):
|
||||||
|
raise ReadError("%s is not a zip file" % filename)
|
||||||
|
|
||||||
|
zip = zipfile.ZipFile(filename)
|
||||||
|
try:
|
||||||
|
for info in zip.infolist():
|
||||||
|
name = info.filename
|
||||||
|
|
||||||
|
# don't extract absolute paths or ones with .. in them
|
||||||
|
if name.startswith('/') or '..' in name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
target = os.path.join(extract_dir, *name.split('/'))
|
||||||
|
if not target:
|
||||||
|
continue
|
||||||
|
|
||||||
|
_ensure_directory(target)
|
||||||
|
if not name.endswith('/'):
|
||||||
|
# file
|
||||||
|
data = zip.read(info.filename)
|
||||||
|
f = open(target, 'wb')
|
||||||
|
try:
|
||||||
|
f.write(data)
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
del data
|
||||||
|
finally:
|
||||||
|
zip.close()
|
||||||
|
|
||||||
|
def _unpack_tarfile(filename, extract_dir):
|
||||||
|
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
tarobj = tarfile.open(filename)
|
||||||
|
except tarfile.TarError:
|
||||||
|
raise ReadError(
|
||||||
|
"%s is not a compressed or uncompressed tar file" % filename)
|
||||||
|
try:
|
||||||
|
tarobj.extractall(extract_dir)
|
||||||
|
finally:
|
||||||
|
tarobj.close()
|
||||||
|
|
||||||
|
_UNPACK_FORMATS = {
|
||||||
|
'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
|
||||||
|
'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
|
||||||
|
'zip': (['.zip'], _unpack_zipfile, [], "ZIP file")
|
||||||
|
}
|
||||||
|
|
||||||
|
if _BZ2_SUPPORTED:
|
||||||
|
_UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
|
||||||
|
"bzip2'ed tar-file")
|
||||||
|
|
||||||
|
def _find_unpack_format(filename):
|
||||||
|
for name, info in _UNPACK_FORMATS.items():
|
||||||
|
for extension in info[0]:
|
||||||
|
if filename.endswith(extension):
|
||||||
|
return name
|
||||||
|
return None
|
||||||
|
|
||||||
|
def unpack_archive(filename, extract_dir=None, format=None):
|
||||||
|
"""Unpack an archive.
|
||||||
|
|
||||||
|
`filename` is the name of the archive.
|
||||||
|
|
||||||
|
`extract_dir` is the name of the target directory, where the archive
|
||||||
|
is unpacked. If not provided, the current working directory is used.
|
||||||
|
|
||||||
|
`format` is the archive format: one of "zip", "tar", or "gztar". Or any
|
||||||
|
other registered format. If not provided, unpack_archive will use the
|
||||||
|
filename extension and see if an unpacker was registered for that
|
||||||
|
extension.
|
||||||
|
|
||||||
|
In case none is found, a ValueError is raised.
|
||||||
|
"""
|
||||||
|
if extract_dir is None:
|
||||||
|
extract_dir = os.getcwd()
|
||||||
|
|
||||||
|
if format is not None:
|
||||||
|
try:
|
||||||
|
format_info = _UNPACK_FORMATS[format]
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError("Unknown unpack format '{0}'".format(format))
|
||||||
|
|
||||||
|
func = format_info[1]
|
||||||
|
func(filename, extract_dir, **dict(format_info[2]))
|
||||||
|
else:
|
||||||
|
# we need to look at the registered unpackers supported extensions
|
||||||
|
format = _find_unpack_format(filename)
|
||||||
|
if format is None:
|
||||||
|
raise ReadError("Unknown archive format '{0}'".format(filename))
|
||||||
|
|
||||||
|
func = _UNPACK_FORMATS[format][1]
|
||||||
|
kwargs = dict(_UNPACK_FORMATS[format][2])
|
||||||
|
func(filename, extract_dir, **kwargs)
|
@ -0,0 +1,84 @@
|
|||||||
|
[posix_prefix]
|
||||||
|
# Configuration directories. Some of these come straight out of the
|
||||||
|
# configure script. They are for implementing the other variables, not to
|
||||||
|
# be used directly in [resource_locations].
|
||||||
|
confdir = /etc
|
||||||
|
datadir = /usr/share
|
||||||
|
libdir = /usr/lib
|
||||||
|
statedir = /var
|
||||||
|
# User resource directory
|
||||||
|
local = ~/.local/{distribution.name}
|
||||||
|
|
||||||
|
stdlib = {base}/lib/python{py_version_short}
|
||||||
|
platstdlib = {platbase}/lib/python{py_version_short}
|
||||||
|
purelib = {base}/lib/python{py_version_short}/site-packages
|
||||||
|
platlib = {platbase}/lib/python{py_version_short}/site-packages
|
||||||
|
include = {base}/include/python{py_version_short}{abiflags}
|
||||||
|
platinclude = {platbase}/include/python{py_version_short}{abiflags}
|
||||||
|
data = {base}
|
||||||
|
|
||||||
|
[posix_home]
|
||||||
|
stdlib = {base}/lib/python
|
||||||
|
platstdlib = {base}/lib/python
|
||||||
|
purelib = {base}/lib/python
|
||||||
|
platlib = {base}/lib/python
|
||||||
|
include = {base}/include/python
|
||||||
|
platinclude = {base}/include/python
|
||||||
|
scripts = {base}/bin
|
||||||
|
data = {base}
|
||||||
|
|
||||||
|
[nt]
|
||||||
|
stdlib = {base}/Lib
|
||||||
|
platstdlib = {base}/Lib
|
||||||
|
purelib = {base}/Lib/site-packages
|
||||||
|
platlib = {base}/Lib/site-packages
|
||||||
|
include = {base}/Include
|
||||||
|
platinclude = {base}/Include
|
||||||
|
scripts = {base}/Scripts
|
||||||
|
data = {base}
|
||||||
|
|
||||||
|
[os2]
|
||||||
|
stdlib = {base}/Lib
|
||||||
|
platstdlib = {base}/Lib
|
||||||
|
purelib = {base}/Lib/site-packages
|
||||||
|
platlib = {base}/Lib/site-packages
|
||||||
|
include = {base}/Include
|
||||||
|
platinclude = {base}/Include
|
||||||
|
scripts = {base}/Scripts
|
||||||
|
data = {base}
|
||||||
|
|
||||||
|
[os2_home]
|
||||||
|
stdlib = {userbase}/lib/python{py_version_short}
|
||||||
|
platstdlib = {userbase}/lib/python{py_version_short}
|
||||||
|
purelib = {userbase}/lib/python{py_version_short}/site-packages
|
||||||
|
platlib = {userbase}/lib/python{py_version_short}/site-packages
|
||||||
|
include = {userbase}/include/python{py_version_short}
|
||||||
|
scripts = {userbase}/bin
|
||||||
|
data = {userbase}
|
||||||
|
|
||||||
|
[nt_user]
|
||||||
|
stdlib = {userbase}/Python{py_version_nodot}
|
||||||
|
platstdlib = {userbase}/Python{py_version_nodot}
|
||||||
|
purelib = {userbase}/Python{py_version_nodot}/site-packages
|
||||||
|
platlib = {userbase}/Python{py_version_nodot}/site-packages
|
||||||
|
include = {userbase}/Python{py_version_nodot}/Include
|
||||||
|
scripts = {userbase}/Scripts
|
||||||
|
data = {userbase}
|
||||||
|
|
||||||
|
[posix_user]
|
||||||
|
stdlib = {userbase}/lib/python{py_version_short}
|
||||||
|
platstdlib = {userbase}/lib/python{py_version_short}
|
||||||
|
purelib = {userbase}/lib/python{py_version_short}/site-packages
|
||||||
|
platlib = {userbase}/lib/python{py_version_short}/site-packages
|
||||||
|
include = {userbase}/include/python{py_version_short}
|
||||||
|
scripts = {userbase}/bin
|
||||||
|
data = {userbase}
|
||||||
|
|
||||||
|
[osx_framework_user]
|
||||||
|
stdlib = {userbase}/lib/python
|
||||||
|
platstdlib = {userbase}/lib/python
|
||||||
|
purelib = {userbase}/lib/python/site-packages
|
||||||
|
platlib = {userbase}/lib/python/site-packages
|
||||||
|
include = {userbase}/include
|
||||||
|
scripts = {userbase}/bin
|
||||||
|
data = {userbase}
|
@ -0,0 +1,786 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2012 The Python Software Foundation.
|
||||||
|
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||||
|
#
|
||||||
|
"""Access to Python's configuration information."""
|
||||||
|
|
||||||
|
import codecs
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from os.path import pardir, realpath
|
||||||
|
try:
|
||||||
|
import configparser
|
||||||
|
except ImportError:
|
||||||
|
import ConfigParser as configparser
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'get_config_h_filename',
|
||||||
|
'get_config_var',
|
||||||
|
'get_config_vars',
|
||||||
|
'get_makefile_filename',
|
||||||
|
'get_path',
|
||||||
|
'get_path_names',
|
||||||
|
'get_paths',
|
||||||
|
'get_platform',
|
||||||
|
'get_python_version',
|
||||||
|
'get_scheme_names',
|
||||||
|
'parse_config_h',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_realpath(path):
|
||||||
|
try:
|
||||||
|
return realpath(path)
|
||||||
|
except OSError:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
if sys.executable:
|
||||||
|
_PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
|
||||||
|
else:
|
||||||
|
# sys.executable can be empty if argv[0] has been changed and Python is
|
||||||
|
# unable to retrieve the real program name
|
||||||
|
_PROJECT_BASE = _safe_realpath(os.getcwd())
|
||||||
|
|
||||||
|
if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
|
||||||
|
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
|
||||||
|
# PC/VS7.1
|
||||||
|
if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
|
||||||
|
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
|
||||||
|
# PC/AMD64
|
||||||
|
if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
|
||||||
|
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
|
||||||
|
|
||||||
|
|
||||||
|
def is_python_build():
|
||||||
|
for fn in ("Setup.dist", "Setup.local"):
|
||||||
|
if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
_PYTHON_BUILD = is_python_build()
|
||||||
|
|
||||||
|
_cfg_read = False
|
||||||
|
|
||||||
|
def _ensure_cfg_read():
|
||||||
|
global _cfg_read
|
||||||
|
if not _cfg_read:
|
||||||
|
from ..resources import finder
|
||||||
|
backport_package = __name__.rsplit('.', 1)[0]
|
||||||
|
_finder = finder(backport_package)
|
||||||
|
_cfgfile = _finder.find('sysconfig.cfg')
|
||||||
|
assert _cfgfile, 'sysconfig.cfg exists'
|
||||||
|
with _cfgfile.as_stream() as s:
|
||||||
|
_SCHEMES.readfp(s)
|
||||||
|
if _PYTHON_BUILD:
|
||||||
|
for scheme in ('posix_prefix', 'posix_home'):
|
||||||
|
_SCHEMES.set(scheme, 'include', '{srcdir}/Include')
|
||||||
|
_SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')
|
||||||
|
|
||||||
|
_cfg_read = True
|
||||||
|
|
||||||
|
|
||||||
|
_SCHEMES = configparser.RawConfigParser()
|
||||||
|
_VAR_REPL = re.compile(r'\{([^{]*?)\}')
|
||||||
|
|
||||||
|
def _expand_globals(config):
|
||||||
|
_ensure_cfg_read()
|
||||||
|
if config.has_section('globals'):
|
||||||
|
globals = config.items('globals')
|
||||||
|
else:
|
||||||
|
globals = tuple()
|
||||||
|
|
||||||
|
sections = config.sections()
|
||||||
|
for section in sections:
|
||||||
|
if section == 'globals':
|
||||||
|
continue
|
||||||
|
for option, value in globals:
|
||||||
|
if config.has_option(section, option):
|
||||||
|
continue
|
||||||
|
config.set(section, option, value)
|
||||||
|
config.remove_section('globals')
|
||||||
|
|
||||||
|
# now expanding local variables defined in the cfg file
|
||||||
|
#
|
||||||
|
for section in config.sections():
|
||||||
|
variables = dict(config.items(section))
|
||||||
|
|
||||||
|
def _replacer(matchobj):
|
||||||
|
name = matchobj.group(1)
|
||||||
|
if name in variables:
|
||||||
|
return variables[name]
|
||||||
|
return matchobj.group(0)
|
||||||
|
|
||||||
|
for option, value in config.items(section):
|
||||||
|
config.set(section, option, _VAR_REPL.sub(_replacer, value))
|
||||||
|
|
||||||
|
#_expand_globals(_SCHEMES)
|
||||||
|
|
||||||
|
_PY_VERSION = '%s.%s.%s' % sys.version_info[:3]
|
||||||
|
_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2]
|
||||||
|
_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2]
|
||||||
|
_PREFIX = os.path.normpath(sys.prefix)
|
||||||
|
_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
|
||||||
|
_CONFIG_VARS = None
|
||||||
|
_USER_BASE = None
|
||||||
|
|
||||||
|
|
||||||
|
def _subst_vars(path, local_vars):
|
||||||
|
"""In the string `path`, replace tokens like {some.thing} with the
|
||||||
|
corresponding value from the map `local_vars`.
|
||||||
|
|
||||||
|
If there is no corresponding value, leave the token unchanged.
|
||||||
|
"""
|
||||||
|
def _replacer(matchobj):
|
||||||
|
name = matchobj.group(1)
|
||||||
|
if name in local_vars:
|
||||||
|
return local_vars[name]
|
||||||
|
elif name in os.environ:
|
||||||
|
return os.environ[name]
|
||||||
|
return matchobj.group(0)
|
||||||
|
return _VAR_REPL.sub(_replacer, path)
|
||||||
|
|
||||||
|
|
||||||
|
def _extend_dict(target_dict, other_dict):
|
||||||
|
target_keys = target_dict.keys()
|
||||||
|
for key, value in other_dict.items():
|
||||||
|
if key in target_keys:
|
||||||
|
continue
|
||||||
|
target_dict[key] = value
|
||||||
|
|
||||||
|
|
||||||
|
def _expand_vars(scheme, vars):
|
||||||
|
res = {}
|
||||||
|
if vars is None:
|
||||||
|
vars = {}
|
||||||
|
_extend_dict(vars, get_config_vars())
|
||||||
|
|
||||||
|
for key, value in _SCHEMES.items(scheme):
|
||||||
|
if os.name in ('posix', 'nt'):
|
||||||
|
value = os.path.expanduser(value)
|
||||||
|
res[key] = os.path.normpath(_subst_vars(value, vars))
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def format_value(value, vars):
|
||||||
|
def _replacer(matchobj):
|
||||||
|
name = matchobj.group(1)
|
||||||
|
if name in vars:
|
||||||
|
return vars[name]
|
||||||
|
return matchobj.group(0)
|
||||||
|
return _VAR_REPL.sub(_replacer, value)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_default_scheme():
|
||||||
|
if os.name == 'posix':
|
||||||
|
# the default scheme for posix is posix_prefix
|
||||||
|
return 'posix_prefix'
|
||||||
|
return os.name
|
||||||
|
|
||||||
|
|
||||||
|
def _getuserbase():
|
||||||
|
env_base = os.environ.get("PYTHONUSERBASE", None)
|
||||||
|
|
||||||
|
def joinuser(*args):
|
||||||
|
return os.path.expanduser(os.path.join(*args))
|
||||||
|
|
||||||
|
# what about 'os2emx', 'riscos' ?
|
||||||
|
if os.name == "nt":
|
||||||
|
base = os.environ.get("APPDATA") or "~"
|
||||||
|
if env_base:
|
||||||
|
return env_base
|
||||||
|
else:
|
||||||
|
return joinuser(base, "Python")
|
||||||
|
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
framework = get_config_var("PYTHONFRAMEWORK")
|
||||||
|
if framework:
|
||||||
|
if env_base:
|
||||||
|
return env_base
|
||||||
|
else:
|
||||||
|
return joinuser("~", "Library", framework, "%d.%d" %
|
||||||
|
sys.version_info[:2])
|
||||||
|
|
||||||
|
if env_base:
|
||||||
|
return env_base
|
||||||
|
else:
|
||||||
|
return joinuser("~", ".local")
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_makefile(filename, vars=None):
|
||||||
|
"""Parse a Makefile-style file.
|
||||||
|
|
||||||
|
A dictionary containing name/value pairs is returned. If an
|
||||||
|
optional dictionary is passed in as the second argument, it is
|
||||||
|
used instead of a new dictionary.
|
||||||
|
"""
|
||||||
|
# Regexes needed for parsing Makefile (and similar syntaxes,
|
||||||
|
# like old-style Setup files).
|
||||||
|
_variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
|
||||||
|
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
|
||||||
|
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
|
||||||
|
|
||||||
|
if vars is None:
|
||||||
|
vars = {}
|
||||||
|
done = {}
|
||||||
|
notdone = {}
|
||||||
|
|
||||||
|
with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
|
||||||
|
for line in lines:
|
||||||
|
if line.startswith('#') or line.strip() == '':
|
||||||
|
continue
|
||||||
|
m = _variable_rx.match(line)
|
||||||
|
if m:
|
||||||
|
n, v = m.group(1, 2)
|
||||||
|
v = v.strip()
|
||||||
|
# `$$' is a literal `$' in make
|
||||||
|
tmpv = v.replace('$$', '')
|
||||||
|
|
||||||
|
if "$" in tmpv:
|
||||||
|
notdone[n] = v
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
v = int(v)
|
||||||
|
except ValueError:
|
||||||
|
# insert literal `$'
|
||||||
|
done[n] = v.replace('$$', '$')
|
||||||
|
else:
|
||||||
|
done[n] = v
|
||||||
|
|
||||||
|
# do variable interpolation here
|
||||||
|
variables = list(notdone.keys())
|
||||||
|
|
||||||
|
# Variables with a 'PY_' prefix in the makefile. These need to
|
||||||
|
# be made available without that prefix through sysconfig.
|
||||||
|
# Special care is needed to ensure that variable expansion works, even
|
||||||
|
# if the expansion uses the name without a prefix.
|
||||||
|
renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
|
||||||
|
|
||||||
|
while len(variables) > 0:
|
||||||
|
for name in tuple(variables):
|
||||||
|
value = notdone[name]
|
||||||
|
m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
|
||||||
|
if m is not None:
|
||||||
|
n = m.group(1)
|
||||||
|
found = True
|
||||||
|
if n in done:
|
||||||
|
item = str(done[n])
|
||||||
|
elif n in notdone:
|
||||||
|
# get it on a subsequent round
|
||||||
|
found = False
|
||||||
|
elif n in os.environ:
|
||||||
|
# do it like make: fall back to environment
|
||||||
|
item = os.environ[n]
|
||||||
|
|
||||||
|
elif n in renamed_variables:
|
||||||
|
if (name.startswith('PY_') and
|
||||||
|
name[3:] in renamed_variables):
|
||||||
|
item = ""
|
||||||
|
|
||||||
|
elif 'PY_' + n in notdone:
|
||||||
|
found = False
|
||||||
|
|
||||||
|
else:
|
||||||
|
item = str(done['PY_' + n])
|
||||||
|
|
||||||
|
else:
|
||||||
|
done[n] = item = ""
|
||||||
|
|
||||||
|
if found:
|
||||||
|
after = value[m.end():]
|
||||||
|
value = value[:m.start()] + item + after
|
||||||
|
if "$" in after:
|
||||||
|
notdone[name] = value
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
value = int(value)
|
||||||
|
except ValueError:
|
||||||
|
done[name] = value.strip()
|
||||||
|
else:
|
||||||
|
done[name] = value
|
||||||
|
variables.remove(name)
|
||||||
|
|
||||||
|
if (name.startswith('PY_') and
|
||||||
|
name[3:] in renamed_variables):
|
||||||
|
|
||||||
|
name = name[3:]
|
||||||
|
if name not in done:
|
||||||
|
done[name] = value
|
||||||
|
|
||||||
|
else:
|
||||||
|
# bogus variable reference (e.g. "prefix=$/opt/python");
|
||||||
|
# just drop it since we can't deal
|
||||||
|
done[name] = value
|
||||||
|
variables.remove(name)
|
||||||
|
|
||||||
|
# strip spurious spaces
|
||||||
|
for k, v in done.items():
|
||||||
|
if isinstance(v, str):
|
||||||
|
done[k] = v.strip()
|
||||||
|
|
||||||
|
# save the results in the global dictionary
|
||||||
|
vars.update(done)
|
||||||
|
return vars
|
||||||
|
|
||||||
|
|
||||||
|
def get_makefile_filename():
|
||||||
|
"""Return the path of the Makefile."""
|
||||||
|
if _PYTHON_BUILD:
|
||||||
|
return os.path.join(_PROJECT_BASE, "Makefile")
|
||||||
|
if hasattr(sys, 'abiflags'):
|
||||||
|
config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
|
||||||
|
else:
|
||||||
|
config_dir_name = 'config'
|
||||||
|
return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
|
||||||
|
|
||||||
|
|
||||||
|
def _init_posix(vars):
|
||||||
|
"""Initialize the module as appropriate for POSIX systems."""
|
||||||
|
# load the installed Makefile:
|
||||||
|
makefile = get_makefile_filename()
|
||||||
|
try:
|
||||||
|
_parse_makefile(makefile, vars)
|
||||||
|
except IOError as e:
|
||||||
|
msg = "invalid Python installation: unable to open %s" % makefile
|
||||||
|
if hasattr(e, "strerror"):
|
||||||
|
msg = msg + " (%s)" % e.strerror
|
||||||
|
raise IOError(msg)
|
||||||
|
# load the installed pyconfig.h:
|
||||||
|
config_h = get_config_h_filename()
|
||||||
|
try:
|
||||||
|
with open(config_h) as f:
|
||||||
|
parse_config_h(f, vars)
|
||||||
|
except IOError as e:
|
||||||
|
msg = "invalid Python installation: unable to open %s" % config_h
|
||||||
|
if hasattr(e, "strerror"):
|
||||||
|
msg = msg + " (%s)" % e.strerror
|
||||||
|
raise IOError(msg)
|
||||||
|
# On AIX, there are wrong paths to the linker scripts in the Makefile
|
||||||
|
# -- these paths are relative to the Python source, but when installed
|
||||||
|
# the scripts are in another directory.
|
||||||
|
if _PYTHON_BUILD:
|
||||||
|
vars['LDSHARED'] = vars['BLDSHARED']
|
||||||
|
|
||||||
|
|
||||||
|
def _init_non_posix(vars):
|
||||||
|
"""Initialize the module as appropriate for NT"""
|
||||||
|
# set basic install directories
|
||||||
|
vars['LIBDEST'] = get_path('stdlib')
|
||||||
|
vars['BINLIBDEST'] = get_path('platstdlib')
|
||||||
|
vars['INCLUDEPY'] = get_path('include')
|
||||||
|
vars['SO'] = '.pyd'
|
||||||
|
vars['EXE'] = '.exe'
|
||||||
|
vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
|
||||||
|
vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
|
||||||
|
|
||||||
|
#
|
||||||
|
# public APIs
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
def parse_config_h(fp, vars=None):
|
||||||
|
"""Parse a config.h-style file.
|
||||||
|
|
||||||
|
A dictionary containing name/value pairs is returned. If an
|
||||||
|
optional dictionary is passed in as the second argument, it is
|
||||||
|
used instead of a new dictionary.
|
||||||
|
"""
|
||||||
|
if vars is None:
|
||||||
|
vars = {}
|
||||||
|
define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
|
||||||
|
undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
line = fp.readline()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
m = define_rx.match(line)
|
||||||
|
if m:
|
||||||
|
n, v = m.group(1, 2)
|
||||||
|
try:
|
||||||
|
v = int(v)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
vars[n] = v
|
||||||
|
else:
|
||||||
|
m = undef_rx.match(line)
|
||||||
|
if m:
|
||||||
|
vars[m.group(1)] = 0
|
||||||
|
return vars
|
||||||
|
|
||||||
|
|
||||||
|
def get_config_h_filename():
|
||||||
|
"""Return the path of pyconfig.h."""
|
||||||
|
if _PYTHON_BUILD:
|
||||||
|
if os.name == "nt":
|
||||||
|
inc_dir = os.path.join(_PROJECT_BASE, "PC")
|
||||||
|
else:
|
||||||
|
inc_dir = _PROJECT_BASE
|
||||||
|
else:
|
||||||
|
inc_dir = get_path('platinclude')
|
||||||
|
return os.path.join(inc_dir, 'pyconfig.h')
|
||||||
|
|
||||||
|
|
||||||
|
def get_scheme_names():
|
||||||
|
"""Return a tuple containing the schemes names."""
|
||||||
|
return tuple(sorted(_SCHEMES.sections()))
|
||||||
|
|
||||||
|
|
||||||
|
def get_path_names():
|
||||||
|
"""Return a tuple containing the paths names."""
|
||||||
|
# xxx see if we want a static list
|
||||||
|
return _SCHEMES.options('posix_prefix')
|
||||||
|
|
||||||
|
|
||||||
|
def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
|
||||||
|
"""Return a mapping containing an install scheme.
|
||||||
|
|
||||||
|
``scheme`` is the install scheme name. If not provided, it will
|
||||||
|
return the default scheme for the current platform.
|
||||||
|
"""
|
||||||
|
_ensure_cfg_read()
|
||||||
|
if expand:
|
||||||
|
return _expand_vars(scheme, vars)
|
||||||
|
else:
|
||||||
|
return dict(_SCHEMES.items(scheme))
|
||||||
|
|
||||||
|
|
||||||
|
def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
|
||||||
|
"""Return a path corresponding to the scheme.
|
||||||
|
|
||||||
|
``scheme`` is the install scheme name.
|
||||||
|
"""
|
||||||
|
return get_paths(scheme, vars, expand)[name]
|
||||||
|
|
||||||
|
|
||||||
|
def get_config_vars(*args):
|
||||||
|
"""With no arguments, return a dictionary of all configuration
|
||||||
|
variables relevant for the current platform.
|
||||||
|
|
||||||
|
On Unix, this means every variable defined in Python's installed Makefile;
|
||||||
|
On Windows and Mac OS it's a much smaller set.
|
||||||
|
|
||||||
|
With arguments, return a list of values that result from looking up
|
||||||
|
each argument in the configuration variable dictionary.
|
||||||
|
"""
|
||||||
|
global _CONFIG_VARS
|
||||||
|
if _CONFIG_VARS is None:
|
||||||
|
_CONFIG_VARS = {}
|
||||||
|
# Normalized versions of prefix and exec_prefix are handy to have;
|
||||||
|
# in fact, these are the standard versions used most places in the
|
||||||
|
# distutils2 module.
|
||||||
|
_CONFIG_VARS['prefix'] = _PREFIX
|
||||||
|
_CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
|
||||||
|
_CONFIG_VARS['py_version'] = _PY_VERSION
|
||||||
|
_CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
|
||||||
|
_CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
|
||||||
|
_CONFIG_VARS['base'] = _PREFIX
|
||||||
|
_CONFIG_VARS['platbase'] = _EXEC_PREFIX
|
||||||
|
_CONFIG_VARS['projectbase'] = _PROJECT_BASE
|
||||||
|
try:
|
||||||
|
_CONFIG_VARS['abiflags'] = sys.abiflags
|
||||||
|
except AttributeError:
|
||||||
|
# sys.abiflags may not be defined on all platforms.
|
||||||
|
_CONFIG_VARS['abiflags'] = ''
|
||||||
|
|
||||||
|
if os.name in ('nt', 'os2'):
|
||||||
|
_init_non_posix(_CONFIG_VARS)
|
||||||
|
if os.name == 'posix':
|
||||||
|
_init_posix(_CONFIG_VARS)
|
||||||
|
# Setting 'userbase' is done below the call to the
|
||||||
|
# init function to enable using 'get_config_var' in
|
||||||
|
# the init-function.
|
||||||
|
if sys.version >= '2.6':
|
||||||
|
_CONFIG_VARS['userbase'] = _getuserbase()
|
||||||
|
|
||||||
|
if 'srcdir' not in _CONFIG_VARS:
|
||||||
|
_CONFIG_VARS['srcdir'] = _PROJECT_BASE
|
||||||
|
else:
|
||||||
|
_CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])
|
||||||
|
|
||||||
|
# Convert srcdir into an absolute path if it appears necessary.
|
||||||
|
# Normally it is relative to the build directory. However, during
|
||||||
|
# testing, for example, we might be running a non-installed python
|
||||||
|
# from a different directory.
|
||||||
|
if _PYTHON_BUILD and os.name == "posix":
|
||||||
|
base = _PROJECT_BASE
|
||||||
|
try:
|
||||||
|
cwd = os.getcwd()
|
||||||
|
except OSError:
|
||||||
|
cwd = None
|
||||||
|
if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
|
||||||
|
base != cwd):
|
||||||
|
# srcdir is relative and we are not in the same directory
|
||||||
|
# as the executable. Assume executable is in the build
|
||||||
|
# directory and make srcdir absolute.
|
||||||
|
srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
|
||||||
|
_CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
|
||||||
|
|
||||||
|
if sys.platform == 'darwin':
|
||||||
|
kernel_version = os.uname()[2] # Kernel version (8.4.3)
|
||||||
|
major_version = int(kernel_version.split('.')[0])
|
||||||
|
|
||||||
|
if major_version < 8:
|
||||||
|
# On Mac OS X before 10.4, check if -arch and -isysroot
|
||||||
|
# are in CFLAGS or LDFLAGS and remove them if they are.
|
||||||
|
# This is needed when building extensions on a 10.3 system
|
||||||
|
# using a universal build of python.
|
||||||
|
for key in ('LDFLAGS', 'BASECFLAGS',
|
||||||
|
# a number of derived variables. These need to be
|
||||||
|
# patched up as well.
|
||||||
|
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
|
||||||
|
flags = _CONFIG_VARS[key]
|
||||||
|
flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
|
||||||
|
flags = re.sub('-isysroot [^ \t]*', ' ', flags)
|
||||||
|
_CONFIG_VARS[key] = flags
|
||||||
|
else:
|
||||||
|
# Allow the user to override the architecture flags using
|
||||||
|
# an environment variable.
|
||||||
|
# NOTE: This name was introduced by Apple in OSX 10.5 and
|
||||||
|
# is used by several scripting languages distributed with
|
||||||
|
# that OS release.
|
||||||
|
if 'ARCHFLAGS' in os.environ:
|
||||||
|
arch = os.environ['ARCHFLAGS']
|
||||||
|
for key in ('LDFLAGS', 'BASECFLAGS',
|
||||||
|
# a number of derived variables. These need to be
|
||||||
|
# patched up as well.
|
||||||
|
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
|
||||||
|
|
||||||
|
flags = _CONFIG_VARS[key]
|
||||||
|
flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
|
||||||
|
flags = flags + ' ' + arch
|
||||||
|
_CONFIG_VARS[key] = flags
|
||||||
|
|
||||||
|
# If we're on OSX 10.5 or later and the user tries to
|
||||||
|
# compiles an extension using an SDK that is not present
|
||||||
|
# on the current machine it is better to not use an SDK
|
||||||
|
# than to fail.
|
||||||
|
#
|
||||||
|
# The major usecase for this is users using a Python.org
|
||||||
|
# binary installer on OSX 10.6: that installer uses
|
||||||
|
# the 10.4u SDK, but that SDK is not installed by default
|
||||||
|
# when you install Xcode.
|
||||||
|
#
|
||||||
|
CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
|
||||||
|
m = re.search(r'-isysroot\s+(\S+)', CFLAGS)
|
||||||
|
if m is not None:
|
||||||
|
sdk = m.group(1)
|
||||||
|
if not os.path.exists(sdk):
|
||||||
|
for key in ('LDFLAGS', 'BASECFLAGS',
|
||||||
|
# a number of derived variables. These need to be
|
||||||
|
# patched up as well.
|
||||||
|
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
|
||||||
|
|
||||||
|
flags = _CONFIG_VARS[key]
|
||||||
|
flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags)
|
||||||
|
_CONFIG_VARS[key] = flags
|
||||||
|
|
||||||
|
if args:
|
||||||
|
vals = []
|
||||||
|
for name in args:
|
||||||
|
vals.append(_CONFIG_VARS.get(name))
|
||||||
|
return vals
|
||||||
|
else:
|
||||||
|
return _CONFIG_VARS
|
||||||
|
|
||||||
|
|
||||||
|
def get_config_var(name):
|
||||||
|
"""Return the value of a single variable using the dictionary returned by
|
||||||
|
'get_config_vars()'.
|
||||||
|
|
||||||
|
Equivalent to get_config_vars().get(name)
|
||||||
|
"""
|
||||||
|
return get_config_vars().get(name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_platform():
|
||||||
|
"""Return a string that identifies the current platform.
|
||||||
|
|
||||||
|
This is used mainly to distinguish platform-specific build directories and
|
||||||
|
platform-specific built distributions. Typically includes the OS name
|
||||||
|
and version and the architecture (as supplied by 'os.uname()'),
|
||||||
|
although the exact information included depends on the OS; eg. for IRIX
|
||||||
|
the architecture isn't particularly important (IRIX only runs on SGI
|
||||||
|
hardware), but for Linux the kernel version isn't particularly
|
||||||
|
important.
|
||||||
|
|
||||||
|
Examples of returned values:
|
||||||
|
linux-i586
|
||||||
|
linux-alpha (?)
|
||||||
|
solaris-2.6-sun4u
|
||||||
|
irix-5.3
|
||||||
|
irix64-6.2
|
||||||
|
|
||||||
|
Windows will return one of:
|
||||||
|
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
|
||||||
|
win-ia64 (64bit Windows on Itanium)
|
||||||
|
win32 (all others - specifically, sys.platform is returned)
|
||||||
|
|
||||||
|
For other non-POSIX platforms, currently just returns 'sys.platform'.
|
||||||
|
"""
|
||||||
|
if os.name == 'nt':
|
||||||
|
# sniff sys.version for architecture.
|
||||||
|
prefix = " bit ("
|
||||||
|
i = sys.version.find(prefix)
|
||||||
|
if i == -1:
|
||||||
|
return sys.platform
|
||||||
|
j = sys.version.find(")", i)
|
||||||
|
look = sys.version[i+len(prefix):j].lower()
|
||||||
|
if look == 'amd64':
|
||||||
|
return 'win-amd64'
|
||||||
|
if look == 'itanium':
|
||||||
|
return 'win-ia64'
|
||||||
|
return sys.platform
|
||||||
|
|
||||||
|
if os.name != "posix" or not hasattr(os, 'uname'):
|
||||||
|
# XXX what about the architecture? NT is Intel or Alpha,
|
||||||
|
# Mac OS is M68k or PPC, etc.
|
||||||
|
return sys.platform
|
||||||
|
|
||||||
|
# Try to distinguish various flavours of Unix
|
||||||
|
osname, host, release, version, machine = os.uname()
|
||||||
|
|
||||||
|
# Convert the OS name to lowercase, remove '/' characters
|
||||||
|
# (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
|
||||||
|
osname = osname.lower().replace('/', '')
|
||||||
|
machine = machine.replace(' ', '_')
|
||||||
|
machine = machine.replace('/', '-')
|
||||||
|
|
||||||
|
if osname[:5] == "linux":
|
||||||
|
# At least on Linux/Intel, 'machine' is the processor --
|
||||||
|
# i386, etc.
|
||||||
|
# XXX what about Alpha, SPARC, etc?
|
||||||
|
return "%s-%s" % (osname, machine)
|
||||||
|
elif osname[:5] == "sunos":
|
||||||
|
if release[0] >= "5": # SunOS 5 == Solaris 2
|
||||||
|
osname = "solaris"
|
||||||
|
release = "%d.%s" % (int(release[0]) - 3, release[2:])
|
||||||
|
# fall through to standard osname-release-machine representation
|
||||||
|
elif osname[:4] == "irix": # could be "irix64"!
|
||||||
|
return "%s-%s" % (osname, release)
|
||||||
|
elif osname[:3] == "aix":
|
||||||
|
return "%s-%s.%s" % (osname, version, release)
|
||||||
|
elif osname[:6] == "cygwin":
|
||||||
|
osname = "cygwin"
|
||||||
|
rel_re = re.compile(r'[\d.]+')
|
||||||
|
m = rel_re.match(release)
|
||||||
|
if m:
|
||||||
|
release = m.group()
|
||||||
|
elif osname[:6] == "darwin":
|
||||||
|
#
|
||||||
|
# For our purposes, we'll assume that the system version from
|
||||||
|
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
|
||||||
|
# to. This makes the compatibility story a bit more sane because the
|
||||||
|
# machine is going to compile and link as if it were
|
||||||
|
# MACOSX_DEPLOYMENT_TARGET.
|
||||||
|
cfgvars = get_config_vars()
|
||||||
|
macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
|
||||||
|
|
||||||
|
if True:
|
||||||
|
# Always calculate the release of the running machine,
|
||||||
|
# needed to determine if we can build fat binaries or not.
|
||||||
|
|
||||||
|
macrelease = macver
|
||||||
|
# Get the system version. Reading this plist is a documented
|
||||||
|
# way to get the system version (see the documentation for
|
||||||
|
# the Gestalt Manager)
|
||||||
|
try:
|
||||||
|
f = open('/System/Library/CoreServices/SystemVersion.plist')
|
||||||
|
except IOError:
|
||||||
|
# We're on a plain darwin box, fall back to the default
|
||||||
|
# behaviour.
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
|
||||||
|
r'<string>(.*?)</string>', f.read())
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
if m is not None:
|
||||||
|
macrelease = '.'.join(m.group(1).split('.')[:2])
|
||||||
|
# else: fall back to the default behaviour
|
||||||
|
|
||||||
|
if not macver:
|
||||||
|
macver = macrelease
|
||||||
|
|
||||||
|
if macver:
|
||||||
|
release = macver
|
||||||
|
osname = "macosx"
|
||||||
|
|
||||||
|
if ((macrelease + '.') >= '10.4.' and
|
||||||
|
'-arch' in get_config_vars().get('CFLAGS', '').strip()):
|
||||||
|
# The universal build will build fat binaries, but not on
|
||||||
|
# systems before 10.4
|
||||||
|
#
|
||||||
|
# Try to detect 4-way universal builds, those have machine-type
|
||||||
|
# 'universal' instead of 'fat'.
|
||||||
|
|
||||||
|
machine = 'fat'
|
||||||
|
cflags = get_config_vars().get('CFLAGS')
|
||||||
|
|
||||||
|
archs = re.findall(r'-arch\s+(\S+)', cflags)
|
||||||
|
archs = tuple(sorted(set(archs)))
|
||||||
|
|
||||||
|
if len(archs) == 1:
|
||||||
|
machine = archs[0]
|
||||||
|
elif archs == ('i386', 'ppc'):
|
||||||
|
machine = 'fat'
|
||||||
|
elif archs == ('i386', 'x86_64'):
|
||||||
|
machine = 'intel'
|
||||||
|
elif archs == ('i386', 'ppc', 'x86_64'):
|
||||||
|
machine = 'fat3'
|
||||||
|
elif archs == ('ppc64', 'x86_64'):
|
||||||
|
machine = 'fat64'
|
||||||
|
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
|
||||||
|
machine = 'universal'
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Don't know machine value for archs=%r" % (archs,))
|
||||||
|
|
||||||
|
elif machine == 'i386':
|
||||||
|
# On OSX the machine type returned by uname is always the
|
||||||
|
# 32-bit variant, even if the executable architecture is
|
||||||
|
# the 64-bit variant
|
||||||
|
if sys.maxsize >= 2**32:
|
||||||
|
machine = 'x86_64'
|
||||||
|
|
||||||
|
elif machine in ('PowerPC', 'Power_Macintosh'):
|
||||||
|
# Pick a sane name for the PPC architecture.
|
||||||
|
# See 'i386' case
|
||||||
|
if sys.maxsize >= 2**32:
|
||||||
|
machine = 'ppc64'
|
||||||
|
else:
|
||||||
|
machine = 'ppc'
|
||||||
|
|
||||||
|
return "%s-%s-%s" % (osname, release, machine)
|
||||||
|
|
||||||
|
|
||||||
|
def get_python_version():
|
||||||
|
return _PY_VERSION_SHORT
|
||||||
|
|
||||||
|
|
||||||
|
def _print_dict(title, data):
|
||||||
|
for index, (key, value) in enumerate(sorted(data.items())):
|
||||||
|
if index == 0:
|
||||||
|
print('%s: ' % (title))
|
||||||
|
print('\t%s = "%s"' % (key, value))
|
||||||
|
|
||||||
|
|
||||||
|
def _main():
|
||||||
|
"""Display all information sysconfig detains."""
|
||||||
|
print('Platform: "%s"' % get_platform())
|
||||||
|
print('Python version: "%s"' % get_python_version())
|
||||||
|
print('Current installation scheme: "%s"' % _get_default_scheme())
|
||||||
|
print()
|
||||||
|
_print_dict('Paths', get_paths())
|
||||||
|
print()
|
||||||
|
_print_dict('Variables', get_config_vars())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
_main()
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,516 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2013 Vinay Sajip.
|
||||||
|
# Licensed to the Python Software Foundation under a contributor agreement.
|
||||||
|
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||||
|
#
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
try:
|
||||||
|
from threading import Thread
|
||||||
|
except ImportError:
|
||||||
|
from dummy_threading import Thread
|
||||||
|
|
||||||
|
from . import DistlibException
|
||||||
|
from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
|
||||||
|
urlparse, build_opener, string_types)
|
||||||
|
from .util import cached_property, zip_dir, ServerProxy
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DEFAULT_INDEX = 'https://pypi.org/pypi'
|
||||||
|
DEFAULT_REALM = 'pypi'
|
||||||
|
|
||||||
|
class PackageIndex(object):
|
||||||
|
"""
|
||||||
|
This class represents a package index compatible with PyPI, the Python
|
||||||
|
Package Index.
|
||||||
|
"""
|
||||||
|
|
||||||
|
boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
|
||||||
|
|
||||||
|
def __init__(self, url=None):
|
||||||
|
"""
|
||||||
|
Initialise an instance.
|
||||||
|
|
||||||
|
:param url: The URL of the index. If not specified, the URL for PyPI is
|
||||||
|
used.
|
||||||
|
"""
|
||||||
|
self.url = url or DEFAULT_INDEX
|
||||||
|
self.read_configuration()
|
||||||
|
scheme, netloc, path, params, query, frag = urlparse(self.url)
|
||||||
|
if params or query or frag or scheme not in ('http', 'https'):
|
||||||
|
raise DistlibException('invalid repository: %s' % self.url)
|
||||||
|
self.password_handler = None
|
||||||
|
self.ssl_verifier = None
|
||||||
|
self.gpg = None
|
||||||
|
self.gpg_home = None
|
||||||
|
with open(os.devnull, 'w') as sink:
|
||||||
|
# Use gpg by default rather than gpg2, as gpg2 insists on
|
||||||
|
# prompting for passwords
|
||||||
|
for s in ('gpg', 'gpg2'):
|
||||||
|
try:
|
||||||
|
rc = subprocess.check_call([s, '--version'], stdout=sink,
|
||||||
|
stderr=sink)
|
||||||
|
if rc == 0:
|
||||||
|
self.gpg = s
|
||||||
|
break
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _get_pypirc_command(self):
|
||||||
|
"""
|
||||||
|
Get the distutils command for interacting with PyPI configurations.
|
||||||
|
:return: the command.
|
||||||
|
"""
|
||||||
|
from distutils.core import Distribution
|
||||||
|
from distutils.config import PyPIRCCommand
|
||||||
|
d = Distribution()
|
||||||
|
return PyPIRCCommand(d)
|
||||||
|
|
||||||
|
def read_configuration(self):
|
||||||
|
"""
|
||||||
|
Read the PyPI access configuration as supported by distutils, getting
|
||||||
|
PyPI to do the actual work. This populates ``username``, ``password``,
|
||||||
|
``realm`` and ``url`` attributes from the configuration.
|
||||||
|
"""
|
||||||
|
# get distutils to do the work
|
||||||
|
c = self._get_pypirc_command()
|
||||||
|
c.repository = self.url
|
||||||
|
cfg = c._read_pypirc()
|
||||||
|
self.username = cfg.get('username')
|
||||||
|
self.password = cfg.get('password')
|
||||||
|
self.realm = cfg.get('realm', 'pypi')
|
||||||
|
self.url = cfg.get('repository', self.url)
|
||||||
|
|
||||||
|
def save_configuration(self):
|
||||||
|
"""
|
||||||
|
Save the PyPI access configuration. You must have set ``username`` and
|
||||||
|
``password`` attributes before calling this method.
|
||||||
|
|
||||||
|
Again, distutils is used to do the actual work.
|
||||||
|
"""
|
||||||
|
self.check_credentials()
|
||||||
|
# get distutils to do the work
|
||||||
|
c = self._get_pypirc_command()
|
||||||
|
c._store_pypirc(self.username, self.password)
|
||||||
|
|
||||||
|
def check_credentials(self):
|
||||||
|
"""
|
||||||
|
Check that ``username`` and ``password`` have been set, and raise an
|
||||||
|
exception if not.
|
||||||
|
"""
|
||||||
|
if self.username is None or self.password is None:
|
||||||
|
raise DistlibException('username and password must be set')
|
||||||
|
pm = HTTPPasswordMgr()
|
||||||
|
_, netloc, _, _, _, _ = urlparse(self.url)
|
||||||
|
pm.add_password(self.realm, netloc, self.username, self.password)
|
||||||
|
self.password_handler = HTTPBasicAuthHandler(pm)
|
||||||
|
|
||||||
|
def register(self, metadata):
|
||||||
|
"""
|
||||||
|
Register a distribution on PyPI, using the provided metadata.
|
||||||
|
|
||||||
|
:param metadata: A :class:`Metadata` instance defining at least a name
|
||||||
|
and version number for the distribution to be
|
||||||
|
registered.
|
||||||
|
:return: The HTTP response received from PyPI upon submission of the
|
||||||
|
request.
|
||||||
|
"""
|
||||||
|
self.check_credentials()
|
||||||
|
metadata.validate()
|
||||||
|
d = metadata.todict()
|
||||||
|
d[':action'] = 'verify'
|
||||||
|
request = self.encode_request(d.items(), [])
|
||||||
|
response = self.send_request(request)
|
||||||
|
d[':action'] = 'submit'
|
||||||
|
request = self.encode_request(d.items(), [])
|
||||||
|
return self.send_request(request)
|
||||||
|
|
||||||
|
def _reader(self, name, stream, outbuf):
|
||||||
|
"""
|
||||||
|
Thread runner for reading lines of from a subprocess into a buffer.
|
||||||
|
|
||||||
|
:param name: The logical name of the stream (used for logging only).
|
||||||
|
:param stream: The stream to read from. This will typically a pipe
|
||||||
|
connected to the output stream of a subprocess.
|
||||||
|
:param outbuf: The list to append the read lines to.
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
s = stream.readline()
|
||||||
|
if not s:
|
||||||
|
break
|
||||||
|
s = s.decode('utf-8').rstrip()
|
||||||
|
outbuf.append(s)
|
||||||
|
logger.debug('%s: %s' % (name, s))
|
||||||
|
stream.close()
|
||||||
|
|
||||||
|
def get_sign_command(self, filename, signer, sign_password,
|
||||||
|
keystore=None):
|
||||||
|
"""
|
||||||
|
Return a suitable command for signing a file.
|
||||||
|
|
||||||
|
:param filename: The pathname to the file to be signed.
|
||||||
|
:param signer: The identifier of the signer of the file.
|
||||||
|
:param sign_password: The passphrase for the signer's
|
||||||
|
private key used for signing.
|
||||||
|
:param keystore: The path to a directory which contains the keys
|
||||||
|
used in verification. If not specified, the
|
||||||
|
instance's ``gpg_home`` attribute is used instead.
|
||||||
|
:return: The signing command as a list suitable to be
|
||||||
|
passed to :class:`subprocess.Popen`.
|
||||||
|
"""
|
||||||
|
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
|
||||||
|
if keystore is None:
|
||||||
|
keystore = self.gpg_home
|
||||||
|
if keystore:
|
||||||
|
cmd.extend(['--homedir', keystore])
|
||||||
|
if sign_password is not None:
|
||||||
|
cmd.extend(['--batch', '--passphrase-fd', '0'])
|
||||||
|
td = tempfile.mkdtemp()
|
||||||
|
sf = os.path.join(td, os.path.basename(filename) + '.asc')
|
||||||
|
cmd.extend(['--detach-sign', '--armor', '--local-user',
|
||||||
|
signer, '--output', sf, filename])
|
||||||
|
logger.debug('invoking: %s', ' '.join(cmd))
|
||||||
|
return cmd, sf
|
||||||
|
|
||||||
|
def run_command(self, cmd, input_data=None):
|
||||||
|
"""
|
||||||
|
Run a command in a child process , passing it any input data specified.
|
||||||
|
|
||||||
|
:param cmd: The command to run.
|
||||||
|
:param input_data: If specified, this must be a byte string containing
|
||||||
|
data to be sent to the child process.
|
||||||
|
:return: A tuple consisting of the subprocess' exit code, a list of
|
||||||
|
lines read from the subprocess' ``stdout``, and a list of
|
||||||
|
lines read from the subprocess' ``stderr``.
|
||||||
|
"""
|
||||||
|
kwargs = {
|
||||||
|
'stdout': subprocess.PIPE,
|
||||||
|
'stderr': subprocess.PIPE,
|
||||||
|
}
|
||||||
|
if input_data is not None:
|
||||||
|
kwargs['stdin'] = subprocess.PIPE
|
||||||
|
stdout = []
|
||||||
|
stderr = []
|
||||||
|
p = subprocess.Popen(cmd, **kwargs)
|
||||||
|
# We don't use communicate() here because we may need to
|
||||||
|
# get clever with interacting with the command
|
||||||
|
t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
|
||||||
|
t1.start()
|
||||||
|
t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
|
||||||
|
t2.start()
|
||||||
|
if input_data is not None:
|
||||||
|
p.stdin.write(input_data)
|
||||||
|
p.stdin.close()
|
||||||
|
|
||||||
|
p.wait()
|
||||||
|
t1.join()
|
||||||
|
t2.join()
|
||||||
|
return p.returncode, stdout, stderr
|
||||||
|
|
||||||
|
def sign_file(self, filename, signer, sign_password, keystore=None):
|
||||||
|
"""
|
||||||
|
Sign a file.
|
||||||
|
|
||||||
|
:param filename: The pathname to the file to be signed.
|
||||||
|
:param signer: The identifier of the signer of the file.
|
||||||
|
:param sign_password: The passphrase for the signer's
|
||||||
|
private key used for signing.
|
||||||
|
:param keystore: The path to a directory which contains the keys
|
||||||
|
used in signing. If not specified, the instance's
|
||||||
|
``gpg_home`` attribute is used instead.
|
||||||
|
:return: The absolute pathname of the file where the signature is
|
||||||
|
stored.
|
||||||
|
"""
|
||||||
|
cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
|
||||||
|
keystore)
|
||||||
|
rc, stdout, stderr = self.run_command(cmd,
|
||||||
|
sign_password.encode('utf-8'))
|
||||||
|
if rc != 0:
|
||||||
|
raise DistlibException('sign command failed with error '
|
||||||
|
'code %s' % rc)
|
||||||
|
return sig_file
|
||||||
|
|
||||||
|
def upload_file(self, metadata, filename, signer=None, sign_password=None,
|
||||||
|
filetype='sdist', pyversion='source', keystore=None):
|
||||||
|
"""
|
||||||
|
Upload a release file to the index.
|
||||||
|
|
||||||
|
:param metadata: A :class:`Metadata` instance defining at least a name
|
||||||
|
and version number for the file to be uploaded.
|
||||||
|
:param filename: The pathname of the file to be uploaded.
|
||||||
|
:param signer: The identifier of the signer of the file.
|
||||||
|
:param sign_password: The passphrase for the signer's
|
||||||
|
private key used for signing.
|
||||||
|
:param filetype: The type of the file being uploaded. This is the
|
||||||
|
distutils command which produced that file, e.g.
|
||||||
|
``sdist`` or ``bdist_wheel``.
|
||||||
|
:param pyversion: The version of Python which the release relates
|
||||||
|
to. For code compatible with any Python, this would
|
||||||
|
be ``source``, otherwise it would be e.g. ``3.2``.
|
||||||
|
:param keystore: The path to a directory which contains the keys
|
||||||
|
used in signing. If not specified, the instance's
|
||||||
|
``gpg_home`` attribute is used instead.
|
||||||
|
:return: The HTTP response received from PyPI upon submission of the
|
||||||
|
request.
|
||||||
|
"""
|
||||||
|
self.check_credentials()
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
raise DistlibException('not found: %s' % filename)
|
||||||
|
metadata.validate()
|
||||||
|
d = metadata.todict()
|
||||||
|
sig_file = None
|
||||||
|
if signer:
|
||||||
|
if not self.gpg:
|
||||||
|
logger.warning('no signing program available - not signed')
|
||||||
|
else:
|
||||||
|
sig_file = self.sign_file(filename, signer, sign_password,
|
||||||
|
keystore)
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
file_data = f.read()
|
||||||
|
md5_digest = hashlib.md5(file_data).hexdigest()
|
||||||
|
sha256_digest = hashlib.sha256(file_data).hexdigest()
|
||||||
|
d.update({
|
||||||
|
':action': 'file_upload',
|
||||||
|
'protocol_version': '1',
|
||||||
|
'filetype': filetype,
|
||||||
|
'pyversion': pyversion,
|
||||||
|
'md5_digest': md5_digest,
|
||||||
|
'sha256_digest': sha256_digest,
|
||||||
|
})
|
||||||
|
files = [('content', os.path.basename(filename), file_data)]
|
||||||
|
if sig_file:
|
||||||
|
with open(sig_file, 'rb') as f:
|
||||||
|
sig_data = f.read()
|
||||||
|
files.append(('gpg_signature', os.path.basename(sig_file),
|
||||||
|
sig_data))
|
||||||
|
shutil.rmtree(os.path.dirname(sig_file))
|
||||||
|
request = self.encode_request(d.items(), files)
|
||||||
|
return self.send_request(request)
|
||||||
|
|
||||||
|
def upload_documentation(self, metadata, doc_dir):
|
||||||
|
"""
|
||||||
|
Upload documentation to the index.
|
||||||
|
|
||||||
|
:param metadata: A :class:`Metadata` instance defining at least a name
|
||||||
|
and version number for the documentation to be
|
||||||
|
uploaded.
|
||||||
|
:param doc_dir: The pathname of the directory which contains the
|
||||||
|
documentation. This should be the directory that
|
||||||
|
contains the ``index.html`` for the documentation.
|
||||||
|
:return: The HTTP response received from PyPI upon submission of the
|
||||||
|
request.
|
||||||
|
"""
|
||||||
|
self.check_credentials()
|
||||||
|
if not os.path.isdir(doc_dir):
|
||||||
|
raise DistlibException('not a directory: %r' % doc_dir)
|
||||||
|
fn = os.path.join(doc_dir, 'index.html')
|
||||||
|
if not os.path.exists(fn):
|
||||||
|
raise DistlibException('not found: %r' % fn)
|
||||||
|
metadata.validate()
|
||||||
|
name, version = metadata.name, metadata.version
|
||||||
|
zip_data = zip_dir(doc_dir).getvalue()
|
||||||
|
fields = [(':action', 'doc_upload'),
|
||||||
|
('name', name), ('version', version)]
|
||||||
|
files = [('content', name, zip_data)]
|
||||||
|
request = self.encode_request(fields, files)
|
||||||
|
return self.send_request(request)
|
||||||
|
|
||||||
|
def get_verify_command(self, signature_filename, data_filename,
|
||||||
|
keystore=None):
|
||||||
|
"""
|
||||||
|
Return a suitable command for verifying a file.
|
||||||
|
|
||||||
|
:param signature_filename: The pathname to the file containing the
|
||||||
|
signature.
|
||||||
|
:param data_filename: The pathname to the file containing the
|
||||||
|
signed data.
|
||||||
|
:param keystore: The path to a directory which contains the keys
|
||||||
|
used in verification. If not specified, the
|
||||||
|
instance's ``gpg_home`` attribute is used instead.
|
||||||
|
:return: The verifying command as a list suitable to be
|
||||||
|
passed to :class:`subprocess.Popen`.
|
||||||
|
"""
|
||||||
|
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
|
||||||
|
if keystore is None:
|
||||||
|
keystore = self.gpg_home
|
||||||
|
if keystore:
|
||||||
|
cmd.extend(['--homedir', keystore])
|
||||||
|
cmd.extend(['--verify', signature_filename, data_filename])
|
||||||
|
logger.debug('invoking: %s', ' '.join(cmd))
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
def verify_signature(self, signature_filename, data_filename,
|
||||||
|
keystore=None):
|
||||||
|
"""
|
||||||
|
Verify a signature for a file.
|
||||||
|
|
||||||
|
:param signature_filename: The pathname to the file containing the
|
||||||
|
signature.
|
||||||
|
:param data_filename: The pathname to the file containing the
|
||||||
|
signed data.
|
||||||
|
:param keystore: The path to a directory which contains the keys
|
||||||
|
used in verification. If not specified, the
|
||||||
|
instance's ``gpg_home`` attribute is used instead.
|
||||||
|
:return: True if the signature was verified, else False.
|
||||||
|
"""
|
||||||
|
if not self.gpg:
|
||||||
|
raise DistlibException('verification unavailable because gpg '
|
||||||
|
'unavailable')
|
||||||
|
cmd = self.get_verify_command(signature_filename, data_filename,
|
||||||
|
keystore)
|
||||||
|
rc, stdout, stderr = self.run_command(cmd)
|
||||||
|
if rc not in (0, 1):
|
||||||
|
raise DistlibException('verify command failed with error '
|
||||||
|
'code %s' % rc)
|
||||||
|
return rc == 0
|
||||||
|
|
||||||
|
def download_file(self, url, destfile, digest=None, reporthook=None):
|
||||||
|
"""
|
||||||
|
This is a convenience method for downloading a file from an URL.
|
||||||
|
Normally, this will be a file from the index, though currently
|
||||||
|
no check is made for this (i.e. a file can be downloaded from
|
||||||
|
anywhere).
|
||||||
|
|
||||||
|
The method is just like the :func:`urlretrieve` function in the
|
||||||
|
standard library, except that it allows digest computation to be
|
||||||
|
done during download and checking that the downloaded data
|
||||||
|
matched any expected value.
|
||||||
|
|
||||||
|
:param url: The URL of the file to be downloaded (assumed to be
|
||||||
|
available via an HTTP GET request).
|
||||||
|
:param destfile: The pathname where the downloaded file is to be
|
||||||
|
saved.
|
||||||
|
:param digest: If specified, this must be a (hasher, value)
|
||||||
|
tuple, where hasher is the algorithm used (e.g.
|
||||||
|
``'md5'``) and ``value`` is the expected value.
|
||||||
|
:param reporthook: The same as for :func:`urlretrieve` in the
|
||||||
|
standard library.
|
||||||
|
"""
|
||||||
|
if digest is None:
|
||||||
|
digester = None
|
||||||
|
logger.debug('No digest specified')
|
||||||
|
else:
|
||||||
|
if isinstance(digest, (list, tuple)):
|
||||||
|
hasher, digest = digest
|
||||||
|
else:
|
||||||
|
hasher = 'md5'
|
||||||
|
digester = getattr(hashlib, hasher)()
|
||||||
|
logger.debug('Digest specified: %s' % digest)
|
||||||
|
# The following code is equivalent to urlretrieve.
|
||||||
|
# We need to do it this way so that we can compute the
|
||||||
|
# digest of the file as we go.
|
||||||
|
with open(destfile, 'wb') as dfp:
|
||||||
|
# addinfourl is not a context manager on 2.x
|
||||||
|
# so we have to use try/finally
|
||||||
|
sfp = self.send_request(Request(url))
|
||||||
|
try:
|
||||||
|
headers = sfp.info()
|
||||||
|
blocksize = 8192
|
||||||
|
size = -1
|
||||||
|
read = 0
|
||||||
|
blocknum = 0
|
||||||
|
if "content-length" in headers:
|
||||||
|
size = int(headers["Content-Length"])
|
||||||
|
if reporthook:
|
||||||
|
reporthook(blocknum, blocksize, size)
|
||||||
|
while True:
|
||||||
|
block = sfp.read(blocksize)
|
||||||
|
if not block:
|
||||||
|
break
|
||||||
|
read += len(block)
|
||||||
|
dfp.write(block)
|
||||||
|
if digester:
|
||||||
|
digester.update(block)
|
||||||
|
blocknum += 1
|
||||||
|
if reporthook:
|
||||||
|
reporthook(blocknum, blocksize, size)
|
||||||
|
finally:
|
||||||
|
sfp.close()
|
||||||
|
|
||||||
|
# check that we got the whole file, if we can
|
||||||
|
if size >= 0 and read < size:
|
||||||
|
raise DistlibException(
|
||||||
|
'retrieval incomplete: got only %d out of %d bytes'
|
||||||
|
% (read, size))
|
||||||
|
# if we have a digest, it must match.
|
||||||
|
if digester:
|
||||||
|
actual = digester.hexdigest()
|
||||||
|
if digest != actual:
|
||||||
|
raise DistlibException('%s digest mismatch for %s: expected '
|
||||||
|
'%s, got %s' % (hasher, destfile,
|
||||||
|
digest, actual))
|
||||||
|
logger.debug('Digest verified: %s', digest)
|
||||||
|
|
||||||
|
def send_request(self, req):
|
||||||
|
"""
|
||||||
|
Send a standard library :class:`Request` to PyPI and return its
|
||||||
|
response.
|
||||||
|
|
||||||
|
:param req: The request to send.
|
||||||
|
:return: The HTTP response from PyPI (a standard library HTTPResponse).
|
||||||
|
"""
|
||||||
|
handlers = []
|
||||||
|
if self.password_handler:
|
||||||
|
handlers.append(self.password_handler)
|
||||||
|
if self.ssl_verifier:
|
||||||
|
handlers.append(self.ssl_verifier)
|
||||||
|
opener = build_opener(*handlers)
|
||||||
|
return opener.open(req)
|
||||||
|
|
||||||
|
def encode_request(self, fields, files):
|
||||||
|
"""
|
||||||
|
Encode fields and files for posting to an HTTP server.
|
||||||
|
|
||||||
|
:param fields: The fields to send as a list of (fieldname, value)
|
||||||
|
tuples.
|
||||||
|
:param files: The files to send as a list of (fieldname, filename,
|
||||||
|
file_bytes) tuple.
|
||||||
|
"""
|
||||||
|
# Adapted from packaging, which in turn was adapted from
|
||||||
|
# http://code.activestate.com/recipes/146306
|
||||||
|
|
||||||
|
parts = []
|
||||||
|
boundary = self.boundary
|
||||||
|
for k, values in fields:
|
||||||
|
if not isinstance(values, (list, tuple)):
|
||||||
|
values = [values]
|
||||||
|
|
||||||
|
for v in values:
|
||||||
|
parts.extend((
|
||||||
|
b'--' + boundary,
|
||||||
|
('Content-Disposition: form-data; name="%s"' %
|
||||||
|
k).encode('utf-8'),
|
||||||
|
b'',
|
||||||
|
v.encode('utf-8')))
|
||||||
|
for key, filename, value in files:
|
||||||
|
parts.extend((
|
||||||
|
b'--' + boundary,
|
||||||
|
('Content-Disposition: form-data; name="%s"; filename="%s"' %
|
||||||
|
(key, filename)).encode('utf-8'),
|
||||||
|
b'',
|
||||||
|
value))
|
||||||
|
|
||||||
|
parts.extend((b'--' + boundary + b'--', b''))
|
||||||
|
|
||||||
|
body = b'\r\n'.join(parts)
|
||||||
|
ct = b'multipart/form-data; boundary=' + boundary
|
||||||
|
headers = {
|
||||||
|
'Content-type': ct,
|
||||||
|
'Content-length': str(len(body))
|
||||||
|
}
|
||||||
|
return Request(self.url, body, headers)
|
||||||
|
|
||||||
|
def search(self, terms, operator=None):
|
||||||
|
if isinstance(terms, string_types):
|
||||||
|
terms = {'name': terms}
|
||||||
|
rpc_proxy = ServerProxy(self.url, timeout=3.0)
|
||||||
|
try:
|
||||||
|
return rpc_proxy.search(terms, operator or 'and')
|
||||||
|
finally:
|
||||||
|
rpc_proxy('close')()
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,393 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2012-2013 Python Software Foundation.
|
||||||
|
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||||
|
#
|
||||||
|
"""
|
||||||
|
Class representing the list of files in a distribution.
|
||||||
|
|
||||||
|
Equivalent to distutils.filelist, but fixes some problems.
|
||||||
|
"""
|
||||||
|
import fnmatch
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from . import DistlibException
|
||||||
|
from .compat import fsdecode
|
||||||
|
from .util import convert_path
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['Manifest']
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# a \ followed by some spaces + EOL
|
||||||
|
_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
|
||||||
|
_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Due to the different results returned by fnmatch.translate, we need
|
||||||
|
# to do slightly different processing for Python 2.7 and 3.2 ... this needed
|
||||||
|
# to be brought in for Python 3.6 onwards.
|
||||||
|
#
|
||||||
|
_PYTHON_VERSION = sys.version_info[:2]
|
||||||
|
|
||||||
|
class Manifest(object):
|
||||||
|
"""A list of files built by on exploring the filesystem and filtered by
|
||||||
|
applying various patterns to what we find there.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, base=None):
|
||||||
|
"""
|
||||||
|
Initialise an instance.
|
||||||
|
|
||||||
|
:param base: The base directory to explore under.
|
||||||
|
"""
|
||||||
|
self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
|
||||||
|
self.prefix = self.base + os.sep
|
||||||
|
self.allfiles = None
|
||||||
|
self.files = set()
|
||||||
|
|
||||||
|
#
|
||||||
|
# Public API
|
||||||
|
#
|
||||||
|
|
||||||
|
def findall(self):
|
||||||
|
"""Find all files under the base and set ``allfiles`` to the absolute
|
||||||
|
pathnames of files found.
|
||||||
|
"""
|
||||||
|
from stat import S_ISREG, S_ISDIR, S_ISLNK
|
||||||
|
|
||||||
|
self.allfiles = allfiles = []
|
||||||
|
root = self.base
|
||||||
|
stack = [root]
|
||||||
|
pop = stack.pop
|
||||||
|
push = stack.append
|
||||||
|
|
||||||
|
while stack:
|
||||||
|
root = pop()
|
||||||
|
names = os.listdir(root)
|
||||||
|
|
||||||
|
for name in names:
|
||||||
|
fullname = os.path.join(root, name)
|
||||||
|
|
||||||
|
# Avoid excess stat calls -- just one will do, thank you!
|
||||||
|
stat = os.stat(fullname)
|
||||||
|
mode = stat.st_mode
|
||||||
|
if S_ISREG(mode):
|
||||||
|
allfiles.append(fsdecode(fullname))
|
||||||
|
elif S_ISDIR(mode) and not S_ISLNK(mode):
|
||||||
|
push(fullname)
|
||||||
|
|
||||||
|
def add(self, item):
|
||||||
|
"""
|
||||||
|
Add a file to the manifest.
|
||||||
|
|
||||||
|
:param item: The pathname to add. This can be relative to the base.
|
||||||
|
"""
|
||||||
|
if not item.startswith(self.prefix):
|
||||||
|
item = os.path.join(self.base, item)
|
||||||
|
self.files.add(os.path.normpath(item))
|
||||||
|
|
||||||
|
def add_many(self, items):
|
||||||
|
"""
|
||||||
|
Add a list of files to the manifest.
|
||||||
|
|
||||||
|
:param items: The pathnames to add. These can be relative to the base.
|
||||||
|
"""
|
||||||
|
for item in items:
|
||||||
|
self.add(item)
|
||||||
|
|
||||||
|
def sorted(self, wantdirs=False):
|
||||||
|
"""
|
||||||
|
Return sorted files in directory order
|
||||||
|
"""
|
||||||
|
|
||||||
|
def add_dir(dirs, d):
|
||||||
|
dirs.add(d)
|
||||||
|
logger.debug('add_dir added %s', d)
|
||||||
|
if d != self.base:
|
||||||
|
parent, _ = os.path.split(d)
|
||||||
|
assert parent not in ('', '/')
|
||||||
|
add_dir(dirs, parent)
|
||||||
|
|
||||||
|
result = set(self.files) # make a copy!
|
||||||
|
if wantdirs:
|
||||||
|
dirs = set()
|
||||||
|
for f in result:
|
||||||
|
add_dir(dirs, os.path.dirname(f))
|
||||||
|
result |= dirs
|
||||||
|
return [os.path.join(*path_tuple) for path_tuple in
|
||||||
|
sorted(os.path.split(path) for path in result)]
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Clear all collected files."""
|
||||||
|
self.files = set()
|
||||||
|
self.allfiles = []
|
||||||
|
|
||||||
|
def process_directive(self, directive):
|
||||||
|
"""
|
||||||
|
Process a directive which either adds some files from ``allfiles`` to
|
||||||
|
``files``, or removes some files from ``files``.
|
||||||
|
|
||||||
|
:param directive: The directive to process. This should be in a format
|
||||||
|
compatible with distutils ``MANIFEST.in`` files:
|
||||||
|
|
||||||
|
http://docs.python.org/distutils/sourcedist.html#commands
|
||||||
|
"""
|
||||||
|
# Parse the line: split it up, make sure the right number of words
|
||||||
|
# is there, and return the relevant words. 'action' is always
|
||||||
|
# defined: it's the first word of the line. Which of the other
|
||||||
|
# three are defined depends on the action; it'll be either
|
||||||
|
# patterns, (dir and patterns), or (dirpattern).
|
||||||
|
action, patterns, thedir, dirpattern = self._parse_directive(directive)
|
||||||
|
|
||||||
|
# OK, now we know that the action is valid and we have the
|
||||||
|
# right number of words on the line for that action -- so we
|
||||||
|
# can proceed with minimal error-checking.
|
||||||
|
if action == 'include':
|
||||||
|
for pattern in patterns:
|
||||||
|
if not self._include_pattern(pattern, anchor=True):
|
||||||
|
logger.warning('no files found matching %r', pattern)
|
||||||
|
|
||||||
|
elif action == 'exclude':
|
||||||
|
for pattern in patterns:
|
||||||
|
found = self._exclude_pattern(pattern, anchor=True)
|
||||||
|
#if not found:
|
||||||
|
# logger.warning('no previously-included files '
|
||||||
|
# 'found matching %r', pattern)
|
||||||
|
|
||||||
|
elif action == 'global-include':
|
||||||
|
for pattern in patterns:
|
||||||
|
if not self._include_pattern(pattern, anchor=False):
|
||||||
|
logger.warning('no files found matching %r '
|
||||||
|
'anywhere in distribution', pattern)
|
||||||
|
|
||||||
|
elif action == 'global-exclude':
|
||||||
|
for pattern in patterns:
|
||||||
|
found = self._exclude_pattern(pattern, anchor=False)
|
||||||
|
#if not found:
|
||||||
|
# logger.warning('no previously-included files '
|
||||||
|
# 'matching %r found anywhere in '
|
||||||
|
# 'distribution', pattern)
|
||||||
|
|
||||||
|
elif action == 'recursive-include':
|
||||||
|
for pattern in patterns:
|
||||||
|
if not self._include_pattern(pattern, prefix=thedir):
|
||||||
|
logger.warning('no files found matching %r '
|
||||||
|
'under directory %r', pattern, thedir)
|
||||||
|
|
||||||
|
elif action == 'recursive-exclude':
|
||||||
|
for pattern in patterns:
|
||||||
|
found = self._exclude_pattern(pattern, prefix=thedir)
|
||||||
|
#if not found:
|
||||||
|
# logger.warning('no previously-included files '
|
||||||
|
# 'matching %r found under directory %r',
|
||||||
|
# pattern, thedir)
|
||||||
|
|
||||||
|
elif action == 'graft':
|
||||||
|
if not self._include_pattern(None, prefix=dirpattern):
|
||||||
|
logger.warning('no directories found matching %r',
|
||||||
|
dirpattern)
|
||||||
|
|
||||||
|
elif action == 'prune':
|
||||||
|
if not self._exclude_pattern(None, prefix=dirpattern):
|
||||||
|
logger.warning('no previously-included directories found '
|
||||||
|
'matching %r', dirpattern)
|
||||||
|
else: # pragma: no cover
|
||||||
|
# This should never happen, as it should be caught in
|
||||||
|
# _parse_template_line
|
||||||
|
raise DistlibException(
|
||||||
|
'invalid action %r' % action)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Private API
|
||||||
|
#
|
||||||
|
|
||||||
|
def _parse_directive(self, directive):
|
||||||
|
"""
|
||||||
|
Validate a directive.
|
||||||
|
:param directive: The directive to validate.
|
||||||
|
:return: A tuple of action, patterns, thedir, dir_patterns
|
||||||
|
"""
|
||||||
|
words = directive.split()
|
||||||
|
if len(words) == 1 and words[0] not in ('include', 'exclude',
|
||||||
|
'global-include',
|
||||||
|
'global-exclude',
|
||||||
|
'recursive-include',
|
||||||
|
'recursive-exclude',
|
||||||
|
'graft', 'prune'):
|
||||||
|
# no action given, let's use the default 'include'
|
||||||
|
words.insert(0, 'include')
|
||||||
|
|
||||||
|
action = words[0]
|
||||||
|
patterns = thedir = dir_pattern = None
|
||||||
|
|
||||||
|
if action in ('include', 'exclude',
|
||||||
|
'global-include', 'global-exclude'):
|
||||||
|
if len(words) < 2:
|
||||||
|
raise DistlibException(
|
||||||
|
'%r expects <pattern1> <pattern2> ...' % action)
|
||||||
|
|
||||||
|
patterns = [convert_path(word) for word in words[1:]]
|
||||||
|
|
||||||
|
elif action in ('recursive-include', 'recursive-exclude'):
|
||||||
|
if len(words) < 3:
|
||||||
|
raise DistlibException(
|
||||||
|
'%r expects <dir> <pattern1> <pattern2> ...' % action)
|
||||||
|
|
||||||
|
thedir = convert_path(words[1])
|
||||||
|
patterns = [convert_path(word) for word in words[2:]]
|
||||||
|
|
||||||
|
elif action in ('graft', 'prune'):
|
||||||
|
if len(words) != 2:
|
||||||
|
raise DistlibException(
|
||||||
|
'%r expects a single <dir_pattern>' % action)
|
||||||
|
|
||||||
|
dir_pattern = convert_path(words[1])
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise DistlibException('unknown action %r' % action)
|
||||||
|
|
||||||
|
return action, patterns, thedir, dir_pattern
|
||||||
|
|
||||||
|
def _include_pattern(self, pattern, anchor=True, prefix=None,
|
||||||
|
is_regex=False):
|
||||||
|
"""Select strings (presumably filenames) from 'self.files' that
|
||||||
|
match 'pattern', a Unix-style wildcard (glob) pattern.
|
||||||
|
|
||||||
|
Patterns are not quite the same as implemented by the 'fnmatch'
|
||||||
|
module: '*' and '?' match non-special characters, where "special"
|
||||||
|
is platform-dependent: slash on Unix; colon, slash, and backslash on
|
||||||
|
DOS/Windows; and colon on Mac OS.
|
||||||
|
|
||||||
|
If 'anchor' is true (the default), then the pattern match is more
|
||||||
|
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
||||||
|
'anchor' is false, both of these will match.
|
||||||
|
|
||||||
|
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
||||||
|
(itself a pattern) and ending with 'pattern', with anything in between
|
||||||
|
them, will match. 'anchor' is ignored in this case.
|
||||||
|
|
||||||
|
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
|
||||||
|
'pattern' is assumed to be either a string containing a regex or a
|
||||||
|
regex object -- no translation is done, the regex is just compiled
|
||||||
|
and used as-is.
|
||||||
|
|
||||||
|
Selected strings will be added to self.files.
|
||||||
|
|
||||||
|
Return True if files are found.
|
||||||
|
"""
|
||||||
|
# XXX docstring lying about what the special chars are?
|
||||||
|
found = False
|
||||||
|
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
|
||||||
|
|
||||||
|
# delayed loading of allfiles list
|
||||||
|
if self.allfiles is None:
|
||||||
|
self.findall()
|
||||||
|
|
||||||
|
for name in self.allfiles:
|
||||||
|
if pattern_re.search(name):
|
||||||
|
self.files.add(name)
|
||||||
|
found = True
|
||||||
|
return found
|
||||||
|
|
||||||
|
def _exclude_pattern(self, pattern, anchor=True, prefix=None,
|
||||||
|
is_regex=False):
|
||||||
|
"""Remove strings (presumably filenames) from 'files' that match
|
||||||
|
'pattern'.
|
||||||
|
|
||||||
|
Other parameters are the same as for 'include_pattern()', above.
|
||||||
|
The list 'self.files' is modified in place. Return True if files are
|
||||||
|
found.
|
||||||
|
|
||||||
|
This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
|
||||||
|
packaging source distributions
|
||||||
|
"""
|
||||||
|
found = False
|
||||||
|
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
|
||||||
|
for f in list(self.files):
|
||||||
|
if pattern_re.search(f):
|
||||||
|
self.files.remove(f)
|
||||||
|
found = True
|
||||||
|
return found
|
||||||
|
|
||||||
|
def _translate_pattern(self, pattern, anchor=True, prefix=None,
|
||||||
|
is_regex=False):
|
||||||
|
"""Translate a shell-like wildcard pattern to a compiled regular
|
||||||
|
expression.
|
||||||
|
|
||||||
|
Return the compiled regex. If 'is_regex' true,
|
||||||
|
then 'pattern' is directly compiled to a regex (if it's a string)
|
||||||
|
or just returned as-is (assumes it's a regex object).
|
||||||
|
"""
|
||||||
|
if is_regex:
|
||||||
|
if isinstance(pattern, str):
|
||||||
|
return re.compile(pattern)
|
||||||
|
else:
|
||||||
|
return pattern
|
||||||
|
|
||||||
|
if _PYTHON_VERSION > (3, 2):
|
||||||
|
# ditch start and end characters
|
||||||
|
start, _, end = self._glob_to_re('_').partition('_')
|
||||||
|
|
||||||
|
if pattern:
|
||||||
|
pattern_re = self._glob_to_re(pattern)
|
||||||
|
if _PYTHON_VERSION > (3, 2):
|
||||||
|
assert pattern_re.startswith(start) and pattern_re.endswith(end)
|
||||||
|
else:
|
||||||
|
pattern_re = ''
|
||||||
|
|
||||||
|
base = re.escape(os.path.join(self.base, ''))
|
||||||
|
if prefix is not None:
|
||||||
|
# ditch end of pattern character
|
||||||
|
if _PYTHON_VERSION <= (3, 2):
|
||||||
|
empty_pattern = self._glob_to_re('')
|
||||||
|
prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
|
||||||
|
else:
|
||||||
|
prefix_re = self._glob_to_re(prefix)
|
||||||
|
assert prefix_re.startswith(start) and prefix_re.endswith(end)
|
||||||
|
prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
|
||||||
|
sep = os.sep
|
||||||
|
if os.sep == '\\':
|
||||||
|
sep = r'\\'
|
||||||
|
if _PYTHON_VERSION <= (3, 2):
|
||||||
|
pattern_re = '^' + base + sep.join((prefix_re,
|
||||||
|
'.*' + pattern_re))
|
||||||
|
else:
|
||||||
|
pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
|
||||||
|
pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
|
||||||
|
pattern_re, end)
|
||||||
|
else: # no prefix -- respect anchor flag
|
||||||
|
if anchor:
|
||||||
|
if _PYTHON_VERSION <= (3, 2):
|
||||||
|
pattern_re = '^' + base + pattern_re
|
||||||
|
else:
|
||||||
|
pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
|
||||||
|
|
||||||
|
return re.compile(pattern_re)
|
||||||
|
|
||||||
|
def _glob_to_re(self, pattern):
|
||||||
|
"""Translate a shell-like glob pattern to a regular expression.
|
||||||
|
|
||||||
|
Return a string containing the regex. Differs from
|
||||||
|
'fnmatch.translate()' in that '*' does not match "special characters"
|
||||||
|
(which are platform-specific).
|
||||||
|
"""
|
||||||
|
pattern_re = fnmatch.translate(pattern)
|
||||||
|
|
||||||
|
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
||||||
|
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
||||||
|
# and by extension they shouldn't match such "special characters" under
|
||||||
|
# any OS. So change all non-escaped dots in the RE to match any
|
||||||
|
# character except the special characters (currently: just os.sep).
|
||||||
|
sep = os.sep
|
||||||
|
if os.sep == '\\':
|
||||||
|
# we're using a regex to manipulate a regex, so we need
|
||||||
|
# to escape the backslash twice
|
||||||
|
sep = r'\\\\'
|
||||||
|
escaped = r'\1[^%s]' % sep
|
||||||
|
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
|
||||||
|
return pattern_re
|
@ -0,0 +1,131 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2012-2017 Vinay Sajip.
|
||||||
|
# Licensed to the Python Software Foundation under a contributor agreement.
|
||||||
|
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||||
|
#
|
||||||
|
"""
|
||||||
|
Parser for the environment markers micro-language defined in PEP 508.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Note: In PEP 345, the micro-language was Python compatible, so the ast
|
||||||
|
# module could be used to parse it. However, PEP 508 introduced operators such
|
||||||
|
# as ~= and === which aren't in Python, necessitating a different approach.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
|
|
||||||
|
from .compat import python_implementation, urlparse, string_types
|
||||||
|
from .util import in_venv, parse_marker
|
||||||
|
|
||||||
|
__all__ = ['interpret']
|
||||||
|
|
||||||
|
def _is_literal(o):
|
||||||
|
if not isinstance(o, string_types) or not o:
|
||||||
|
return False
|
||||||
|
return o[0] in '\'"'
|
||||||
|
|
||||||
|
class Evaluator(object):
|
||||||
|
"""
|
||||||
|
This class is used to evaluate marker expessions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
operations = {
|
||||||
|
'==': lambda x, y: x == y,
|
||||||
|
'===': lambda x, y: x == y,
|
||||||
|
'~=': lambda x, y: x == y or x > y,
|
||||||
|
'!=': lambda x, y: x != y,
|
||||||
|
'<': lambda x, y: x < y,
|
||||||
|
'<=': lambda x, y: x == y or x < y,
|
||||||
|
'>': lambda x, y: x > y,
|
||||||
|
'>=': lambda x, y: x == y or x > y,
|
||||||
|
'and': lambda x, y: x and y,
|
||||||
|
'or': lambda x, y: x or y,
|
||||||
|
'in': lambda x, y: x in y,
|
||||||
|
'not in': lambda x, y: x not in y,
|
||||||
|
}
|
||||||
|
|
||||||
|
def evaluate(self, expr, context):
|
||||||
|
"""
|
||||||
|
Evaluate a marker expression returned by the :func:`parse_requirement`
|
||||||
|
function in the specified context.
|
||||||
|
"""
|
||||||
|
if isinstance(expr, string_types):
|
||||||
|
if expr[0] in '\'"':
|
||||||
|
result = expr[1:-1]
|
||||||
|
else:
|
||||||
|
if expr not in context:
|
||||||
|
raise SyntaxError('unknown variable: %s' % expr)
|
||||||
|
result = context[expr]
|
||||||
|
else:
|
||||||
|
assert isinstance(expr, dict)
|
||||||
|
op = expr['op']
|
||||||
|
if op not in self.operations:
|
||||||
|
raise NotImplementedError('op not implemented: %s' % op)
|
||||||
|
elhs = expr['lhs']
|
||||||
|
erhs = expr['rhs']
|
||||||
|
if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
|
||||||
|
raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs))
|
||||||
|
|
||||||
|
lhs = self.evaluate(elhs, context)
|
||||||
|
rhs = self.evaluate(erhs, context)
|
||||||
|
result = self.operations[op](lhs, rhs)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def default_context():
|
||||||
|
def format_full_version(info):
|
||||||
|
version = '%s.%s.%s' % (info.major, info.minor, info.micro)
|
||||||
|
kind = info.releaselevel
|
||||||
|
if kind != 'final':
|
||||||
|
version += kind[0] + str(info.serial)
|
||||||
|
return version
|
||||||
|
|
||||||
|
if hasattr(sys, 'implementation'):
|
||||||
|
implementation_version = format_full_version(sys.implementation.version)
|
||||||
|
implementation_name = sys.implementation.name
|
||||||
|
else:
|
||||||
|
implementation_version = '0'
|
||||||
|
implementation_name = ''
|
||||||
|
|
||||||
|
result = {
|
||||||
|
'implementation_name': implementation_name,
|
||||||
|
'implementation_version': implementation_version,
|
||||||
|
'os_name': os.name,
|
||||||
|
'platform_machine': platform.machine(),
|
||||||
|
'platform_python_implementation': platform.python_implementation(),
|
||||||
|
'platform_release': platform.release(),
|
||||||
|
'platform_system': platform.system(),
|
||||||
|
'platform_version': platform.version(),
|
||||||
|
'platform_in_venv': str(in_venv()),
|
||||||
|
'python_full_version': platform.python_version(),
|
||||||
|
'python_version': platform.python_version()[:3],
|
||||||
|
'sys_platform': sys.platform,
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
DEFAULT_CONTEXT = default_context()
|
||||||
|
del default_context
|
||||||
|
|
||||||
|
evaluator = Evaluator()
|
||||||
|
|
||||||
|
def interpret(marker, execution_context=None):
|
||||||
|
"""
|
||||||
|
Interpret a marker and return a result depending on environment.
|
||||||
|
|
||||||
|
:param marker: The marker to interpret.
|
||||||
|
:type marker: str
|
||||||
|
:param execution_context: The context used for name lookup.
|
||||||
|
:type execution_context: mapping
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
expr, rest = parse_marker(marker)
|
||||||
|
except Exception as e:
|
||||||
|
raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
|
||||||
|
if rest and rest[0] != '#':
|
||||||
|
raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
|
||||||
|
context = dict(DEFAULT_CONTEXT)
|
||||||
|
if execution_context:
|
||||||
|
context.update(execution_context)
|
||||||
|
return evaluator.evaluate(expr, context)
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,355 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2013-2017 Vinay Sajip.
|
||||||
|
# Licensed to the Python Software Foundation under a contributor agreement.
|
||||||
|
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||||
|
#
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import bisect
|
||||||
|
import io
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pkgutil
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
import zipimport
|
||||||
|
|
||||||
|
from . import DistlibException
|
||||||
|
from .util import cached_property, get_cache_base, path_to_cache_dir, Cache
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
cache = None # created when needed
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceCache(Cache):
|
||||||
|
def __init__(self, base=None):
|
||||||
|
if base is None:
|
||||||
|
# Use native string to avoid issues on 2.x: see Python #20140.
|
||||||
|
base = os.path.join(get_cache_base(), str('resource-cache'))
|
||||||
|
super(ResourceCache, self).__init__(base)
|
||||||
|
|
||||||
|
def is_stale(self, resource, path):
|
||||||
|
"""
|
||||||
|
Is the cache stale for the given resource?
|
||||||
|
|
||||||
|
:param resource: The :class:`Resource` being cached.
|
||||||
|
:param path: The path of the resource in the cache.
|
||||||
|
:return: True if the cache is stale.
|
||||||
|
"""
|
||||||
|
# Cache invalidation is a hard problem :-)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, resource):
|
||||||
|
"""
|
||||||
|
Get a resource into the cache,
|
||||||
|
|
||||||
|
:param resource: A :class:`Resource` instance.
|
||||||
|
:return: The pathname of the resource in the cache.
|
||||||
|
"""
|
||||||
|
prefix, path = resource.finder.get_cache_info(resource)
|
||||||
|
if prefix is None:
|
||||||
|
result = path
|
||||||
|
else:
|
||||||
|
result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
|
||||||
|
dirname = os.path.dirname(result)
|
||||||
|
if not os.path.isdir(dirname):
|
||||||
|
os.makedirs(dirname)
|
||||||
|
if not os.path.exists(result):
|
||||||
|
stale = True
|
||||||
|
else:
|
||||||
|
stale = self.is_stale(resource, path)
|
||||||
|
if stale:
|
||||||
|
# write the bytes of the resource to the cache location
|
||||||
|
with open(result, 'wb') as f:
|
||||||
|
f.write(resource.bytes)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceBase(object):
|
||||||
|
def __init__(self, finder, name):
|
||||||
|
self.finder = finder
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
|
||||||
|
class Resource(ResourceBase):
|
||||||
|
"""
|
||||||
|
A class representing an in-package resource, such as a data file. This is
|
||||||
|
not normally instantiated by user code, but rather by a
|
||||||
|
:class:`ResourceFinder` which manages the resource.
|
||||||
|
"""
|
||||||
|
is_container = False # Backwards compatibility
|
||||||
|
|
||||||
|
def as_stream(self):
|
||||||
|
"""
|
||||||
|
Get the resource as a stream.
|
||||||
|
|
||||||
|
This is not a property to make it obvious that it returns a new stream
|
||||||
|
each time.
|
||||||
|
"""
|
||||||
|
return self.finder.get_stream(self)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def file_path(self):
|
||||||
|
global cache
|
||||||
|
if cache is None:
|
||||||
|
cache = ResourceCache()
|
||||||
|
return cache.get(self)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def bytes(self):
|
||||||
|
return self.finder.get_bytes(self)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def size(self):
|
||||||
|
return self.finder.get_size(self)
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceContainer(ResourceBase):
|
||||||
|
is_container = True # Backwards compatibility
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def resources(self):
|
||||||
|
return self.finder.get_resources(self)
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceFinder(object):
|
||||||
|
"""
|
||||||
|
Resource finder for file system resources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if sys.platform.startswith('java'):
|
||||||
|
skipped_extensions = ('.pyc', '.pyo', '.class')
|
||||||
|
else:
|
||||||
|
skipped_extensions = ('.pyc', '.pyo')
|
||||||
|
|
||||||
|
def __init__(self, module):
|
||||||
|
self.module = module
|
||||||
|
self.loader = getattr(module, '__loader__', None)
|
||||||
|
self.base = os.path.dirname(getattr(module, '__file__', ''))
|
||||||
|
|
||||||
|
def _adjust_path(self, path):
|
||||||
|
return os.path.realpath(path)
|
||||||
|
|
||||||
|
def _make_path(self, resource_name):
|
||||||
|
# Issue #50: need to preserve type of path on Python 2.x
|
||||||
|
# like os.path._get_sep
|
||||||
|
if isinstance(resource_name, bytes): # should only happen on 2.x
|
||||||
|
sep = b'/'
|
||||||
|
else:
|
||||||
|
sep = '/'
|
||||||
|
parts = resource_name.split(sep)
|
||||||
|
parts.insert(0, self.base)
|
||||||
|
result = os.path.join(*parts)
|
||||||
|
return self._adjust_path(result)
|
||||||
|
|
||||||
|
def _find(self, path):
|
||||||
|
return os.path.exists(path)
|
||||||
|
|
||||||
|
def get_cache_info(self, resource):
|
||||||
|
return None, resource.path
|
||||||
|
|
||||||
|
def find(self, resource_name):
|
||||||
|
path = self._make_path(resource_name)
|
||||||
|
if not self._find(path):
|
||||||
|
result = None
|
||||||
|
else:
|
||||||
|
if self._is_directory(path):
|
||||||
|
result = ResourceContainer(self, resource_name)
|
||||||
|
else:
|
||||||
|
result = Resource(self, resource_name)
|
||||||
|
result.path = path
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_stream(self, resource):
|
||||||
|
return open(resource.path, 'rb')
|
||||||
|
|
||||||
|
def get_bytes(self, resource):
|
||||||
|
with open(resource.path, 'rb') as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
def get_size(self, resource):
|
||||||
|
return os.path.getsize(resource.path)
|
||||||
|
|
||||||
|
def get_resources(self, resource):
|
||||||
|
def allowed(f):
|
||||||
|
return (f != '__pycache__' and not
|
||||||
|
f.endswith(self.skipped_extensions))
|
||||||
|
return set([f for f in os.listdir(resource.path) if allowed(f)])
|
||||||
|
|
||||||
|
def is_container(self, resource):
|
||||||
|
return self._is_directory(resource.path)
|
||||||
|
|
||||||
|
_is_directory = staticmethod(os.path.isdir)
|
||||||
|
|
||||||
|
def iterator(self, resource_name):
|
||||||
|
resource = self.find(resource_name)
|
||||||
|
if resource is not None:
|
||||||
|
todo = [resource]
|
||||||
|
while todo:
|
||||||
|
resource = todo.pop(0)
|
||||||
|
yield resource
|
||||||
|
if resource.is_container:
|
||||||
|
rname = resource.name
|
||||||
|
for name in resource.resources:
|
||||||
|
if not rname:
|
||||||
|
new_name = name
|
||||||
|
else:
|
||||||
|
new_name = '/'.join([rname, name])
|
||||||
|
child = self.find(new_name)
|
||||||
|
if child.is_container:
|
||||||
|
todo.append(child)
|
||||||
|
else:
|
||||||
|
yield child
|
||||||
|
|
||||||
|
|
||||||
|
class ZipResourceFinder(ResourceFinder):
|
||||||
|
"""
|
||||||
|
Resource finder for resources in .zip files.
|
||||||
|
"""
|
||||||
|
def __init__(self, module):
|
||||||
|
super(ZipResourceFinder, self).__init__(module)
|
||||||
|
archive = self.loader.archive
|
||||||
|
self.prefix_len = 1 + len(archive)
|
||||||
|
# PyPy doesn't have a _files attr on zipimporter, and you can't set one
|
||||||
|
if hasattr(self.loader, '_files'):
|
||||||
|
self._files = self.loader._files
|
||||||
|
else:
|
||||||
|
self._files = zipimport._zip_directory_cache[archive]
|
||||||
|
self.index = sorted(self._files)
|
||||||
|
|
||||||
|
def _adjust_path(self, path):
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _find(self, path):
|
||||||
|
path = path[self.prefix_len:]
|
||||||
|
if path in self._files:
|
||||||
|
result = True
|
||||||
|
else:
|
||||||
|
if path and path[-1] != os.sep:
|
||||||
|
path = path + os.sep
|
||||||
|
i = bisect.bisect(self.index, path)
|
||||||
|
try:
|
||||||
|
result = self.index[i].startswith(path)
|
||||||
|
except IndexError:
|
||||||
|
result = False
|
||||||
|
if not result:
|
||||||
|
logger.debug('_find failed: %r %r', path, self.loader.prefix)
|
||||||
|
else:
|
||||||
|
logger.debug('_find worked: %r %r', path, self.loader.prefix)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_cache_info(self, resource):
|
||||||
|
prefix = self.loader.archive
|
||||||
|
path = resource.path[1 + len(prefix):]
|
||||||
|
return prefix, path
|
||||||
|
|
||||||
|
def get_bytes(self, resource):
|
||||||
|
return self.loader.get_data(resource.path)
|
||||||
|
|
||||||
|
def get_stream(self, resource):
|
||||||
|
return io.BytesIO(self.get_bytes(resource))
|
||||||
|
|
||||||
|
def get_size(self, resource):
|
||||||
|
path = resource.path[self.prefix_len:]
|
||||||
|
return self._files[path][3]
|
||||||
|
|
||||||
|
def get_resources(self, resource):
|
||||||
|
path = resource.path[self.prefix_len:]
|
||||||
|
if path and path[-1] != os.sep:
|
||||||
|
path += os.sep
|
||||||
|
plen = len(path)
|
||||||
|
result = set()
|
||||||
|
i = bisect.bisect(self.index, path)
|
||||||
|
while i < len(self.index):
|
||||||
|
if not self.index[i].startswith(path):
|
||||||
|
break
|
||||||
|
s = self.index[i][plen:]
|
||||||
|
result.add(s.split(os.sep, 1)[0]) # only immediate children
|
||||||
|
i += 1
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _is_directory(self, path):
|
||||||
|
path = path[self.prefix_len:]
|
||||||
|
if path and path[-1] != os.sep:
|
||||||
|
path += os.sep
|
||||||
|
i = bisect.bisect(self.index, path)
|
||||||
|
try:
|
||||||
|
result = self.index[i].startswith(path)
|
||||||
|
except IndexError:
|
||||||
|
result = False
|
||||||
|
return result
|
||||||
|
|
||||||
|
_finder_registry = {
|
||||||
|
type(None): ResourceFinder,
|
||||||
|
zipimport.zipimporter: ZipResourceFinder
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# In Python 3.6, _frozen_importlib -> _frozen_importlib_external
|
||||||
|
try:
|
||||||
|
import _frozen_importlib_external as _fi
|
||||||
|
except ImportError:
|
||||||
|
import _frozen_importlib as _fi
|
||||||
|
_finder_registry[_fi.SourceFileLoader] = ResourceFinder
|
||||||
|
_finder_registry[_fi.FileFinder] = ResourceFinder
|
||||||
|
del _fi
|
||||||
|
except (ImportError, AttributeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def register_finder(loader, finder_maker):
|
||||||
|
_finder_registry[type(loader)] = finder_maker
|
||||||
|
|
||||||
|
_finder_cache = {}
|
||||||
|
|
||||||
|
|
||||||
|
def finder(package):
|
||||||
|
"""
|
||||||
|
Return a resource finder for a package.
|
||||||
|
:param package: The name of the package.
|
||||||
|
:return: A :class:`ResourceFinder` instance for the package.
|
||||||
|
"""
|
||||||
|
if package in _finder_cache:
|
||||||
|
result = _finder_cache[package]
|
||||||
|
else:
|
||||||
|
if package not in sys.modules:
|
||||||
|
__import__(package)
|
||||||
|
module = sys.modules[package]
|
||||||
|
path = getattr(module, '__path__', None)
|
||||||
|
if path is None:
|
||||||
|
raise DistlibException('You cannot get a finder for a module, '
|
||||||
|
'only for a package')
|
||||||
|
loader = getattr(module, '__loader__', None)
|
||||||
|
finder_maker = _finder_registry.get(type(loader))
|
||||||
|
if finder_maker is None:
|
||||||
|
raise DistlibException('Unable to locate finder for %r' % package)
|
||||||
|
result = finder_maker(module)
|
||||||
|
_finder_cache[package] = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
_dummy_module = types.ModuleType(str('__dummy__'))
|
||||||
|
|
||||||
|
|
||||||
|
def finder_for_path(path):
|
||||||
|
"""
|
||||||
|
Return a resource finder for a path, which should represent a container.
|
||||||
|
|
||||||
|
:param path: The path.
|
||||||
|
:return: A :class:`ResourceFinder` instance for the path.
|
||||||
|
"""
|
||||||
|
result = None
|
||||||
|
# calls any path hooks, gets importer into cache
|
||||||
|
pkgutil.get_importer(path)
|
||||||
|
loader = sys.path_importer_cache.get(path)
|
||||||
|
finder = _finder_registry.get(type(loader))
|
||||||
|
if finder:
|
||||||
|
module = _dummy_module
|
||||||
|
module.__file__ = os.path.join(path, '')
|
||||||
|
module.__loader__ = loader
|
||||||
|
result = finder(module)
|
||||||
|
return result
|
@ -0,0 +1,419 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2013-2015 Vinay Sajip.
|
||||||
|
# Licensed to the Python Software Foundation under a contributor agreement.
|
||||||
|
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||||
|
#
|
||||||
|
from io import BytesIO
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import struct
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from .compat import sysconfig, detect_encoding, ZipFile
|
||||||
|
from .resources import finder
|
||||||
|
from .util import (FileOperator, get_export_entry, convert_path,
|
||||||
|
get_executable, in_venv)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_DEFAULT_MANIFEST = '''
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||||
|
<assemblyIdentity version="1.0.0.0"
|
||||||
|
processorArchitecture="X86"
|
||||||
|
name="%s"
|
||||||
|
type="win32"/>
|
||||||
|
|
||||||
|
<!-- Identify the application security requirements. -->
|
||||||
|
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||||
|
<security>
|
||||||
|
<requestedPrivileges>
|
||||||
|
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
||||||
|
</requestedPrivileges>
|
||||||
|
</security>
|
||||||
|
</trustInfo>
|
||||||
|
</assembly>'''.strip()
|
||||||
|
|
||||||
|
# check if Python is called on the first line with this expression
|
||||||
|
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
|
||||||
|
SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from %(module)s import %(import_name)s
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(%(func)s())
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
def enquote_executable(executable):
|
||||||
|
if ' ' in executable:
|
||||||
|
# make sure we quote only the executable in case of env
|
||||||
|
# for example /usr/bin/env "/dir with spaces/bin/jython"
|
||||||
|
# instead of "/usr/bin/env /dir with spaces/bin/jython"
|
||||||
|
# otherwise whole
|
||||||
|
if executable.startswith('/usr/bin/env '):
|
||||||
|
env, _executable = executable.split(' ', 1)
|
||||||
|
if ' ' in _executable and not _executable.startswith('"'):
|
||||||
|
executable = '%s "%s"' % (env, _executable)
|
||||||
|
else:
|
||||||
|
if not executable.startswith('"'):
|
||||||
|
executable = '"%s"' % executable
|
||||||
|
return executable
|
||||||
|
|
||||||
|
# Keep the old name around (for now), as there is at least one project using it!
|
||||||
|
_enquote_executable = enquote_executable
|
||||||
|
|
||||||
|
class ScriptMaker(object):
|
||||||
|
"""
|
||||||
|
A class to copy or create scripts from source scripts or callable
|
||||||
|
specifications.
|
||||||
|
"""
|
||||||
|
script_template = SCRIPT_TEMPLATE
|
||||||
|
|
||||||
|
executable = None # for shebangs
|
||||||
|
|
||||||
|
def __init__(self, source_dir, target_dir, add_launchers=True,
|
||||||
|
dry_run=False, fileop=None):
|
||||||
|
self.source_dir = source_dir
|
||||||
|
self.target_dir = target_dir
|
||||||
|
self.add_launchers = add_launchers
|
||||||
|
self.force = False
|
||||||
|
self.clobber = False
|
||||||
|
# It only makes sense to set mode bits on POSIX.
|
||||||
|
self.set_mode = (os.name == 'posix') or (os.name == 'java' and
|
||||||
|
os._name == 'posix')
|
||||||
|
self.variants = set(('', 'X.Y'))
|
||||||
|
self._fileop = fileop or FileOperator(dry_run)
|
||||||
|
|
||||||
|
self._is_nt = os.name == 'nt' or (
|
||||||
|
os.name == 'java' and os._name == 'nt')
|
||||||
|
self.version_info = sys.version_info
|
||||||
|
|
||||||
|
def _get_alternate_executable(self, executable, options):
|
||||||
|
if options.get('gui', False) and self._is_nt: # pragma: no cover
|
||||||
|
dn, fn = os.path.split(executable)
|
||||||
|
fn = fn.replace('python', 'pythonw')
|
||||||
|
executable = os.path.join(dn, fn)
|
||||||
|
return executable
|
||||||
|
|
||||||
|
if sys.platform.startswith('java'): # pragma: no cover
|
||||||
|
def _is_shell(self, executable):
|
||||||
|
"""
|
||||||
|
Determine if the specified executable is a script
|
||||||
|
(contains a #! line)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with open(executable) as fp:
|
||||||
|
return fp.read(2) == '#!'
|
||||||
|
except (OSError, IOError):
|
||||||
|
logger.warning('Failed to open %s', executable)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _fix_jython_executable(self, executable):
|
||||||
|
if self._is_shell(executable):
|
||||||
|
# Workaround for Jython is not needed on Linux systems.
|
||||||
|
import java
|
||||||
|
|
||||||
|
if java.lang.System.getProperty('os.name') == 'Linux':
|
||||||
|
return executable
|
||||||
|
elif executable.lower().endswith('jython.exe'):
|
||||||
|
# Use wrapper exe for Jython on Windows
|
||||||
|
return executable
|
||||||
|
return '/usr/bin/env %s' % executable
|
||||||
|
|
||||||
|
def _build_shebang(self, executable, post_interp):
|
||||||
|
"""
|
||||||
|
Build a shebang line. In the simple case (on Windows, or a shebang line
|
||||||
|
which is not too long or contains spaces) use a simple formulation for
|
||||||
|
the shebang. Otherwise, use /bin/sh as the executable, with a contrived
|
||||||
|
shebang which allows the script to run either under Python or sh, using
|
||||||
|
suitable quoting. Thanks to Harald Nordgren for his input.
|
||||||
|
|
||||||
|
See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
|
||||||
|
https://hg.mozilla.org/mozilla-central/file/tip/mach
|
||||||
|
"""
|
||||||
|
if os.name != 'posix':
|
||||||
|
simple_shebang = True
|
||||||
|
else:
|
||||||
|
# Add 3 for '#!' prefix and newline suffix.
|
||||||
|
shebang_length = len(executable) + len(post_interp) + 3
|
||||||
|
if sys.platform == 'darwin':
|
||||||
|
max_shebang_length = 512
|
||||||
|
else:
|
||||||
|
max_shebang_length = 127
|
||||||
|
simple_shebang = ((b' ' not in executable) and
|
||||||
|
(shebang_length <= max_shebang_length))
|
||||||
|
|
||||||
|
if simple_shebang:
|
||||||
|
result = b'#!' + executable + post_interp + b'\n'
|
||||||
|
else:
|
||||||
|
result = b'#!/bin/sh\n'
|
||||||
|
result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
|
||||||
|
result += b"' '''"
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _get_shebang(self, encoding, post_interp=b'', options=None):
|
||||||
|
enquote = True
|
||||||
|
if self.executable:
|
||||||
|
executable = self.executable
|
||||||
|
enquote = False # assume this will be taken care of
|
||||||
|
elif not sysconfig.is_python_build():
|
||||||
|
executable = get_executable()
|
||||||
|
elif in_venv(): # pragma: no cover
|
||||||
|
executable = os.path.join(sysconfig.get_path('scripts'),
|
||||||
|
'python%s' % sysconfig.get_config_var('EXE'))
|
||||||
|
else: # pragma: no cover
|
||||||
|
executable = os.path.join(
|
||||||
|
sysconfig.get_config_var('BINDIR'),
|
||||||
|
'python%s%s' % (sysconfig.get_config_var('VERSION'),
|
||||||
|
sysconfig.get_config_var('EXE')))
|
||||||
|
if options:
|
||||||
|
executable = self._get_alternate_executable(executable, options)
|
||||||
|
|
||||||
|
if sys.platform.startswith('java'): # pragma: no cover
|
||||||
|
executable = self._fix_jython_executable(executable)
|
||||||
|
|
||||||
|
# Normalise case for Windows - COMMENTED OUT
|
||||||
|
# executable = os.path.normcase(executable)
|
||||||
|
# N.B. The normalising operation above has been commented out: See
|
||||||
|
# issue #124. Although paths in Windows are generally case-insensitive,
|
||||||
|
# they aren't always. For example, a path containing a ẞ (which is a
|
||||||
|
# LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a
|
||||||
|
# LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by
|
||||||
|
# Windows as equivalent in path names.
|
||||||
|
|
||||||
|
# If the user didn't specify an executable, it may be necessary to
|
||||||
|
# cater for executable paths with spaces (not uncommon on Windows)
|
||||||
|
if enquote:
|
||||||
|
executable = enquote_executable(executable)
|
||||||
|
# Issue #51: don't use fsencode, since we later try to
|
||||||
|
# check that the shebang is decodable using utf-8.
|
||||||
|
executable = executable.encode('utf-8')
|
||||||
|
# in case of IronPython, play safe and enable frames support
|
||||||
|
if (sys.platform == 'cli' and '-X:Frames' not in post_interp
|
||||||
|
and '-X:FullFrames' not in post_interp): # pragma: no cover
|
||||||
|
post_interp += b' -X:Frames'
|
||||||
|
shebang = self._build_shebang(executable, post_interp)
|
||||||
|
# Python parser starts to read a script using UTF-8 until
|
||||||
|
# it gets a #coding:xxx cookie. The shebang has to be the
|
||||||
|
# first line of a file, the #coding:xxx cookie cannot be
|
||||||
|
# written before. So the shebang has to be decodable from
|
||||||
|
# UTF-8.
|
||||||
|
try:
|
||||||
|
shebang.decode('utf-8')
|
||||||
|
except UnicodeDecodeError: # pragma: no cover
|
||||||
|
raise ValueError(
|
||||||
|
'The shebang (%r) is not decodable from utf-8' % shebang)
|
||||||
|
# If the script is encoded to a custom encoding (use a
|
||||||
|
# #coding:xxx cookie), the shebang has to be decodable from
|
||||||
|
# the script encoding too.
|
||||||
|
if encoding != 'utf-8':
|
||||||
|
try:
|
||||||
|
shebang.decode(encoding)
|
||||||
|
except UnicodeDecodeError: # pragma: no cover
|
||||||
|
raise ValueError(
|
||||||
|
'The shebang (%r) is not decodable '
|
||||||
|
'from the script encoding (%r)' % (shebang, encoding))
|
||||||
|
return shebang
|
||||||
|
|
||||||
|
def _get_script_text(self, entry):
|
||||||
|
return self.script_template % dict(module=entry.prefix,
|
||||||
|
import_name=entry.suffix.split('.')[0],
|
||||||
|
func=entry.suffix)
|
||||||
|
|
||||||
|
manifest = _DEFAULT_MANIFEST
|
||||||
|
|
||||||
|
def get_manifest(self, exename):
|
||||||
|
base = os.path.basename(exename)
|
||||||
|
return self.manifest % base
|
||||||
|
|
||||||
|
def _write_script(self, names, shebang, script_bytes, filenames, ext):
|
||||||
|
use_launcher = self.add_launchers and self._is_nt
|
||||||
|
linesep = os.linesep.encode('utf-8')
|
||||||
|
if not shebang.endswith(linesep):
|
||||||
|
shebang += linesep
|
||||||
|
if not use_launcher:
|
||||||
|
script_bytes = shebang + script_bytes
|
||||||
|
else: # pragma: no cover
|
||||||
|
if ext == 'py':
|
||||||
|
launcher = self._get_launcher('t')
|
||||||
|
else:
|
||||||
|
launcher = self._get_launcher('w')
|
||||||
|
stream = BytesIO()
|
||||||
|
with ZipFile(stream, 'w') as zf:
|
||||||
|
zf.writestr('__main__.py', script_bytes)
|
||||||
|
zip_data = stream.getvalue()
|
||||||
|
script_bytes = launcher + shebang + zip_data
|
||||||
|
for name in names:
|
||||||
|
outname = os.path.join(self.target_dir, name)
|
||||||
|
if use_launcher: # pragma: no cover
|
||||||
|
n, e = os.path.splitext(outname)
|
||||||
|
if e.startswith('.py'):
|
||||||
|
outname = n
|
||||||
|
outname = '%s.exe' % outname
|
||||||
|
try:
|
||||||
|
self._fileop.write_binary_file(outname, script_bytes)
|
||||||
|
except Exception:
|
||||||
|
# Failed writing an executable - it might be in use.
|
||||||
|
logger.warning('Failed to write executable - trying to '
|
||||||
|
'use .deleteme logic')
|
||||||
|
dfname = '%s.deleteme' % outname
|
||||||
|
if os.path.exists(dfname):
|
||||||
|
os.remove(dfname) # Not allowed to fail here
|
||||||
|
os.rename(outname, dfname) # nor here
|
||||||
|
self._fileop.write_binary_file(outname, script_bytes)
|
||||||
|
logger.debug('Able to replace executable using '
|
||||||
|
'.deleteme logic')
|
||||||
|
try:
|
||||||
|
os.remove(dfname)
|
||||||
|
except Exception:
|
||||||
|
pass # still in use - ignore error
|
||||||
|
else:
|
||||||
|
if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover
|
||||||
|
outname = '%s.%s' % (outname, ext)
|
||||||
|
if os.path.exists(outname) and not self.clobber:
|
||||||
|
logger.warning('Skipping existing file %s', outname)
|
||||||
|
continue
|
||||||
|
self._fileop.write_binary_file(outname, script_bytes)
|
||||||
|
if self.set_mode:
|
||||||
|
self._fileop.set_executable_mode([outname])
|
||||||
|
filenames.append(outname)
|
||||||
|
|
||||||
|
def _make_script(self, entry, filenames, options=None):
|
||||||
|
post_interp = b''
|
||||||
|
if options:
|
||||||
|
args = options.get('interpreter_args', [])
|
||||||
|
if args:
|
||||||
|
args = ' %s' % ' '.join(args)
|
||||||
|
post_interp = args.encode('utf-8')
|
||||||
|
shebang = self._get_shebang('utf-8', post_interp, options=options)
|
||||||
|
script = self._get_script_text(entry).encode('utf-8')
|
||||||
|
name = entry.name
|
||||||
|
scriptnames = set()
|
||||||
|
if '' in self.variants:
|
||||||
|
scriptnames.add(name)
|
||||||
|
if 'X' in self.variants:
|
||||||
|
scriptnames.add('%s%s' % (name, self.version_info[0]))
|
||||||
|
if 'X.Y' in self.variants:
|
||||||
|
scriptnames.add('%s-%s.%s' % (name, self.version_info[0],
|
||||||
|
self.version_info[1]))
|
||||||
|
if options and options.get('gui', False):
|
||||||
|
ext = 'pyw'
|
||||||
|
else:
|
||||||
|
ext = 'py'
|
||||||
|
self._write_script(scriptnames, shebang, script, filenames, ext)
|
||||||
|
|
||||||
|
def _copy_script(self, script, filenames):
|
||||||
|
adjust = False
|
||||||
|
script = os.path.join(self.source_dir, convert_path(script))
|
||||||
|
outname = os.path.join(self.target_dir, os.path.basename(script))
|
||||||
|
if not self.force and not self._fileop.newer(script, outname):
|
||||||
|
logger.debug('not copying %s (up-to-date)', script)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Always open the file, but ignore failures in dry-run mode --
|
||||||
|
# that way, we'll get accurate feedback if we can read the
|
||||||
|
# script.
|
||||||
|
try:
|
||||||
|
f = open(script, 'rb')
|
||||||
|
except IOError: # pragma: no cover
|
||||||
|
if not self.dry_run:
|
||||||
|
raise
|
||||||
|
f = None
|
||||||
|
else:
|
||||||
|
first_line = f.readline()
|
||||||
|
if not first_line: # pragma: no cover
|
||||||
|
logger.warning('%s: %s is an empty file (skipping)',
|
||||||
|
self.get_command_name(), script)
|
||||||
|
return
|
||||||
|
|
||||||
|
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
|
||||||
|
if match:
|
||||||
|
adjust = True
|
||||||
|
post_interp = match.group(1) or b''
|
||||||
|
|
||||||
|
if not adjust:
|
||||||
|
if f:
|
||||||
|
f.close()
|
||||||
|
self._fileop.copy_file(script, outname)
|
||||||
|
if self.set_mode:
|
||||||
|
self._fileop.set_executable_mode([outname])
|
||||||
|
filenames.append(outname)
|
||||||
|
else:
|
||||||
|
logger.info('copying and adjusting %s -> %s', script,
|
||||||
|
self.target_dir)
|
||||||
|
if not self._fileop.dry_run:
|
||||||
|
encoding, lines = detect_encoding(f.readline)
|
||||||
|
f.seek(0)
|
||||||
|
shebang = self._get_shebang(encoding, post_interp)
|
||||||
|
if b'pythonw' in first_line: # pragma: no cover
|
||||||
|
ext = 'pyw'
|
||||||
|
else:
|
||||||
|
ext = 'py'
|
||||||
|
n = os.path.basename(outname)
|
||||||
|
self._write_script([n], shebang, f.read(), filenames, ext)
|
||||||
|
if f:
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dry_run(self):
|
||||||
|
return self._fileop.dry_run
|
||||||
|
|
||||||
|
@dry_run.setter
|
||||||
|
def dry_run(self, value):
|
||||||
|
self._fileop.dry_run = value
|
||||||
|
|
||||||
|
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover
|
||||||
|
# Executable launcher support.
|
||||||
|
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
|
||||||
|
|
||||||
|
def _get_launcher(self, kind):
|
||||||
|
if struct.calcsize('P') == 8: # 64-bit
|
||||||
|
bits = '64'
|
||||||
|
else:
|
||||||
|
bits = '32'
|
||||||
|
name = '%s%s.exe' % (kind, bits)
|
||||||
|
# Issue 31: don't hardcode an absolute package name, but
|
||||||
|
# determine it relative to the current package
|
||||||
|
distlib_package = __name__.rsplit('.', 1)[0]
|
||||||
|
resource = finder(distlib_package).find(name)
|
||||||
|
if not resource:
|
||||||
|
msg = ('Unable to find resource %s in package %s' % (name,
|
||||||
|
distlib_package))
|
||||||
|
raise ValueError(msg)
|
||||||
|
return resource.bytes
|
||||||
|
|
||||||
|
# Public API follows
|
||||||
|
|
||||||
|
def make(self, specification, options=None):
|
||||||
|
"""
|
||||||
|
Make a script.
|
||||||
|
|
||||||
|
:param specification: The specification, which is either a valid export
|
||||||
|
entry specification (to make a script from a
|
||||||
|
callable) or a filename (to make a script by
|
||||||
|
copying from a source location).
|
||||||
|
:param options: A dictionary of options controlling script generation.
|
||||||
|
:return: A list of all absolute pathnames written to.
|
||||||
|
"""
|
||||||
|
filenames = []
|
||||||
|
entry = get_export_entry(specification)
|
||||||
|
if entry is None:
|
||||||
|
self._copy_script(specification, filenames)
|
||||||
|
else:
|
||||||
|
self._make_script(entry, filenames, options=options)
|
||||||
|
return filenames
|
||||||
|
|
||||||
|
def make_multiple(self, specifications, options=None):
|
||||||
|
"""
|
||||||
|
Take a list of specifications and make scripts from them,
|
||||||
|
:param specifications: A list of specifications.
|
||||||
|
:return: A list of all absolute pathnames written to,
|
||||||
|
"""
|
||||||
|
filenames = []
|
||||||
|
for specification in specifications:
|
||||||
|
filenames.extend(self.make(specification, options))
|
||||||
|
return filenames
|
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,736 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2012-2017 The Python Software Foundation.
|
||||||
|
# See LICENSE.txt and CONTRIBUTORS.txt.
|
||||||
|
#
|
||||||
|
"""
|
||||||
|
Implementation of a flexible versioning scheme providing support for PEP-440,
|
||||||
|
setuptools-compatible and semantic versioning.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
from .compat import string_types
|
||||||
|
from .util import parse_requirement
|
||||||
|
|
||||||
|
__all__ = ['NormalizedVersion', 'NormalizedMatcher',
|
||||||
|
'LegacyVersion', 'LegacyMatcher',
|
||||||
|
'SemanticVersion', 'SemanticMatcher',
|
||||||
|
'UnsupportedVersionError', 'get_scheme']
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedVersionError(ValueError):
|
||||||
|
"""This is an unsupported version."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Version(object):
|
||||||
|
def __init__(self, s):
|
||||||
|
self._string = s = s.strip()
|
||||||
|
self._parts = parts = self.parse(s)
|
||||||
|
assert isinstance(parts, tuple)
|
||||||
|
assert len(parts) > 0
|
||||||
|
|
||||||
|
def parse(self, s):
|
||||||
|
raise NotImplementedError('please implement in a subclass')
|
||||||
|
|
||||||
|
def _check_compatible(self, other):
|
||||||
|
if type(self) != type(other):
|
||||||
|
raise TypeError('cannot compare %r and %r' % (self, other))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
self._check_compatible(other)
|
||||||
|
return self._parts == other._parts
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self.__eq__(other)
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
self._check_compatible(other)
|
||||||
|
return self._parts < other._parts
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
return not (self.__lt__(other) or self.__eq__(other))
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.__lt__(other) or self.__eq__(other)
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self.__gt__(other) or self.__eq__(other)
|
||||||
|
|
||||||
|
# See http://docs.python.org/reference/datamodel#object.__hash__
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self._parts)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "%s('%s')" % (self.__class__.__name__, self._string)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self._string
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_prerelease(self):
|
||||||
|
raise NotImplementedError('Please implement in subclasses.')
|
||||||
|
|
||||||
|
|
||||||
|
class Matcher(object):
|
||||||
|
version_class = None
|
||||||
|
|
||||||
|
# value is either a callable or the name of a method
|
||||||
|
_operators = {
|
||||||
|
'<': lambda v, c, p: v < c,
|
||||||
|
'>': lambda v, c, p: v > c,
|
||||||
|
'<=': lambda v, c, p: v == c or v < c,
|
||||||
|
'>=': lambda v, c, p: v == c or v > c,
|
||||||
|
'==': lambda v, c, p: v == c,
|
||||||
|
'===': lambda v, c, p: v == c,
|
||||||
|
# by default, compatible => >=.
|
||||||
|
'~=': lambda v, c, p: v == c or v > c,
|
||||||
|
'!=': lambda v, c, p: v != c,
|
||||||
|
}
|
||||||
|
|
||||||
|
# this is a method only to support alternative implementations
|
||||||
|
# via overriding
|
||||||
|
def parse_requirement(self, s):
|
||||||
|
return parse_requirement(s)
|
||||||
|
|
||||||
|
def __init__(self, s):
|
||||||
|
if self.version_class is None:
|
||||||
|
raise ValueError('Please specify a version class')
|
||||||
|
self._string = s = s.strip()
|
||||||
|
r = self.parse_requirement(s)
|
||||||
|
if not r:
|
||||||
|
raise ValueError('Not valid: %r' % s)
|
||||||
|
self.name = r.name
|
||||||
|
self.key = self.name.lower() # for case-insensitive comparisons
|
||||||
|
clist = []
|
||||||
|
if r.constraints:
|
||||||
|
# import pdb; pdb.set_trace()
|
||||||
|
for op, s in r.constraints:
|
||||||
|
if s.endswith('.*'):
|
||||||
|
if op not in ('==', '!='):
|
||||||
|
raise ValueError('\'.*\' not allowed for '
|
||||||
|
'%r constraints' % op)
|
||||||
|
# Could be a partial version (e.g. for '2.*') which
|
||||||
|
# won't parse as a version, so keep it as a string
|
||||||
|
vn, prefix = s[:-2], True
|
||||||
|
# Just to check that vn is a valid version
|
||||||
|
self.version_class(vn)
|
||||||
|
else:
|
||||||
|
# Should parse as a version, so we can create an
|
||||||
|
# instance for the comparison
|
||||||
|
vn, prefix = self.version_class(s), False
|
||||||
|
clist.append((op, vn, prefix))
|
||||||
|
self._parts = tuple(clist)
|
||||||
|
|
||||||
|
def match(self, version):
|
||||||
|
"""
|
||||||
|
Check if the provided version matches the constraints.
|
||||||
|
|
||||||
|
:param version: The version to match against this instance.
|
||||||
|
:type version: String or :class:`Version` instance.
|
||||||
|
"""
|
||||||
|
if isinstance(version, string_types):
|
||||||
|
version = self.version_class(version)
|
||||||
|
for operator, constraint, prefix in self._parts:
|
||||||
|
f = self._operators.get(operator)
|
||||||
|
if isinstance(f, string_types):
|
||||||
|
f = getattr(self, f)
|
||||||
|
if not f:
|
||||||
|
msg = ('%r not implemented '
|
||||||
|
'for %s' % (operator, self.__class__.__name__))
|
||||||
|
raise NotImplementedError(msg)
|
||||||
|
if not f(version, constraint, prefix):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def exact_version(self):
|
||||||
|
result = None
|
||||||
|
if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
|
||||||
|
result = self._parts[0][1]
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _check_compatible(self, other):
|
||||||
|
if type(self) != type(other) or self.name != other.name:
|
||||||
|
raise TypeError('cannot compare %s and %s' % (self, other))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
self._check_compatible(other)
|
||||||
|
return self.key == other.key and self._parts == other._parts
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self.__eq__(other)
|
||||||
|
|
||||||
|
# See http://docs.python.org/reference/datamodel#object.__hash__
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self.key) + hash(self._parts)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "%s(%r)" % (self.__class__.__name__, self._string)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self._string
|
||||||
|
|
||||||
|
|
||||||
|
PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
|
||||||
|
r'(\.(post)(\d+))?(\.(dev)(\d+))?'
|
||||||
|
r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')
|
||||||
|
|
||||||
|
|
||||||
|
def _pep_440_key(s):
|
||||||
|
s = s.strip()
|
||||||
|
m = PEP440_VERSION_RE.match(s)
|
||||||
|
if not m:
|
||||||
|
raise UnsupportedVersionError('Not a valid version: %s' % s)
|
||||||
|
groups = m.groups()
|
||||||
|
nums = tuple(int(v) for v in groups[1].split('.'))
|
||||||
|
while len(nums) > 1 and nums[-1] == 0:
|
||||||
|
nums = nums[:-1]
|
||||||
|
|
||||||
|
if not groups[0]:
|
||||||
|
epoch = 0
|
||||||
|
else:
|
||||||
|
epoch = int(groups[0])
|
||||||
|
pre = groups[4:6]
|
||||||
|
post = groups[7:9]
|
||||||
|
dev = groups[10:12]
|
||||||
|
local = groups[13]
|
||||||
|
if pre == (None, None):
|
||||||
|
pre = ()
|
||||||
|
else:
|
||||||
|
pre = pre[0], int(pre[1])
|
||||||
|
if post == (None, None):
|
||||||
|
post = ()
|
||||||
|
else:
|
||||||
|
post = post[0], int(post[1])
|
||||||
|
if dev == (None, None):
|
||||||
|
dev = ()
|
||||||
|
else:
|
||||||
|
dev = dev[0], int(dev[1])
|
||||||
|
if local is None:
|
||||||
|
local = ()
|
||||||
|
else:
|
||||||
|
parts = []
|
||||||
|
for part in local.split('.'):
|
||||||
|
# to ensure that numeric compares as > lexicographic, avoid
|
||||||
|
# comparing them directly, but encode a tuple which ensures
|
||||||
|
# correct sorting
|
||||||
|
if part.isdigit():
|
||||||
|
part = (1, int(part))
|
||||||
|
else:
|
||||||
|
part = (0, part)
|
||||||
|
parts.append(part)
|
||||||
|
local = tuple(parts)
|
||||||
|
if not pre:
|
||||||
|
# either before pre-release, or final release and after
|
||||||
|
if not post and dev:
|
||||||
|
# before pre-release
|
||||||
|
pre = ('a', -1) # to sort before a0
|
||||||
|
else:
|
||||||
|
pre = ('z',) # to sort after all pre-releases
|
||||||
|
# now look at the state of post and dev.
|
||||||
|
if not post:
|
||||||
|
post = ('_',) # sort before 'a'
|
||||||
|
if not dev:
|
||||||
|
dev = ('final',)
|
||||||
|
|
||||||
|
#print('%s -> %s' % (s, m.groups()))
|
||||||
|
return epoch, nums, pre, post, dev, local
|
||||||
|
|
||||||
|
|
||||||
|
_normalized_key = _pep_440_key
|
||||||
|
|
||||||
|
|
||||||
|
class NormalizedVersion(Version):
|
||||||
|
"""A rational version.
|
||||||
|
|
||||||
|
Good:
|
||||||
|
1.2 # equivalent to "1.2.0"
|
||||||
|
1.2.0
|
||||||
|
1.2a1
|
||||||
|
1.2.3a2
|
||||||
|
1.2.3b1
|
||||||
|
1.2.3c1
|
||||||
|
1.2.3.4
|
||||||
|
TODO: fill this out
|
||||||
|
|
||||||
|
Bad:
|
||||||
|
1 # minimum two numbers
|
||||||
|
1.2a # release level must have a release serial
|
||||||
|
1.2.3b
|
||||||
|
"""
|
||||||
|
def parse(self, s):
|
||||||
|
result = _normalized_key(s)
|
||||||
|
# _normalized_key loses trailing zeroes in the release
|
||||||
|
# clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
|
||||||
|
# However, PEP 440 prefix matching needs it: for example,
|
||||||
|
# (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
|
||||||
|
m = PEP440_VERSION_RE.match(s) # must succeed
|
||||||
|
groups = m.groups()
|
||||||
|
self._release_clause = tuple(int(v) for v in groups[1].split('.'))
|
||||||
|
return result
|
||||||
|
|
||||||
|
PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_prerelease(self):
|
||||||
|
return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
|
||||||
|
|
||||||
|
|
||||||
|
def _match_prefix(x, y):
|
||||||
|
x = str(x)
|
||||||
|
y = str(y)
|
||||||
|
if x == y:
|
||||||
|
return True
|
||||||
|
if not x.startswith(y):
|
||||||
|
return False
|
||||||
|
n = len(y)
|
||||||
|
return x[n] == '.'
|
||||||
|
|
||||||
|
|
||||||
|
class NormalizedMatcher(Matcher):
|
||||||
|
version_class = NormalizedVersion
|
||||||
|
|
||||||
|
# value is either a callable or the name of a method
|
||||||
|
_operators = {
|
||||||
|
'~=': '_match_compatible',
|
||||||
|
'<': '_match_lt',
|
||||||
|
'>': '_match_gt',
|
||||||
|
'<=': '_match_le',
|
||||||
|
'>=': '_match_ge',
|
||||||
|
'==': '_match_eq',
|
||||||
|
'===': '_match_arbitrary',
|
||||||
|
'!=': '_match_ne',
|
||||||
|
}
|
||||||
|
|
||||||
|
def _adjust_local(self, version, constraint, prefix):
|
||||||
|
if prefix:
|
||||||
|
strip_local = '+' not in constraint and version._parts[-1]
|
||||||
|
else:
|
||||||
|
# both constraint and version are
|
||||||
|
# NormalizedVersion instances.
|
||||||
|
# If constraint does not have a local component,
|
||||||
|
# ensure the version doesn't, either.
|
||||||
|
strip_local = not constraint._parts[-1] and version._parts[-1]
|
||||||
|
if strip_local:
|
||||||
|
s = version._string.split('+', 1)[0]
|
||||||
|
version = self.version_class(s)
|
||||||
|
return version, constraint
|
||||||
|
|
||||||
|
def _match_lt(self, version, constraint, prefix):
|
||||||
|
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||||
|
if version >= constraint:
|
||||||
|
return False
|
||||||
|
release_clause = constraint._release_clause
|
||||||
|
pfx = '.'.join([str(i) for i in release_clause])
|
||||||
|
return not _match_prefix(version, pfx)
|
||||||
|
|
||||||
|
def _match_gt(self, version, constraint, prefix):
|
||||||
|
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||||
|
if version <= constraint:
|
||||||
|
return False
|
||||||
|
release_clause = constraint._release_clause
|
||||||
|
pfx = '.'.join([str(i) for i in release_clause])
|
||||||
|
return not _match_prefix(version, pfx)
|
||||||
|
|
||||||
|
def _match_le(self, version, constraint, prefix):
|
||||||
|
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||||
|
return version <= constraint
|
||||||
|
|
||||||
|
def _match_ge(self, version, constraint, prefix):
|
||||||
|
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||||
|
return version >= constraint
|
||||||
|
|
||||||
|
def _match_eq(self, version, constraint, prefix):
|
||||||
|
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||||
|
if not prefix:
|
||||||
|
result = (version == constraint)
|
||||||
|
else:
|
||||||
|
result = _match_prefix(version, constraint)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _match_arbitrary(self, version, constraint, prefix):
|
||||||
|
return str(version) == str(constraint)
|
||||||
|
|
||||||
|
def _match_ne(self, version, constraint, prefix):
|
||||||
|
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||||
|
if not prefix:
|
||||||
|
result = (version != constraint)
|
||||||
|
else:
|
||||||
|
result = not _match_prefix(version, constraint)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _match_compatible(self, version, constraint, prefix):
|
||||||
|
version, constraint = self._adjust_local(version, constraint, prefix)
|
||||||
|
if version == constraint:
|
||||||
|
return True
|
||||||
|
if version < constraint:
|
||||||
|
return False
|
||||||
|
# if not prefix:
|
||||||
|
# return True
|
||||||
|
release_clause = constraint._release_clause
|
||||||
|
if len(release_clause) > 1:
|
||||||
|
release_clause = release_clause[:-1]
|
||||||
|
pfx = '.'.join([str(i) for i in release_clause])
|
||||||
|
return _match_prefix(version, pfx)
|
||||||
|
|
||||||
|
_REPLACEMENTS = (
|
||||||
|
(re.compile('[.+-]$'), ''), # remove trailing puncts
|
||||||
|
(re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start
|
||||||
|
(re.compile('^[.-]'), ''), # remove leading puncts
|
||||||
|
(re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses
|
||||||
|
(re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
|
||||||
|
(re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
|
||||||
|
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
|
||||||
|
(re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha
|
||||||
|
(re.compile(r'\b(pre-alpha|prealpha)\b'),
|
||||||
|
'pre.alpha'), # standardise
|
||||||
|
(re.compile(r'\(beta\)$'), 'beta'), # remove parentheses
|
||||||
|
)
|
||||||
|
|
||||||
|
_SUFFIX_REPLACEMENTS = (
|
||||||
|
(re.compile('^[:~._+-]+'), ''), # remove leading puncts
|
||||||
|
(re.compile('[,*")([\\]]'), ''), # remove unwanted chars
|
||||||
|
(re.compile('[~:+_ -]'), '.'), # replace illegal chars
|
||||||
|
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
|
||||||
|
(re.compile(r'\.$'), ''), # trailing '.'
|
||||||
|
)
|
||||||
|
|
||||||
|
_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
|
||||||
|
|
||||||
|
|
||||||
|
def _suggest_semantic_version(s):
|
||||||
|
"""
|
||||||
|
Try to suggest a semantic form for a version for which
|
||||||
|
_suggest_normalized_version couldn't come up with anything.
|
||||||
|
"""
|
||||||
|
result = s.strip().lower()
|
||||||
|
for pat, repl in _REPLACEMENTS:
|
||||||
|
result = pat.sub(repl, result)
|
||||||
|
if not result:
|
||||||
|
result = '0.0.0'
|
||||||
|
|
||||||
|
# Now look for numeric prefix, and separate it out from
|
||||||
|
# the rest.
|
||||||
|
#import pdb; pdb.set_trace()
|
||||||
|
m = _NUMERIC_PREFIX.match(result)
|
||||||
|
if not m:
|
||||||
|
prefix = '0.0.0'
|
||||||
|
suffix = result
|
||||||
|
else:
|
||||||
|
prefix = m.groups()[0].split('.')
|
||||||
|
prefix = [int(i) for i in prefix]
|
||||||
|
while len(prefix) < 3:
|
||||||
|
prefix.append(0)
|
||||||
|
if len(prefix) == 3:
|
||||||
|
suffix = result[m.end():]
|
||||||
|
else:
|
||||||
|
suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
|
||||||
|
prefix = prefix[:3]
|
||||||
|
prefix = '.'.join([str(i) for i in prefix])
|
||||||
|
suffix = suffix.strip()
|
||||||
|
if suffix:
|
||||||
|
#import pdb; pdb.set_trace()
|
||||||
|
# massage the suffix.
|
||||||
|
for pat, repl in _SUFFIX_REPLACEMENTS:
|
||||||
|
suffix = pat.sub(repl, suffix)
|
||||||
|
|
||||||
|
if not suffix:
|
||||||
|
result = prefix
|
||||||
|
else:
|
||||||
|
sep = '-' if 'dev' in suffix else '+'
|
||||||
|
result = prefix + sep + suffix
|
||||||
|
if not is_semver(result):
|
||||||
|
result = None
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _suggest_normalized_version(s):
|
||||||
|
"""Suggest a normalized version close to the given version string.
|
||||||
|
|
||||||
|
If you have a version string that isn't rational (i.e. NormalizedVersion
|
||||||
|
doesn't like it) then you might be able to get an equivalent (or close)
|
||||||
|
rational version from this function.
|
||||||
|
|
||||||
|
This does a number of simple normalizations to the given string, based
|
||||||
|
on observation of versions currently in use on PyPI. Given a dump of
|
||||||
|
those version during PyCon 2009, 4287 of them:
|
||||||
|
- 2312 (53.93%) match NormalizedVersion without change
|
||||||
|
with the automatic suggestion
|
||||||
|
- 3474 (81.04%) match when using this suggestion method
|
||||||
|
|
||||||
|
@param s {str} An irrational version string.
|
||||||
|
@returns A rational version string, or None, if couldn't determine one.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
_normalized_key(s)
|
||||||
|
return s # already rational
|
||||||
|
except UnsupportedVersionError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
rs = s.lower()
|
||||||
|
|
||||||
|
# part of this could use maketrans
|
||||||
|
for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
|
||||||
|
('beta', 'b'), ('rc', 'c'), ('-final', ''),
|
||||||
|
('-pre', 'c'),
|
||||||
|
('-release', ''), ('.release', ''), ('-stable', ''),
|
||||||
|
('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
|
||||||
|
('final', '')):
|
||||||
|
rs = rs.replace(orig, repl)
|
||||||
|
|
||||||
|
# if something ends with dev or pre, we add a 0
|
||||||
|
rs = re.sub(r"pre$", r"pre0", rs)
|
||||||
|
rs = re.sub(r"dev$", r"dev0", rs)
|
||||||
|
|
||||||
|
# if we have something like "b-2" or "a.2" at the end of the
|
||||||
|
# version, that is probably beta, alpha, etc
|
||||||
|
# let's remove the dash or dot
|
||||||
|
rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
|
||||||
|
|
||||||
|
# 1.0-dev-r371 -> 1.0.dev371
|
||||||
|
# 0.1-dev-r79 -> 0.1.dev79
|
||||||
|
rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
|
||||||
|
|
||||||
|
# Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
|
||||||
|
rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
|
||||||
|
|
||||||
|
# Clean: v0.3, v1.0
|
||||||
|
if rs.startswith('v'):
|
||||||
|
rs = rs[1:]
|
||||||
|
|
||||||
|
# Clean leading '0's on numbers.
|
||||||
|
#TODO: unintended side-effect on, e.g., "2003.05.09"
|
||||||
|
# PyPI stats: 77 (~2%) better
|
||||||
|
rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
|
||||||
|
|
||||||
|
# Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
|
||||||
|
# zero.
|
||||||
|
# PyPI stats: 245 (7.56%) better
|
||||||
|
rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
|
||||||
|
|
||||||
|
# the 'dev-rNNN' tag is a dev tag
|
||||||
|
rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
|
||||||
|
|
||||||
|
# clean the - when used as a pre delimiter
|
||||||
|
rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
|
||||||
|
|
||||||
|
# a terminal "dev" or "devel" can be changed into ".dev0"
|
||||||
|
rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
|
||||||
|
|
||||||
|
# a terminal "dev" can be changed into ".dev0"
|
||||||
|
rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
|
||||||
|
|
||||||
|
# a terminal "final" or "stable" can be removed
|
||||||
|
rs = re.sub(r"(final|stable)$", "", rs)
|
||||||
|
|
||||||
|
# The 'r' and the '-' tags are post release tags
|
||||||
|
# 0.4a1.r10 -> 0.4a1.post10
|
||||||
|
# 0.9.33-17222 -> 0.9.33.post17222
|
||||||
|
# 0.9.33-r17222 -> 0.9.33.post17222
|
||||||
|
rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
|
||||||
|
|
||||||
|
# Clean 'r' instead of 'dev' usage:
|
||||||
|
# 0.9.33+r17222 -> 0.9.33.dev17222
|
||||||
|
# 1.0dev123 -> 1.0.dev123
|
||||||
|
# 1.0.git123 -> 1.0.dev123
|
||||||
|
# 1.0.bzr123 -> 1.0.dev123
|
||||||
|
# 0.1a0dev.123 -> 0.1a0.dev123
|
||||||
|
# PyPI stats: ~150 (~4%) better
|
||||||
|
rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
|
||||||
|
|
||||||
|
# Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
|
||||||
|
# 0.2.pre1 -> 0.2c1
|
||||||
|
# 0.2-c1 -> 0.2c1
|
||||||
|
# 1.0preview123 -> 1.0c123
|
||||||
|
# PyPI stats: ~21 (0.62%) better
|
||||||
|
rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
|
||||||
|
|
||||||
|
# Tcl/Tk uses "px" for their post release markers
|
||||||
|
rs = re.sub(r"p(\d+)$", r".post\1", rs)
|
||||||
|
|
||||||
|
try:
|
||||||
|
_normalized_key(rs)
|
||||||
|
except UnsupportedVersionError:
|
||||||
|
rs = None
|
||||||
|
return rs
|
||||||
|
|
||||||
|
#
|
||||||
|
# Legacy version processing (distribute-compatible)
|
||||||
|
#
|
||||||
|
|
||||||
|
_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
|
||||||
|
_VERSION_REPLACE = {
|
||||||
|
'pre': 'c',
|
||||||
|
'preview': 'c',
|
||||||
|
'-': 'final-',
|
||||||
|
'rc': 'c',
|
||||||
|
'dev': '@',
|
||||||
|
'': None,
|
||||||
|
'.': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _legacy_key(s):
|
||||||
|
def get_parts(s):
|
||||||
|
result = []
|
||||||
|
for p in _VERSION_PART.split(s.lower()):
|
||||||
|
p = _VERSION_REPLACE.get(p, p)
|
||||||
|
if p:
|
||||||
|
if '0' <= p[:1] <= '9':
|
||||||
|
p = p.zfill(8)
|
||||||
|
else:
|
||||||
|
p = '*' + p
|
||||||
|
result.append(p)
|
||||||
|
result.append('*final')
|
||||||
|
return result
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for p in get_parts(s):
|
||||||
|
if p.startswith('*'):
|
||||||
|
if p < '*final':
|
||||||
|
while result and result[-1] == '*final-':
|
||||||
|
result.pop()
|
||||||
|
while result and result[-1] == '00000000':
|
||||||
|
result.pop()
|
||||||
|
result.append(p)
|
||||||
|
return tuple(result)
|
||||||
|
|
||||||
|
|
||||||
|
class LegacyVersion(Version):
|
||||||
|
def parse(self, s):
|
||||||
|
return _legacy_key(s)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_prerelease(self):
|
||||||
|
result = False
|
||||||
|
for x in self._parts:
|
||||||
|
if (isinstance(x, string_types) and x.startswith('*') and
|
||||||
|
x < '*final'):
|
||||||
|
result = True
|
||||||
|
break
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class LegacyMatcher(Matcher):
|
||||||
|
version_class = LegacyVersion
|
||||||
|
|
||||||
|
_operators = dict(Matcher._operators)
|
||||||
|
_operators['~='] = '_match_compatible'
|
||||||
|
|
||||||
|
numeric_re = re.compile(r'^(\d+(\.\d+)*)')
|
||||||
|
|
||||||
|
def _match_compatible(self, version, constraint, prefix):
|
||||||
|
if version < constraint:
|
||||||
|
return False
|
||||||
|
m = self.numeric_re.match(str(constraint))
|
||||||
|
if not m:
|
||||||
|
logger.warning('Cannot compute compatible match for version %s '
|
||||||
|
' and constraint %s', version, constraint)
|
||||||
|
return True
|
||||||
|
s = m.groups()[0]
|
||||||
|
if '.' in s:
|
||||||
|
s = s.rsplit('.', 1)[0]
|
||||||
|
return _match_prefix(version, s)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Semantic versioning
|
||||||
|
#
|
||||||
|
|
||||||
|
_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
|
||||||
|
r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
|
||||||
|
r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
|
||||||
|
|
||||||
|
|
||||||
|
def is_semver(s):
|
||||||
|
return _SEMVER_RE.match(s)
|
||||||
|
|
||||||
|
|
||||||
|
def _semantic_key(s):
|
||||||
|
def make_tuple(s, absent):
|
||||||
|
if s is None:
|
||||||
|
result = (absent,)
|
||||||
|
else:
|
||||||
|
parts = s[1:].split('.')
|
||||||
|
# We can't compare ints and strings on Python 3, so fudge it
|
||||||
|
# by zero-filling numeric values so simulate a numeric comparison
|
||||||
|
result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
|
||||||
|
return result
|
||||||
|
|
||||||
|
m = is_semver(s)
|
||||||
|
if not m:
|
||||||
|
raise UnsupportedVersionError(s)
|
||||||
|
groups = m.groups()
|
||||||
|
major, minor, patch = [int(i) for i in groups[:3]]
|
||||||
|
# choose the '|' and '*' so that versions sort correctly
|
||||||
|
pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
|
||||||
|
return (major, minor, patch), pre, build
|
||||||
|
|
||||||
|
|
||||||
|
class SemanticVersion(Version):
|
||||||
|
def parse(self, s):
|
||||||
|
return _semantic_key(s)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_prerelease(self):
|
||||||
|
return self._parts[1][0] != '|'
|
||||||
|
|
||||||
|
|
||||||
|
class SemanticMatcher(Matcher):
|
||||||
|
version_class = SemanticVersion
|
||||||
|
|
||||||
|
|
||||||
|
class VersionScheme(object):
|
||||||
|
def __init__(self, key, matcher, suggester=None):
|
||||||
|
self.key = key
|
||||||
|
self.matcher = matcher
|
||||||
|
self.suggester = suggester
|
||||||
|
|
||||||
|
def is_valid_version(self, s):
|
||||||
|
try:
|
||||||
|
self.matcher.version_class(s)
|
||||||
|
result = True
|
||||||
|
except UnsupportedVersionError:
|
||||||
|
result = False
|
||||||
|
return result
|
||||||
|
|
||||||
|
def is_valid_matcher(self, s):
|
||||||
|
try:
|
||||||
|
self.matcher(s)
|
||||||
|
result = True
|
||||||
|
except UnsupportedVersionError:
|
||||||
|
result = False
|
||||||
|
return result
|
||||||
|
|
||||||
|
def is_valid_constraint_list(self, s):
|
||||||
|
"""
|
||||||
|
Used for processing some metadata fields
|
||||||
|
"""
|
||||||
|
return self.is_valid_matcher('dummy_name (%s)' % s)
|
||||||
|
|
||||||
|
def suggest(self, s):
|
||||||
|
if self.suggester is None:
|
||||||
|
result = None
|
||||||
|
else:
|
||||||
|
result = self.suggester(s)
|
||||||
|
return result
|
||||||
|
|
||||||
|
_SCHEMES = {
|
||||||
|
'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
|
||||||
|
_suggest_normalized_version),
|
||||||
|
'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
|
||||||
|
'semantic': VersionScheme(_semantic_key, SemanticMatcher,
|
||||||
|
_suggest_semantic_version),
|
||||||
|
}
|
||||||
|
|
||||||
|
_SCHEMES['default'] = _SCHEMES['normalized']
|
||||||
|
|
||||||
|
|
||||||
|
def get_scheme(name):
|
||||||
|
if name not in _SCHEMES:
|
||||||
|
raise ValueError('unknown scheme name: %r' % name)
|
||||||
|
return _SCHEMES[name]
|
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1 @@
|
|||||||
|
pip
|
@ -0,0 +1,24 @@
|
|||||||
|
This is free and unencumbered software released into the public domain.
|
||||||
|
|
||||||
|
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||||
|
distribute this software, either in source code form or as a compiled
|
||||||
|
binary, for any purpose, commercial or non-commercial, and by any
|
||||||
|
means.
|
||||||
|
|
||||||
|
In jurisdictions that recognize copyright laws, the author or authors
|
||||||
|
of this software dedicate any and all copyright interest in the
|
||||||
|
software to the public domain. We make this dedication for the benefit
|
||||||
|
of the public at large and to the detriment of our heirs and
|
||||||
|
successors. We intend this dedication to be an overt act of
|
||||||
|
relinquishment in perpetuity of all present and future rights to this
|
||||||
|
software under copyright law.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||||
|
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||||
|
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
For more information, please refer to <http://unlicense.org>
|
@ -0,0 +1,156 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: filelock
|
||||||
|
Version: 3.0.12
|
||||||
|
Summary: A platform independent file lock.
|
||||||
|
Home-page: https://github.com/benediktschmitt/py-filelock
|
||||||
|
Author: Benedikt Schmitt
|
||||||
|
Author-email: benedikt@benediktschmitt.de
|
||||||
|
License: Public Domain <http://unlicense.org>
|
||||||
|
Download-URL: https://github.com/benediktschmitt/py-filelock/archive/master.zip
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: License :: Public Domain
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.4
|
||||||
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: Topic :: System
|
||||||
|
Classifier: Topic :: Internet
|
||||||
|
Classifier: Topic :: Software Development :: Libraries
|
||||||
|
Description-Content-Type: text/markdown
|
||||||
|
|
||||||
|
# py-filelock
|
||||||
|
|
||||||
|
![travis-ci](https://travis-ci.org/benediktschmitt/py-filelock.svg?branch=master)
|
||||||
|
|
||||||
|
This package contains a single module, which implements a platform independent
|
||||||
|
file lock in Python, which provides a simple way of inter-process communication:
|
||||||
|
|
||||||
|
```Python
|
||||||
|
from filelock import Timeout, FileLock
|
||||||
|
|
||||||
|
lock = FileLock("high_ground.txt.lock")
|
||||||
|
with lock:
|
||||||
|
open("high_ground.txt", "a").write("You were the chosen one.")
|
||||||
|
```
|
||||||
|
|
||||||
|
**Don't use** a *FileLock* to lock the file you want to write to, instead create
|
||||||
|
a separate *.lock* file as shown above.
|
||||||
|
|
||||||
|
![animated example](https://raw.githubusercontent.com/benediktschmitt/py-filelock/master/example/example.gif)
|
||||||
|
|
||||||
|
|
||||||
|
## Similar libraries
|
||||||
|
|
||||||
|
Perhaps you are looking for something like
|
||||||
|
|
||||||
|
* https://pypi.python.org/pypi/pid/2.1.1
|
||||||
|
* https://docs.python.org/3.6/library/msvcrt.html#msvcrt.locking
|
||||||
|
* or https://docs.python.org/3/library/fcntl.html#fcntl.flock
|
||||||
|
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
*py-filelock* is available via PyPi:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ pip3 install filelock
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
The documentation for the API is available on
|
||||||
|
[readthedocs.org](https://filelock.readthedocs.io/).
|
||||||
|
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
A *FileLock* is used to indicate another process of your application that a
|
||||||
|
resource or working
|
||||||
|
directory is currently used. To do so, create a *FileLock* first:
|
||||||
|
|
||||||
|
```Python
|
||||||
|
from filelock import Timeout, FileLock
|
||||||
|
|
||||||
|
file_path = "high_ground.txt"
|
||||||
|
lock_path = "high_ground.txt.lock"
|
||||||
|
|
||||||
|
lock = FileLock(lock_path, timeout=1)
|
||||||
|
```
|
||||||
|
|
||||||
|
The lock object supports multiple ways for acquiring the lock, including the
|
||||||
|
ones used to acquire standard Python thread locks:
|
||||||
|
|
||||||
|
```Python
|
||||||
|
with lock:
|
||||||
|
open(file_path, "a").write("Hello there!")
|
||||||
|
|
||||||
|
lock.acquire()
|
||||||
|
try:
|
||||||
|
open(file_path, "a").write("General Kenobi!")
|
||||||
|
finally:
|
||||||
|
lock.release()
|
||||||
|
```
|
||||||
|
|
||||||
|
The *acquire()* method accepts also a *timeout* parameter. If the lock cannot be
|
||||||
|
acquired within *timeout* seconds, a *Timeout* exception is raised:
|
||||||
|
|
||||||
|
```Python
|
||||||
|
try:
|
||||||
|
with lock.acquire(timeout=10):
|
||||||
|
open(file_path, "a").write("I have a bad feeling about this.")
|
||||||
|
except Timeout:
|
||||||
|
print("Another instance of this application currently holds the lock.")
|
||||||
|
```
|
||||||
|
|
||||||
|
The lock objects are recursive locks, which means that once acquired, they will
|
||||||
|
not block on successive lock requests:
|
||||||
|
|
||||||
|
```Python
|
||||||
|
def cite1():
|
||||||
|
with lock:
|
||||||
|
open(file_path, "a").write("I hate it when he does that.")
|
||||||
|
|
||||||
|
def cite2():
|
||||||
|
with lock:
|
||||||
|
open(file_path, "a").write("You don't want to sell me death sticks.")
|
||||||
|
|
||||||
|
# The lock is acquired here.
|
||||||
|
with lock:
|
||||||
|
cite1()
|
||||||
|
cite2()
|
||||||
|
|
||||||
|
# And released here.
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## FileLock vs SoftFileLock
|
||||||
|
|
||||||
|
The *FileLock* is platform dependent while the *SoftFileLock* is not. Use the
|
||||||
|
*FileLock* if all instances of your application are running on the same host and
|
||||||
|
a *SoftFileLock* otherwise.
|
||||||
|
|
||||||
|
The *SoftFileLock* only watches the existence of the lock file. This makes it
|
||||||
|
ultra portable, but also more prone to dead locks if the application crashes.
|
||||||
|
You can simply delete the lock file in such cases.
|
||||||
|
|
||||||
|
|
||||||
|
## Contributions
|
||||||
|
|
||||||
|
Contributions are always welcome, please make sure they pass all tests before
|
||||||
|
creating a pull request. Never hesitate to open a new issue, although it may
|
||||||
|
take some time for me to respond.
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This package is [public domain](./LICENSE.rst).
|
||||||
|
|
||||||
|
|
@ -0,0 +1,8 @@
|
|||||||
|
__pycache__/filelock.cpython-36.pyc,,
|
||||||
|
filelock-3.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
filelock-3.0.12.dist-info/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210
|
||||||
|
filelock-3.0.12.dist-info/METADATA,sha256=gjzbv9nxtD-Rj2ysjUuG7SLZCHUQl5hMy68Jij8soPw,4343
|
||||||
|
filelock-3.0.12.dist-info/RECORD,,
|
||||||
|
filelock-3.0.12.dist-info/WHEEL,sha256=S8S5VL-stOTSZDYxHyf0KP7eds0J72qrK0Evu3TfyAY,92
|
||||||
|
filelock-3.0.12.dist-info/top_level.txt,sha256=NDrf9i5BNogz4hEdsr6Hi7Ws3TlSSKY4Q2Y9_-i2GwU,9
|
||||||
|
filelock.py,sha256=5DQTtOaQq7-vgLkZzvOhqhVMh_umfydWgSA8Vuzmf8M,13229
|
@ -0,0 +1,5 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.33.4)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
@ -0,0 +1 @@
|
|||||||
|
filelock
|
@ -0,0 +1,451 @@
|
|||||||
|
# This is free and unencumbered software released into the public domain.
|
||||||
|
#
|
||||||
|
# Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||||
|
# distribute this software, either in source code form or as a compiled
|
||||||
|
# binary, for any purpose, commercial or non-commercial, and by any
|
||||||
|
# means.
|
||||||
|
#
|
||||||
|
# In jurisdictions that recognize copyright laws, the author or authors
|
||||||
|
# of this software dedicate any and all copyright interest in the
|
||||||
|
# software to the public domain. We make this dedication for the benefit
|
||||||
|
# of the public at large and to the detriment of our heirs and
|
||||||
|
# successors. We intend this dedication to be an overt act of
|
||||||
|
# relinquishment in perpetuity of all present and future rights to this
|
||||||
|
# software under copyright law.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||||
|
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||||
|
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
# OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
#
|
||||||
|
# For more information, please refer to <http://unlicense.org>
|
||||||
|
|
||||||
|
"""
|
||||||
|
A platform independent file lock that supports the with-statement.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# Modules
|
||||||
|
# ------------------------------------------------
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
try:
|
||||||
|
import warnings
|
||||||
|
except ImportError:
|
||||||
|
warnings = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
import msvcrt
|
||||||
|
except ImportError:
|
||||||
|
msvcrt = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
import fcntl
|
||||||
|
except ImportError:
|
||||||
|
fcntl = None
|
||||||
|
|
||||||
|
|
||||||
|
# Backward compatibility
|
||||||
|
# ------------------------------------------------
|
||||||
|
try:
|
||||||
|
TimeoutError
|
||||||
|
except NameError:
|
||||||
|
TimeoutError = OSError
|
||||||
|
|
||||||
|
|
||||||
|
# Data
|
||||||
|
# ------------------------------------------------
|
||||||
|
__all__ = [
|
||||||
|
"Timeout",
|
||||||
|
"BaseFileLock",
|
||||||
|
"WindowsFileLock",
|
||||||
|
"UnixFileLock",
|
||||||
|
"SoftFileLock",
|
||||||
|
"FileLock"
|
||||||
|
]
|
||||||
|
|
||||||
|
__version__ = "3.0.12"
|
||||||
|
|
||||||
|
|
||||||
|
_logger = None
|
||||||
|
def logger():
|
||||||
|
"""Returns the logger instance used in this module."""
|
||||||
|
global _logger
|
||||||
|
_logger = _logger or logging.getLogger(__name__)
|
||||||
|
return _logger
|
||||||
|
|
||||||
|
|
||||||
|
# Exceptions
|
||||||
|
# ------------------------------------------------
|
||||||
|
class Timeout(TimeoutError):
|
||||||
|
"""
|
||||||
|
Raised when the lock could not be acquired in *timeout*
|
||||||
|
seconds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, lock_file):
|
||||||
|
"""
|
||||||
|
"""
|
||||||
|
#: The path of the file lock.
|
||||||
|
self.lock_file = lock_file
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
temp = "The file lock '{}' could not be acquired."\
|
||||||
|
.format(self.lock_file)
|
||||||
|
return temp
|
||||||
|
|
||||||
|
|
||||||
|
# Classes
|
||||||
|
# ------------------------------------------------
|
||||||
|
|
||||||
|
# This is a helper class which is returned by :meth:`BaseFileLock.acquire`
|
||||||
|
# and wraps the lock to make sure __enter__ is not called twice when entering
|
||||||
|
# the with statement.
|
||||||
|
# If we would simply return *self*, the lock would be acquired again
|
||||||
|
# in the *__enter__* method of the BaseFileLock, but not released again
|
||||||
|
# automatically.
|
||||||
|
#
|
||||||
|
# :seealso: issue #37 (memory leak)
|
||||||
|
class _Acquire_ReturnProxy(object):
|
||||||
|
|
||||||
|
def __init__(self, lock):
|
||||||
|
self.lock = lock
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self.lock
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
self.lock.release()
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class BaseFileLock(object):
|
||||||
|
"""
|
||||||
|
Implements the base class of a file lock.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, lock_file, timeout = -1):
|
||||||
|
"""
|
||||||
|
"""
|
||||||
|
# The path to the lock file.
|
||||||
|
self._lock_file = lock_file
|
||||||
|
|
||||||
|
# The file descriptor for the *_lock_file* as it is returned by the
|
||||||
|
# os.open() function.
|
||||||
|
# This file lock is only NOT None, if the object currently holds the
|
||||||
|
# lock.
|
||||||
|
self._lock_file_fd = None
|
||||||
|
|
||||||
|
# The default timeout value.
|
||||||
|
self.timeout = timeout
|
||||||
|
|
||||||
|
# We use this lock primarily for the lock counter.
|
||||||
|
self._thread_lock = threading.Lock()
|
||||||
|
|
||||||
|
# The lock counter is used for implementing the nested locking
|
||||||
|
# mechanism. Whenever the lock is acquired, the counter is increased and
|
||||||
|
# the lock is only released, when this value is 0 again.
|
||||||
|
self._lock_counter = 0
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lock_file(self):
|
||||||
|
"""
|
||||||
|
The path to the lock file.
|
||||||
|
"""
|
||||||
|
return self._lock_file
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timeout(self):
|
||||||
|
"""
|
||||||
|
You can set a default timeout for the filelock. It will be used as
|
||||||
|
fallback value in the acquire method, if no timeout value (*None*) is
|
||||||
|
given.
|
||||||
|
|
||||||
|
If you want to disable the timeout, set it to a negative value.
|
||||||
|
|
||||||
|
A timeout of 0 means, that there is exactly one attempt to acquire the
|
||||||
|
file lock.
|
||||||
|
|
||||||
|
.. versionadded:: 2.0.0
|
||||||
|
"""
|
||||||
|
return self._timeout
|
||||||
|
|
||||||
|
@timeout.setter
|
||||||
|
def timeout(self, value):
|
||||||
|
"""
|
||||||
|
"""
|
||||||
|
self._timeout = float(value)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Platform dependent locking
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
def _acquire(self):
|
||||||
|
"""
|
||||||
|
Platform dependent. If the file lock could be
|
||||||
|
acquired, self._lock_file_fd holds the file descriptor
|
||||||
|
of the lock file.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def _release(self):
|
||||||
|
"""
|
||||||
|
Releases the lock and sets self._lock_file_fd to None.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
# Platform independent methods
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_locked(self):
|
||||||
|
"""
|
||||||
|
True, if the object holds the file lock.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.0.0
|
||||||
|
|
||||||
|
This was previously a method and is now a property.
|
||||||
|
"""
|
||||||
|
return self._lock_file_fd is not None
|
||||||
|
|
||||||
|
def acquire(self, timeout=None, poll_intervall=0.05):
|
||||||
|
"""
|
||||||
|
Acquires the file lock or fails with a :exc:`Timeout` error.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# You can use this method in the context manager (recommended)
|
||||||
|
with lock.acquire():
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Or use an equivalent try-finally construct:
|
||||||
|
lock.acquire()
|
||||||
|
try:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
lock.release()
|
||||||
|
|
||||||
|
:arg float timeout:
|
||||||
|
The maximum time waited for the file lock.
|
||||||
|
If ``timeout < 0``, there is no timeout and this method will
|
||||||
|
block until the lock could be acquired.
|
||||||
|
If ``timeout`` is None, the default :attr:`~timeout` is used.
|
||||||
|
|
||||||
|
:arg float poll_intervall:
|
||||||
|
We check once in *poll_intervall* seconds if we can acquire the
|
||||||
|
file lock.
|
||||||
|
|
||||||
|
:raises Timeout:
|
||||||
|
if the lock could not be acquired in *timeout* seconds.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.0.0
|
||||||
|
|
||||||
|
This method returns now a *proxy* object instead of *self*,
|
||||||
|
so that it can be used in a with statement without side effects.
|
||||||
|
"""
|
||||||
|
# Use the default timeout, if no timeout is provided.
|
||||||
|
if timeout is None:
|
||||||
|
timeout = self.timeout
|
||||||
|
|
||||||
|
# Increment the number right at the beginning.
|
||||||
|
# We can still undo it, if something fails.
|
||||||
|
with self._thread_lock:
|
||||||
|
self._lock_counter += 1
|
||||||
|
|
||||||
|
lock_id = id(self)
|
||||||
|
lock_filename = self._lock_file
|
||||||
|
start_time = time.time()
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
with self._thread_lock:
|
||||||
|
if not self.is_locked:
|
||||||
|
logger().debug('Attempting to acquire lock %s on %s', lock_id, lock_filename)
|
||||||
|
self._acquire()
|
||||||
|
|
||||||
|
if self.is_locked:
|
||||||
|
logger().info('Lock %s acquired on %s', lock_id, lock_filename)
|
||||||
|
break
|
||||||
|
elif timeout >= 0 and time.time() - start_time > timeout:
|
||||||
|
logger().debug('Timeout on acquiring lock %s on %s', lock_id, lock_filename)
|
||||||
|
raise Timeout(self._lock_file)
|
||||||
|
else:
|
||||||
|
logger().debug(
|
||||||
|
'Lock %s not acquired on %s, waiting %s seconds ...',
|
||||||
|
lock_id, lock_filename, poll_intervall
|
||||||
|
)
|
||||||
|
time.sleep(poll_intervall)
|
||||||
|
except:
|
||||||
|
# Something did go wrong, so decrement the counter.
|
||||||
|
with self._thread_lock:
|
||||||
|
self._lock_counter = max(0, self._lock_counter - 1)
|
||||||
|
|
||||||
|
raise
|
||||||
|
return _Acquire_ReturnProxy(lock = self)
|
||||||
|
|
||||||
|
def release(self, force = False):
|
||||||
|
"""
|
||||||
|
Releases the file lock.
|
||||||
|
|
||||||
|
Please note, that the lock is only completly released, if the lock
|
||||||
|
counter is 0.
|
||||||
|
|
||||||
|
Also note, that the lock file itself is not automatically deleted.
|
||||||
|
|
||||||
|
:arg bool force:
|
||||||
|
If true, the lock counter is ignored and the lock is released in
|
||||||
|
every case.
|
||||||
|
"""
|
||||||
|
with self._thread_lock:
|
||||||
|
|
||||||
|
if self.is_locked:
|
||||||
|
self._lock_counter -= 1
|
||||||
|
|
||||||
|
if self._lock_counter == 0 or force:
|
||||||
|
lock_id = id(self)
|
||||||
|
lock_filename = self._lock_file
|
||||||
|
|
||||||
|
logger().debug('Attempting to release lock %s on %s', lock_id, lock_filename)
|
||||||
|
self._release()
|
||||||
|
self._lock_counter = 0
|
||||||
|
logger().info('Lock %s released on %s', lock_id, lock_filename)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.acquire()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
self.release()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.release(force = True)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Windows locking mechanism
|
||||||
|
# ~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
class WindowsFileLock(BaseFileLock):
|
||||||
|
"""
|
||||||
|
Uses the :func:`msvcrt.locking` function to hard lock the lock file on
|
||||||
|
windows systems.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _acquire(self):
|
||||||
|
open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC
|
||||||
|
|
||||||
|
try:
|
||||||
|
fd = os.open(self._lock_file, open_mode)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
|
||||||
|
except (IOError, OSError):
|
||||||
|
os.close(fd)
|
||||||
|
else:
|
||||||
|
self._lock_file_fd = fd
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _release(self):
|
||||||
|
fd = self._lock_file_fd
|
||||||
|
self._lock_file_fd = None
|
||||||
|
msvcrt.locking(fd, msvcrt.LK_UNLCK, 1)
|
||||||
|
os.close(fd)
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.remove(self._lock_file)
|
||||||
|
# Probably another instance of the application
|
||||||
|
# that acquired the file lock.
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Unix locking mechanism
|
||||||
|
# ~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
class UnixFileLock(BaseFileLock):
|
||||||
|
"""
|
||||||
|
Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _acquire(self):
|
||||||
|
open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC
|
||||||
|
fd = os.open(self._lock_file, open_mode)
|
||||||
|
|
||||||
|
try:
|
||||||
|
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||||
|
except (IOError, OSError):
|
||||||
|
os.close(fd)
|
||||||
|
else:
|
||||||
|
self._lock_file_fd = fd
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _release(self):
|
||||||
|
# Do not remove the lockfile:
|
||||||
|
#
|
||||||
|
# https://github.com/benediktschmitt/py-filelock/issues/31
|
||||||
|
# https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition
|
||||||
|
fd = self._lock_file_fd
|
||||||
|
self._lock_file_fd = None
|
||||||
|
fcntl.flock(fd, fcntl.LOCK_UN)
|
||||||
|
os.close(fd)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Soft lock
|
||||||
|
# ~~~~~~~~~
|
||||||
|
|
||||||
|
class SoftFileLock(BaseFileLock):
|
||||||
|
"""
|
||||||
|
Simply watches the existence of the lock file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _acquire(self):
|
||||||
|
open_mode = os.O_WRONLY | os.O_CREAT | os.O_EXCL | os.O_TRUNC
|
||||||
|
try:
|
||||||
|
fd = os.open(self._lock_file, open_mode)
|
||||||
|
except (IOError, OSError):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self._lock_file_fd = fd
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _release(self):
|
||||||
|
os.close(self._lock_file_fd)
|
||||||
|
self._lock_file_fd = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.remove(self._lock_file)
|
||||||
|
# The file is already deleted and that's what we want.
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Platform filelock
|
||||||
|
# ~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
#: Alias for the lock, which should be used for the current platform. On
|
||||||
|
#: Windows, this is an alias for :class:`WindowsFileLock`, on Unix for
|
||||||
|
#: :class:`UnixFileLock` and otherwise for :class:`SoftFileLock`.
|
||||||
|
FileLock = None
|
||||||
|
|
||||||
|
if msvcrt:
|
||||||
|
FileLock = WindowsFileLock
|
||||||
|
elif fcntl:
|
||||||
|
FileLock = UnixFileLock
|
||||||
|
else:
|
||||||
|
FileLock = SoftFileLock
|
||||||
|
|
||||||
|
if warnings is not None:
|
||||||
|
warnings.warn("only soft file lock is available")
|
@ -0,0 +1 @@
|
|||||||
|
pip
|
@ -0,0 +1,13 @@
|
|||||||
|
Copyright 2017-2019 Jason R. Coombs, Barry Warsaw
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
@ -0,0 +1,66 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: importlib-metadata
|
||||||
|
Version: 1.7.0
|
||||||
|
Summary: Read metadata from Python packages
|
||||||
|
Home-page: http://importlib-metadata.readthedocs.io/
|
||||||
|
Author: Barry Warsaw
|
||||||
|
Author-email: barry@python.org
|
||||||
|
License: Apache Software License
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Development Status :: 3 - Alpha
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: Apache Software License
|
||||||
|
Classifier: Topic :: Software Development :: Libraries
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
|
||||||
|
Requires-Dist: zipp (>=0.5)
|
||||||
|
Requires-Dist: pathlib2 ; python_version < "3"
|
||||||
|
Requires-Dist: contextlib2 ; python_version < "3"
|
||||||
|
Requires-Dist: configparser (>=3.5) ; python_version < "3"
|
||||||
|
Provides-Extra: docs
|
||||||
|
Requires-Dist: sphinx ; extra == 'docs'
|
||||||
|
Requires-Dist: rst.linker ; extra == 'docs'
|
||||||
|
Provides-Extra: testing
|
||||||
|
Requires-Dist: packaging ; extra == 'testing'
|
||||||
|
Requires-Dist: pep517 ; extra == 'testing'
|
||||||
|
Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing'
|
||||||
|
|
||||||
|
=========================
|
||||||
|
``importlib_metadata``
|
||||||
|
=========================
|
||||||
|
|
||||||
|
``importlib_metadata`` is a library to access the metadata for a Python
|
||||||
|
package. It is intended to be ported to Python 3.8.
|
||||||
|
|
||||||
|
|
||||||
|
Usage
|
||||||
|
=====
|
||||||
|
|
||||||
|
See the `online documentation <https://importlib_metadata.readthedocs.io/>`_
|
||||||
|
for usage details.
|
||||||
|
|
||||||
|
`Finder authors
|
||||||
|
<https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
|
||||||
|
also add support for custom package installers. See the above documentation
|
||||||
|
for details.
|
||||||
|
|
||||||
|
|
||||||
|
Caveats
|
||||||
|
=======
|
||||||
|
|
||||||
|
This project primarily supports third-party packages installed by PyPA
|
||||||
|
tools (or other conforming packages). It does not support:
|
||||||
|
|
||||||
|
- Packages in the stdlib.
|
||||||
|
- Packages installed without metadata.
|
||||||
|
|
||||||
|
Project details
|
||||||
|
===============
|
||||||
|
|
||||||
|
* Project home: https://gitlab.com/python-devs/importlib_metadata
|
||||||
|
* Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues
|
||||||
|
* Code hosting: https://gitlab.com/python-devs/importlib_metadata.git
|
||||||
|
* Documentation: http://importlib_metadata.readthedocs.io/
|
||||||
|
|
||||||
|
|
@ -0,0 +1,33 @@
|
|||||||
|
importlib_metadata-1.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
importlib_metadata-1.7.0.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571
|
||||||
|
importlib_metadata-1.7.0.dist-info/METADATA,sha256=AvM2AcUhNbF_2Yyo8ttyVBCh_qGbRHaRE3MVgrHYDVw,2144
|
||||||
|
importlib_metadata-1.7.0.dist-info/RECORD,,
|
||||||
|
importlib_metadata-1.7.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
|
||||||
|
importlib_metadata-1.7.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
|
||||||
|
importlib_metadata/__init__.py,sha256=phnrEcGP-8cF-_ZZ5peJL4cUVAANOK0CpSWC-0-IVAs,18961
|
||||||
|
importlib_metadata/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_metadata/__pycache__/_compat.cpython-36.pyc,,
|
||||||
|
importlib_metadata/_compat.py,sha256=DnM55BbJKFCcZmJOkArmyO76-0g7pA6HEfzSYWXN88k,4417
|
||||||
|
importlib_metadata/docs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_metadata/docs/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_metadata/docs/__pycache__/conf.cpython-36.pyc,,
|
||||||
|
importlib_metadata/docs/changelog.rst,sha256=6EZfl84T0SQHzAXNlTiTegG0cBTa9wiMt0od0ht2n_8,8739
|
||||||
|
importlib_metadata/docs/conf.py,sha256=m-b6Mju5gFkpSHh-lyJ4iwqf_8t4LjYYFRumtutQSZc,5578
|
||||||
|
importlib_metadata/docs/index.rst,sha256=rbXrDkLAKLIDccqME5u9CCMEfMKprqzQOkIOuwOnfz4,1907
|
||||||
|
importlib_metadata/docs/using.rst,sha256=k_L4Hwwsf10ap9xWejyC-_gLz_WtvRfDOzuJA3o7Zw0,9504
|
||||||
|
importlib_metadata/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_metadata/tests/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_metadata/tests/__pycache__/fixtures.cpython-36.pyc,,
|
||||||
|
importlib_metadata/tests/__pycache__/test_api.cpython-36.pyc,,
|
||||||
|
importlib_metadata/tests/__pycache__/test_integration.cpython-36.pyc,,
|
||||||
|
importlib_metadata/tests/__pycache__/test_main.cpython-36.pyc,,
|
||||||
|
importlib_metadata/tests/__pycache__/test_zip.cpython-36.pyc,,
|
||||||
|
importlib_metadata/tests/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_metadata/tests/data/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_metadata/tests/data/example-21.12-py3-none-any.whl,sha256=I-kYufETid-tDYyR8f1OFJ3t5u_Io23k0cbQxJTUN4I,1455
|
||||||
|
importlib_metadata/tests/data/example-21.12-py3.6.egg,sha256=-EeugFAijkdUO9xyQHTZkQwZoFXK0_QxICBj6R5AAJo,1497
|
||||||
|
importlib_metadata/tests/fixtures.py,sha256=Ua_PqyqBhFqkkNGFsXtgMah6vXKQjeqKo1KhhzYdn-w,5752
|
||||||
|
importlib_metadata/tests/test_api.py,sha256=YMAGTsRENrtvpw2CSLmRndJMBeT4q_M0GSe-QsnnMZ4,5544
|
||||||
|
importlib_metadata/tests/test_integration.py,sha256=ykJpwjSkVwvWHG4gUw4RUrZzU_7JKX8vZyPf_kFIrLE,1579
|
||||||
|
importlib_metadata/tests/test_main.py,sha256=dcsDqyxTRtard2j5ysDDvVwfK6vvXdRtZCaQ0QljSR8,9026
|
||||||
|
importlib_metadata/tests/test_zip.py,sha256=lOCNPyfJSm9nz0-2RQndM7OQV-_gRjJzyRnvMqXqRSI,2675
|
@ -0,0 +1,6 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.34.2)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py2-none-any
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
@ -0,0 +1 @@
|
|||||||
|
importlib_metadata
|
@ -0,0 +1,623 @@
|
|||||||
|
from __future__ import unicode_literals, absolute_import
|
||||||
|
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import abc
|
||||||
|
import csv
|
||||||
|
import sys
|
||||||
|
import zipp
|
||||||
|
import operator
|
||||||
|
import functools
|
||||||
|
import itertools
|
||||||
|
import posixpath
|
||||||
|
import collections
|
||||||
|
|
||||||
|
from ._compat import (
|
||||||
|
install,
|
||||||
|
NullFinder,
|
||||||
|
ConfigParser,
|
||||||
|
suppress,
|
||||||
|
map,
|
||||||
|
FileNotFoundError,
|
||||||
|
IsADirectoryError,
|
||||||
|
NotADirectoryError,
|
||||||
|
PermissionError,
|
||||||
|
pathlib,
|
||||||
|
ModuleNotFoundError,
|
||||||
|
MetaPathFinder,
|
||||||
|
email_message_from_string,
|
||||||
|
PyPy_repr,
|
||||||
|
unique_ordered,
|
||||||
|
str,
|
||||||
|
)
|
||||||
|
from importlib import import_module
|
||||||
|
from itertools import starmap
|
||||||
|
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'Distribution',
|
||||||
|
'DistributionFinder',
|
||||||
|
'PackageNotFoundError',
|
||||||
|
'distribution',
|
||||||
|
'distributions',
|
||||||
|
'entry_points',
|
||||||
|
'files',
|
||||||
|
'metadata',
|
||||||
|
'requires',
|
||||||
|
'version',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class PackageNotFoundError(ModuleNotFoundError):
|
||||||
|
"""The package was not found."""
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
tmpl = "No package metadata was found for {self.name}"
|
||||||
|
return tmpl.format(**locals())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
name, = self.args
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
class EntryPoint(
|
||||||
|
PyPy_repr,
|
||||||
|
collections.namedtuple('EntryPointBase', 'name value group')):
|
||||||
|
"""An entry point as defined by Python packaging conventions.
|
||||||
|
|
||||||
|
See `the packaging docs on entry points
|
||||||
|
<https://packaging.python.org/specifications/entry-points/>`_
|
||||||
|
for more information.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pattern = re.compile(
|
||||||
|
r'(?P<module>[\w.]+)\s*'
|
||||||
|
r'(:\s*(?P<attr>[\w.]+))?\s*'
|
||||||
|
r'(?P<extras>\[.*\])?\s*$'
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
A regular expression describing the syntax for an entry point,
|
||||||
|
which might look like:
|
||||||
|
|
||||||
|
- module
|
||||||
|
- package.module
|
||||||
|
- package.module:attribute
|
||||||
|
- package.module:object.attribute
|
||||||
|
- package.module:attr [extra1, extra2]
|
||||||
|
|
||||||
|
Other combinations are possible as well.
|
||||||
|
|
||||||
|
The expression is lenient about whitespace around the ':',
|
||||||
|
following the attr, and following any extras.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
"""Load the entry point from its definition. If only a module
|
||||||
|
is indicated by the value, return that module. Otherwise,
|
||||||
|
return the named object.
|
||||||
|
"""
|
||||||
|
match = self.pattern.match(self.value)
|
||||||
|
module = import_module(match.group('module'))
|
||||||
|
attrs = filter(None, (match.group('attr') or '').split('.'))
|
||||||
|
return functools.reduce(getattr, attrs, module)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def module(self):
|
||||||
|
match = self.pattern.match(self.value)
|
||||||
|
return match.group('module')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def attr(self):
|
||||||
|
match = self.pattern.match(self.value)
|
||||||
|
return match.group('attr')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extras(self):
|
||||||
|
match = self.pattern.match(self.value)
|
||||||
|
return list(re.finditer(r'\w+', match.group('extras') or ''))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_config(cls, config):
|
||||||
|
return [
|
||||||
|
cls(name, value, group)
|
||||||
|
for group in config.sections()
|
||||||
|
for name, value in config.items(group)
|
||||||
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_text(cls, text):
|
||||||
|
config = ConfigParser(delimiters='=')
|
||||||
|
# case sensitive: https://stackoverflow.com/q/1611799/812183
|
||||||
|
config.optionxform = str
|
||||||
|
try:
|
||||||
|
config.read_string(text)
|
||||||
|
except AttributeError: # pragma: nocover
|
||||||
|
# Python 2 has no read_string
|
||||||
|
config.readfp(io.StringIO(text))
|
||||||
|
return EntryPoint._from_config(config)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""
|
||||||
|
Supply iter so one may construct dicts of EntryPoints easily.
|
||||||
|
"""
|
||||||
|
return iter((self.name, self))
|
||||||
|
|
||||||
|
def __reduce__(self):
|
||||||
|
return (
|
||||||
|
self.__class__,
|
||||||
|
(self.name, self.value, self.group),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PackagePath(pathlib.PurePosixPath):
|
||||||
|
"""A reference to a path in a package"""
|
||||||
|
|
||||||
|
def read_text(self, encoding='utf-8'):
|
||||||
|
with self.locate().open(encoding=encoding) as stream:
|
||||||
|
return stream.read()
|
||||||
|
|
||||||
|
def read_binary(self):
|
||||||
|
with self.locate().open('rb') as stream:
|
||||||
|
return stream.read()
|
||||||
|
|
||||||
|
def locate(self):
|
||||||
|
"""Return a path-like object for this path"""
|
||||||
|
return self.dist.locate_file(self)
|
||||||
|
|
||||||
|
|
||||||
|
class FileHash:
|
||||||
|
def __init__(self, spec):
|
||||||
|
self.mode, _, self.value = spec.partition('=')
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
|
||||||
|
|
||||||
|
|
||||||
|
class Distribution:
|
||||||
|
"""A Python distribution package."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def read_text(self, filename):
|
||||||
|
"""Attempt to load metadata file given by the name.
|
||||||
|
|
||||||
|
:param filename: The name of the file in the distribution info.
|
||||||
|
:return: The text if found, otherwise None.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def locate_file(self, path):
|
||||||
|
"""
|
||||||
|
Given a path to a file in this distribution, return a path
|
||||||
|
to it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_name(cls, name):
|
||||||
|
"""Return the Distribution for the given package name.
|
||||||
|
|
||||||
|
:param name: The name of the distribution package to search for.
|
||||||
|
:return: The Distribution instance (or subclass thereof) for the named
|
||||||
|
package, if found.
|
||||||
|
:raises PackageNotFoundError: When the named package's distribution
|
||||||
|
metadata cannot be found.
|
||||||
|
"""
|
||||||
|
for resolver in cls._discover_resolvers():
|
||||||
|
dists = resolver(DistributionFinder.Context(name=name))
|
||||||
|
dist = next(iter(dists), None)
|
||||||
|
if dist is not None:
|
||||||
|
return dist
|
||||||
|
else:
|
||||||
|
raise PackageNotFoundError(name)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def discover(cls, **kwargs):
|
||||||
|
"""Return an iterable of Distribution objects for all packages.
|
||||||
|
|
||||||
|
Pass a ``context`` or pass keyword arguments for constructing
|
||||||
|
a context.
|
||||||
|
|
||||||
|
:context: A ``DistributionFinder.Context`` object.
|
||||||
|
:return: Iterable of Distribution objects for all packages.
|
||||||
|
"""
|
||||||
|
context = kwargs.pop('context', None)
|
||||||
|
if context and kwargs:
|
||||||
|
raise ValueError("cannot accept context and kwargs")
|
||||||
|
context = context or DistributionFinder.Context(**kwargs)
|
||||||
|
return itertools.chain.from_iterable(
|
||||||
|
resolver(context)
|
||||||
|
for resolver in cls._discover_resolvers()
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def at(path):
|
||||||
|
"""Return a Distribution for the indicated metadata path
|
||||||
|
|
||||||
|
:param path: a string or path-like object
|
||||||
|
:return: a concrete Distribution instance for the path
|
||||||
|
"""
|
||||||
|
return PathDistribution(pathlib.Path(path))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _discover_resolvers():
|
||||||
|
"""Search the meta_path for resolvers."""
|
||||||
|
declared = (
|
||||||
|
getattr(finder, 'find_distributions', None)
|
||||||
|
for finder in sys.meta_path
|
||||||
|
)
|
||||||
|
return filter(None, declared)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _local(cls, root='.'):
|
||||||
|
from pep517 import build, meta
|
||||||
|
system = build.compat_system(root)
|
||||||
|
builder = functools.partial(
|
||||||
|
meta.build,
|
||||||
|
source_dir=root,
|
||||||
|
system=system,
|
||||||
|
)
|
||||||
|
return PathDistribution(zipp.Path(meta.build_as_zip(builder)))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def metadata(self):
|
||||||
|
"""Return the parsed metadata for this Distribution.
|
||||||
|
|
||||||
|
The returned object will have keys that name the various bits of
|
||||||
|
metadata. See PEP 566 for details.
|
||||||
|
"""
|
||||||
|
text = (
|
||||||
|
self.read_text('METADATA')
|
||||||
|
or self.read_text('PKG-INFO')
|
||||||
|
# This last clause is here to support old egg-info files. Its
|
||||||
|
# effect is to just end up using the PathDistribution's self._path
|
||||||
|
# (which points to the egg-info file) attribute unchanged.
|
||||||
|
or self.read_text('')
|
||||||
|
)
|
||||||
|
return email_message_from_string(text)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self):
|
||||||
|
"""Return the 'Version' metadata for the distribution package."""
|
||||||
|
return self.metadata['Version']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def entry_points(self):
|
||||||
|
return EntryPoint._from_text(self.read_text('entry_points.txt'))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def files(self):
|
||||||
|
"""Files in this distribution.
|
||||||
|
|
||||||
|
:return: List of PackagePath for this distribution or None
|
||||||
|
|
||||||
|
Result is `None` if the metadata file that enumerates files
|
||||||
|
(i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
|
||||||
|
missing.
|
||||||
|
Result may be empty if the metadata exists but is empty.
|
||||||
|
"""
|
||||||
|
file_lines = self._read_files_distinfo() or self._read_files_egginfo()
|
||||||
|
|
||||||
|
def make_file(name, hash=None, size_str=None):
|
||||||
|
result = PackagePath(name)
|
||||||
|
result.hash = FileHash(hash) if hash else None
|
||||||
|
result.size = int(size_str) if size_str else None
|
||||||
|
result.dist = self
|
||||||
|
return result
|
||||||
|
|
||||||
|
return file_lines and list(starmap(make_file, csv.reader(file_lines)))
|
||||||
|
|
||||||
|
def _read_files_distinfo(self):
|
||||||
|
"""
|
||||||
|
Read the lines of RECORD
|
||||||
|
"""
|
||||||
|
text = self.read_text('RECORD')
|
||||||
|
return text and text.splitlines()
|
||||||
|
|
||||||
|
def _read_files_egginfo(self):
|
||||||
|
"""
|
||||||
|
SOURCES.txt might contain literal commas, so wrap each line
|
||||||
|
in quotes.
|
||||||
|
"""
|
||||||
|
text = self.read_text('SOURCES.txt')
|
||||||
|
return text and map('"{}"'.format, text.splitlines())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def requires(self):
|
||||||
|
"""Generated requirements specified for this Distribution"""
|
||||||
|
reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
|
||||||
|
return reqs and list(reqs)
|
||||||
|
|
||||||
|
def _read_dist_info_reqs(self):
|
||||||
|
return self.metadata.get_all('Requires-Dist')
|
||||||
|
|
||||||
|
def _read_egg_info_reqs(self):
|
||||||
|
source = self.read_text('requires.txt')
|
||||||
|
return source and self._deps_from_requires_text(source)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _deps_from_requires_text(cls, source):
|
||||||
|
section_pairs = cls._read_sections(source.splitlines())
|
||||||
|
sections = {
|
||||||
|
section: list(map(operator.itemgetter('line'), results))
|
||||||
|
for section, results in
|
||||||
|
itertools.groupby(section_pairs, operator.itemgetter('section'))
|
||||||
|
}
|
||||||
|
return cls._convert_egg_info_reqs_to_simple_reqs(sections)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _read_sections(lines):
|
||||||
|
section = None
|
||||||
|
for line in filter(None, lines):
|
||||||
|
section_match = re.match(r'\[(.*)\]$', line)
|
||||||
|
if section_match:
|
||||||
|
section = section_match.group(1)
|
||||||
|
continue
|
||||||
|
yield locals()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _convert_egg_info_reqs_to_simple_reqs(sections):
|
||||||
|
"""
|
||||||
|
Historically, setuptools would solicit and store 'extra'
|
||||||
|
requirements, including those with environment markers,
|
||||||
|
in separate sections. More modern tools expect each
|
||||||
|
dependency to be defined separately, with any relevant
|
||||||
|
extras and environment markers attached directly to that
|
||||||
|
requirement. This method converts the former to the
|
||||||
|
latter. See _test_deps_from_requires_text for an example.
|
||||||
|
"""
|
||||||
|
def make_condition(name):
|
||||||
|
return name and 'extra == "{name}"'.format(name=name)
|
||||||
|
|
||||||
|
def parse_condition(section):
|
||||||
|
section = section or ''
|
||||||
|
extra, sep, markers = section.partition(':')
|
||||||
|
if extra and markers:
|
||||||
|
markers = '({markers})'.format(markers=markers)
|
||||||
|
conditions = list(filter(None, [markers, make_condition(extra)]))
|
||||||
|
return '; ' + ' and '.join(conditions) if conditions else ''
|
||||||
|
|
||||||
|
for section, deps in sections.items():
|
||||||
|
for dep in deps:
|
||||||
|
yield dep + parse_condition(section)
|
||||||
|
|
||||||
|
|
||||||
|
class DistributionFinder(MetaPathFinder):
|
||||||
|
"""
|
||||||
|
A MetaPathFinder capable of discovering installed distributions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Context:
|
||||||
|
"""
|
||||||
|
Keyword arguments presented by the caller to
|
||||||
|
``distributions()`` or ``Distribution.discover()``
|
||||||
|
to narrow the scope of a search for distributions
|
||||||
|
in all DistributionFinders.
|
||||||
|
|
||||||
|
Each DistributionFinder may expect any parameters
|
||||||
|
and should attempt to honor the canonical
|
||||||
|
parameters defined below when appropriate.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = None
|
||||||
|
"""
|
||||||
|
Specific name for which a distribution finder should match.
|
||||||
|
A name of ``None`` matches all distributions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
vars(self).update(kwargs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
"""
|
||||||
|
The path that a distribution finder should search.
|
||||||
|
|
||||||
|
Typically refers to Python package paths and defaults
|
||||||
|
to ``sys.path``.
|
||||||
|
"""
|
||||||
|
return vars(self).get('path', sys.path)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def find_distributions(self, context=Context()):
|
||||||
|
"""
|
||||||
|
Find distributions.
|
||||||
|
|
||||||
|
Return an iterable of all Distribution instances capable of
|
||||||
|
loading the metadata for packages matching the ``context``,
|
||||||
|
a DistributionFinder.Context instance.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class FastPath:
|
||||||
|
"""
|
||||||
|
Micro-optimized class for searching a path for
|
||||||
|
children.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, root):
|
||||||
|
self.root = str(root)
|
||||||
|
self.base = os.path.basename(self.root).lower()
|
||||||
|
|
||||||
|
def joinpath(self, child):
|
||||||
|
return pathlib.Path(self.root, child)
|
||||||
|
|
||||||
|
def children(self):
|
||||||
|
with suppress(Exception):
|
||||||
|
return os.listdir(self.root or '')
|
||||||
|
with suppress(Exception):
|
||||||
|
return self.zip_children()
|
||||||
|
return []
|
||||||
|
|
||||||
|
def zip_children(self):
|
||||||
|
zip_path = zipp.Path(self.root)
|
||||||
|
names = zip_path.root.namelist()
|
||||||
|
self.joinpath = zip_path.joinpath
|
||||||
|
|
||||||
|
return unique_ordered(
|
||||||
|
child.split(posixpath.sep, 1)[0]
|
||||||
|
for child in names
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_egg(self, search):
|
||||||
|
base = self.base
|
||||||
|
return (
|
||||||
|
base == search.versionless_egg_name
|
||||||
|
or base.startswith(search.prefix)
|
||||||
|
and base.endswith('.egg'))
|
||||||
|
|
||||||
|
def search(self, name):
|
||||||
|
for child in self.children():
|
||||||
|
n_low = child.lower()
|
||||||
|
if (n_low in name.exact_matches
|
||||||
|
or n_low.startswith(name.prefix)
|
||||||
|
and n_low.endswith(name.suffixes)
|
||||||
|
# legacy case:
|
||||||
|
or self.is_egg(name) and n_low == 'egg-info'):
|
||||||
|
yield self.joinpath(child)
|
||||||
|
|
||||||
|
|
||||||
|
class Prepared:
|
||||||
|
"""
|
||||||
|
A prepared search for metadata on a possibly-named package.
|
||||||
|
"""
|
||||||
|
normalized = ''
|
||||||
|
prefix = ''
|
||||||
|
suffixes = '.dist-info', '.egg-info'
|
||||||
|
exact_matches = [''][:0]
|
||||||
|
versionless_egg_name = ''
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
if name is None:
|
||||||
|
return
|
||||||
|
self.normalized = name.lower().replace('-', '_')
|
||||||
|
self.prefix = self.normalized + '-'
|
||||||
|
self.exact_matches = [
|
||||||
|
self.normalized + suffix for suffix in self.suffixes]
|
||||||
|
self.versionless_egg_name = self.normalized + '.egg'
|
||||||
|
|
||||||
|
|
||||||
|
@install
|
||||||
|
class MetadataPathFinder(NullFinder, DistributionFinder):
|
||||||
|
"""A degenerate finder for distribution packages on the file system.
|
||||||
|
|
||||||
|
This finder supplies only a find_distributions() method for versions
|
||||||
|
of Python that do not have a PathFinder find_distributions().
|
||||||
|
"""
|
||||||
|
|
||||||
|
def find_distributions(self, context=DistributionFinder.Context()):
|
||||||
|
"""
|
||||||
|
Find distributions.
|
||||||
|
|
||||||
|
Return an iterable of all Distribution instances capable of
|
||||||
|
loading the metadata for packages matching ``context.name``
|
||||||
|
(or all names if ``None`` indicated) along the paths in the list
|
||||||
|
of directories ``context.path``.
|
||||||
|
"""
|
||||||
|
found = self._search_paths(context.name, context.path)
|
||||||
|
return map(PathDistribution, found)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _search_paths(cls, name, paths):
|
||||||
|
"""Find metadata directories in paths heuristically."""
|
||||||
|
return itertools.chain.from_iterable(
|
||||||
|
path.search(Prepared(name))
|
||||||
|
for path in map(FastPath, paths)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PathDistribution(Distribution):
|
||||||
|
def __init__(self, path):
|
||||||
|
"""Construct a distribution from a path to the metadata directory.
|
||||||
|
|
||||||
|
:param path: A pathlib.Path or similar object supporting
|
||||||
|
.joinpath(), __div__, .parent, and .read_text().
|
||||||
|
"""
|
||||||
|
self._path = path
|
||||||
|
|
||||||
|
def read_text(self, filename):
|
||||||
|
with suppress(FileNotFoundError, IsADirectoryError, KeyError,
|
||||||
|
NotADirectoryError, PermissionError):
|
||||||
|
return self._path.joinpath(filename).read_text(encoding='utf-8')
|
||||||
|
read_text.__doc__ = Distribution.read_text.__doc__
|
||||||
|
|
||||||
|
def locate_file(self, path):
|
||||||
|
return self._path.parent / path
|
||||||
|
|
||||||
|
|
||||||
|
def distribution(distribution_name):
|
||||||
|
"""Get the ``Distribution`` instance for the named package.
|
||||||
|
|
||||||
|
:param distribution_name: The name of the distribution package as a string.
|
||||||
|
:return: A ``Distribution`` instance (or subclass thereof).
|
||||||
|
"""
|
||||||
|
return Distribution.from_name(distribution_name)
|
||||||
|
|
||||||
|
|
||||||
|
def distributions(**kwargs):
|
||||||
|
"""Get all ``Distribution`` instances in the current environment.
|
||||||
|
|
||||||
|
:return: An iterable of ``Distribution`` instances.
|
||||||
|
"""
|
||||||
|
return Distribution.discover(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def metadata(distribution_name):
|
||||||
|
"""Get the metadata for the named package.
|
||||||
|
|
||||||
|
:param distribution_name: The name of the distribution package to query.
|
||||||
|
:return: An email.Message containing the parsed metadata.
|
||||||
|
"""
|
||||||
|
return Distribution.from_name(distribution_name).metadata
|
||||||
|
|
||||||
|
|
||||||
|
def version(distribution_name):
|
||||||
|
"""Get the version string for the named package.
|
||||||
|
|
||||||
|
:param distribution_name: The name of the distribution package to query.
|
||||||
|
:return: The version string for the package as defined in the package's
|
||||||
|
"Version" metadata key.
|
||||||
|
"""
|
||||||
|
return distribution(distribution_name).version
|
||||||
|
|
||||||
|
|
||||||
|
def entry_points():
|
||||||
|
"""Return EntryPoint objects for all installed packages.
|
||||||
|
|
||||||
|
:return: EntryPoint objects for all installed packages.
|
||||||
|
"""
|
||||||
|
eps = itertools.chain.from_iterable(
|
||||||
|
dist.entry_points for dist in distributions())
|
||||||
|
by_group = operator.attrgetter('group')
|
||||||
|
ordered = sorted(eps, key=by_group)
|
||||||
|
grouped = itertools.groupby(ordered, by_group)
|
||||||
|
return {
|
||||||
|
group: tuple(eps)
|
||||||
|
for group, eps in grouped
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def files(distribution_name):
|
||||||
|
"""Return a list of files for the named package.
|
||||||
|
|
||||||
|
:param distribution_name: The name of the distribution package to query.
|
||||||
|
:return: List of files composing the distribution.
|
||||||
|
"""
|
||||||
|
return distribution(distribution_name).files
|
||||||
|
|
||||||
|
|
||||||
|
def requires(distribution_name):
|
||||||
|
"""
|
||||||
|
Return a list of requirements for the named package.
|
||||||
|
|
||||||
|
:return: An iterator of requirements, suitable for
|
||||||
|
packaging.requirement.Requirement.
|
||||||
|
"""
|
||||||
|
return distribution(distribution_name).requires
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = version(__name__)
|
@ -0,0 +1,152 @@
|
|||||||
|
from __future__ import absolute_import, unicode_literals
|
||||||
|
|
||||||
|
import io
|
||||||
|
import abc
|
||||||
|
import sys
|
||||||
|
import email
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info > (3,): # pragma: nocover
|
||||||
|
import builtins
|
||||||
|
from configparser import ConfigParser
|
||||||
|
import contextlib
|
||||||
|
FileNotFoundError = builtins.FileNotFoundError
|
||||||
|
IsADirectoryError = builtins.IsADirectoryError
|
||||||
|
NotADirectoryError = builtins.NotADirectoryError
|
||||||
|
PermissionError = builtins.PermissionError
|
||||||
|
map = builtins.map
|
||||||
|
from itertools import filterfalse
|
||||||
|
else: # pragma: nocover
|
||||||
|
from backports.configparser import ConfigParser
|
||||||
|
from itertools import imap as map # type: ignore
|
||||||
|
from itertools import ifilterfalse as filterfalse
|
||||||
|
import contextlib2 as contextlib
|
||||||
|
FileNotFoundError = IOError, OSError
|
||||||
|
IsADirectoryError = IOError, OSError
|
||||||
|
NotADirectoryError = IOError, OSError
|
||||||
|
PermissionError = IOError, OSError
|
||||||
|
|
||||||
|
str = type('')
|
||||||
|
|
||||||
|
suppress = contextlib.suppress
|
||||||
|
|
||||||
|
if sys.version_info > (3, 5): # pragma: nocover
|
||||||
|
import pathlib
|
||||||
|
else: # pragma: nocover
|
||||||
|
import pathlib2 as pathlib
|
||||||
|
|
||||||
|
try:
|
||||||
|
ModuleNotFoundError = builtins.FileNotFoundError
|
||||||
|
except (NameError, AttributeError): # pragma: nocover
|
||||||
|
ModuleNotFoundError = ImportError # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3,): # pragma: nocover
|
||||||
|
from importlib.abc import MetaPathFinder
|
||||||
|
else: # pragma: nocover
|
||||||
|
class MetaPathFinder(object):
|
||||||
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
__all__ = [
|
||||||
|
'install', 'NullFinder', 'MetaPathFinder', 'ModuleNotFoundError',
|
||||||
|
'pathlib', 'ConfigParser', 'map', 'suppress', 'FileNotFoundError',
|
||||||
|
'NotADirectoryError', 'email_message_from_string',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def install(cls):
|
||||||
|
"""
|
||||||
|
Class decorator for installation on sys.meta_path.
|
||||||
|
|
||||||
|
Adds the backport DistributionFinder to sys.meta_path and
|
||||||
|
attempts to disable the finder functionality of the stdlib
|
||||||
|
DistributionFinder.
|
||||||
|
"""
|
||||||
|
sys.meta_path.append(cls())
|
||||||
|
disable_stdlib_finder()
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
||||||
|
def disable_stdlib_finder():
|
||||||
|
"""
|
||||||
|
Give the backport primacy for discovering path-based distributions
|
||||||
|
by monkey-patching the stdlib O_O.
|
||||||
|
|
||||||
|
See #91 for more background for rationale on this sketchy
|
||||||
|
behavior.
|
||||||
|
"""
|
||||||
|
def matches(finder):
|
||||||
|
return (
|
||||||
|
getattr(finder, '__module__', None) == '_frozen_importlib_external'
|
||||||
|
and hasattr(finder, 'find_distributions')
|
||||||
|
)
|
||||||
|
for finder in filter(matches, sys.meta_path): # pragma: nocover
|
||||||
|
del finder.find_distributions
|
||||||
|
|
||||||
|
|
||||||
|
class NullFinder:
|
||||||
|
"""
|
||||||
|
A "Finder" (aka "MetaClassFinder") that never finds any modules,
|
||||||
|
but may find distributions.
|
||||||
|
"""
|
||||||
|
@staticmethod
|
||||||
|
def find_spec(*args, **kwargs):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# In Python 2, the import system requires finders
|
||||||
|
# to have a find_module() method, but this usage
|
||||||
|
# is deprecated in Python 3 in favor of find_spec().
|
||||||
|
# For the purposes of this finder (i.e. being present
|
||||||
|
# on sys.meta_path but having no other import
|
||||||
|
# system functionality), the two methods are identical.
|
||||||
|
find_module = find_spec
|
||||||
|
|
||||||
|
|
||||||
|
def py2_message_from_string(text): # nocoverpy3
|
||||||
|
# Work around https://bugs.python.org/issue25545 where
|
||||||
|
# email.message_from_string cannot handle Unicode on Python 2.
|
||||||
|
io_buffer = io.StringIO(text)
|
||||||
|
return email.message_from_file(io_buffer)
|
||||||
|
|
||||||
|
|
||||||
|
email_message_from_string = (
|
||||||
|
py2_message_from_string
|
||||||
|
if sys.version_info < (3,) else
|
||||||
|
email.message_from_string
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PyPy_repr:
|
||||||
|
"""
|
||||||
|
Override repr for EntryPoint objects on PyPy to avoid __iter__ access.
|
||||||
|
Ref #97, #102.
|
||||||
|
"""
|
||||||
|
affected = hasattr(sys, 'pypy_version_info')
|
||||||
|
|
||||||
|
def __compat_repr__(self): # pragma: nocover
|
||||||
|
def make_param(name):
|
||||||
|
value = getattr(self, name)
|
||||||
|
return '{name}={value!r}'.format(**locals())
|
||||||
|
params = ', '.join(map(make_param, self._fields))
|
||||||
|
return 'EntryPoint({params})'.format(**locals())
|
||||||
|
|
||||||
|
if affected: # pragma: nocover
|
||||||
|
__repr__ = __compat_repr__
|
||||||
|
del affected
|
||||||
|
|
||||||
|
|
||||||
|
# from itertools recipes
|
||||||
|
def unique_everseen(iterable): # pragma: nocover
|
||||||
|
"List unique elements, preserving order. Remember all elements ever seen."
|
||||||
|
seen = set()
|
||||||
|
seen_add = seen.add
|
||||||
|
|
||||||
|
for element in filterfalse(seen.__contains__, iterable):
|
||||||
|
seen_add(element)
|
||||||
|
yield element
|
||||||
|
|
||||||
|
|
||||||
|
unique_ordered = (
|
||||||
|
unique_everseen if sys.version_info < (3, 7) else dict.fromkeys)
|
@ -0,0 +1,297 @@
|
|||||||
|
=========================
|
||||||
|
importlib_metadata NEWS
|
||||||
|
=========================
|
||||||
|
|
||||||
|
v1.7.0
|
||||||
|
======
|
||||||
|
|
||||||
|
* ``PathNotFoundError`` now has a custom ``__str__``
|
||||||
|
mentioning "package metadata" being missing to help
|
||||||
|
guide users to the cause when the package is installed
|
||||||
|
but no metadata is present. Closes #124.
|
||||||
|
|
||||||
|
v1.6.1
|
||||||
|
======
|
||||||
|
|
||||||
|
* Added ``Distribution._local()`` as a provisional
|
||||||
|
demonstration of how to load metadata for a local
|
||||||
|
package. Implicitly requires that
|
||||||
|
`pep517 <https://pypi.org/project/pep517>`_ is
|
||||||
|
installed. Ref #42.
|
||||||
|
* Ensure inputs to FastPath are Unicode. Closes #121.
|
||||||
|
* Tests now rely on ``importlib.resources.files`` (and
|
||||||
|
backport) instead of the older ``path`` function.
|
||||||
|
* Support any iterable from ``find_distributions``.
|
||||||
|
Closes #122.
|
||||||
|
|
||||||
|
v1.6.0
|
||||||
|
======
|
||||||
|
|
||||||
|
* Added ``module`` and ``attr`` attributes to ``EntryPoint``
|
||||||
|
|
||||||
|
v1.5.2
|
||||||
|
======
|
||||||
|
|
||||||
|
* Fix redundant entries from ``FastPath.zip_children``.
|
||||||
|
Closes #117.
|
||||||
|
|
||||||
|
v1.5.1
|
||||||
|
======
|
||||||
|
|
||||||
|
* Improve reliability and consistency of compatibility
|
||||||
|
imports for contextlib and pathlib when running tests.
|
||||||
|
Closes #116.
|
||||||
|
|
||||||
|
v1.5.0
|
||||||
|
======
|
||||||
|
|
||||||
|
* Additional performance optimizations in FastPath now
|
||||||
|
saves an additional 20% on a typical call.
|
||||||
|
* Correct for issue where PyOxidizer finder has no
|
||||||
|
``__module__`` attribute. Closes #110.
|
||||||
|
|
||||||
|
v1.4.0
|
||||||
|
======
|
||||||
|
|
||||||
|
* Through careful optimization, ``distribution()`` is
|
||||||
|
3-4x faster. Thanks to Antony Lee for the
|
||||||
|
contribution. Closes #95.
|
||||||
|
|
||||||
|
* When searching through ``sys.path``, if any error
|
||||||
|
occurs attempting to list a path entry, that entry
|
||||||
|
is skipped, making the system much more lenient
|
||||||
|
to errors. Closes #94.
|
||||||
|
|
||||||
|
v1.3.0
|
||||||
|
======
|
||||||
|
|
||||||
|
* Improve custom finders documentation. Closes #105.
|
||||||
|
|
||||||
|
v1.2.0
|
||||||
|
======
|
||||||
|
|
||||||
|
* Once again, drop support for Python 3.4. Ref #104.
|
||||||
|
|
||||||
|
v1.1.3
|
||||||
|
======
|
||||||
|
|
||||||
|
* Restored support for Python 3.4 due to improper version
|
||||||
|
compatibility declarations in the v1.1.0 and v1.1.1
|
||||||
|
releases. Closes #104.
|
||||||
|
|
||||||
|
v1.1.2
|
||||||
|
======
|
||||||
|
|
||||||
|
* Repaired project metadata to correctly declare the
|
||||||
|
``python_requires`` directive. Closes #103.
|
||||||
|
|
||||||
|
v1.1.1
|
||||||
|
======
|
||||||
|
|
||||||
|
* Fixed ``repr(EntryPoint)`` on PyPy 3 also. Closes #102.
|
||||||
|
|
||||||
|
v1.1.0
|
||||||
|
======
|
||||||
|
|
||||||
|
* Dropped support for Python 3.4.
|
||||||
|
* EntryPoints are now pickleable. Closes #96.
|
||||||
|
* Fixed ``repr(EntryPoint)`` on PyPy 2. Closes #97.
|
||||||
|
|
||||||
|
v1.0.0
|
||||||
|
======
|
||||||
|
|
||||||
|
* Project adopts semver for versioning.
|
||||||
|
|
||||||
|
* Removed compatibility shim introduced in 0.23.
|
||||||
|
|
||||||
|
* For better compatibility with the stdlib implementation and to
|
||||||
|
avoid the same distributions being discovered by the stdlib and
|
||||||
|
backport implementations, the backport now disables the
|
||||||
|
stdlib DistributionFinder during initialization (import time).
|
||||||
|
Closes #91 and closes #100.
|
||||||
|
|
||||||
|
0.23
|
||||||
|
====
|
||||||
|
* Added a compatibility shim to prevent failures on beta releases
|
||||||
|
of Python before the signature changed to accept the
|
||||||
|
"context" parameter on find_distributions. This workaround
|
||||||
|
will have a limited lifespan, not to extend beyond release of
|
||||||
|
Python 3.8 final.
|
||||||
|
|
||||||
|
0.22
|
||||||
|
====
|
||||||
|
* Renamed ``package`` parameter to ``distribution_name``
|
||||||
|
as `recommended <https://bugs.python.org/issue34632#msg349423>`_
|
||||||
|
in the following functions: ``distribution``, ``metadata``,
|
||||||
|
``version``, ``files``, and ``requires``. This
|
||||||
|
backward-incompatible change is expected to have little impact
|
||||||
|
as these functions are assumed to be primarily used with
|
||||||
|
positional parameters.
|
||||||
|
|
||||||
|
0.21
|
||||||
|
====
|
||||||
|
* ``importlib.metadata`` now exposes the ``DistributionFinder``
|
||||||
|
metaclass and references it in the docs for extending the
|
||||||
|
search algorithm.
|
||||||
|
* Add ``Distribution.at`` for constructing a Distribution object
|
||||||
|
from a known metadata directory on the file system. Closes #80.
|
||||||
|
* Distribution finders now receive a context object that
|
||||||
|
supplies ``.path`` and ``.name`` properties. This change
|
||||||
|
introduces a fundamental backward incompatibility for
|
||||||
|
any projects implementing a ``find_distributions`` method
|
||||||
|
on a ``MetaPathFinder``. This new layer of abstraction
|
||||||
|
allows this context to be supplied directly or constructed
|
||||||
|
on demand and opens the opportunity for a
|
||||||
|
``find_distributions`` method to solicit additional
|
||||||
|
context from the caller. Closes #85.
|
||||||
|
|
||||||
|
0.20
|
||||||
|
====
|
||||||
|
* Clarify in the docs that calls to ``.files`` could return
|
||||||
|
``None`` when the metadata is not present. Closes #69.
|
||||||
|
* Return all requirements and not just the first for dist-info
|
||||||
|
packages. Closes #67.
|
||||||
|
|
||||||
|
0.19
|
||||||
|
====
|
||||||
|
* Restrain over-eager egg metadata resolution.
|
||||||
|
* Add support for entry points with colons in the name. Closes #75.
|
||||||
|
|
||||||
|
0.18
|
||||||
|
====
|
||||||
|
* Parse entry points case sensitively. Closes #68
|
||||||
|
* Add a version constraint on the backport configparser package. Closes #66
|
||||||
|
|
||||||
|
0.17
|
||||||
|
====
|
||||||
|
* Fix a permission problem in the tests on Windows.
|
||||||
|
|
||||||
|
0.16
|
||||||
|
====
|
||||||
|
* Don't crash if there exists an EGG-INFO directory on sys.path.
|
||||||
|
|
||||||
|
0.15
|
||||||
|
====
|
||||||
|
* Fix documentation.
|
||||||
|
|
||||||
|
0.14
|
||||||
|
====
|
||||||
|
* Removed ``local_distribution`` function from the API.
|
||||||
|
**This backward-incompatible change removes this
|
||||||
|
behavior summarily**. Projects should remove their
|
||||||
|
reliance on this behavior. A replacement behavior is
|
||||||
|
under review in the `pep517 project
|
||||||
|
<https://github.com/pypa/pep517>`_. Closes #42.
|
||||||
|
|
||||||
|
0.13
|
||||||
|
====
|
||||||
|
* Update docstrings to match PEP 8. Closes #63.
|
||||||
|
* Merged modules into one module. Closes #62.
|
||||||
|
|
||||||
|
0.12
|
||||||
|
====
|
||||||
|
* Add support for eggs. !65; Closes #19.
|
||||||
|
|
||||||
|
0.11
|
||||||
|
====
|
||||||
|
* Support generic zip files (not just wheels). Closes #59
|
||||||
|
* Support zip files with multiple distributions in them. Closes #60
|
||||||
|
* Fully expose the public API in ``importlib_metadata.__all__``.
|
||||||
|
|
||||||
|
0.10
|
||||||
|
====
|
||||||
|
* The ``Distribution`` ABC is now officially part of the public API.
|
||||||
|
Closes #37.
|
||||||
|
* Fixed support for older single file egg-info formats. Closes #43.
|
||||||
|
* Fixed a testing bug when ``$CWD`` has spaces in the path. Closes #50.
|
||||||
|
* Add Python 3.8 to the ``tox`` testing matrix.
|
||||||
|
|
||||||
|
0.9
|
||||||
|
===
|
||||||
|
* Fixed issue where entry points without an attribute would raise an
|
||||||
|
Exception. Closes #40.
|
||||||
|
* Removed unused ``name`` parameter from ``entry_points()``. Closes #44.
|
||||||
|
* ``DistributionFinder`` classes must now be instantiated before
|
||||||
|
being placed on ``sys.meta_path``.
|
||||||
|
|
||||||
|
0.8
|
||||||
|
===
|
||||||
|
* This library can now discover/enumerate all installed packages. **This
|
||||||
|
backward-incompatible change alters the protocol finders must
|
||||||
|
implement to support distribution package discovery.** Closes #24.
|
||||||
|
* The signature of ``find_distributions()`` on custom installer finders
|
||||||
|
should now accept two parameters, ``name`` and ``path`` and
|
||||||
|
these parameters must supply defaults.
|
||||||
|
* The ``entry_points()`` method no longer accepts a package name
|
||||||
|
but instead returns all entry points in a dictionary keyed by the
|
||||||
|
``EntryPoint.group``. The ``resolve`` method has been removed. Instead,
|
||||||
|
call ``EntryPoint.load()``, which has the same semantics as
|
||||||
|
``pkg_resources`` and ``entrypoints``. **This is a backward incompatible
|
||||||
|
change.**
|
||||||
|
* Metadata is now always returned as Unicode text regardless of
|
||||||
|
Python version. Closes #29.
|
||||||
|
* This library can now discover metadata for a 'local' package (found
|
||||||
|
in the current-working directory). Closes #27.
|
||||||
|
* Added ``files()`` function for resolving files from a distribution.
|
||||||
|
* Added a new ``requires()`` function, which returns the requirements
|
||||||
|
for a package suitable for parsing by
|
||||||
|
``packaging.requirements.Requirement``. Closes #18.
|
||||||
|
* The top-level ``read_text()`` function has been removed. Use
|
||||||
|
``PackagePath.read_text()`` on instances returned by the ``files()``
|
||||||
|
function. **This is a backward incompatible change.**
|
||||||
|
* Release dates are now automatically injected into the changelog
|
||||||
|
based on SCM tags.
|
||||||
|
|
||||||
|
0.7
|
||||||
|
===
|
||||||
|
* Fixed issue where packages with dashes in their names would
|
||||||
|
not be discovered. Closes #21.
|
||||||
|
* Distribution lookup is now case-insensitive. Closes #20.
|
||||||
|
* Wheel distributions can no longer be discovered by their module
|
||||||
|
name. Like Path distributions, they must be indicated by their
|
||||||
|
distribution package name.
|
||||||
|
|
||||||
|
0.6
|
||||||
|
===
|
||||||
|
* Removed ``importlib_metadata.distribution`` function. Now
|
||||||
|
the public interface is primarily the utility functions exposed
|
||||||
|
in ``importlib_metadata.__all__``. Closes #14.
|
||||||
|
* Added two new utility functions ``read_text`` and
|
||||||
|
``metadata``.
|
||||||
|
|
||||||
|
0.5
|
||||||
|
===
|
||||||
|
* Updated README and removed details about Distribution
|
||||||
|
class, now considered private. Closes #15.
|
||||||
|
* Added test suite support for Python 3.4+.
|
||||||
|
* Fixed SyntaxErrors on Python 3.4 and 3.5. !12
|
||||||
|
* Fixed errors on Windows joining Path elements. !15
|
||||||
|
|
||||||
|
0.4
|
||||||
|
===
|
||||||
|
* Housekeeping.
|
||||||
|
|
||||||
|
0.3
|
||||||
|
===
|
||||||
|
* Added usage documentation. Closes #8
|
||||||
|
* Add support for getting metadata from wheels on ``sys.path``. Closes #9
|
||||||
|
|
||||||
|
0.2
|
||||||
|
===
|
||||||
|
* Added ``importlib_metadata.entry_points()``. Closes #1
|
||||||
|
* Added ``importlib_metadata.resolve()``. Closes #12
|
||||||
|
* Add support for Python 2.7. Closes #4
|
||||||
|
|
||||||
|
0.1
|
||||||
|
===
|
||||||
|
* Initial release.
|
||||||
|
|
||||||
|
|
||||||
|
..
|
||||||
|
Local Variables:
|
||||||
|
mode: change-log-mode
|
||||||
|
indent-tabs-mode: nil
|
||||||
|
sentence-end-double-space: t
|
||||||
|
fill-column: 78
|
||||||
|
coding: utf-8
|
||||||
|
End:
|
@ -0,0 +1,185 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# importlib_metadata documentation build configuration file, created by
|
||||||
|
# sphinx-quickstart on Thu Nov 30 10:21:00 2017.
|
||||||
|
#
|
||||||
|
# This file is execfile()d with the current directory set to its
|
||||||
|
# containing dir.
|
||||||
|
#
|
||||||
|
# Note that not all possible configuration values are present in this
|
||||||
|
# autogenerated file.
|
||||||
|
#
|
||||||
|
# All configuration values have a default; values that are commented out
|
||||||
|
# serve to show the default.
|
||||||
|
|
||||||
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
|
#
|
||||||
|
# import os
|
||||||
|
# import sys
|
||||||
|
# sys.path.insert(0, os.path.abspath('.'))
|
||||||
|
|
||||||
|
|
||||||
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
|
#
|
||||||
|
# needs_sphinx = '1.0'
|
||||||
|
|
||||||
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
|
# ones.
|
||||||
|
extensions = [
|
||||||
|
'rst.linker',
|
||||||
|
'sphinx.ext.autodoc',
|
||||||
|
'sphinx.ext.coverage',
|
||||||
|
'sphinx.ext.doctest',
|
||||||
|
'sphinx.ext.intersphinx',
|
||||||
|
'sphinx.ext.viewcode',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
|
templates_path = ['_templates']
|
||||||
|
|
||||||
|
# The suffix(es) of source filenames.
|
||||||
|
# You can specify multiple suffix as a list of string:
|
||||||
|
#
|
||||||
|
# source_suffix = ['.rst', '.md']
|
||||||
|
source_suffix = '.rst'
|
||||||
|
|
||||||
|
# The master toctree document.
|
||||||
|
master_doc = 'index'
|
||||||
|
|
||||||
|
# General information about the project.
|
||||||
|
project = 'importlib_metadata'
|
||||||
|
copyright = '2017-2019, Jason R. Coombs, Barry Warsaw'
|
||||||
|
author = 'Jason R. Coombs, Barry Warsaw'
|
||||||
|
|
||||||
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
# built documents.
|
||||||
|
#
|
||||||
|
# The short X.Y version.
|
||||||
|
version = '0.1'
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
release = '0.1'
|
||||||
|
|
||||||
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
|
# for a list of supported languages.
|
||||||
|
#
|
||||||
|
# This is also used if you do content translation via gettext catalogs.
|
||||||
|
# Usually you set "language" from the command line for these cases.
|
||||||
|
language = None
|
||||||
|
|
||||||
|
# List of patterns, relative to source directory, that match files and
|
||||||
|
# directories to ignore when looking for source files.
|
||||||
|
# This patterns also effect to html_static_path and html_extra_path
|
||||||
|
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||||
|
|
||||||
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
|
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||||
|
todo_include_todos = False
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
|
# a list of builtin themes.
|
||||||
|
#
|
||||||
|
html_theme = 'default'
|
||||||
|
|
||||||
|
# Custom sidebar templates, must be a dictionary that maps document names
|
||||||
|
# to template names.
|
||||||
|
#
|
||||||
|
# This is required for the alabaster theme
|
||||||
|
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
|
||||||
|
html_sidebars = {
|
||||||
|
'**': [
|
||||||
|
'relations.html', # needs 'show_related': True theme option to display
|
||||||
|
'searchbox.html',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTMLHelp output ------------------------------------------
|
||||||
|
|
||||||
|
# Output file base name for HTML help builder.
|
||||||
|
htmlhelp_basename = 'importlib_metadatadoc'
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
|
latex_elements = {
|
||||||
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
|
#
|
||||||
|
# 'papersize': 'letterpaper',
|
||||||
|
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
|
#
|
||||||
|
# 'pointsize': '10pt',
|
||||||
|
|
||||||
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#
|
||||||
|
# 'preamble': '',
|
||||||
|
|
||||||
|
# Latex figure (float) alignment
|
||||||
|
#
|
||||||
|
# 'figure_align': 'htbp',
|
||||||
|
}
|
||||||
|
|
||||||
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
# (source start file, target name, title,
|
||||||
|
# author, documentclass [howto, manual, or own class]).
|
||||||
|
latex_documents = [
|
||||||
|
(master_doc, 'importlib_metadata.tex',
|
||||||
|
'importlib\\_metadata Documentation',
|
||||||
|
'Brett Cannon, Barry Warsaw', 'manual'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for manual page output ---------------------------------------
|
||||||
|
|
||||||
|
# One entry per manual page. List of tuples
|
||||||
|
# (source start file, name, description, authors, manual section).
|
||||||
|
man_pages = [
|
||||||
|
(master_doc, 'importlib_metadata', 'importlib_metadata Documentation',
|
||||||
|
[author], 1)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for Texinfo output -------------------------------------------
|
||||||
|
|
||||||
|
# Grouping the document tree into Texinfo files. List of tuples
|
||||||
|
# (source start file, target name, title, author,
|
||||||
|
# dir menu entry, description, category)
|
||||||
|
texinfo_documents = [
|
||||||
|
(master_doc, 'importlib_metadata', 'importlib_metadata Documentation',
|
||||||
|
author, 'importlib_metadata', 'One line description of project.',
|
||||||
|
'Miscellaneous'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
|
intersphinx_mapping = {
|
||||||
|
'python': ('https://docs.python.org/3', None),
|
||||||
|
'importlib_resources': (
|
||||||
|
'https://importlib-resources.readthedocs.io/en/latest/', None
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# For rst.linker, inject release dates into changelog.rst
|
||||||
|
link_files = {
|
||||||
|
'changelog.rst': dict(
|
||||||
|
replace=[
|
||||||
|
dict(
|
||||||
|
pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
|
||||||
|
with_scm='{text}\n{rev[timestamp]:%Y-%m-%d}\n\n',
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
}
|
@ -0,0 +1,50 @@
|
|||||||
|
===============================
|
||||||
|
Welcome to importlib_metadata
|
||||||
|
===============================
|
||||||
|
|
||||||
|
``importlib_metadata`` is a library which provides an API for accessing an
|
||||||
|
installed package's metadata (see :pep:`566`), such as its entry points or its top-level
|
||||||
|
name. This functionality intends to replace most uses of ``pkg_resources``
|
||||||
|
`entry point API`_ and `metadata API`_. Along with :mod:`importlib.resources` in
|
||||||
|
Python 3.7 and newer (backported as :doc:`importlib_resources <importlib_resources:index>` for older
|
||||||
|
versions of Python), this can eliminate the need to use the older and less
|
||||||
|
efficient ``pkg_resources`` package.
|
||||||
|
|
||||||
|
``importlib_metadata`` is a backport of Python 3.8's standard library
|
||||||
|
:doc:`importlib.metadata <library/importlib.metadata>` module for Python 2.7, and 3.4 through 3.7. Users of
|
||||||
|
Python 3.8 and beyond are encouraged to use the standard library module.
|
||||||
|
When imported on Python 3.8 and later, ``importlib_metadata`` replaces the
|
||||||
|
DistributionFinder behavior from the stdlib, but leaves the API in tact.
|
||||||
|
Developers looking for detailed API descriptions should refer to the Python
|
||||||
|
3.8 standard library documentation.
|
||||||
|
|
||||||
|
The documentation here includes a general :ref:`usage <using>` guide.
|
||||||
|
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
:caption: Contents:
|
||||||
|
|
||||||
|
using.rst
|
||||||
|
changelog (links).rst
|
||||||
|
|
||||||
|
|
||||||
|
Project details
|
||||||
|
===============
|
||||||
|
|
||||||
|
* Project home: https://gitlab.com/python-devs/importlib_metadata
|
||||||
|
* Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues
|
||||||
|
* Code hosting: https://gitlab.com/python-devs/importlib_metadata.git
|
||||||
|
* Documentation: http://importlib_metadata.readthedocs.io/
|
||||||
|
|
||||||
|
|
||||||
|
Indices and tables
|
||||||
|
==================
|
||||||
|
|
||||||
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
|
* :ref:`search`
|
||||||
|
|
||||||
|
|
||||||
|
.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
|
||||||
|
.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api
|
@ -0,0 +1,260 @@
|
|||||||
|
.. _using:
|
||||||
|
|
||||||
|
=================================
|
||||||
|
Using :mod:`!importlib_metadata`
|
||||||
|
=================================
|
||||||
|
|
||||||
|
``importlib_metadata`` is a library that provides for access to installed
|
||||||
|
package metadata. Built in part on Python's import system, this library
|
||||||
|
intends to replace similar functionality in the `entry point
|
||||||
|
API`_ and `metadata API`_ of ``pkg_resources``. Along with
|
||||||
|
:mod:`importlib.resources` in Python 3.7
|
||||||
|
and newer (backported as :doc:`importlib_resources <importlib_resources:index>` for older versions of
|
||||||
|
Python), this can eliminate the need to use the older and less efficient
|
||||||
|
``pkg_resources`` package.
|
||||||
|
|
||||||
|
By "installed package" we generally mean a third-party package installed into
|
||||||
|
Python's ``site-packages`` directory via tools such as `pip
|
||||||
|
<https://pypi.org/project/pip/>`_. Specifically,
|
||||||
|
it means a package with either a discoverable ``dist-info`` or ``egg-info``
|
||||||
|
directory, and metadata defined by :pep:`566` or its older specifications.
|
||||||
|
By default, package metadata can live on the file system or in zip archives on
|
||||||
|
:data:`sys.path`. Through an extension mechanism, the metadata can live almost
|
||||||
|
anywhere.
|
||||||
|
|
||||||
|
|
||||||
|
Overview
|
||||||
|
========
|
||||||
|
|
||||||
|
Let's say you wanted to get the version string for a package you've installed
|
||||||
|
using ``pip``. We start by creating a virtual environment and installing
|
||||||
|
something into it::
|
||||||
|
|
||||||
|
$ python3 -m venv example
|
||||||
|
$ source example/bin/activate
|
||||||
|
(example) $ pip install importlib_metadata
|
||||||
|
(example) $ pip install wheel
|
||||||
|
|
||||||
|
You can get the version string for ``wheel`` by running the following::
|
||||||
|
|
||||||
|
(example) $ python
|
||||||
|
>>> from importlib_metadata import version
|
||||||
|
>>> version('wheel')
|
||||||
|
'0.32.3'
|
||||||
|
|
||||||
|
You can also get the set of entry points keyed by group, such as
|
||||||
|
``console_scripts``, ``distutils.commands`` and others. Each group contains a
|
||||||
|
sequence of :ref:`EntryPoint <entry-points>` objects.
|
||||||
|
|
||||||
|
You can get the :ref:`metadata for a distribution <metadata>`::
|
||||||
|
|
||||||
|
>>> list(metadata('wheel'))
|
||||||
|
['Metadata-Version', 'Name', 'Version', 'Summary', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Project-URL', 'Project-URL', 'Project-URL', 'Keywords', 'Platform', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Requires-Python', 'Provides-Extra', 'Requires-Dist', 'Requires-Dist']
|
||||||
|
|
||||||
|
You can also get a :ref:`distribution's version number <version>`, list its
|
||||||
|
:ref:`constituent files <files>`, and get a list of the distribution's
|
||||||
|
:ref:`requirements`.
|
||||||
|
|
||||||
|
|
||||||
|
Functional API
|
||||||
|
==============
|
||||||
|
|
||||||
|
This package provides the following functionality via its public API.
|
||||||
|
|
||||||
|
|
||||||
|
.. _entry-points:
|
||||||
|
|
||||||
|
Entry points
|
||||||
|
------------
|
||||||
|
|
||||||
|
The ``entry_points()`` function returns a dictionary of all entry points,
|
||||||
|
keyed by group. Entry points are represented by ``EntryPoint`` instances;
|
||||||
|
each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and
|
||||||
|
a ``.load()`` method to resolve the value. There are also ``.module``,
|
||||||
|
``.attr``, and ``.extras`` attributes for getting the components of the
|
||||||
|
``.value`` attribute::
|
||||||
|
|
||||||
|
>>> eps = entry_points()
|
||||||
|
>>> list(eps)
|
||||||
|
['console_scripts', 'distutils.commands', 'distutils.setup_keywords', 'egg_info.writers', 'setuptools.installation']
|
||||||
|
>>> scripts = eps['console_scripts']
|
||||||
|
>>> wheel = [ep for ep in scripts if ep.name == 'wheel'][0]
|
||||||
|
>>> wheel
|
||||||
|
EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts')
|
||||||
|
>>> wheel.module
|
||||||
|
'wheel.cli'
|
||||||
|
>>> wheel.attr
|
||||||
|
'main'
|
||||||
|
>>> wheel.extras
|
||||||
|
[]
|
||||||
|
>>> main = wheel.load()
|
||||||
|
>>> main
|
||||||
|
<function main at 0x103528488>
|
||||||
|
|
||||||
|
The ``group`` and ``name`` are arbitrary values defined by the package author
|
||||||
|
and usually a client will wish to resolve all entry points for a particular
|
||||||
|
group. Read `the setuptools docs
|
||||||
|
<https://setuptools.readthedocs.io/en/latest/setuptools.html#dynamic-discovery-of-services-and-plugins>`_
|
||||||
|
for more information on entry points, their definition, and usage.
|
||||||
|
|
||||||
|
|
||||||
|
.. _metadata:
|
||||||
|
|
||||||
|
Distribution metadata
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
Every distribution includes some metadata, which you can extract using the
|
||||||
|
``metadata()`` function::
|
||||||
|
|
||||||
|
>>> wheel_metadata = metadata('wheel')
|
||||||
|
|
||||||
|
The keys of the returned data structure [#f1]_ name the metadata keywords, and
|
||||||
|
their values are returned unparsed from the distribution metadata::
|
||||||
|
|
||||||
|
>>> wheel_metadata['Requires-Python']
|
||||||
|
'>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*'
|
||||||
|
|
||||||
|
|
||||||
|
.. _version:
|
||||||
|
|
||||||
|
Distribution versions
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
The ``version()`` function is the quickest way to get a distribution's version
|
||||||
|
number, as a string::
|
||||||
|
|
||||||
|
>>> version('wheel')
|
||||||
|
'0.32.3'
|
||||||
|
|
||||||
|
|
||||||
|
.. _files:
|
||||||
|
|
||||||
|
Distribution files
|
||||||
|
------------------
|
||||||
|
|
||||||
|
You can also get the full set of files contained within a distribution. The
|
||||||
|
``files()`` function takes a distribution package name and returns all of the
|
||||||
|
files installed by this distribution. Each file object returned is a
|
||||||
|
``PackagePath``, a :class:`pathlib.Path` derived object with additional ``dist``,
|
||||||
|
``size``, and ``hash`` properties as indicated by the metadata. For example::
|
||||||
|
|
||||||
|
>>> util = [p for p in files('wheel') if 'util.py' in str(p)][0]
|
||||||
|
>>> util
|
||||||
|
PackagePath('wheel/util.py')
|
||||||
|
>>> util.size
|
||||||
|
859
|
||||||
|
>>> util.dist
|
||||||
|
<importlib_metadata._hooks.PathDistribution object at 0x101e0cef0>
|
||||||
|
>>> util.hash
|
||||||
|
<FileHash mode: sha256 value: bYkw5oMccfazVCoYQwKkkemoVyMAFoR34mmKBx8R1NI>
|
||||||
|
|
||||||
|
Once you have the file, you can also read its contents::
|
||||||
|
|
||||||
|
>>> print(util.read_text())
|
||||||
|
import base64
|
||||||
|
import sys
|
||||||
|
...
|
||||||
|
def as_bytes(s):
|
||||||
|
if isinstance(s, text_type):
|
||||||
|
return s.encode('utf-8')
|
||||||
|
return s
|
||||||
|
|
||||||
|
In the case where the metadata file listing files
|
||||||
|
(RECORD or SOURCES.txt) is missing, ``files()`` will
|
||||||
|
return ``None``. The caller may wish to wrap calls to
|
||||||
|
``files()`` in `always_iterable
|
||||||
|
<https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_
|
||||||
|
or otherwise guard against this condition if the target
|
||||||
|
distribution is not known to have the metadata present.
|
||||||
|
|
||||||
|
.. _requirements:
|
||||||
|
|
||||||
|
Distribution requirements
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
To get the full set of requirements for a distribution, use the ``requires()``
|
||||||
|
function::
|
||||||
|
|
||||||
|
>>> requires('wheel')
|
||||||
|
["pytest (>=3.0.0) ; extra == 'test'", "pytest-cov ; extra == 'test'"]
|
||||||
|
|
||||||
|
|
||||||
|
Distributions
|
||||||
|
=============
|
||||||
|
|
||||||
|
While the above API is the most common and convenient usage, you can get all
|
||||||
|
of that information from the ``Distribution`` class. A ``Distribution`` is an
|
||||||
|
abstract object that represents the metadata for a Python package. You can
|
||||||
|
get the ``Distribution`` instance::
|
||||||
|
|
||||||
|
>>> from importlib_metadata import distribution
|
||||||
|
>>> dist = distribution('wheel')
|
||||||
|
|
||||||
|
Thus, an alternative way to get the version number is through the
|
||||||
|
``Distribution`` instance::
|
||||||
|
|
||||||
|
>>> dist.version
|
||||||
|
'0.32.3'
|
||||||
|
|
||||||
|
There are all kinds of additional metadata available on the ``Distribution``
|
||||||
|
instance::
|
||||||
|
|
||||||
|
>>> d.metadata['Requires-Python']
|
||||||
|
'>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*'
|
||||||
|
>>> d.metadata['License']
|
||||||
|
'MIT'
|
||||||
|
|
||||||
|
The full set of available metadata is not described here. See :pep:`566`
|
||||||
|
for additional details.
|
||||||
|
|
||||||
|
|
||||||
|
Extending the search algorithm
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Because package metadata is not available through :data:`sys.path` searches, or
|
||||||
|
package loaders directly, the metadata for a package is found through import
|
||||||
|
system `finders`_. To find a distribution package's metadata,
|
||||||
|
``importlib.metadata`` queries the list of :term:`meta path finders <meta path finder>` on
|
||||||
|
:data:`sys.meta_path`.
|
||||||
|
|
||||||
|
By default ``importlib_metadata`` installs a finder for distribution packages
|
||||||
|
found on the file system. This finder doesn't actually find any *packages*,
|
||||||
|
but it can find the packages' metadata.
|
||||||
|
|
||||||
|
The abstract class :py:class:`importlib.abc.MetaPathFinder` defines the
|
||||||
|
interface expected of finders by Python's import system.
|
||||||
|
``importlib_metadata`` extends this protocol by looking for an optional
|
||||||
|
``find_distributions`` callable on the finders from
|
||||||
|
:data:`sys.meta_path` and presents this extended interface as the
|
||||||
|
``DistributionFinder`` abstract base class, which defines this abstract
|
||||||
|
method::
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def find_distributions(context=DistributionFinder.Context()):
|
||||||
|
"""Return an iterable of all Distribution instances capable of
|
||||||
|
loading the metadata for packages for the indicated ``context``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
The ``DistributionFinder.Context`` object provides ``.path`` and ``.name``
|
||||||
|
properties indicating the path to search and name to match and may
|
||||||
|
supply other relevant context.
|
||||||
|
|
||||||
|
What this means in practice is that to support finding distribution package
|
||||||
|
metadata in locations other than the file system, subclass
|
||||||
|
``Distribution`` and implement the abstract methods. Then from
|
||||||
|
a custom finder, return instances of this derived ``Distribution`` in the
|
||||||
|
``find_distributions()`` method.
|
||||||
|
|
||||||
|
|
||||||
|
.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
|
||||||
|
.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api
|
||||||
|
.. _`finders`: https://docs.python.org/3/reference/import.html#finders-and-loaders
|
||||||
|
|
||||||
|
|
||||||
|
.. rubric:: Footnotes
|
||||||
|
|
||||||
|
.. [#f1] Technically, the returned distribution metadata object is an
|
||||||
|
:class:`email.message.EmailMessage`
|
||||||
|
instance, but this is an implementation detail, and not part of the
|
||||||
|
stable API. You should only use dictionary-like methods and syntax
|
||||||
|
to access the metadata contents.
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,232 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import textwrap
|
||||||
|
import test.support
|
||||||
|
|
||||||
|
from .._compat import pathlib, contextlib
|
||||||
|
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def tempdir():
|
||||||
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
try:
|
||||||
|
yield pathlib.Path(tmpdir)
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def save_cwd():
|
||||||
|
orig = os.getcwd()
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
os.chdir(orig)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def tempdir_as_cwd():
|
||||||
|
with tempdir() as tmp:
|
||||||
|
with save_cwd():
|
||||||
|
os.chdir(str(tmp))
|
||||||
|
yield tmp
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def install_finder(finder):
|
||||||
|
sys.meta_path.append(finder)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
sys.meta_path.remove(finder)
|
||||||
|
|
||||||
|
|
||||||
|
class Fixtures:
|
||||||
|
def setUp(self):
|
||||||
|
self.fixtures = contextlib.ExitStack()
|
||||||
|
self.addCleanup(self.fixtures.close)
|
||||||
|
|
||||||
|
|
||||||
|
class SiteDir(Fixtures):
|
||||||
|
def setUp(self):
|
||||||
|
super(SiteDir, self).setUp()
|
||||||
|
self.site_dir = self.fixtures.enter_context(tempdir())
|
||||||
|
|
||||||
|
|
||||||
|
class OnSysPath(Fixtures):
|
||||||
|
@staticmethod
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def add_sys_path(dir):
|
||||||
|
sys.path[:0] = [str(dir)]
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
sys.path.remove(str(dir))
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(OnSysPath, self).setUp()
|
||||||
|
self.fixtures.enter_context(self.add_sys_path(self.site_dir))
|
||||||
|
|
||||||
|
|
||||||
|
class DistInfoPkg(OnSysPath, SiteDir):
|
||||||
|
files = {
|
||||||
|
"distinfo_pkg-1.0.0.dist-info": {
|
||||||
|
"METADATA": """
|
||||||
|
Name: distinfo-pkg
|
||||||
|
Author: Steven Ma
|
||||||
|
Version: 1.0.0
|
||||||
|
Requires-Dist: wheel >= 1.0
|
||||||
|
Requires-Dist: pytest; extra == 'test'
|
||||||
|
""",
|
||||||
|
"RECORD": "mod.py,sha256=abc,20\n",
|
||||||
|
"entry_points.txt": """
|
||||||
|
[entries]
|
||||||
|
main = mod:main
|
||||||
|
ns:sub = mod:main
|
||||||
|
"""
|
||||||
|
},
|
||||||
|
"mod.py": """
|
||||||
|
def main():
|
||||||
|
print("hello world")
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(DistInfoPkg, self).setUp()
|
||||||
|
build_files(DistInfoPkg.files, self.site_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class DistInfoPkgOffPath(SiteDir):
|
||||||
|
def setUp(self):
|
||||||
|
super(DistInfoPkgOffPath, self).setUp()
|
||||||
|
build_files(DistInfoPkg.files, self.site_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class EggInfoPkg(OnSysPath, SiteDir):
|
||||||
|
files = {
|
||||||
|
"egginfo_pkg.egg-info": {
|
||||||
|
"PKG-INFO": """
|
||||||
|
Name: egginfo-pkg
|
||||||
|
Author: Steven Ma
|
||||||
|
License: Unknown
|
||||||
|
Version: 1.0.0
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: Topic :: Software Development :: Libraries
|
||||||
|
""",
|
||||||
|
"SOURCES.txt": """
|
||||||
|
mod.py
|
||||||
|
egginfo_pkg.egg-info/top_level.txt
|
||||||
|
""",
|
||||||
|
"entry_points.txt": """
|
||||||
|
[entries]
|
||||||
|
main = mod:main
|
||||||
|
""",
|
||||||
|
"requires.txt": """
|
||||||
|
wheel >= 1.0; python_version >= "2.7"
|
||||||
|
[test]
|
||||||
|
pytest
|
||||||
|
""",
|
||||||
|
"top_level.txt": "mod\n"
|
||||||
|
},
|
||||||
|
"mod.py": """
|
||||||
|
def main():
|
||||||
|
print("hello world")
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(EggInfoPkg, self).setUp()
|
||||||
|
build_files(EggInfoPkg.files, prefix=self.site_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class EggInfoFile(OnSysPath, SiteDir):
|
||||||
|
files = {
|
||||||
|
"egginfo_file.egg-info": """
|
||||||
|
Metadata-Version: 1.0
|
||||||
|
Name: egginfo_file
|
||||||
|
Version: 0.1
|
||||||
|
Summary: An example package
|
||||||
|
Home-page: www.example.com
|
||||||
|
Author: Eric Haffa-Vee
|
||||||
|
Author-email: eric@example.coms
|
||||||
|
License: UNKNOWN
|
||||||
|
Description: UNKNOWN
|
||||||
|
Platform: UNKNOWN
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(EggInfoFile, self).setUp()
|
||||||
|
build_files(EggInfoFile.files, prefix=self.site_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class LocalPackage:
|
||||||
|
files = {
|
||||||
|
"setup.py": """
|
||||||
|
import setuptools
|
||||||
|
setuptools.setup(name="local-pkg", version="2.0.1")
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.fixtures = contextlib.ExitStack()
|
||||||
|
self.addCleanup(self.fixtures.close)
|
||||||
|
self.fixtures.enter_context(tempdir_as_cwd())
|
||||||
|
build_files(self.files)
|
||||||
|
|
||||||
|
|
||||||
|
def build_files(file_defs, prefix=pathlib.Path()):
|
||||||
|
"""Build a set of files/directories, as described by the
|
||||||
|
|
||||||
|
file_defs dictionary. Each key/value pair in the dictionary is
|
||||||
|
interpreted as a filename/contents pair. If the contents value is a
|
||||||
|
dictionary, a directory is created, and the dictionary interpreted
|
||||||
|
as the files within it, recursively.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
{"README.txt": "A README file",
|
||||||
|
"foo": {
|
||||||
|
"__init__.py": "",
|
||||||
|
"bar": {
|
||||||
|
"__init__.py": "",
|
||||||
|
},
|
||||||
|
"baz.py": "# Some code",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
for name, contents in file_defs.items():
|
||||||
|
full_name = prefix / name
|
||||||
|
if isinstance(contents, dict):
|
||||||
|
full_name.mkdir()
|
||||||
|
build_files(contents, prefix=full_name)
|
||||||
|
else:
|
||||||
|
if isinstance(contents, bytes):
|
||||||
|
with full_name.open('wb') as f:
|
||||||
|
f.write(contents)
|
||||||
|
else:
|
||||||
|
with full_name.open('w') as f:
|
||||||
|
f.write(DALS(contents))
|
||||||
|
|
||||||
|
|
||||||
|
class FileBuilder:
|
||||||
|
def unicode_filename(self):
|
||||||
|
return test.support.FS_NONASCII or \
|
||||||
|
self.skip("File system does not support non-ascii.")
|
||||||
|
|
||||||
|
|
||||||
|
def DALS(str):
|
||||||
|
"Dedent and left-strip"
|
||||||
|
return textwrap.dedent(str).lstrip()
|
||||||
|
|
||||||
|
|
||||||
|
class NullFinder:
|
||||||
|
def find_module(self, name):
|
||||||
|
pass
|
@ -0,0 +1,176 @@
|
|||||||
|
import re
|
||||||
|
import textwrap
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from . import fixtures
|
||||||
|
from .. import (
|
||||||
|
Distribution, PackageNotFoundError, __version__, distribution,
|
||||||
|
entry_points, files, metadata, requires, version,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from collections.abc import Iterator
|
||||||
|
except ImportError:
|
||||||
|
from collections import Iterator # noqa: F401
|
||||||
|
|
||||||
|
try:
|
||||||
|
from builtins import str as text
|
||||||
|
except ImportError:
|
||||||
|
from __builtin__ import unicode as text
|
||||||
|
|
||||||
|
|
||||||
|
class APITests(
|
||||||
|
fixtures.EggInfoPkg,
|
||||||
|
fixtures.DistInfoPkg,
|
||||||
|
fixtures.EggInfoFile,
|
||||||
|
unittest.TestCase):
|
||||||
|
|
||||||
|
version_pattern = r'\d+\.\d+(\.\d)?'
|
||||||
|
|
||||||
|
def test_retrieves_version_of_self(self):
|
||||||
|
pkg_version = version('egginfo-pkg')
|
||||||
|
assert isinstance(pkg_version, text)
|
||||||
|
assert re.match(self.version_pattern, pkg_version)
|
||||||
|
|
||||||
|
def test_retrieves_version_of_distinfo_pkg(self):
|
||||||
|
pkg_version = version('distinfo-pkg')
|
||||||
|
assert isinstance(pkg_version, text)
|
||||||
|
assert re.match(self.version_pattern, pkg_version)
|
||||||
|
|
||||||
|
def test_for_name_does_not_exist(self):
|
||||||
|
with self.assertRaises(PackageNotFoundError):
|
||||||
|
distribution('does-not-exist')
|
||||||
|
|
||||||
|
def test_for_top_level(self):
|
||||||
|
self.assertEqual(
|
||||||
|
distribution('egginfo-pkg').read_text('top_level.txt').strip(),
|
||||||
|
'mod')
|
||||||
|
|
||||||
|
def test_read_text(self):
|
||||||
|
top_level = [
|
||||||
|
path for path in files('egginfo-pkg')
|
||||||
|
if path.name == 'top_level.txt'
|
||||||
|
][0]
|
||||||
|
self.assertEqual(top_level.read_text(), 'mod\n')
|
||||||
|
|
||||||
|
def test_entry_points(self):
|
||||||
|
entries = dict(entry_points()['entries'])
|
||||||
|
ep = entries['main']
|
||||||
|
self.assertEqual(ep.value, 'mod:main')
|
||||||
|
self.assertEqual(ep.extras, [])
|
||||||
|
|
||||||
|
def test_metadata_for_this_package(self):
|
||||||
|
md = metadata('egginfo-pkg')
|
||||||
|
assert md['author'] == 'Steven Ma'
|
||||||
|
assert md['LICENSE'] == 'Unknown'
|
||||||
|
assert md['Name'] == 'egginfo-pkg'
|
||||||
|
classifiers = md.get_all('Classifier')
|
||||||
|
assert 'Topic :: Software Development :: Libraries' in classifiers
|
||||||
|
|
||||||
|
def test_importlib_metadata_version(self):
|
||||||
|
assert re.match(self.version_pattern, __version__)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _test_files(files):
|
||||||
|
root = files[0].root
|
||||||
|
for file in files:
|
||||||
|
assert file.root == root
|
||||||
|
assert not file.hash or file.hash.value
|
||||||
|
assert not file.hash or file.hash.mode == 'sha256'
|
||||||
|
assert not file.size or file.size >= 0
|
||||||
|
assert file.locate().exists()
|
||||||
|
assert isinstance(file.read_binary(), bytes)
|
||||||
|
if file.name.endswith('.py'):
|
||||||
|
file.read_text()
|
||||||
|
|
||||||
|
def test_file_hash_repr(self):
|
||||||
|
try:
|
||||||
|
assertRegex = self.assertRegex
|
||||||
|
except AttributeError:
|
||||||
|
# Python 2
|
||||||
|
assertRegex = self.assertRegexpMatches
|
||||||
|
|
||||||
|
util = [
|
||||||
|
p for p in files('distinfo-pkg')
|
||||||
|
if p.name == 'mod.py'
|
||||||
|
][0]
|
||||||
|
assertRegex(
|
||||||
|
repr(util.hash),
|
||||||
|
'<FileHash mode: sha256 value: .*>')
|
||||||
|
|
||||||
|
def test_files_dist_info(self):
|
||||||
|
self._test_files(files('distinfo-pkg'))
|
||||||
|
|
||||||
|
def test_files_egg_info(self):
|
||||||
|
self._test_files(files('egginfo-pkg'))
|
||||||
|
|
||||||
|
def test_version_egg_info_file(self):
|
||||||
|
self.assertEqual(version('egginfo-file'), '0.1')
|
||||||
|
|
||||||
|
def test_requires_egg_info_file(self):
|
||||||
|
requirements = requires('egginfo-file')
|
||||||
|
self.assertIsNone(requirements)
|
||||||
|
|
||||||
|
def test_requires_egg_info(self):
|
||||||
|
deps = requires('egginfo-pkg')
|
||||||
|
assert len(deps) == 2
|
||||||
|
assert any(
|
||||||
|
dep == 'wheel >= 1.0; python_version >= "2.7"'
|
||||||
|
for dep in deps
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_requires_dist_info(self):
|
||||||
|
deps = requires('distinfo-pkg')
|
||||||
|
assert len(deps) == 2
|
||||||
|
assert all(deps)
|
||||||
|
assert 'wheel >= 1.0' in deps
|
||||||
|
assert "pytest; extra == 'test'" in deps
|
||||||
|
|
||||||
|
def test_more_complex_deps_requires_text(self):
|
||||||
|
requires = textwrap.dedent("""
|
||||||
|
dep1
|
||||||
|
dep2
|
||||||
|
|
||||||
|
[:python_version < "3"]
|
||||||
|
dep3
|
||||||
|
|
||||||
|
[extra1]
|
||||||
|
dep4
|
||||||
|
|
||||||
|
[extra2:python_version < "3"]
|
||||||
|
dep5
|
||||||
|
""")
|
||||||
|
deps = sorted(Distribution._deps_from_requires_text(requires))
|
||||||
|
expected = [
|
||||||
|
'dep1',
|
||||||
|
'dep2',
|
||||||
|
'dep3; python_version < "3"',
|
||||||
|
'dep4; extra == "extra1"',
|
||||||
|
'dep5; (python_version < "3") and extra == "extra2"',
|
||||||
|
]
|
||||||
|
# It's important that the environment marker expression be
|
||||||
|
# wrapped in parentheses to avoid the following 'and' binding more
|
||||||
|
# tightly than some other part of the environment expression.
|
||||||
|
|
||||||
|
assert deps == expected
|
||||||
|
|
||||||
|
|
||||||
|
class OffSysPathTests(fixtures.DistInfoPkgOffPath, unittest.TestCase):
|
||||||
|
def test_find_distributions_specified_path(self):
|
||||||
|
dists = Distribution.discover(path=[str(self.site_dir)])
|
||||||
|
assert any(
|
||||||
|
dist.metadata['Name'] == 'distinfo-pkg'
|
||||||
|
for dist in dists
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_distribution_at_pathlib(self):
|
||||||
|
"""Demonstrate how to load metadata direct from a directory.
|
||||||
|
"""
|
||||||
|
dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info'
|
||||||
|
dist = Distribution.at(dist_info_path)
|
||||||
|
assert dist.version == '1.0.0'
|
||||||
|
|
||||||
|
def test_distribution_at_str(self):
|
||||||
|
dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info'
|
||||||
|
dist = Distribution.at(str(dist_info_path))
|
||||||
|
assert dist.version == '1.0.0'
|
@ -0,0 +1,54 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import packaging.requirements
|
||||||
|
import packaging.version
|
||||||
|
|
||||||
|
from . import fixtures
|
||||||
|
from .. import (
|
||||||
|
Distribution,
|
||||||
|
_compat,
|
||||||
|
version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class IntegrationTests(fixtures.DistInfoPkg, unittest.TestCase):
|
||||||
|
|
||||||
|
def test_package_spec_installed(self):
|
||||||
|
"""
|
||||||
|
Illustrate the recommended procedure to determine if
|
||||||
|
a specified version of a package is installed.
|
||||||
|
"""
|
||||||
|
def is_installed(package_spec):
|
||||||
|
req = packaging.requirements.Requirement(package_spec)
|
||||||
|
return version(req.name) in req.specifier
|
||||||
|
|
||||||
|
assert is_installed('distinfo-pkg==1.0')
|
||||||
|
assert is_installed('distinfo-pkg>=1.0,<2.0')
|
||||||
|
assert not is_installed('distinfo-pkg<1.0')
|
||||||
|
|
||||||
|
|
||||||
|
class FinderTests(fixtures.Fixtures, unittest.TestCase):
|
||||||
|
|
||||||
|
def test_finder_without_module(self):
|
||||||
|
class ModuleFreeFinder(fixtures.NullFinder):
|
||||||
|
"""
|
||||||
|
A finder without an __module__ attribute
|
||||||
|
"""
|
||||||
|
def __getattribute__(self, name):
|
||||||
|
if name == '__module__':
|
||||||
|
raise AttributeError(name)
|
||||||
|
return super().__getattribute__(name)
|
||||||
|
|
||||||
|
self.fixtures.enter_context(
|
||||||
|
fixtures.install_finder(ModuleFreeFinder()))
|
||||||
|
_compat.disable_stdlib_finder()
|
||||||
|
|
||||||
|
|
||||||
|
class LocalProjectTests(fixtures.LocalPackage, unittest.TestCase):
|
||||||
|
def test_find_local(self):
|
||||||
|
dist = Distribution._local()
|
||||||
|
assert dist.metadata['Name'] == 'local-pkg'
|
||||||
|
assert dist.version == '2.0.1'
|
@ -0,0 +1,80 @@
|
|||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from .. import (
|
||||||
|
distribution, entry_points, files, PackageNotFoundError,
|
||||||
|
version, distributions,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from importlib import resources
|
||||||
|
getattr(resources, 'files')
|
||||||
|
getattr(resources, 'as_file')
|
||||||
|
except (ImportError, AttributeError):
|
||||||
|
import importlib_resources as resources
|
||||||
|
|
||||||
|
try:
|
||||||
|
from contextlib import ExitStack
|
||||||
|
except ImportError:
|
||||||
|
from contextlib2 import ExitStack
|
||||||
|
|
||||||
|
|
||||||
|
class TestZip(unittest.TestCase):
|
||||||
|
root = 'importlib_metadata.tests.data'
|
||||||
|
|
||||||
|
def _fixture_on_path(self, filename):
|
||||||
|
pkg_file = resources.files(self.root).joinpath(filename)
|
||||||
|
file = self.resources.enter_context(resources.as_file(pkg_file))
|
||||||
|
assert file.name.startswith('example-'), file.name
|
||||||
|
sys.path.insert(0, str(file))
|
||||||
|
self.resources.callback(sys.path.pop, 0)
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
# Find the path to the example-*.whl so we can add it to the front of
|
||||||
|
# sys.path, where we'll then try to find the metadata thereof.
|
||||||
|
self.resources = ExitStack()
|
||||||
|
self.addCleanup(self.resources.close)
|
||||||
|
self._fixture_on_path('example-21.12-py3-none-any.whl')
|
||||||
|
|
||||||
|
def test_zip_version(self):
|
||||||
|
self.assertEqual(version('example'), '21.12')
|
||||||
|
|
||||||
|
def test_zip_version_does_not_match(self):
|
||||||
|
with self.assertRaises(PackageNotFoundError):
|
||||||
|
version('definitely-not-installed')
|
||||||
|
|
||||||
|
def test_zip_entry_points(self):
|
||||||
|
scripts = dict(entry_points()['console_scripts'])
|
||||||
|
entry_point = scripts['example']
|
||||||
|
self.assertEqual(entry_point.value, 'example:main')
|
||||||
|
entry_point = scripts['Example']
|
||||||
|
self.assertEqual(entry_point.value, 'example:main')
|
||||||
|
|
||||||
|
def test_missing_metadata(self):
|
||||||
|
self.assertIsNone(distribution('example').read_text('does not exist'))
|
||||||
|
|
||||||
|
def test_case_insensitive(self):
|
||||||
|
self.assertEqual(version('Example'), '21.12')
|
||||||
|
|
||||||
|
def test_files(self):
|
||||||
|
for file in files('example'):
|
||||||
|
path = str(file.dist.locate_file(file))
|
||||||
|
assert '.whl/' in path, path
|
||||||
|
|
||||||
|
def test_one_distribution(self):
|
||||||
|
dists = list(distributions(path=sys.path[:1]))
|
||||||
|
assert len(dists) == 1
|
||||||
|
|
||||||
|
|
||||||
|
class TestEgg(TestZip):
|
||||||
|
def setUp(self):
|
||||||
|
# Find the path to the example-*.egg so we can add it to the front of
|
||||||
|
# sys.path, where we'll then try to find the metadata thereof.
|
||||||
|
self.resources = ExitStack()
|
||||||
|
self.addCleanup(self.resources.close)
|
||||||
|
self._fixture_on_path('example-21.12-py3.6.egg')
|
||||||
|
|
||||||
|
def test_files(self):
|
||||||
|
for file in files('example'):
|
||||||
|
path = str(file.dist.locate_file(file))
|
||||||
|
assert '.egg/' in path, path
|
@ -0,0 +1 @@
|
|||||||
|
pip
|
@ -0,0 +1,13 @@
|
|||||||
|
Copyright 2017-2019 Brett Cannon, Barry Warsaw
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
@ -0,0 +1,53 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: importlib-resources
|
||||||
|
Version: 3.0.0
|
||||||
|
Summary: Read resources from Python packages
|
||||||
|
Home-page: http://importlib-resources.readthedocs.io/
|
||||||
|
Author: Barry Warsaw
|
||||||
|
Author-email: barry@python.org
|
||||||
|
License: UNKNOWN
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: Apache Software License
|
||||||
|
Classifier: Topic :: Software Development :: Libraries
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
|
||||||
|
Requires-Dist: pathlib2 ; python_version < "3"
|
||||||
|
Requires-Dist: contextlib2 ; python_version < "3"
|
||||||
|
Requires-Dist: singledispatch ; python_version < "3.4"
|
||||||
|
Requires-Dist: typing ; python_version < "3.5"
|
||||||
|
Requires-Dist: zipp (>=0.4) ; python_version < "3.8"
|
||||||
|
Provides-Extra: docs
|
||||||
|
Requires-Dist: sphinx ; extra == 'docs'
|
||||||
|
Requires-Dist: rst.linker ; extra == 'docs'
|
||||||
|
Requires-Dist: jaraco.packaging ; extra == 'docs'
|
||||||
|
|
||||||
|
=========================
|
||||||
|
``importlib_resources``
|
||||||
|
=========================
|
||||||
|
|
||||||
|
``importlib_resources`` is a backport of Python standard library
|
||||||
|
`importlib.resources
|
||||||
|
<https://docs.python.org/3.9/library/importlib.html#module-importlib.resources>`_
|
||||||
|
module for Python 2.7, and 3.4 through 3.8. Users of Python 3.9 and beyond
|
||||||
|
should use the standard library module, since for these versions,
|
||||||
|
``importlib_resources`` just delegates to that module.
|
||||||
|
|
||||||
|
The key goal of this module is to replace parts of `pkg_resources
|
||||||
|
<https://setuptools.readthedocs.io/en/latest/pkg_resources.html>`_ with a
|
||||||
|
solution in Python's stdlib that relies on well-defined APIs. This makes
|
||||||
|
reading resources included in packages easier, with more stable and consistent
|
||||||
|
semantics.
|
||||||
|
|
||||||
|
|
||||||
|
Project details
|
||||||
|
===============
|
||||||
|
|
||||||
|
* Project home: https://gitlab.com/python-devs/importlib_resources
|
||||||
|
* Report bugs at: https://gitlab.com/python-devs/importlib_resources/issues
|
||||||
|
* Code hosting: https://gitlab.com/python-devs/importlib_resources.git
|
||||||
|
* Documentation: https://importlib-resources.readthedocs.io/
|
||||||
|
|
||||||
|
|
@ -0,0 +1,66 @@
|
|||||||
|
importlib_resources-3.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
importlib_resources-3.0.0.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568
|
||||||
|
importlib_resources-3.0.0.dist-info/METADATA,sha256=BIalQpWJG-Av5ZUNQXdFsv6M8s8EdYiymu6GaoEL1Rk,2100
|
||||||
|
importlib_resources-3.0.0.dist-info/RECORD,,
|
||||||
|
importlib_resources-3.0.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
|
||||||
|
importlib_resources-3.0.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
|
||||||
|
importlib_resources/__init__.py,sha256=hswDmLAH0IUlLWwmdHXPN2mgus2bk5IwDP-BFzg7VKo,977
|
||||||
|
importlib_resources/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/__pycache__/_common.cpython-36.pyc,,
|
||||||
|
importlib_resources/__pycache__/_compat.cpython-36.pyc,,
|
||||||
|
importlib_resources/__pycache__/_py2.cpython-36.pyc,,
|
||||||
|
importlib_resources/__pycache__/_py3.cpython-36.pyc,,
|
||||||
|
importlib_resources/__pycache__/abc.cpython-36.pyc,,
|
||||||
|
importlib_resources/__pycache__/readers.cpython-36.pyc,,
|
||||||
|
importlib_resources/__pycache__/trees.cpython-36.pyc,,
|
||||||
|
importlib_resources/_common.py,sha256=jXVqgKZ1bt8IbZiErvjIeb69BjqsMSCSt9AwV4bHnE8,3157
|
||||||
|
importlib_resources/_compat.py,sha256=5nvS1tAZIl_6VqrFSScNVW2wtBGaRXGxcpDXMskruoA,3233
|
||||||
|
importlib_resources/_py2.py,sha256=G9M5mv1ILl8NARGdNX0v9_F_Hb4HUKCS-FCNK63Ajvw,4146
|
||||||
|
importlib_resources/_py3.py,sha256=2wJYfjLG8nd9mT1HLBtX96m6zlu9-Tocte9wFl9q_bY,5474
|
||||||
|
importlib_resources/abc.py,sha256=6PX4Nprv39YnAht3NymhHIuSso0ocAKqDJZf-A6BgIw,3894
|
||||||
|
importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/readers.py,sha256=S0DsGQB1li6w5USiZQtiy-5HXe4UAxt-zmKo8QlAxsI,1155
|
||||||
|
importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/__pycache__/test_files.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/__pycache__/test_open.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/__pycache__/test_path.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/__pycache__/test_read.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/__pycache__/test_resource.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/__pycache__/util.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data01/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
|
||||||
|
importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
|
||||||
|
importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
|
||||||
|
importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
|
||||||
|
importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data02/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data02/one/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
|
||||||
|
importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data02/two/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
|
||||||
|
importlib_resources/tests/data03/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data03/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/data03/namespace/portion1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data03/namespace/portion1/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/data03/namespace/portion2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data03/namespace/portion2/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/data03/namespace/resource1.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/test_files.py,sha256=91rf4C74_aJsKNSt-a-03slVpY9QSAuCbogFWnsaPjE,1017
|
||||||
|
importlib_resources/tests/test_open.py,sha256=yDXmTGXQspByj6WU0prnoVwab1yWWEA3fwz_XIx7TQU,2288
|
||||||
|
importlib_resources/tests/test_path.py,sha256=GnUOu-338o9offnC8xwbXjH9JIQJpD7JujgQkGB106Q,1548
|
||||||
|
importlib_resources/tests/test_read.py,sha256=DpA7tzxSQlU0_YQuWibB3E5PDL9fQUdzeKoEUGnAx78,2046
|
||||||
|
importlib_resources/tests/test_resource.py,sha256=X77DzU2BRoM6d59iEh74zDHHw3pKOBGLCg3lP3dH4BI,6467
|
||||||
|
importlib_resources/tests/util.py,sha256=f0RZU-RkEkybJjXRd7C5HcWMsoLFRWJL4FIUF1CJ2wo,6980
|
||||||
|
importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=AYf51fj80OKCRis93v2DlZjt5rM-VQOPptSHJbFtkXw,1131
|
||||||
|
importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=e6HXvTEObXvJcNxyX5I8tu5M8_6mSN8ALahHfqE7ADA,698
|
||||||
|
importlib_resources/trees.py,sha256=U3FlQSI5--eF4AdzOjBvW4xnjL21OFX8ivk82Quwv_M,117
|
@ -0,0 +1,6 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.34.2)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py2-none-any
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
@ -0,0 +1 @@
|
|||||||
|
importlib_resources
|
@ -0,0 +1,53 @@
|
|||||||
|
"""Read resources contained within a package."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ._common import (
|
||||||
|
as_file, files,
|
||||||
|
)
|
||||||
|
|
||||||
|
# For compatibility. Ref #88.
|
||||||
|
# Also requires hook-importlib_resources.py (Ref #101).
|
||||||
|
__import__('importlib_resources.trees')
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'Package',
|
||||||
|
'Resource',
|
||||||
|
'ResourceReader',
|
||||||
|
'as_file',
|
||||||
|
'contents',
|
||||||
|
'files',
|
||||||
|
'is_resource',
|
||||||
|
'open_binary',
|
||||||
|
'open_text',
|
||||||
|
'path',
|
||||||
|
'read_binary',
|
||||||
|
'read_text',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
from importlib_resources._py3 import (
|
||||||
|
Package,
|
||||||
|
Resource,
|
||||||
|
contents,
|
||||||
|
is_resource,
|
||||||
|
open_binary,
|
||||||
|
open_text,
|
||||||
|
path,
|
||||||
|
read_binary,
|
||||||
|
read_text,
|
||||||
|
)
|
||||||
|
from importlib_resources.abc import ResourceReader
|
||||||
|
else:
|
||||||
|
from importlib_resources._py2 import (
|
||||||
|
contents,
|
||||||
|
is_resource,
|
||||||
|
open_binary,
|
||||||
|
open_text,
|
||||||
|
path,
|
||||||
|
read_binary,
|
||||||
|
read_text,
|
||||||
|
)
|
||||||
|
del __all__[:3]
|
@ -0,0 +1,121 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
import contextlib
|
||||||
|
import types
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
from ._compat import (
|
||||||
|
Path, FileNotFoundError,
|
||||||
|
singledispatch, package_spec,
|
||||||
|
)
|
||||||
|
|
||||||
|
if False: # TYPE_CHECKING
|
||||||
|
from typing import Union, Any, Optional
|
||||||
|
from .abc import ResourceReader
|
||||||
|
Package = Union[types.ModuleType, str]
|
||||||
|
|
||||||
|
|
||||||
|
def files(package):
|
||||||
|
"""
|
||||||
|
Get a Traversable resource from a package
|
||||||
|
"""
|
||||||
|
return from_package(get_package(package))
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_path(path):
|
||||||
|
# type: (Any) -> str
|
||||||
|
"""Normalize a path by ensuring it is a string.
|
||||||
|
|
||||||
|
If the resulting string contains path separators, an exception is raised.
|
||||||
|
"""
|
||||||
|
str_path = str(path)
|
||||||
|
parent, file_name = os.path.split(str_path)
|
||||||
|
if parent:
|
||||||
|
raise ValueError('{!r} must be only a file name'.format(path))
|
||||||
|
return file_name
|
||||||
|
|
||||||
|
|
||||||
|
def get_resource_reader(package):
|
||||||
|
# type: (types.ModuleType) -> Optional[ResourceReader]
|
||||||
|
"""
|
||||||
|
Return the package's loader if it's a ResourceReader.
|
||||||
|
"""
|
||||||
|
# We can't use
|
||||||
|
# a issubclass() check here because apparently abc.'s __subclasscheck__()
|
||||||
|
# hook wants to create a weak reference to the object, but
|
||||||
|
# zipimport.zipimporter does not support weak references, resulting in a
|
||||||
|
# TypeError. That seems terrible.
|
||||||
|
spec = package.__spec__
|
||||||
|
reader = getattr(spec.loader, 'get_resource_reader', None)
|
||||||
|
if reader is None:
|
||||||
|
return None
|
||||||
|
return reader(spec.name)
|
||||||
|
|
||||||
|
|
||||||
|
def resolve(cand):
|
||||||
|
# type: (Package) -> types.ModuleType
|
||||||
|
return (
|
||||||
|
cand if isinstance(cand, types.ModuleType)
|
||||||
|
else importlib.import_module(cand)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_package(package):
|
||||||
|
# type: (Package) -> types.ModuleType
|
||||||
|
"""Take a package name or module object and return the module.
|
||||||
|
|
||||||
|
Raise an exception if the resolved module is not a package.
|
||||||
|
"""
|
||||||
|
resolved = resolve(package)
|
||||||
|
if package_spec(resolved).submodule_search_locations is None:
|
||||||
|
raise TypeError('{!r} is not a package'.format(package))
|
||||||
|
return resolved
|
||||||
|
|
||||||
|
|
||||||
|
def from_package(package):
|
||||||
|
"""
|
||||||
|
Return a Traversable object for the given package.
|
||||||
|
|
||||||
|
"""
|
||||||
|
spec = package_spec(package)
|
||||||
|
reader = spec.loader.get_resource_reader(spec.name)
|
||||||
|
return reader.files()
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def _tempfile(reader, suffix=''):
|
||||||
|
# Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
|
||||||
|
# blocks due to the need to close the temporary file to work on Windows
|
||||||
|
# properly.
|
||||||
|
fd, raw_path = tempfile.mkstemp(suffix=suffix)
|
||||||
|
try:
|
||||||
|
os.write(fd, reader())
|
||||||
|
os.close(fd)
|
||||||
|
yield Path(raw_path)
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
os.remove(raw_path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@singledispatch
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def as_file(path):
|
||||||
|
"""
|
||||||
|
Given a Traversable object, return that object as a
|
||||||
|
path on the local file system in a context manager.
|
||||||
|
"""
|
||||||
|
with _tempfile(path.read_bytes, suffix=path.name) as local:
|
||||||
|
yield local
|
||||||
|
|
||||||
|
|
||||||
|
@as_file.register(Path)
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def _(path):
|
||||||
|
"""
|
||||||
|
Degenerate behavior for pathlib.Path objects.
|
||||||
|
"""
|
||||||
|
yield path
|
@ -0,0 +1,127 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# flake8: noqa
|
||||||
|
|
||||||
|
if sys.version_info > (3,5):
|
||||||
|
from pathlib import Path, PurePath
|
||||||
|
else:
|
||||||
|
from pathlib2 import Path, PurePath # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info > (3,):
|
||||||
|
from contextlib import suppress
|
||||||
|
else:
|
||||||
|
from contextlib2 import suppress # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from functools import singledispatch
|
||||||
|
except ImportError:
|
||||||
|
from singledispatch import singledispatch # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from abc import ABC # type: ignore
|
||||||
|
except ImportError:
|
||||||
|
from abc import ABCMeta
|
||||||
|
|
||||||
|
class ABC(object): # type: ignore
|
||||||
|
__metaclass__ = ABCMeta
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
FileNotFoundError = FileNotFoundError # type: ignore
|
||||||
|
except NameError:
|
||||||
|
FileNotFoundError = OSError # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from zipfile import Path as ZipPath # type: ignore
|
||||||
|
except ImportError:
|
||||||
|
from zipp import Path as ZipPath # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import runtime_checkable # type: ignore
|
||||||
|
except ImportError:
|
||||||
|
def runtime_checkable(cls): # type: ignore
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Protocol # type: ignore
|
||||||
|
except ImportError:
|
||||||
|
Protocol = ABC # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
class PackageSpec:
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
vars(self).update(kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class TraversableResourcesAdapter:
|
||||||
|
def __init__(self, spec):
|
||||||
|
self.spec = spec
|
||||||
|
self.loader = LoaderAdapter(spec)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return getattr(self.spec, name)
|
||||||
|
|
||||||
|
|
||||||
|
class LoaderAdapter:
|
||||||
|
"""
|
||||||
|
Adapt loaders to provide TraversableResources and other
|
||||||
|
compatibility.
|
||||||
|
"""
|
||||||
|
def __init__(self, spec):
|
||||||
|
self.spec = spec
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
# Python < 3
|
||||||
|
return self.spec.origin
|
||||||
|
|
||||||
|
def get_resource_reader(self, name):
|
||||||
|
# Python < 3.9
|
||||||
|
from . import readers
|
||||||
|
|
||||||
|
def _zip_reader(spec):
|
||||||
|
with suppress(AttributeError):
|
||||||
|
return readers.ZipReader(spec.loader, spec.name)
|
||||||
|
|
||||||
|
def _available_reader(spec):
|
||||||
|
with suppress(AttributeError):
|
||||||
|
return spec.loader.get_resource_reader(spec.name)
|
||||||
|
|
||||||
|
def _native_reader(spec):
|
||||||
|
reader = _available_reader(spec)
|
||||||
|
return reader if hasattr(reader, 'files') else None
|
||||||
|
|
||||||
|
return (
|
||||||
|
# native reader if it supplies 'files'
|
||||||
|
_native_reader(self.spec) or
|
||||||
|
# local ZipReader if a zip module
|
||||||
|
_zip_reader(self.spec) or
|
||||||
|
# local FileReader
|
||||||
|
readers.FileReader(self)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def package_spec(package):
|
||||||
|
"""
|
||||||
|
Construct a minimal package spec suitable for
|
||||||
|
matching the interfaces this library relies upon
|
||||||
|
in later Python versions.
|
||||||
|
"""
|
||||||
|
spec = getattr(package, '__spec__', None) or \
|
||||||
|
PackageSpec(
|
||||||
|
origin=package.__file__,
|
||||||
|
loader=getattr(package, '__loader__', None),
|
||||||
|
name=package.__name__,
|
||||||
|
submodule_search_locations=getattr(package, '__path__', None),
|
||||||
|
)
|
||||||
|
return TraversableResourcesAdapter(spec)
|
@ -0,0 +1,107 @@
|
|||||||
|
import os
|
||||||
|
import errno
|
||||||
|
|
||||||
|
from . import _common
|
||||||
|
from ._compat import FileNotFoundError
|
||||||
|
from io import BytesIO, TextIOWrapper, open as io_open
|
||||||
|
|
||||||
|
|
||||||
|
def open_binary(package, resource):
|
||||||
|
"""Return a file-like object opened for binary reading of the resource."""
|
||||||
|
resource = _common.normalize_path(resource)
|
||||||
|
package = _common.get_package(package)
|
||||||
|
# Using pathlib doesn't work well here due to the lack of 'strict' argument
|
||||||
|
# for pathlib.Path.resolve() prior to Python 3.6.
|
||||||
|
package_path = os.path.dirname(package.__file__)
|
||||||
|
relative_path = os.path.join(package_path, resource)
|
||||||
|
full_path = os.path.abspath(relative_path)
|
||||||
|
try:
|
||||||
|
return io_open(full_path, 'rb')
|
||||||
|
except IOError:
|
||||||
|
# This might be a package in a zip file. zipimport provides a loader
|
||||||
|
# with a functioning get_data() method, however we have to strip the
|
||||||
|
# archive (i.e. the .zip file's name) off the front of the path. This
|
||||||
|
# is because the zipimport loader in Python 2 doesn't actually follow
|
||||||
|
# PEP 302. It should allow the full path, but actually requires that
|
||||||
|
# the path be relative to the zip file.
|
||||||
|
try:
|
||||||
|
loader = package.__loader__
|
||||||
|
full_path = relative_path[len(loader.archive)+1:]
|
||||||
|
data = loader.get_data(full_path)
|
||||||
|
except (IOError, AttributeError):
|
||||||
|
package_name = package.__name__
|
||||||
|
message = '{!r} resource not found in {!r}'.format(
|
||||||
|
resource, package_name)
|
||||||
|
raise FileNotFoundError(message)
|
||||||
|
return BytesIO(data)
|
||||||
|
|
||||||
|
|
||||||
|
def open_text(package, resource, encoding='utf-8', errors='strict'):
|
||||||
|
"""Return a file-like object opened for text reading of the resource."""
|
||||||
|
return TextIOWrapper(
|
||||||
|
open_binary(package, resource), encoding=encoding, errors=errors)
|
||||||
|
|
||||||
|
|
||||||
|
def read_binary(package, resource):
|
||||||
|
"""Return the binary contents of the resource."""
|
||||||
|
with open_binary(package, resource) as fp:
|
||||||
|
return fp.read()
|
||||||
|
|
||||||
|
|
||||||
|
def read_text(package, resource, encoding='utf-8', errors='strict'):
|
||||||
|
"""Return the decoded string of the resource.
|
||||||
|
|
||||||
|
The decoding-related arguments have the same semantics as those of
|
||||||
|
bytes.decode().
|
||||||
|
"""
|
||||||
|
with open_text(package, resource, encoding, errors) as fp:
|
||||||
|
return fp.read()
|
||||||
|
|
||||||
|
|
||||||
|
def path(package, resource):
|
||||||
|
"""A context manager providing a file path object to the resource.
|
||||||
|
|
||||||
|
If the resource does not already exist on its own on the file system,
|
||||||
|
a temporary file will be created. If the file was created, the file
|
||||||
|
will be deleted upon exiting the context manager (no exception is
|
||||||
|
raised if the file was deleted prior to the context manager
|
||||||
|
exiting).
|
||||||
|
"""
|
||||||
|
path = _common.files(package).joinpath(_common.normalize_path(resource))
|
||||||
|
if not path.is_file():
|
||||||
|
raise FileNotFoundError(path)
|
||||||
|
return _common.as_file(path)
|
||||||
|
|
||||||
|
|
||||||
|
def is_resource(package, name):
|
||||||
|
"""True if name is a resource inside package.
|
||||||
|
|
||||||
|
Directories are *not* resources.
|
||||||
|
"""
|
||||||
|
package = _common.get_package(package)
|
||||||
|
_common.normalize_path(name)
|
||||||
|
try:
|
||||||
|
package_contents = set(contents(package))
|
||||||
|
except OSError as error:
|
||||||
|
if error.errno not in (errno.ENOENT, errno.ENOTDIR):
|
||||||
|
# We won't hit this in the Python 2 tests, so it'll appear
|
||||||
|
# uncovered. We could mock os.listdir() to return a non-ENOENT or
|
||||||
|
# ENOTDIR, but then we'd have to depend on another external
|
||||||
|
# library since Python 2 doesn't have unittest.mock. It's not
|
||||||
|
# worth it.
|
||||||
|
raise # pragma: nocover
|
||||||
|
return False
|
||||||
|
if name not in package_contents:
|
||||||
|
return False
|
||||||
|
return (_common.from_package(package) / name).is_file()
|
||||||
|
|
||||||
|
|
||||||
|
def contents(package):
|
||||||
|
"""Return an iterable of entries in `package`.
|
||||||
|
|
||||||
|
Note that not all entries are resources. Specifically, directories are
|
||||||
|
not considered resources. Use `is_resource()` on each entry returned here
|
||||||
|
to check if it is a resource or not.
|
||||||
|
"""
|
||||||
|
package = _common.get_package(package)
|
||||||
|
return list(item.name for item in _common.from_package(package).iterdir())
|
@ -0,0 +1,150 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from . import _common
|
||||||
|
from contextlib import contextmanager, suppress
|
||||||
|
from importlib.abc import ResourceLoader
|
||||||
|
from io import BytesIO, TextIOWrapper
|
||||||
|
from pathlib import Path
|
||||||
|
from types import ModuleType
|
||||||
|
from typing import Iterable, Iterator, Optional, Set, Union # noqa: F401
|
||||||
|
from typing import cast
|
||||||
|
from typing.io import BinaryIO, TextIO
|
||||||
|
|
||||||
|
if False: # TYPE_CHECKING
|
||||||
|
from typing import ContextManager
|
||||||
|
|
||||||
|
Package = Union[ModuleType, str]
|
||||||
|
if sys.version_info >= (3, 6):
|
||||||
|
Resource = Union[str, os.PathLike] # pragma: <=35
|
||||||
|
else:
|
||||||
|
Resource = str # pragma: >=36
|
||||||
|
|
||||||
|
|
||||||
|
def open_binary(package: Package, resource: Resource) -> BinaryIO:
|
||||||
|
"""Return a file-like object opened for binary reading of the resource."""
|
||||||
|
resource = _common.normalize_path(resource)
|
||||||
|
package = _common.get_package(package)
|
||||||
|
reader = _common.get_resource_reader(package)
|
||||||
|
if reader is not None:
|
||||||
|
return reader.open_resource(resource)
|
||||||
|
# Using pathlib doesn't work well here due to the lack of 'strict'
|
||||||
|
# argument for pathlib.Path.resolve() prior to Python 3.6.
|
||||||
|
absolute_package_path = os.path.abspath(
|
||||||
|
package.__spec__.origin or 'non-existent file')
|
||||||
|
package_path = os.path.dirname(absolute_package_path)
|
||||||
|
full_path = os.path.join(package_path, resource)
|
||||||
|
try:
|
||||||
|
return open(full_path, mode='rb')
|
||||||
|
except OSError:
|
||||||
|
# Just assume the loader is a resource loader; all the relevant
|
||||||
|
# importlib.machinery loaders are and an AttributeError for
|
||||||
|
# get_data() will make it clear what is needed from the loader.
|
||||||
|
loader = cast(ResourceLoader, package.__spec__.loader)
|
||||||
|
data = None
|
||||||
|
if hasattr(package.__spec__.loader, 'get_data'):
|
||||||
|
with suppress(OSError):
|
||||||
|
data = loader.get_data(full_path)
|
||||||
|
if data is None:
|
||||||
|
package_name = package.__spec__.name
|
||||||
|
message = '{!r} resource not found in {!r}'.format(
|
||||||
|
resource, package_name)
|
||||||
|
raise FileNotFoundError(message)
|
||||||
|
return BytesIO(data)
|
||||||
|
|
||||||
|
|
||||||
|
def open_text(package: Package,
|
||||||
|
resource: Resource,
|
||||||
|
encoding: str = 'utf-8',
|
||||||
|
errors: str = 'strict') -> TextIO:
|
||||||
|
"""Return a file-like object opened for text reading of the resource."""
|
||||||
|
return TextIOWrapper(
|
||||||
|
open_binary(package, resource), encoding=encoding, errors=errors)
|
||||||
|
|
||||||
|
|
||||||
|
def read_binary(package: Package, resource: Resource) -> bytes:
|
||||||
|
"""Return the binary contents of the resource."""
|
||||||
|
with open_binary(package, resource) as fp:
|
||||||
|
return fp.read()
|
||||||
|
|
||||||
|
|
||||||
|
def read_text(package: Package,
|
||||||
|
resource: Resource,
|
||||||
|
encoding: str = 'utf-8',
|
||||||
|
errors: str = 'strict') -> str:
|
||||||
|
"""Return the decoded string of the resource.
|
||||||
|
|
||||||
|
The decoding-related arguments have the same semantics as those of
|
||||||
|
bytes.decode().
|
||||||
|
"""
|
||||||
|
with open_text(package, resource, encoding, errors) as fp:
|
||||||
|
return fp.read()
|
||||||
|
|
||||||
|
|
||||||
|
def path(
|
||||||
|
package: Package, resource: Resource,
|
||||||
|
) -> 'ContextManager[Path]':
|
||||||
|
"""A context manager providing a file path object to the resource.
|
||||||
|
|
||||||
|
If the resource does not already exist on its own on the file system,
|
||||||
|
a temporary file will be created. If the file was created, the file
|
||||||
|
will be deleted upon exiting the context manager (no exception is
|
||||||
|
raised if the file was deleted prior to the context manager
|
||||||
|
exiting).
|
||||||
|
"""
|
||||||
|
reader = _common.get_resource_reader(_common.get_package(package))
|
||||||
|
return (
|
||||||
|
_path_from_reader(reader, resource)
|
||||||
|
if reader else
|
||||||
|
_common.as_file(
|
||||||
|
_common.files(package).joinpath(_common.normalize_path(resource)))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def _path_from_reader(reader, resource):
|
||||||
|
norm_resource = _common.normalize_path(resource)
|
||||||
|
with suppress(FileNotFoundError):
|
||||||
|
yield Path(reader.resource_path(norm_resource))
|
||||||
|
return
|
||||||
|
opener_reader = reader.open_resource(norm_resource)
|
||||||
|
with _common._tempfile(opener_reader.read, suffix=norm_resource) as res:
|
||||||
|
yield res
|
||||||
|
|
||||||
|
|
||||||
|
def is_resource(package: Package, name: str) -> bool:
|
||||||
|
"""True if `name` is a resource inside `package`.
|
||||||
|
|
||||||
|
Directories are *not* resources.
|
||||||
|
"""
|
||||||
|
package = _common.get_package(package)
|
||||||
|
_common.normalize_path(name)
|
||||||
|
reader = _common.get_resource_reader(package)
|
||||||
|
if reader is not None:
|
||||||
|
return reader.is_resource(name)
|
||||||
|
package_contents = set(contents(package))
|
||||||
|
if name not in package_contents:
|
||||||
|
return False
|
||||||
|
return (_common.from_package(package) / name).is_file()
|
||||||
|
|
||||||
|
|
||||||
|
def contents(package: Package) -> Iterable[str]:
|
||||||
|
"""Return an iterable of entries in `package`.
|
||||||
|
|
||||||
|
Note that not all entries are resources. Specifically, directories are
|
||||||
|
not considered resources. Use `is_resource()` on each entry returned here
|
||||||
|
to check if it is a resource or not.
|
||||||
|
"""
|
||||||
|
package = _common.get_package(package)
|
||||||
|
reader = _common.get_resource_reader(package)
|
||||||
|
if reader is not None:
|
||||||
|
return reader.contents()
|
||||||
|
# Is the package a namespace package? By definition, namespace packages
|
||||||
|
# cannot have resources.
|
||||||
|
namespace = (
|
||||||
|
package.__spec__.origin is None or
|
||||||
|
package.__spec__.origin == 'namespace'
|
||||||
|
)
|
||||||
|
if namespace or not package.__spec__.has_location:
|
||||||
|
return ()
|
||||||
|
return list(item.name for item in _common.from_package(package).iterdir())
|
@ -0,0 +1,142 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
from ._compat import ABC, FileNotFoundError, runtime_checkable, Protocol
|
||||||
|
|
||||||
|
# Use mypy's comment syntax for Python 2 compatibility
|
||||||
|
try:
|
||||||
|
from typing import BinaryIO, Iterable, Text
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceReader(ABC):
|
||||||
|
"""Abstract base class for loaders to provide resource reading support."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def open_resource(self, resource):
|
||||||
|
# type: (Text) -> BinaryIO
|
||||||
|
"""Return an opened, file-like object for binary reading.
|
||||||
|
|
||||||
|
The 'resource' argument is expected to represent only a file name.
|
||||||
|
If the resource cannot be found, FileNotFoundError is raised.
|
||||||
|
"""
|
||||||
|
# This deliberately raises FileNotFoundError instead of
|
||||||
|
# NotImplementedError so that if this method is accidentally called,
|
||||||
|
# it'll still do the right thing.
|
||||||
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def resource_path(self, resource):
|
||||||
|
# type: (Text) -> Text
|
||||||
|
"""Return the file system path to the specified resource.
|
||||||
|
|
||||||
|
The 'resource' argument is expected to represent only a file name.
|
||||||
|
If the resource does not exist on the file system, raise
|
||||||
|
FileNotFoundError.
|
||||||
|
"""
|
||||||
|
# This deliberately raises FileNotFoundError instead of
|
||||||
|
# NotImplementedError so that if this method is accidentally called,
|
||||||
|
# it'll still do the right thing.
|
||||||
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def is_resource(self, path):
|
||||||
|
# type: (Text) -> bool
|
||||||
|
"""Return True if the named 'path' is a resource.
|
||||||
|
|
||||||
|
Files are resources, directories are not.
|
||||||
|
"""
|
||||||
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def contents(self):
|
||||||
|
# type: () -> Iterable[str]
|
||||||
|
"""Return an iterable of entries in `package`."""
|
||||||
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
|
@runtime_checkable
|
||||||
|
class Traversable(Protocol):
|
||||||
|
"""
|
||||||
|
An object with a subset of pathlib.Path methods suitable for
|
||||||
|
traversing directories and opening files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def iterdir(self):
|
||||||
|
"""
|
||||||
|
Yield Traversable objects in self
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def read_bytes(self):
|
||||||
|
"""
|
||||||
|
Read contents of self as bytes
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def read_text(self, encoding=None):
|
||||||
|
"""
|
||||||
|
Read contents of self as bytes
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def is_dir(self):
|
||||||
|
"""
|
||||||
|
Return True if self is a dir
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def is_file(self):
|
||||||
|
"""
|
||||||
|
Return True if self is a file
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def joinpath(self, child):
|
||||||
|
"""
|
||||||
|
Return Traversable child in self
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __truediv__(self, child):
|
||||||
|
"""
|
||||||
|
Return Traversable child in self
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def open(self, mode='r', *args, **kwargs):
|
||||||
|
"""
|
||||||
|
mode may be 'r' or 'rb' to open as text or binary. Return a handle
|
||||||
|
suitable for reading (same as pathlib.Path.open).
|
||||||
|
|
||||||
|
When opening as text, accepts encoding parameters such as those
|
||||||
|
accepted by io.TextIOWrapper.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def name(self):
|
||||||
|
# type: () -> str
|
||||||
|
"""
|
||||||
|
The base name of this object without any parent references.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class TraversableResources(ResourceReader):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def files(self):
|
||||||
|
"""Return a Traversable object for the loaded package."""
|
||||||
|
|
||||||
|
def open_resource(self, resource):
|
||||||
|
return self.files().joinpath(resource).open('rb')
|
||||||
|
|
||||||
|
def resource_path(self, resource):
|
||||||
|
raise FileNotFoundError(resource)
|
||||||
|
|
||||||
|
def is_resource(self, path):
|
||||||
|
return self.files().joinpath(path).is_file()
|
||||||
|
|
||||||
|
def contents(self):
|
||||||
|
return (item.name for item in self.files().iterdir())
|
@ -0,0 +1,41 @@
|
|||||||
|
from . import abc
|
||||||
|
|
||||||
|
from ._compat import Path, ZipPath
|
||||||
|
|
||||||
|
|
||||||
|
class FileReader(abc.TraversableResources):
|
||||||
|
def __init__(self, loader):
|
||||||
|
self.path = Path(loader.path).parent
|
||||||
|
|
||||||
|
def resource_path(self, resource):
|
||||||
|
"""
|
||||||
|
Return the file system path to prevent
|
||||||
|
`resources.path()` from creating a temporary
|
||||||
|
copy.
|
||||||
|
"""
|
||||||
|
return str(self.path.joinpath(resource))
|
||||||
|
|
||||||
|
def files(self):
|
||||||
|
return self.path
|
||||||
|
|
||||||
|
|
||||||
|
class ZipReader(abc.TraversableResources):
|
||||||
|
def __init__(self, loader, module):
|
||||||
|
_, _, name = module.rpartition('.')
|
||||||
|
prefix = loader.prefix.replace('\\', '/') + name + '/'
|
||||||
|
self.path = ZipPath(loader.archive, prefix)
|
||||||
|
|
||||||
|
def open_resource(self, resource):
|
||||||
|
try:
|
||||||
|
return super().open_resource(resource)
|
||||||
|
except KeyError as exc:
|
||||||
|
raise FileNotFoundError(exc.args[0])
|
||||||
|
|
||||||
|
def is_resource(self, path):
|
||||||
|
# workaround for `zipfile.Path.is_file` returning true
|
||||||
|
# for non-existent paths.
|
||||||
|
target = self.files().joinpath(path)
|
||||||
|
return target.is_file() and target.exists()
|
||||||
|
|
||||||
|
def files(self):
|
||||||
|
return self.path
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1 @@
|
|||||||
|
Hello, UTF-8 world!
|
@ -0,0 +1 @@
|
|||||||
|
one resource
|
@ -0,0 +1 @@
|
|||||||
|
two resource
|
@ -0,0 +1,39 @@
|
|||||||
|
import typing
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import importlib_resources as resources
|
||||||
|
from importlib_resources.abc import Traversable
|
||||||
|
from . import data01
|
||||||
|
from . import util
|
||||||
|
|
||||||
|
|
||||||
|
class FilesTests:
|
||||||
|
def test_read_bytes(self):
|
||||||
|
files = resources.files(self.data)
|
||||||
|
actual = files.joinpath('utf-8.file').read_bytes()
|
||||||
|
assert actual == b'Hello, UTF-8 world!\n'
|
||||||
|
|
||||||
|
def test_read_text(self):
|
||||||
|
files = resources.files(self.data)
|
||||||
|
actual = files.joinpath('utf-8.file').read_text()
|
||||||
|
assert actual == 'Hello, UTF-8 world!\n'
|
||||||
|
|
||||||
|
@unittest.skipUnless(
|
||||||
|
hasattr(typing, 'runtime_checkable'),
|
||||||
|
"Only suitable when typing supports runtime_checkable",
|
||||||
|
)
|
||||||
|
def test_traversable(self):
|
||||||
|
assert isinstance(resources.files(self.data), Traversable)
|
||||||
|
|
||||||
|
|
||||||
|
class OpenDiskTests(FilesTests, unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.data = data01
|
||||||
|
|
||||||
|
|
||||||
|
class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -0,0 +1,73 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
import importlib_resources as resources
|
||||||
|
from . import data01
|
||||||
|
from . import util
|
||||||
|
from .._compat import FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
|
class CommonBinaryTests(util.CommonTests, unittest.TestCase):
|
||||||
|
def execute(self, package, path):
|
||||||
|
with resources.open_binary(package, path):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CommonTextTests(util.CommonTests, unittest.TestCase):
|
||||||
|
def execute(self, package, path):
|
||||||
|
with resources.open_text(package, path):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OpenTests:
|
||||||
|
def test_open_binary(self):
|
||||||
|
with resources.open_binary(self.data, 'utf-8.file') as fp:
|
||||||
|
result = fp.read()
|
||||||
|
self.assertEqual(result, b'Hello, UTF-8 world!\n')
|
||||||
|
|
||||||
|
def test_open_text_default_encoding(self):
|
||||||
|
with resources.open_text(self.data, 'utf-8.file') as fp:
|
||||||
|
result = fp.read()
|
||||||
|
self.assertEqual(result, 'Hello, UTF-8 world!\n')
|
||||||
|
|
||||||
|
def test_open_text_given_encoding(self):
|
||||||
|
with resources.open_text(
|
||||||
|
self.data, 'utf-16.file', 'utf-16', 'strict') as fp:
|
||||||
|
result = fp.read()
|
||||||
|
self.assertEqual(result, 'Hello, UTF-16 world!\n')
|
||||||
|
|
||||||
|
def test_open_text_with_errors(self):
|
||||||
|
# Raises UnicodeError without the 'errors' argument.
|
||||||
|
with resources.open_text(
|
||||||
|
self.data, 'utf-16.file', 'utf-8', 'strict') as fp:
|
||||||
|
self.assertRaises(UnicodeError, fp.read)
|
||||||
|
with resources.open_text(
|
||||||
|
self.data, 'utf-16.file', 'utf-8', 'ignore') as fp:
|
||||||
|
result = fp.read()
|
||||||
|
self.assertEqual(
|
||||||
|
result,
|
||||||
|
'H\x00e\x00l\x00l\x00o\x00,\x00 '
|
||||||
|
'\x00U\x00T\x00F\x00-\x001\x006\x00 '
|
||||||
|
'\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00')
|
||||||
|
|
||||||
|
def test_open_binary_FileNotFoundError(self):
|
||||||
|
self.assertRaises(
|
||||||
|
FileNotFoundError,
|
||||||
|
resources.open_binary, self.data, 'does-not-exist')
|
||||||
|
|
||||||
|
def test_open_text_FileNotFoundError(self):
|
||||||
|
self.assertRaises(
|
||||||
|
FileNotFoundError,
|
||||||
|
resources.open_text, self.data, 'does-not-exist')
|
||||||
|
|
||||||
|
|
||||||
|
class OpenDiskTests(OpenTests, unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.data = data01
|
||||||
|
|
||||||
|
|
||||||
|
class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -0,0 +1,51 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
import importlib_resources as resources
|
||||||
|
from . import data01
|
||||||
|
from . import util
|
||||||
|
|
||||||
|
|
||||||
|
class CommonTests(util.CommonTests, unittest.TestCase):
|
||||||
|
|
||||||
|
def execute(self, package, path):
|
||||||
|
with resources.path(package, path):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PathTests:
|
||||||
|
|
||||||
|
def test_reading(self):
|
||||||
|
# Path should be readable.
|
||||||
|
# Test also implicitly verifies the returned object is a pathlib.Path
|
||||||
|
# instance.
|
||||||
|
with resources.path(self.data, 'utf-8.file') as path:
|
||||||
|
self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
|
||||||
|
# pathlib.Path.read_text() was introduced in Python 3.5.
|
||||||
|
with path.open('r', encoding='utf-8') as file:
|
||||||
|
text = file.read()
|
||||||
|
self.assertEqual('Hello, UTF-8 world!\n', text)
|
||||||
|
|
||||||
|
|
||||||
|
class PathDiskTests(PathTests, unittest.TestCase):
|
||||||
|
data = data01
|
||||||
|
|
||||||
|
def test_natural_path(self):
|
||||||
|
"""
|
||||||
|
Guarantee the internal implementation detail that
|
||||||
|
file-system-backed resources do not get the tempdir
|
||||||
|
treatment.
|
||||||
|
"""
|
||||||
|
with resources.path(self.data, 'utf-8.file') as path:
|
||||||
|
assert 'data' in str(path)
|
||||||
|
|
||||||
|
|
||||||
|
class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase):
|
||||||
|
def test_remove_in_context_manager(self):
|
||||||
|
# It is not an error if the file that was temporarily stashed on the
|
||||||
|
# file system is removed inside the `with` stanza.
|
||||||
|
with resources.path(self.data, 'utf-8.file') as path:
|
||||||
|
path.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -0,0 +1,63 @@
|
|||||||
|
import unittest
|
||||||
|
import importlib_resources as resources
|
||||||
|
|
||||||
|
from . import data01
|
||||||
|
from . import util
|
||||||
|
from importlib import import_module
|
||||||
|
|
||||||
|
|
||||||
|
class CommonBinaryTests(util.CommonTests, unittest.TestCase):
|
||||||
|
def execute(self, package, path):
|
||||||
|
resources.read_binary(package, path)
|
||||||
|
|
||||||
|
|
||||||
|
class CommonTextTests(util.CommonTests, unittest.TestCase):
|
||||||
|
def execute(self, package, path):
|
||||||
|
resources.read_text(package, path)
|
||||||
|
|
||||||
|
|
||||||
|
class ReadTests:
|
||||||
|
def test_read_binary(self):
|
||||||
|
result = resources.read_binary(self.data, 'binary.file')
|
||||||
|
self.assertEqual(result, b'\0\1\2\3')
|
||||||
|
|
||||||
|
def test_read_text_default_encoding(self):
|
||||||
|
result = resources.read_text(self.data, 'utf-8.file')
|
||||||
|
self.assertEqual(result, 'Hello, UTF-8 world!\n')
|
||||||
|
|
||||||
|
def test_read_text_given_encoding(self):
|
||||||
|
result = resources.read_text(
|
||||||
|
self.data, 'utf-16.file', encoding='utf-16')
|
||||||
|
self.assertEqual(result, 'Hello, UTF-16 world!\n')
|
||||||
|
|
||||||
|
def test_read_text_with_errors(self):
|
||||||
|
# Raises UnicodeError without the 'errors' argument.
|
||||||
|
self.assertRaises(
|
||||||
|
UnicodeError, resources.read_text, self.data, 'utf-16.file')
|
||||||
|
result = resources.read_text(self.data, 'utf-16.file', errors='ignore')
|
||||||
|
self.assertEqual(
|
||||||
|
result,
|
||||||
|
'H\x00e\x00l\x00l\x00o\x00,\x00 '
|
||||||
|
'\x00U\x00T\x00F\x00-\x001\x006\x00 '
|
||||||
|
'\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00')
|
||||||
|
|
||||||
|
|
||||||
|
class ReadDiskTests(ReadTests, unittest.TestCase):
|
||||||
|
data = data01
|
||||||
|
|
||||||
|
|
||||||
|
class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
|
||||||
|
def test_read_submodule_resource(self):
|
||||||
|
submodule = import_module('ziptestdata.subdirectory')
|
||||||
|
result = resources.read_binary(
|
||||||
|
submodule, 'binary.file')
|
||||||
|
self.assertEqual(result, b'\0\1\2\3')
|
||||||
|
|
||||||
|
def test_read_submodule_resource_by_name(self):
|
||||||
|
result = resources.read_binary(
|
||||||
|
'ziptestdata.subdirectory', 'binary.file')
|
||||||
|
self.assertEqual(result, b'\0\1\2\3')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -0,0 +1,170 @@
|
|||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
import importlib_resources as resources
|
||||||
|
|
||||||
|
from . import data01
|
||||||
|
from . import zipdata01, zipdata02
|
||||||
|
from . import util
|
||||||
|
from importlib import import_module
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceTests:
|
||||||
|
# Subclasses are expected to set the `data` attribute.
|
||||||
|
|
||||||
|
def test_is_resource_good_path(self):
|
||||||
|
self.assertTrue(resources.is_resource(self.data, 'binary.file'))
|
||||||
|
|
||||||
|
def test_is_resource_missing(self):
|
||||||
|
self.assertFalse(resources.is_resource(self.data, 'not-a-file'))
|
||||||
|
|
||||||
|
def test_is_resource_subresource_directory(self):
|
||||||
|
# Directories are not resources.
|
||||||
|
self.assertFalse(resources.is_resource(self.data, 'subdirectory'))
|
||||||
|
|
||||||
|
def test_contents(self):
|
||||||
|
contents = set(resources.contents(self.data))
|
||||||
|
# There may be cruft in the directory listing of the data directory.
|
||||||
|
# Under Python 3 we could have a __pycache__ directory, and under
|
||||||
|
# Python 2 we could have .pyc files. These are both artifacts of the
|
||||||
|
# test suite importing these modules and writing these caches. They
|
||||||
|
# aren't germane to this test, so just filter them out.
|
||||||
|
contents.discard('__pycache__')
|
||||||
|
contents.discard('__init__.pyc')
|
||||||
|
contents.discard('__init__.pyo')
|
||||||
|
self.assertEqual(contents, {
|
||||||
|
'__init__.py',
|
||||||
|
'subdirectory',
|
||||||
|
'utf-8.file',
|
||||||
|
'binary.file',
|
||||||
|
'utf-16.file',
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceDiskTests(ResourceTests, unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.data = data01
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@unittest.skipIf(sys.version_info < (3,), 'No ResourceReader in Python 2')
|
||||||
|
class ResourceLoaderTests(unittest.TestCase):
|
||||||
|
def test_resource_contents(self):
|
||||||
|
package = util.create_package(
|
||||||
|
file=data01, path=data01.__file__, contents=['A', 'B', 'C'])
|
||||||
|
self.assertEqual(
|
||||||
|
set(resources.contents(package)),
|
||||||
|
{'A', 'B', 'C'})
|
||||||
|
|
||||||
|
def test_resource_is_resource(self):
|
||||||
|
package = util.create_package(
|
||||||
|
file=data01, path=data01.__file__,
|
||||||
|
contents=['A', 'B', 'C', 'D/E', 'D/F'])
|
||||||
|
self.assertTrue(resources.is_resource(package, 'B'))
|
||||||
|
|
||||||
|
def test_resource_directory_is_not_resource(self):
|
||||||
|
package = util.create_package(
|
||||||
|
file=data01, path=data01.__file__,
|
||||||
|
contents=['A', 'B', 'C', 'D/E', 'D/F'])
|
||||||
|
self.assertFalse(resources.is_resource(package, 'D'))
|
||||||
|
|
||||||
|
def test_resource_missing_is_not_resource(self):
|
||||||
|
package = util.create_package(
|
||||||
|
file=data01, path=data01.__file__,
|
||||||
|
contents=['A', 'B', 'C', 'D/E', 'D/F'])
|
||||||
|
self.assertFalse(resources.is_resource(package, 'Z'))
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceCornerCaseTests(unittest.TestCase):
|
||||||
|
def test_package_has_no_reader_fallback(self):
|
||||||
|
# Test odd ball packages which:
|
||||||
|
# 1. Do not have a ResourceReader as a loader
|
||||||
|
# 2. Are not on the file system
|
||||||
|
# 3. Are not in a zip file
|
||||||
|
module = util.create_package(
|
||||||
|
file=data01, path=data01.__file__, contents=['A', 'B', 'C'])
|
||||||
|
# Give the module a dummy loader.
|
||||||
|
module.__loader__ = object()
|
||||||
|
# Give the module a dummy origin.
|
||||||
|
module.__file__ = '/path/which/shall/not/be/named'
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
module.__spec__.loader = module.__loader__
|
||||||
|
module.__spec__.origin = module.__file__
|
||||||
|
self.assertFalse(resources.is_resource(module, 'A'))
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
|
||||||
|
ZIP_MODULE = zipdata01 # type: ignore
|
||||||
|
|
||||||
|
def test_is_submodule_resource(self):
|
||||||
|
submodule = import_module('ziptestdata.subdirectory')
|
||||||
|
self.assertTrue(
|
||||||
|
resources.is_resource(submodule, 'binary.file'))
|
||||||
|
|
||||||
|
def test_read_submodule_resource_by_name(self):
|
||||||
|
self.assertTrue(
|
||||||
|
resources.is_resource('ziptestdata.subdirectory', 'binary.file'))
|
||||||
|
|
||||||
|
def test_submodule_contents(self):
|
||||||
|
submodule = import_module('ziptestdata.subdirectory')
|
||||||
|
self.assertEqual(
|
||||||
|
set(resources.contents(submodule)),
|
||||||
|
{'__init__.py', 'binary.file'})
|
||||||
|
|
||||||
|
def test_submodule_contents_by_name(self):
|
||||||
|
self.assertEqual(
|
||||||
|
set(resources.contents('ziptestdata.subdirectory')),
|
||||||
|
{'__init__.py', 'binary.file'})
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
|
||||||
|
ZIP_MODULE = zipdata02 # type: ignore
|
||||||
|
|
||||||
|
def test_unrelated_contents(self):
|
||||||
|
# https://gitlab.com/python-devs/importlib_resources/issues/44
|
||||||
|
#
|
||||||
|
# Here we have a zip file with two unrelated subpackages. The bug
|
||||||
|
# reports that getting the contents of a resource returns unrelated
|
||||||
|
# files.
|
||||||
|
self.assertEqual(
|
||||||
|
set(resources.contents('ziptestdata.one')),
|
||||||
|
{'__init__.py', 'resource1.txt'})
|
||||||
|
self.assertEqual(
|
||||||
|
set(resources.contents('ziptestdata.two')),
|
||||||
|
{'__init__.py', 'resource2.txt'})
|
||||||
|
|
||||||
|
|
||||||
|
@unittest.skipIf(sys.version_info < (3,), 'No namespace packages in Python 2')
|
||||||
|
class NamespaceTest(unittest.TestCase):
|
||||||
|
def test_namespaces_cannot_have_resources(self):
|
||||||
|
contents = resources.contents(
|
||||||
|
'importlib_resources.tests.data03.namespace')
|
||||||
|
self.assertFalse(list(contents))
|
||||||
|
# Even though there is a file in the namespace directory, it is not
|
||||||
|
# considered a resource, since namespace packages can't have them.
|
||||||
|
self.assertFalse(resources.is_resource(
|
||||||
|
'importlib_resources.tests.data03.namespace',
|
||||||
|
'resource1.txt'))
|
||||||
|
# We should get an exception if we try to read it or open it.
|
||||||
|
self.assertRaises(
|
||||||
|
FileNotFoundError,
|
||||||
|
resources.open_text,
|
||||||
|
'importlib_resources.tests.data03.namespace', 'resource1.txt')
|
||||||
|
self.assertRaises(
|
||||||
|
FileNotFoundError,
|
||||||
|
resources.open_binary,
|
||||||
|
'importlib_resources.tests.data03.namespace', 'resource1.txt')
|
||||||
|
self.assertRaises(
|
||||||
|
FileNotFoundError,
|
||||||
|
resources.read_text,
|
||||||
|
'importlib_resources.tests.data03.namespace', 'resource1.txt')
|
||||||
|
self.assertRaises(
|
||||||
|
FileNotFoundError,
|
||||||
|
resources.read_binary,
|
||||||
|
'importlib_resources.tests.data03.namespace', 'resource1.txt')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -0,0 +1,213 @@
|
|||||||
|
import abc
|
||||||
|
import importlib
|
||||||
|
import io
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from . import data01
|
||||||
|
from . import zipdata01
|
||||||
|
from .._compat import ABC, Path, PurePath, FileNotFoundError
|
||||||
|
from ..abc import ResourceReader
|
||||||
|
|
||||||
|
try:
|
||||||
|
from test.support import modules_setup, modules_cleanup
|
||||||
|
except ImportError:
|
||||||
|
# Python 2.7.
|
||||||
|
def modules_setup():
|
||||||
|
return sys.modules.copy(),
|
||||||
|
|
||||||
|
def modules_cleanup(oldmodules):
|
||||||
|
# Encoders/decoders are registered permanently within the internal
|
||||||
|
# codec cache. If we destroy the corresponding modules their
|
||||||
|
# globals will be set to None which will trip up the cached functions.
|
||||||
|
encodings = [(k, v) for k, v in sys.modules.items()
|
||||||
|
if k.startswith('encodings.')]
|
||||||
|
sys.modules.clear()
|
||||||
|
sys.modules.update(encodings)
|
||||||
|
# XXX: This kind of problem can affect more than just encodings. In
|
||||||
|
# particular extension modules (such as _ssl) don't cope with reloading
|
||||||
|
# properly. Really, test modules should be cleaning out the test
|
||||||
|
# specific modules they know they added (ala test_runpy) rather than
|
||||||
|
# relying on this function (as test_importhooks and test_pkg do
|
||||||
|
# currently). Implicitly imported *real* modules should be left alone
|
||||||
|
# (see issue 10556).
|
||||||
|
sys.modules.update(oldmodules)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from importlib.machinery import ModuleSpec
|
||||||
|
except ImportError:
|
||||||
|
ModuleSpec = None # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def create_package(file, path, is_package=True, contents=()):
|
||||||
|
class Reader(ResourceReader):
|
||||||
|
def get_resource_reader(self, package):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def open_resource(self, path):
|
||||||
|
self._path = path
|
||||||
|
if isinstance(file, Exception):
|
||||||
|
raise file
|
||||||
|
else:
|
||||||
|
return file
|
||||||
|
|
||||||
|
def resource_path(self, path_):
|
||||||
|
self._path = path_
|
||||||
|
if isinstance(path, Exception):
|
||||||
|
raise path
|
||||||
|
else:
|
||||||
|
return path
|
||||||
|
|
||||||
|
def is_resource(self, path_):
|
||||||
|
self._path = path_
|
||||||
|
if isinstance(path, Exception):
|
||||||
|
raise path
|
||||||
|
for entry in contents:
|
||||||
|
parts = entry.split('/')
|
||||||
|
if len(parts) == 1 and parts[0] == path_:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def contents(self):
|
||||||
|
if isinstance(path, Exception):
|
||||||
|
raise path
|
||||||
|
# There's no yield from in baseball, er, Python 2.
|
||||||
|
for entry in contents:
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
name = 'testingpackage'
|
||||||
|
# Unforunately importlib.util.module_from_spec() was not introduced until
|
||||||
|
# Python 3.5.
|
||||||
|
module = types.ModuleType(name)
|
||||||
|
if ModuleSpec is None:
|
||||||
|
# Python 2.
|
||||||
|
module.__name__ = name
|
||||||
|
module.__file__ = 'does-not-exist'
|
||||||
|
if is_package:
|
||||||
|
module.__path__ = []
|
||||||
|
else:
|
||||||
|
# Python 3.
|
||||||
|
loader = Reader()
|
||||||
|
spec = ModuleSpec(
|
||||||
|
name, loader,
|
||||||
|
origin='does-not-exist',
|
||||||
|
is_package=is_package)
|
||||||
|
module.__spec__ = spec
|
||||||
|
module.__loader__ = loader
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
class CommonTests(ABC):
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def execute(self, package, path):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def test_package_name(self):
|
||||||
|
# Passing in the package name should succeed.
|
||||||
|
self.execute(data01.__name__, 'utf-8.file')
|
||||||
|
|
||||||
|
def test_package_object(self):
|
||||||
|
# Passing in the package itself should succeed.
|
||||||
|
self.execute(data01, 'utf-8.file')
|
||||||
|
|
||||||
|
def test_string_path(self):
|
||||||
|
# Passing in a string for the path should succeed.
|
||||||
|
path = 'utf-8.file'
|
||||||
|
self.execute(data01, path)
|
||||||
|
|
||||||
|
@unittest.skipIf(sys.version_info < (3, 6), 'requires os.PathLike support')
|
||||||
|
def test_pathlib_path(self):
|
||||||
|
# Passing in a pathlib.PurePath object for the path should succeed.
|
||||||
|
path = PurePath('utf-8.file')
|
||||||
|
self.execute(data01, path)
|
||||||
|
|
||||||
|
def test_absolute_path(self):
|
||||||
|
# An absolute path is a ValueError.
|
||||||
|
path = Path(__file__)
|
||||||
|
full_path = path.parent/'utf-8.file'
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self.execute(data01, full_path)
|
||||||
|
|
||||||
|
def test_relative_path(self):
|
||||||
|
# A reative path is a ValueError.
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self.execute(data01, '../data01/utf-8.file')
|
||||||
|
|
||||||
|
def test_importing_module_as_side_effect(self):
|
||||||
|
# The anchor package can already be imported.
|
||||||
|
del sys.modules[data01.__name__]
|
||||||
|
self.execute(data01.__name__, 'utf-8.file')
|
||||||
|
|
||||||
|
def test_non_package_by_name(self):
|
||||||
|
# The anchor package cannot be a module.
|
||||||
|
with self.assertRaises(TypeError):
|
||||||
|
self.execute(__name__, 'utf-8.file')
|
||||||
|
|
||||||
|
def test_non_package_by_package(self):
|
||||||
|
# The anchor package cannot be a module.
|
||||||
|
with self.assertRaises(TypeError):
|
||||||
|
module = sys.modules['importlib_resources.tests.util']
|
||||||
|
self.execute(module, 'utf-8.file')
|
||||||
|
|
||||||
|
@unittest.skipIf(sys.version_info < (3,), 'No ResourceReader in Python 2')
|
||||||
|
def test_resource_opener(self):
|
||||||
|
bytes_data = io.BytesIO(b'Hello, world!')
|
||||||
|
package = create_package(file=bytes_data, path=FileNotFoundError())
|
||||||
|
self.execute(package, 'utf-8.file')
|
||||||
|
self.assertEqual(package.__loader__._path, 'utf-8.file')
|
||||||
|
|
||||||
|
@unittest.skipIf(sys.version_info < (3,), 'No ResourceReader in Python 2')
|
||||||
|
def test_resource_path(self):
|
||||||
|
bytes_data = io.BytesIO(b'Hello, world!')
|
||||||
|
path = __file__
|
||||||
|
package = create_package(file=bytes_data, path=path)
|
||||||
|
self.execute(package, 'utf-8.file')
|
||||||
|
self.assertEqual(package.__loader__._path, 'utf-8.file')
|
||||||
|
|
||||||
|
def test_useless_loader(self):
|
||||||
|
package = create_package(file=FileNotFoundError(),
|
||||||
|
path=FileNotFoundError())
|
||||||
|
with self.assertRaises(FileNotFoundError):
|
||||||
|
self.execute(package, 'utf-8.file')
|
||||||
|
|
||||||
|
|
||||||
|
class ZipSetupBase:
|
||||||
|
ZIP_MODULE = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
|
data_path = Path(cls.ZIP_MODULE.__file__)
|
||||||
|
data_dir = data_path.parent
|
||||||
|
cls._zip_path = str(data_dir / 'ziptestdata.zip')
|
||||||
|
sys.path.append(cls._zip_path)
|
||||||
|
cls.data = importlib.import_module('ziptestdata')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def tearDownClass(cls):
|
||||||
|
try:
|
||||||
|
sys.path.remove(cls._zip_path)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
del sys.path_importer_cache[cls._zip_path]
|
||||||
|
del sys.modules[cls.data.__name__]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
del cls.data
|
||||||
|
del cls._zip_path
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
modules = modules_setup()
|
||||||
|
self.addCleanup(modules_cleanup, *modules)
|
||||||
|
|
||||||
|
|
||||||
|
class ZipSetup(ZipSetupBase):
|
||||||
|
ZIP_MODULE = zipdata01 # type: ignore
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue