Installing wheels

pull/2/head
sgoudham 5 years ago
parent b805c2ab83
commit 4d6168b350

@ -1,8 +0,0 @@
README.md
setup.py
discord/ext/menus/__init__.py
discord_ext_menus.egg-info/PKG-INFO
discord_ext_menus.egg-info/SOURCES.txt
discord_ext_menus.egg-info/dependency_links.txt
discord_ext_menus.egg-info/requires.txt
discord_ext_menus.egg-info/top_level.txt

@ -1,7 +0,0 @@
..\discord\ext\menus\__init__.py
..\discord\ext\menus\__pycache__\__init__.cpython-36.pyc
PKG-INFO
SOURCES.txt
dependency_links.txt
requires.txt
top_level.txt

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015-2019 Danny Y. (Rapptz)
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

@ -1,10 +1,14 @@
Metadata-Version: 1.2
Metadata-Version: 2.1
Name: discord-ext-menus
Version: 1.0.0a22+gcc108be
Summary: An extension module to make reaction based menus with discord.py
Home-page: https://github.com/Rapptz/discord-ext-menus
Author: Rapptz
License: MIT
Description: UNKNOWN
Platform: UNKNOWN
Requires-Python: >=3.5.3
Requires-Dist: discord.py (>=1.2.5)
UNKNOWN

@ -0,0 +1,9 @@
discord/ext/menus/__init__.py,sha256=V8ojHgCI1oH5fWiJF7ZbrOg95MbgH3PIbC8eK33uwVc,41686
discord/ext/menus/__pycache__/__init__.cpython-36.pyc,,
discord_ext_menus-1.0.0a22+gcc108be.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
discord_ext_menus-1.0.0a22+gcc108be.dist-info/LICENSE,sha256=7BAmT4qZKQ2E3NZi8tfNGXQx6QSIJ-vbjhDH853-9Fc,1110
discord_ext_menus-1.0.0a22+gcc108be.dist-info/METADATA,sha256=xr4yYqrPVCPgZ07VrN7oW5hHYdiMqI1wMPUGfJuOdHY,320
discord_ext_menus-1.0.0a22+gcc108be.dist-info/RECORD,,
discord_ext_menus-1.0.0a22+gcc108be.dist-info/WHEEL,sha256=YUYzQ6UQdoqxXjimOitTqynltBCkwY6qlTfTh2IzqQU,97
discord_ext_menus-1.0.0a22+gcc108be.dist-info/direct_url.json,sha256=Ltfp2x8uwSMUplapYkfSOJi86JJkX5gXUSMBuPxbqsM,139
discord_ext_menus-1.0.0a22+gcc108be.dist-info/top_level.txt,sha256=fJkrNbR-_8ubMBUcDEJBcfkpECrvSEmMrNKgvLlQFoM,8

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.34.2)
Root-Is-Purelib: true
Tag: py3-none-any

@ -0,0 +1 @@
{"url": "https://github.com/Rapptz/discord-ext-menus", "vcs_info": {"commit_id": "cc108bed812d0e481a628ca573c2eeeca9226b42", "vcs": "git"}}

@ -0,0 +1,22 @@
"wheel" copyright (c) 2012-2014 Daniel Holth <dholth@fastmail.fm> and
contributors.
The MIT License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,66 @@
Metadata-Version: 2.1
Name: wheel
Version: 0.34.2
Summary: A built-package format for Python
Home-page: https://github.com/pypa/wheel
Author: Daniel Holth
Author-email: dholth@fastmail.fm
Maintainer: Alex Grönholm
Maintainer-email: alex.gronholm@nextday.fi
License: MIT
Project-URL: Documentation, https://wheel.readthedocs.io/
Project-URL: Changelog, https://wheel.readthedocs.io/en/stable/news.html
Project-URL: Issue Tracker, https://github.com/pypa/wheel/issues
Keywords: wheel,packaging
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: Topic :: System :: Archiving :: Packaging
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
Provides-Extra: test
Requires-Dist: pytest (>=3.0.0) ; extra == 'test'
Requires-Dist: pytest-cov ; extra == 'test'
wheel
=====
This library is the reference implementation of the Python wheel packaging
standard, as defined in `PEP 427`_.
It has two different roles:
#. A setuptools_ extension for building wheels that provides the
``bdist_wheel`` setuptools command
#. A command line tool for working with wheel files
It should be noted that wheel is **not** intended to be used as a library, and
as such there is no stable, public API.
.. _PEP 427: https://www.python.org/dev/peps/pep-0427/
.. _setuptools: https://pypi.org/project/setuptools/
Documentation
-------------
The documentation_ can be found on Read The Docs.
.. _documentation: https://wheel.readthedocs.io/
Code of Conduct
---------------
Everyone interacting in the wheel project's codebases, issue trackers, chat
rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/

@ -0,0 +1,38 @@
../../Scripts/wheel.exe,sha256=GRuTq5C6yxuiPV2_3k1o9oJ62z2c5d9g9CNMvKft1iA,106360
wheel-0.34.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
wheel-0.34.2.dist-info/LICENSE.txt,sha256=zKniDGrx_Pv2lAjzd3aShsvuvN7TNhAMm0o_NfvmNeQ,1125
wheel-0.34.2.dist-info/METADATA,sha256=uizJLqT4WTszpXZTkDAoUn364lfXXMz-ha1TLIuQQbU,2219
wheel-0.34.2.dist-info/RECORD,,
wheel-0.34.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
wheel-0.34.2.dist-info/entry_points.txt,sha256=N8HbYFST3yrNQYeB2wXWBEPUhFsEtKNRPaCFGJPyqyc,108
wheel-0.34.2.dist-info/top_level.txt,sha256=HxSBIbgEstMPe4eFawhA66Mq-QYHMopXVoAncfjb_1c,6
wheel/__init__.py,sha256=HnvQS9U0JqVi8rcO-9DbcHRmVtnblu7VwdzoKbMiZDQ,23
wheel/__main__.py,sha256=lF-YLO4hdQmoWuh4eWZd8YL1U95RSdm76sNLBXa0vjE,417
wheel/__pycache__/__init__.cpython-36.pyc,,
wheel/__pycache__/__main__.cpython-36.pyc,,
wheel/__pycache__/_version.cpython-36.pyc,,
wheel/__pycache__/bdist_wheel.cpython-36.pyc,,
wheel/__pycache__/macosx_libfile.cpython-36.pyc,,
wheel/__pycache__/metadata.cpython-36.pyc,,
wheel/__pycache__/pep425tags.cpython-36.pyc,,
wheel/__pycache__/pkginfo.cpython-36.pyc,,
wheel/__pycache__/util.cpython-36.pyc,,
wheel/__pycache__/wheelfile.cpython-36.pyc,,
wheel/_version.py,sha256=KiDEywHVTrXNxe6ojGe-7gEm6gUYge5r_fThX1den7Y,133
wheel/bdist_wheel.py,sha256=fgVFnJ2cC3MkU5Wy0nNLCzbJoGorSzSrzM9PFq4HWj0,15840
wheel/cli/__init__.py,sha256=GWSoGUpRabTf8bk3FsNTPrc5Fsr8YOv2dX55iY2W7eY,2572
wheel/cli/__pycache__/__init__.cpython-36.pyc,,
wheel/cli/__pycache__/convert.cpython-36.pyc,,
wheel/cli/__pycache__/install.cpython-36.pyc,,
wheel/cli/__pycache__/pack.cpython-36.pyc,,
wheel/cli/__pycache__/unpack.cpython-36.pyc,,
wheel/cli/convert.py,sha256=7F4vj23A2OghDDWn9gX2V-_TeXMza1a5nIejmFGEUJM,9498
wheel/cli/install.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
wheel/cli/pack.py,sha256=S-J1iIy1GPDTTDdn-_SwxGa7N729h4iZNI11EDFCqfA,3208
wheel/cli/unpack.py,sha256=0VWzT7U_xyenTPwEVavxqvdee93GPvAFHnR3Uu91aRc,673
wheel/macosx_libfile.py,sha256=NhNz1C3zF_78iMUd4ij0le1jVJNnz93tCAICSKXgYvg,11858
wheel/metadata.py,sha256=siS-hs_DTT0ScpbzloYbQSGYXUDC_Tim10ixcYWSM-4,4517
wheel/pep425tags.py,sha256=tuXvpyhYJHDcsXHgSRonny1X3kG3TSn7CN9nL9tZUMA,9071
wheel/pkginfo.py,sha256=GR76kupQzn1x9sKDaXuE6B6FsZ4OkfRtG7pndlXPvQ4,1257
wheel/util.py,sha256=mnNZkJCi9DHLI_q4lTudoD0mW97h_AoAWl7prNPLXJc,938
wheel/wheelfile.py,sha256=WsWfD-OBgHxxeF7SxRi3OX6l_qxtP06b2ZK0NVhT_n0,7298

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.34.2)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

@ -0,0 +1,6 @@
[console_scripts]
wheel = wheel.cli:main
[distutils.commands]
bdist_wheel = wheel.bdist_wheel:bdist_wheel

@ -0,0 +1,19 @@
"""
Wheel command line tool (enable python -m wheel syntax)
"""
import sys
def main(): # needed for console script
if __package__ == '':
# To be able to run 'python wheel-0.9.whl/wheel':
import os.path
path = os.path.dirname(os.path.dirname(__file__))
sys.path[0:0] = [path]
import wheel.cli
sys.exit(wheel.cli.main())
if __name__ == "__main__":
sys.exit(main())

@ -0,0 +1,4 @@
# coding: utf-8
# file generated by setuptools_scm
# don't change, don't track in version control
version = '0.33.6.post32+gd3d7a43'

@ -0,0 +1,403 @@
"""
Create a wheel (.whl) distribution.
A wheel is a built archive format.
"""
import os
import shutil
import stat
import sys
import re
from collections import OrderedDict
from email.generator import Generator
from distutils.core import Command
from distutils.sysconfig import get_python_version
from distutils import log as logger
from glob import iglob
from shutil import rmtree
from warnings import warn
from zipfile import ZIP_DEFLATED, ZIP_STORED
import pkg_resources
from .pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag, get_platform
from .pkginfo import write_pkg_info
from .metadata import pkginfo_to_metadata
from .wheelfile import WheelFile
from . import pep425tags
from . import __version__ as wheel_version
safe_name = pkg_resources.safe_name
safe_version = pkg_resources.safe_version
PY_LIMITED_API_PATTERN = r'cp3\d'
def safer_name(name):
return safe_name(name).replace('-', '_')
def safer_version(version):
return safe_version(version).replace('-', '_')
def remove_readonly(func, path, excinfo):
print(str(excinfo[1]))
os.chmod(path, stat.S_IWRITE)
func(path)
class bdist_wheel(Command):
description = 'create a wheel distribution'
supported_compressions = OrderedDict([
('stored', ZIP_STORED),
('deflated', ZIP_DEFLATED)
])
user_options = [('bdist-dir=', 'b',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform(None)),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('relative', None,
"build the archive using relative paths "
"(default: false)"),
('owner=', 'u',
"Owner name used when creating a tar file"
" [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file"
" [default: current group]"),
('universal', None,
"make a universal wheel"
" (default: false)"),
('compression=', None,
"zipfile compression (one of: {})"
" (default: 'deflated')"
.format(', '.join(supported_compressions))),
('python-tag=', None,
"Python implementation compatibility tag"
" (default: py%s)" % get_impl_ver()[0]),
('build-number=', None,
"Build number for this particular version. "
"As specified in PEP-0427, this must start with a digit. "
"[default: None]"),
('py-limited-api=', None,
"Python tag (cp32|cp33|cpNN) for abi3 wheel tag"
" (default: false)"),
]
boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal']
def initialize_options(self):
self.bdist_dir = None
self.data_dir = None
self.plat_name = None
self.plat_tag = None
self.format = 'zip'
self.keep_temp = False
self.dist_dir = None
self.egginfo_dir = None
self.root_is_pure = None
self.skip_build = None
self.relative = False
self.owner = None
self.group = None
self.universal = False
self.compression = 'deflated'
self.python_tag = 'py' + get_impl_ver()[0]
self.build_number = None
self.py_limited_api = False
self.plat_name_supplied = False
def finalize_options(self):
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'wheel')
self.data_dir = self.wheel_dist_name + '.data'
self.plat_name_supplied = self.plat_name is not None
try:
self.compression = self.supported_compressions[self.compression]
except KeyError:
raise ValueError('Unsupported compression: {}'.format(self.compression))
need_options = ('dist_dir', 'plat_name', 'skip_build')
self.set_undefined_options('bdist',
*zip(need_options, need_options))
self.root_is_pure = not (self.distribution.has_ext_modules()
or self.distribution.has_c_libraries())
if self.py_limited_api and not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api):
raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
# Support legacy [wheel] section for setting universal
wheel = self.distribution.get_option_dict('wheel')
if 'universal' in wheel:
# please don't define this in your global configs
logger.warn('The [wheel] section is deprecated. Use [bdist_wheel] instead.')
val = wheel['universal'][1].strip()
if val.lower() in ('1', 'true', 'yes'):
self.universal = True
if self.build_number is not None and not self.build_number[:1].isdigit():
raise ValueError("Build tag (build-number) must start with a digit.")
@property
def wheel_dist_name(self):
"""Return distribution full name with - replaced with _"""
components = (safer_name(self.distribution.get_name()),
safer_version(self.distribution.get_version()))
if self.build_number:
components += (self.build_number,)
return '-'.join(components)
def get_tag(self):
# bdist sets self.plat_name if unset, we should only use it for purepy
# wheels if the user supplied it.
if self.plat_name_supplied:
plat_name = self.plat_name
elif self.root_is_pure:
plat_name = 'any'
else:
# macosx contains system version in platform name so need special handle
if self.plat_name and not self.plat_name.startswith("macosx"):
plat_name = self.plat_name
else:
plat_name = get_platform(self.bdist_dir)
if plat_name in ('linux-x86_64', 'linux_x86_64') and sys.maxsize == 2147483647:
plat_name = 'linux_i686'
plat_name = plat_name.replace('-', '_').replace('.', '_')
if self.root_is_pure:
if self.universal:
impl = 'py2.py3'
else:
impl = self.python_tag
tag = (impl, 'none', plat_name)
else:
impl_name = get_abbr_impl()
impl_ver = get_impl_ver()
impl = impl_name + impl_ver
# We don't work on CPython 3.1, 3.0.
if self.py_limited_api and (impl_name + impl_ver).startswith('cp3'):
impl = self.py_limited_api
abi_tag = 'abi3'
else:
abi_tag = str(get_abi_tag()).lower()
tag = (impl, abi_tag, plat_name)
supported_tags = pep425tags.get_supported(
self.bdist_dir,
supplied_platform=plat_name if self.plat_name_supplied else None)
# XXX switch to this alternate implementation for non-pure:
if not self.py_limited_api:
assert tag == supported_tags[0], "%s != %s" % (tag, supported_tags[0])
assert tag in supported_tags, "would build wheel with unsupported tag {}".format(tag)
return tag
def run(self):
build_scripts = self.reinitialize_command('build_scripts')
build_scripts.executable = 'python'
build_scripts.force = True
build_ext = self.reinitialize_command('build_ext')
build_ext.inplace = False
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install',
reinit_subcommands=True)
install.root = self.bdist_dir
install.compile = False
install.skip_build = self.skip_build
install.warn_dir = False
# A wheel without setuptools scripts is more cross-platform.
# Use the (undocumented) `no_ep` option to setuptools'
# install_scripts command to avoid creating entry point scripts.
install_scripts = self.reinitialize_command('install_scripts')
install_scripts.no_ep = True
# Use a custom scheme for the archive, because we have to decide
# at installation time which scheme to use.
for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'):
setattr(install,
'install_' + key,
os.path.join(self.data_dir, key))
basedir_observed = ''
if os.name == 'nt':
# win32 barfs if any of these are ''; could be '.'?
# (distutils.command.install:change_roots bug)
basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..'))
self.install_libbase = self.install_lib = basedir_observed
setattr(install,
'install_purelib' if self.root_is_pure else 'install_platlib',
basedir_observed)
logger.info("installing to %s", self.bdist_dir)
self.run_command('install')
impl_tag, abi_tag, plat_tag = self.get_tag()
archive_basename = "{}-{}-{}-{}".format(self.wheel_dist_name, impl_tag, abi_tag, plat_tag)
if not self.relative:
archive_root = self.bdist_dir
else:
archive_root = os.path.join(
self.bdist_dir,
self._ensure_relative(install.install_base))
self.set_undefined_options('install_egg_info', ('target', 'egginfo_dir'))
distinfo_dirname = '{}-{}.dist-info'.format(
safer_name(self.distribution.get_name()),
safer_version(self.distribution.get_version()))
distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
self.egg2dist(self.egginfo_dir, distinfo_dir)
self.write_wheelfile(distinfo_dir)
# Make the archive
if not os.path.exists(self.dist_dir):
os.makedirs(self.dist_dir)
wheel_path = os.path.join(self.dist_dir, archive_basename + '.whl')
with WheelFile(wheel_path, 'w', self.compression) as wf:
wf.write_files(archive_root)
# Add to 'Distribution.dist_files' so that the "upload" command works
getattr(self.distribution, 'dist_files', []).append(
('bdist_wheel', get_python_version(), wheel_path))
if not self.keep_temp:
logger.info('removing %s', self.bdist_dir)
if not self.dry_run:
rmtree(self.bdist_dir, onerror=remove_readonly)
def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel_version + ')'):
from email.message import Message
msg = Message()
msg['Wheel-Version'] = '1.0' # of the spec
msg['Generator'] = generator
msg['Root-Is-Purelib'] = str(self.root_is_pure).lower()
if self.build_number is not None:
msg['Build'] = self.build_number
# Doesn't work for bdist_wininst
impl_tag, abi_tag, plat_tag = self.get_tag()
for impl in impl_tag.split('.'):
for abi in abi_tag.split('.'):
for plat in plat_tag.split('.'):
msg['Tag'] = '-'.join((impl, abi, plat))
wheelfile_path = os.path.join(wheelfile_base, 'WHEEL')
logger.info('creating %s', wheelfile_path)
with open(wheelfile_path, 'w') as f:
Generator(f, maxheaderlen=0).flatten(msg)
def _ensure_relative(self, path):
# copied from dir_util, deleted
drive, path = os.path.splitdrive(path)
if path[0:1] == os.sep:
path = drive + path[1:]
return path
@property
def license_paths(self):
metadata = self.distribution.get_option_dict('metadata')
files = set()
patterns = sorted({
option for option in metadata.get('license_files', ('', ''))[1].split()
})
if 'license_file' in metadata:
warn('The "license_file" option is deprecated. Use "license_files" instead.',
DeprecationWarning)
files.add(metadata['license_file'][1])
if 'license_file' not in metadata and 'license_files' not in metadata:
patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*')
for pattern in patterns:
for path in iglob(pattern):
if path not in files and os.path.isfile(path):
logger.info('adding license file "%s" (matched pattern "%s")', path, pattern)
files.add(path)
return files
def egg2dist(self, egginfo_path, distinfo_path):
"""Convert an .egg-info directory into a .dist-info directory"""
def adios(p):
"""Appropriately delete directory, file or link."""
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
shutil.rmtree(p)
elif os.path.exists(p):
os.unlink(p)
adios(distinfo_path)
if not os.path.exists(egginfo_path):
# There is no egg-info. This is probably because the egg-info
# file/directory is not named matching the distribution name used
# to name the archive file. Check for this case and report
# accordingly.
import glob
pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info')
possible = glob.glob(pat)
err = "Egg metadata expected at %s but not found" % (egginfo_path,)
if possible:
alt = os.path.basename(possible[0])
err += " (%s found - possible misnamed archive file?)" % (alt,)
raise ValueError(err)
if os.path.isfile(egginfo_path):
# .egg-info is a single file
pkginfo_path = egginfo_path
pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
os.mkdir(distinfo_path)
else:
# .egg-info is a directory
pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO')
pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
# ignore common egg metadata that is useless to wheel
shutil.copytree(egginfo_path, distinfo_path,
ignore=lambda x, y: {'PKG-INFO', 'requires.txt', 'SOURCES.txt',
'not-zip-safe'}
)
# delete dependency_links if it is only whitespace
dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt')
with open(dependency_links_path, 'r') as dependency_links_file:
dependency_links = dependency_links_file.read().strip()
if not dependency_links:
adios(dependency_links_path)
write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info)
for license_path in self.license_paths:
filename = os.path.basename(license_path)
shutil.copy(license_path, os.path.join(distinfo_path, filename))
adios(egginfo_path)

@ -0,0 +1,88 @@
"""
Wheel command-line utility.
"""
from __future__ import print_function
import argparse
import os
import sys
def require_pkgresources(name):
try:
import pkg_resources # noqa: F401
except ImportError:
raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name))
class WheelError(Exception):
pass
def unpack_f(args):
from .unpack import unpack
unpack(args.wheelfile, args.dest)
def pack_f(args):
from .pack import pack
pack(args.directory, args.dest_dir, args.build_number)
def convert_f(args):
from .convert import convert
convert(args.files, args.dest_dir, args.verbose)
def version_f(args):
from .. import __version__
print("wheel %s" % __version__)
def parser():
p = argparse.ArgumentParser()
s = p.add_subparsers(help="commands")
unpack_parser = s.add_parser('unpack', help='Unpack wheel')
unpack_parser.add_argument('--dest', '-d', help='Destination directory',
default='.')
unpack_parser.add_argument('wheelfile', help='Wheel file')
unpack_parser.set_defaults(func=unpack_f)
repack_parser = s.add_parser('pack', help='Repack wheel')
repack_parser.add_argument('directory', help='Root directory of the unpacked wheel')
repack_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store the wheel (default %(default)s)")
repack_parser.add_argument('--build-number', help="Build tag to use in the wheel name")
repack_parser.set_defaults(func=pack_f)
convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel')
convert_parser.add_argument('files', nargs='*', help='Files to convert')
convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store wheels (default %(default)s)")
convert_parser.add_argument('--verbose', '-v', action='store_true')
convert_parser.set_defaults(func=convert_f)
version_parser = s.add_parser('version', help='Print version and exit')
version_parser.set_defaults(func=version_f)
help_parser = s.add_parser('help', help='Show this help')
help_parser.set_defaults(func=lambda args: p.print_help())
return p
def main():
p = parser()
args = p.parse_args()
if not hasattr(args, 'func'):
p.print_help()
else:
try:
args.func(args)
return 0
except WheelError as e:
print(e, file=sys.stderr)
return 1

@ -0,0 +1,269 @@
import os.path
import re
import shutil
import sys
import tempfile
import zipfile
from distutils import dist
from glob import iglob
from ..bdist_wheel import bdist_wheel
from ..wheelfile import WheelFile
from . import WheelError, require_pkgresources
egg_info_re = re.compile(r'''
(?P<name>.+?)-(?P<ver>.+?)
(-(?P<pyver>py\d\.\d+)
(-(?P<arch>.+?))?
)?.egg$''', re.VERBOSE)
class _bdist_wheel_tag(bdist_wheel):
# allow the client to override the default generated wheel tag
# The default bdist_wheel implementation uses python and abi tags
# of the running python process. This is not suitable for
# generating/repackaging prebuild binaries.
full_tag_supplied = False
full_tag = None # None or a (pytag, soabitag, plattag) triple
def get_tag(self):
if self.full_tag_supplied and self.full_tag is not None:
return self.full_tag
else:
return bdist_wheel.get_tag(self)
def egg2wheel(egg_path, dest_dir):
filename = os.path.basename(egg_path)
match = egg_info_re.match(filename)
if not match:
raise WheelError('Invalid egg file name: {}'.format(filename))
egg_info = match.groupdict()
dir = tempfile.mkdtemp(suffix="_e2w")
if os.path.isfile(egg_path):
# assume we have a bdist_egg otherwise
with zipfile.ZipFile(egg_path) as egg:
egg.extractall(dir)
else:
# support buildout-style installed eggs directories
for pth in os.listdir(egg_path):
src = os.path.join(egg_path, pth)
if os.path.isfile(src):
shutil.copy2(src, dir)
else:
shutil.copytree(src, os.path.join(dir, pth))
pyver = egg_info['pyver']
if pyver:
pyver = egg_info['pyver'] = pyver.replace('.', '')
arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_')
# assume all binary eggs are for CPython
abi = 'cp' + pyver[2:] if arch != 'any' else 'none'
root_is_purelib = egg_info['arch'] is None
if root_is_purelib:
bw = bdist_wheel(dist.Distribution())
else:
bw = _bdist_wheel_tag(dist.Distribution())
bw.root_is_pure = root_is_purelib
bw.python_tag = pyver
bw.plat_name_supplied = True
bw.plat_name = egg_info['arch'] or 'any'
if not root_is_purelib:
bw.full_tag_supplied = True
bw.full_tag = (pyver, abi, arch)
dist_info_dir = os.path.join(dir, '{name}-{ver}.dist-info'.format(**egg_info))
bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir)
bw.write_wheelfile(dist_info_dir, generator='egg2wheel')
wheel_name = '{name}-{ver}-{pyver}-{}-{}.whl'.format(abi, arch, **egg_info)
with WheelFile(os.path.join(dest_dir, wheel_name), 'w') as wf:
wf.write_files(dir)
shutil.rmtree(dir)
def parse_wininst_info(wininfo_name, egginfo_name):
"""Extract metadata from filenames.
Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
the installer filename and the name of the egg-info directory embedded in
the zipfile (if any).
The egginfo filename has the format::
name-ver(-pyver)(-arch).egg-info
The installer filename has the format::
name-ver.arch(-pyver).exe
Some things to note:
1. The installer filename is not definitive. An installer can be renamed
and work perfectly well as an installer. So more reliable data should
be used whenever possible.
2. The egg-info data should be preferred for the name and version, because
these come straight from the distutils metadata, and are mandatory.
3. The pyver from the egg-info data should be ignored, as it is
constructed from the version of Python used to build the installer,
which is irrelevant - the installer filename is correct here (even to
the point that when it's not there, any version is implied).
4. The architecture must be taken from the installer filename, as it is
not included in the egg-info data.
5. Architecture-neutral installers still have an architecture because the
installer format itself (being executable) is architecture-specific. We
should therefore ignore the architecture if the content is pure-python.
"""
egginfo = None
if egginfo_name:
egginfo = egg_info_re.search(egginfo_name)
if not egginfo:
raise ValueError("Egg info filename %s is not valid" % (egginfo_name,))
# Parse the wininst filename
# 1. Distribution name (up to the first '-')
w_name, sep, rest = wininfo_name.partition('-')
if not sep:
raise ValueError("Installer filename %s is not valid" % (wininfo_name,))
# Strip '.exe'
rest = rest[:-4]
# 2. Python version (from the last '-', must start with 'py')
rest2, sep, w_pyver = rest.rpartition('-')
if sep and w_pyver.startswith('py'):
rest = rest2
w_pyver = w_pyver.replace('.', '')
else:
# Not version specific - use py2.py3. While it is possible that
# pure-Python code is not compatible with both Python 2 and 3, there
# is no way of knowing from the wininst format, so we assume the best
# here (the user can always manually rename the wheel to be more
# restrictive if needed).
w_pyver = 'py2.py3'
# 3. Version and architecture
w_ver, sep, w_arch = rest.rpartition('.')
if not sep:
raise ValueError("Installer filename %s is not valid" % (wininfo_name,))
if egginfo:
w_name = egginfo.group('name')
w_ver = egginfo.group('ver')
return {'name': w_name, 'ver': w_ver, 'arch': w_arch, 'pyver': w_pyver}
def wininst2wheel(path, dest_dir):
with zipfile.ZipFile(path) as bdw:
# Search for egg-info in the archive
egginfo_name = None
for filename in bdw.namelist():
if '.egg-info' in filename:
egginfo_name = filename
break
info = parse_wininst_info(os.path.basename(path), egginfo_name)
root_is_purelib = True
for zipinfo in bdw.infolist():
if zipinfo.filename.startswith('PLATLIB'):
root_is_purelib = False
break
if root_is_purelib:
paths = {'purelib': ''}
else:
paths = {'platlib': ''}
dist_info = "%(name)s-%(ver)s" % info
datadir = "%s.data/" % dist_info
# rewrite paths to trick ZipFile into extracting an egg
# XXX grab wininst .ini - between .exe, padding, and first zip file.
members = []
egginfo_name = ''
for zipinfo in bdw.infolist():
key, basename = zipinfo.filename.split('/', 1)
key = key.lower()
basepath = paths.get(key, None)
if basepath is None:
basepath = datadir + key.lower() + '/'
oldname = zipinfo.filename
newname = basepath + basename
zipinfo.filename = newname
del bdw.NameToInfo[oldname]
bdw.NameToInfo[newname] = zipinfo
# Collect member names, but omit '' (from an entry like "PLATLIB/"
if newname:
members.append(newname)
# Remember egg-info name for the egg2dist call below
if not egginfo_name:
if newname.endswith('.egg-info'):
egginfo_name = newname
elif '.egg-info/' in newname:
egginfo_name, sep, _ = newname.rpartition('/')
dir = tempfile.mkdtemp(suffix="_b2w")
bdw.extractall(dir, members)
# egg2wheel
abi = 'none'
pyver = info['pyver']
arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_')
# Wininst installers always have arch even if they are not
# architecture-specific (because the format itself is).
# So, assume the content is architecture-neutral if root is purelib.
if root_is_purelib:
arch = 'any'
# If the installer is architecture-specific, it's almost certainly also
# CPython-specific.
if arch != 'any':
pyver = pyver.replace('py', 'cp')
wheel_name = '-'.join((dist_info, pyver, abi, arch))
if root_is_purelib:
bw = bdist_wheel(dist.Distribution())
else:
bw = _bdist_wheel_tag(dist.Distribution())
bw.root_is_pure = root_is_purelib
bw.python_tag = pyver
bw.plat_name_supplied = True
bw.plat_name = info['arch'] or 'any'
if not root_is_purelib:
bw.full_tag_supplied = True
bw.full_tag = (pyver, abi, arch)
dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
bw.write_wheelfile(dist_info_dir, generator='wininst2wheel')
wheel_path = os.path.join(dest_dir, wheel_name)
with WheelFile(wheel_path, 'w') as wf:
wf.write_files(dir)
shutil.rmtree(dir)
def convert(files, dest_dir, verbose):
# Only support wheel convert if pkg_resources is present
require_pkgresources('wheel convert')
for pat in files:
for installer in iglob(pat):
if os.path.splitext(installer)[1] == '.egg':
conv = egg2wheel
else:
conv = wininst2wheel
if verbose:
print("{}... ".format(installer))
sys.stdout.flush()
conv(installer, dest_dir)
if verbose:
print("OK")

@ -0,0 +1,79 @@
from __future__ import print_function
import os.path
import re
import sys
from wheel.cli import WheelError
from wheel.wheelfile import WheelFile
DIST_INFO_RE = re.compile(r"^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))\.dist-info$")
BUILD_NUM_RE = re.compile(br'Build: (\d\w*)$')
def pack(directory, dest_dir, build_number):
"""Repack a previously unpacked wheel directory into a new wheel file.
The .dist-info/WHEEL file must contain one or more tags so that the target
wheel file name can be determined.
:param directory: The unpacked wheel directory
:param dest_dir: Destination directory (defaults to the current directory)
"""
# Find the .dist-info directory
dist_info_dirs = [fn for fn in os.listdir(directory)
if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)]
if len(dist_info_dirs) > 1:
raise WheelError('Multiple .dist-info directories found in {}'.format(directory))
elif not dist_info_dirs:
raise WheelError('No .dist-info directories found in {}'.format(directory))
# Determine the target wheel filename
dist_info_dir = dist_info_dirs[0]
name_version = DIST_INFO_RE.match(dist_info_dir).group('namever')
# Read the tags and the existing build number from .dist-info/WHEEL
existing_build_number = None
wheel_file_path = os.path.join(directory, dist_info_dir, 'WHEEL')
with open(wheel_file_path) as f:
tags = []
for line in f:
if line.startswith('Tag: '):
tags.append(line.split(' ')[1].rstrip())
elif line.startswith('Build: '):
existing_build_number = line.split(' ')[1].rstrip()
if not tags:
raise WheelError('No tags present in {}/WHEEL; cannot determine target wheel filename'
.format(dist_info_dir))
# Set the wheel file name and add/replace/remove the Build tag in .dist-info/WHEEL
build_number = build_number if build_number is not None else existing_build_number
if build_number is not None:
if build_number:
name_version += '-' + build_number
if build_number != existing_build_number:
replacement = ('Build: %s\r\n' % build_number).encode('ascii') if build_number else b''
with open(wheel_file_path, 'rb+') as f:
wheel_file_content = f.read()
if not BUILD_NUM_RE.subn(replacement, wheel_file_content)[1]:
wheel_file_content += replacement
f.truncate()
f.write(wheel_file_content)
# Reassemble the tags for the wheel file
impls = sorted({tag.split('-')[0] for tag in tags})
abivers = sorted({tag.split('-')[1] for tag in tags})
platforms = sorted({tag.split('-')[2] for tag in tags})
tagline = '-'.join(['.'.join(impls), '.'.join(abivers), '.'.join(platforms)])
# Repack the wheel
wheel_path = os.path.join(dest_dir, '{}-{}.whl'.format(name_version, tagline))
with WheelFile(wheel_path, 'w') as wf:
print("Repacking wheel as {}...".format(wheel_path), end='')
sys.stdout.flush()
wf.write_files(directory)
print('OK')

@ -0,0 +1,25 @@
from __future__ import print_function
import os.path
import sys
from ..wheelfile import WheelFile
def unpack(path, dest='.'):
"""Unpack a wheel.
Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
is the package name and {ver} its version.
:param path: The path to the wheel.
:param dest: Destination directory (default to current directory).
"""
with WheelFile(path) as wf:
namever = wf.parsed_filename.group('namever')
destination = os.path.join(dest, namever)
print("Unpacking to: {}...".format(destination), end='')
sys.stdout.flush()
wf.extractall(destination)
print('OK')

@ -0,0 +1,341 @@
"""
This module contains function to analyse dynamic library
headers to extract system information
Currently only for MacOSX
Library file on macosx system starts with Mach-O or Fat field.
This can be distinguish by first 32 bites and it is called magic number.
Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means
reversed bytes order.
Both fields can occur in two types: 32 and 64 bytes.
FAT field inform that this library contains few version of library
(typically for different types version). It contains
information where Mach-O headers starts.
Each section started with Mach-O header contains one library
(So if file starts with this field it contains only one version).
After filed Mach-O there are section fields.
Each of them starts with two fields:
cmd - magic number for this command
cmdsize - total size occupied by this section information.
In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier)
and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting,
because them contains information about minimal system version.
Important remarks:
- For fat files this implementation looks for maximum number version.
It not check if it is 32 or 64 and do not compare it with currently builded package.
So it is possible to false report higher version that needed.
- All structures signatures are taken form macosx header files.
- I think that binary format will be more stable than `otool` output.
and if apple introduce some changes both implementation will need to be updated.
"""
import ctypes
import sys
"""here the needed const and struct from mach-o header files"""
FAT_MAGIC = 0xcafebabe
FAT_CIGAM = 0xbebafeca
FAT_MAGIC_64 = 0xcafebabf
FAT_CIGAM_64 = 0xbfbafeca
MH_MAGIC = 0xfeedface
MH_CIGAM = 0xcefaedfe
MH_MAGIC_64 = 0xfeedfacf
MH_CIGAM_64 = 0xcffaedfe
LC_VERSION_MIN_MACOSX = 0x24
LC_BUILD_VERSION = 0x32
mach_header_fields = [
("magic", ctypes.c_uint32), ("cputype", ctypes.c_int),
("cpusubtype", ctypes.c_int), ("filetype", ctypes.c_uint32),
("ncmds", ctypes.c_uint32), ("sizeofcmds", ctypes.c_uint32),
("flags", ctypes.c_uint32)
]
"""
struct mach_header {
uint32_t magic; /* mach magic number identifier */
cpu_type_t cputype; /* cpu specifier */
cpu_subtype_t cpusubtype; /* machine specifier */
uint32_t filetype; /* type of file */
uint32_t ncmds; /* number of load commands */
uint32_t sizeofcmds; /* the size of all the load commands */
uint32_t flags; /* flags */
};
typedef integer_t cpu_type_t;
typedef integer_t cpu_subtype_t;
"""
mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)]
"""
struct mach_header_64 {
uint32_t magic; /* mach magic number identifier */
cpu_type_t cputype; /* cpu specifier */
cpu_subtype_t cpusubtype; /* machine specifier */
uint32_t filetype; /* type of file */
uint32_t ncmds; /* number of load commands */
uint32_t sizeofcmds; /* the size of all the load commands */
uint32_t flags; /* flags */
uint32_t reserved; /* reserved */
};
"""
fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)]
"""
struct fat_header {
uint32_t magic; /* FAT_MAGIC or FAT_MAGIC_64 */
uint32_t nfat_arch; /* number of structs that follow */
};
"""
fat_arch_fields = [
("cputype", ctypes.c_int), ("cpusubtype", ctypes.c_int),
("offset", ctypes.c_uint32), ("size", ctypes.c_uint32),
("align", ctypes.c_uint32)
]
"""
struct fat_arch {
cpu_type_t cputype; /* cpu specifier (int) */
cpu_subtype_t cpusubtype; /* machine specifier (int) */
uint32_t offset; /* file offset to this object file */
uint32_t size; /* size of this object file */
uint32_t align; /* alignment as a power of 2 */
};
"""
fat_arch_64_fields = [
("cputype", ctypes.c_int), ("cpusubtype", ctypes.c_int),
("offset", ctypes.c_uint64), ("size", ctypes.c_uint64),
("align", ctypes.c_uint32), ("reserved", ctypes.c_uint32)
]
"""
struct fat_arch_64 {
cpu_type_t cputype; /* cpu specifier (int) */
cpu_subtype_t cpusubtype; /* machine specifier (int) */
uint64_t offset; /* file offset to this object file */
uint64_t size; /* size of this object file */
uint32_t align; /* alignment as a power of 2 */
uint32_t reserved; /* reserved */
};
"""
segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)]
"""base for reading segment info"""
segment_command_fields = [
("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32),
("segname", ctypes.c_char * 16), ("vmaddr", ctypes.c_uint32),
("vmsize", ctypes.c_uint32), ("fileoff", ctypes.c_uint32),
("filesize", ctypes.c_uint32), ("maxprot", ctypes.c_int),
("initprot", ctypes.c_int), ("nsects", ctypes.c_uint32),
("flags", ctypes.c_uint32),
]
"""
struct segment_command { /* for 32-bit architectures */
uint32_t cmd; /* LC_SEGMENT */
uint32_t cmdsize; /* includes sizeof section structs */
char segname[16]; /* segment name */
uint32_t vmaddr; /* memory address of this segment */
uint32_t vmsize; /* memory size of this segment */
uint32_t fileoff; /* file offset of this segment */
uint32_t filesize; /* amount to map from the file */
vm_prot_t maxprot; /* maximum VM protection */
vm_prot_t initprot; /* initial VM protection */
uint32_t nsects; /* number of sections in segment */
uint32_t flags; /* flags */
};
typedef int vm_prot_t;
"""
segment_command_fields_64 = [
("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32),
("segname", ctypes.c_char * 16), ("vmaddr", ctypes.c_uint64),
("vmsize", ctypes.c_uint64), ("fileoff", ctypes.c_uint64),
("filesize", ctypes.c_uint64), ("maxprot", ctypes.c_int),
("initprot", ctypes.c_int), ("nsects", ctypes.c_uint32),
("flags", ctypes.c_uint32),
]
"""
struct segment_command_64 { /* for 64-bit architectures */
uint32_t cmd; /* LC_SEGMENT_64 */
uint32_t cmdsize; /* includes sizeof section_64 structs */
char segname[16]; /* segment name */
uint64_t vmaddr; /* memory address of this segment */
uint64_t vmsize; /* memory size of this segment */
uint64_t fileoff; /* file offset of this segment */
uint64_t filesize; /* amount to map from the file */
vm_prot_t maxprot; /* maximum VM protection */
vm_prot_t initprot; /* initial VM protection */
uint32_t nsects; /* number of sections in segment */
uint32_t flags; /* flags */
};
"""
version_min_command_fields = segment_base_fields + \
[("version", ctypes.c_uint32), ("sdk", ctypes.c_uint32)]
"""
struct version_min_command {
uint32_t cmd; /* LC_VERSION_MIN_MACOSX or
LC_VERSION_MIN_IPHONEOS or
LC_VERSION_MIN_WATCHOS or
LC_VERSION_MIN_TVOS */
uint32_t cmdsize; /* sizeof(struct min_version_command) */
uint32_t version; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
};
"""
build_version_command_fields = segment_base_fields + \
[("platform", ctypes.c_uint32), ("minos", ctypes.c_uint32),
("sdk", ctypes.c_uint32), ("ntools", ctypes.c_uint32)]
"""
struct build_version_command {
uint32_t cmd; /* LC_BUILD_VERSION */
uint32_t cmdsize; /* sizeof(struct build_version_command) plus */
/* ntools * sizeof(struct build_tool_version) */
uint32_t platform; /* platform */
uint32_t minos; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
uint32_t ntools; /* number of tool entries following this */
};
"""
def swap32(x):
return (((x << 24) & 0xFF000000) |
((x << 8) & 0x00FF0000) |
((x >> 8) & 0x0000FF00) |
((x >> 24) & 0x000000FF))
def get_base_class_and_magic_number(lib_file, seek=None):
if seek is None:
seek = lib_file.tell()
else:
lib_file.seek(seek)
magic_number = ctypes.c_uint32.from_buffer_copy(
lib_file.read(ctypes.sizeof(ctypes.c_uint32))).value
# Handle wrong byte order
if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]:
if sys.byteorder == "little":
BaseClass = ctypes.BigEndianStructure
else:
BaseClass = ctypes.LittleEndianStructure
magic_number = swap32(magic_number)
else:
BaseClass = ctypes.Structure
lib_file.seek(seek)
return BaseClass, magic_number
def read_data(struct_class, lib_file):
return struct_class.from_buffer_copy(lib_file.read(
ctypes.sizeof(struct_class)))
def extract_macosx_min_system_version(path_to_lib):
with open(path_to_lib, "rb") as lib_file:
BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0)
if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]:
return
if magic_number in [FAT_MAGIC, FAT_CIGAM_64]:
class FatHeader(BaseClass):
_fields_ = fat_header_fields
fat_header = read_data(FatHeader, lib_file)
if magic_number == FAT_MAGIC:
class FatArch(BaseClass):
_fields_ = fat_arch_fields
else:
class FatArch(BaseClass):
_fields_ = fat_arch_64_fields
fat_arch_list = [read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)]
versions_list = []
for el in fat_arch_list:
try:
version = read_mach_header(lib_file, el.offset)
if version is not None:
versions_list.append(version)
except ValueError:
pass
if len(versions_list) > 0:
return max(versions_list)
else:
return None
else:
try:
return read_mach_header(lib_file, 0)
except ValueError:
"""when some error during read library files"""
return None
def read_mach_header(lib_file, seek=None):
"""
This funcition parse mach-O header and extract
information about minimal system version
:param lib_file: reference to opened library file with pointer
"""
if seek is not None:
lib_file.seek(seek)
base_class, magic_number = get_base_class_and_magic_number(lib_file)
arch = "32" if magic_number == MH_MAGIC else "64"
class SegmentBase(base_class):
_fields_ = segment_base_fields
if arch == "32":
class MachHeader(base_class):
_fields_ = mach_header_fields
else:
class MachHeader(base_class):
_fields_ = mach_header_fields_64
mach_header = read_data(MachHeader, lib_file)
for _i in range(mach_header.ncmds):
pos = lib_file.tell()
segment_base = read_data(SegmentBase, lib_file)
lib_file.seek(pos)
if segment_base.cmd == LC_VERSION_MIN_MACOSX:
class VersionMinCommand(base_class):
_fields_ = version_min_command_fields
version_info = read_data(VersionMinCommand, lib_file)
return parse_version(version_info.version)
elif segment_base.cmd == LC_BUILD_VERSION:
class VersionBuild(base_class):
_fields_ = build_version_command_fields
version_info = read_data(VersionBuild, lib_file)
return parse_version(version_info.minos)
else:
lib_file.seek(pos + segment_base.cmdsize)
continue
def parse_version(version):
x = (version & 0xffff0000) >> 16
y = (version & 0x0000ff00) >> 8
z = (version & 0x000000ff)
return x, y, z

@ -0,0 +1,138 @@
"""
Tools for converting old- to new-style metadata.
"""
import os.path
import re
import textwrap
import pkg_resources
from .pkginfo import read_pkg_info
# Support markers syntax with the extra at the end only
EXTRA_RE = re.compile(
r"""^(?P<package>.*?)(;\s*(?P<condition>.*?)(extra == '(?P<extra>.*?)')?)$""")
def requires_to_requires_dist(requirement):
"""Return the version specifier for a requirement in PEP 345/566 fashion."""
if getattr(requirement, 'url', None):
return " @ " + requirement.url
requires_dist = []
for op, ver in requirement.specs:
requires_dist.append(op + ver)
if not requires_dist:
return ''
return " (%s)" % ','.join(sorted(requires_dist))
def convert_requirements(requirements):
"""Yield Requires-Dist: strings for parsed requirements strings."""
for req in requirements:
parsed_requirement = pkg_resources.Requirement.parse(req)
spec = requires_to_requires_dist(parsed_requirement)
extras = ",".join(sorted(parsed_requirement.extras))
if extras:
extras = "[%s]" % extras
yield (parsed_requirement.project_name + extras + spec)
def generate_requirements(extras_require):
"""
Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement')
and ('Provides-Extra', 'extra') tuples.
extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
using the empty extra {'': [requirements]} to hold install_requires.
"""
for extra, depends in extras_require.items():
condition = ''
extra = extra or ''
if ':' in extra: # setuptools extra:condition syntax
extra, condition = extra.split(':', 1)
extra = pkg_resources.safe_extra(extra)
if extra:
yield 'Provides-Extra', extra
if condition:
condition = "(" + condition + ") and "
condition += "extra == '%s'" % extra
if condition:
condition = ' ; ' + condition
for new_req in convert_requirements(depends):
yield 'Requires-Dist', new_req + condition
def pkginfo_to_metadata(egg_info_path, pkginfo_path):
"""
Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format
"""
pkg_info = read_pkg_info(pkginfo_path)
pkg_info.replace_header('Metadata-Version', '2.1')
# Those will be regenerated from `requires.txt`.
del pkg_info['Provides-Extra']
del pkg_info['Requires-Dist']
requires_path = os.path.join(egg_info_path, 'requires.txt')
if os.path.exists(requires_path):
with open(requires_path) as requires_file:
requires = requires_file.read()
parsed_requirements = sorted(pkg_resources.split_sections(requires),
key=lambda x: x[0] or '')
for extra, reqs in parsed_requirements:
for key, value in generate_requirements({extra: reqs}):
if (key, value) not in pkg_info.items():
pkg_info[key] = value
description = pkg_info['Description']
if description:
pkg_info.set_payload(dedent_description(pkg_info))
del pkg_info['Description']
return pkg_info
def pkginfo_unicode(pkg_info, field):
"""Hack to coax Unicode out of an email Message() - Python 3.3+"""
text = pkg_info[field]
field = field.lower()
if not isinstance(text, str):
for item in pkg_info.raw_items():
if item[0].lower() == field:
text = item[1].encode('ascii', 'surrogateescape') \
.decode('utf-8')
break
return text
def dedent_description(pkg_info):
"""
Dedent and convert pkg_info['Description'] to Unicode.
"""
description = pkg_info['Description']
# Python 3 Unicode handling, sorta.
surrogates = False
if not isinstance(description, str):
surrogates = True
description = pkginfo_unicode(pkg_info, 'Description')
description_lines = description.splitlines()
description_dedent = '\n'.join(
# if the first line of long_description is blank,
# the first line here will be indented.
(description_lines[0].lstrip(),
textwrap.dedent('\n'.join(description_lines[1:])),
'\n'))
if surrogates:
description_dedent = description_dedent \
.encode("utf8") \
.decode("ascii", "surrogateescape")
return description_dedent

@ -0,0 +1,261 @@
"""Generate and work with PEP 425 Compatibility Tags."""
import distutils.util
import platform
import sys
import os
import sysconfig
import warnings
from .macosx_libfile import extract_macosx_min_system_version
try:
from importlib.machinery import all_suffixes as get_all_suffixes
except ImportError:
from imp import get_suffixes
def get_all_suffixes():
return [suffix[0] for suffix in get_suffixes()]
def get_config_var(var):
try:
return sysconfig.get_config_var(var)
except IOError as e: # pip Issue #1074
warnings.warn("{0}".format(e), RuntimeWarning)
return None
def get_abbr_impl():
"""Return abbreviated implementation name."""
impl = platform.python_implementation()
if impl == 'PyPy':
return 'pp'
elif impl == 'Jython':
return 'jy'
elif impl == 'IronPython':
return 'ip'
elif impl == 'CPython':
return 'cp'
raise LookupError('Unknown Python implementation: ' + impl)
def get_impl_ver():
"""Return implementation version."""
impl_ver = get_config_var("py_version_nodot")
if not impl_ver:
impl_ver = ''.join(map(str, get_impl_version_info()))
return impl_ver
def get_impl_version_info():
"""Return sys.version_info-like tuple for use in decrementing the minor
version."""
return sys.version_info[0], sys.version_info[1]
def get_flag(var, fallback, expected=True, warn=True):
"""Use a fallback method for determining SOABI flags if the needed config
var is unset or unavailable."""
val = get_config_var(var)
if val is None:
if warn:
warnings.warn("Config variable '{0}' is unset, Python ABI tag may "
"be incorrect".format(var), RuntimeWarning, 2)
return fallback()
return val == expected
def get_abi_tag():
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
(CPython 2, PyPy)."""
soabi = get_config_var('SOABI')
impl = get_abbr_impl()
if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):
d = ''
m = ''
u = ''
if get_flag('Py_DEBUG',
lambda: hasattr(sys, 'gettotalrefcount'),
warn=(impl == 'cp')):
d = 'd'
if get_flag('WITH_PYMALLOC',
lambda: impl == 'cp',
warn=(impl == 'cp' and
sys.version_info < (3, 8))) \
and sys.version_info < (3, 8):
m = 'm'
if get_flag('Py_UNICODE_SIZE',
lambda: sys.maxunicode == 0x10ffff,
expected=4,
warn=(impl == 'cp' and
sys.version_info < (3, 3))) \
and sys.version_info < (3, 3):
u = 'u'
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
elif soabi and soabi.startswith('cpython-'):
abi = 'cp' + soabi.split('-')[1]
elif soabi:
abi = soabi.replace('.', '_').replace('-', '_')
else:
abi = None
return abi
def calculate_macosx_platform_tag(archive_root, platform_tag):
"""
Calculate proper macosx platform tag basing on files which are included to wheel
Example platform tag `macosx-10.14-x86_64`
"""
prefix, base_version, suffix = platform_tag.split('-')
base_version = tuple([int(x) for x in base_version.split(".")])
if len(base_version) >= 2:
base_version = base_version[0:2]
assert len(base_version) == 2
if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
deploy_target = tuple([int(x) for x in os.environ[
"MACOSX_DEPLOYMENT_TARGET"].split(".")])
if len(deploy_target) >= 2:
deploy_target = deploy_target[0:2]
if deploy_target < base_version:
sys.stderr.write(
"[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than the "
"version on which the Python interpreter was compiled ({}), and will be "
"ignored.\n".format('.'.join(str(x) for x in deploy_target),
'.'.join(str(x) for x in base_version))
)
else:
base_version = deploy_target
assert len(base_version) == 2
start_version = base_version
versions_dict = {}
for (dirpath, dirnames, filenames) in os.walk(archive_root):
for filename in filenames:
if filename.endswith('.dylib') or filename.endswith('.so'):
lib_path = os.path.join(dirpath, filename)
min_ver = extract_macosx_min_system_version(lib_path)
if min_ver is not None:
versions_dict[lib_path] = min_ver[0:2]
if len(versions_dict) > 0:
base_version = max(base_version, max(versions_dict.values()))
# macosx platform tag do not support minor bugfix release
fin_base_version = "_".join([str(x) for x in base_version])
if start_version < base_version:
problematic_files = [k for k, v in versions_dict.items() if v > start_version]
problematic_files = "\n".join(problematic_files)
if len(problematic_files) == 1:
files_form = "this file"
else:
files_form = "these files"
error_message = \
"[WARNING] This wheel needs a higher macOS version than {} " \
"To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least " +\
fin_base_version + " or recreate " + files_form + " with lower " \
"MACOSX_DEPLOYMENT_TARGET: \n" + problematic_files
if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
error_message = error_message.format("is set in MACOSX_DEPLOYMENT_TARGET variable.")
else:
error_message = error_message.format(
"the version your Python interpreter is compiled against.")
sys.stderr.write(error_message)
platform_tag = prefix + "_" + fin_base_version + "_" + suffix
return platform_tag
def get_platform(archive_root):
"""Return our platform name 'win32', 'linux_x86_64'"""
# XXX remove distutils dependency
result = distutils.util.get_platform()
if result.startswith("macosx") and archive_root is not None:
result = calculate_macosx_platform_tag(archive_root, result)
result = result.replace('.', '_').replace('-', '_')
if result == "linux_x86_64" and sys.maxsize == 2147483647:
# pip pull request #3497
result = "linux_i686"
return result
def get_supported(archive_root, versions=None, supplied_platform=None):
"""Return a list of supported tags for each version specified in
`versions`.
:param versions: a list of string versions, of the form ["33", "32"],
or None. The first version will be assumed to support our ABI.
"""
supported = []
# Versions must be given with respect to the preference
if versions is None:
versions = []
version_info = get_impl_version_info()
major = version_info[:-1]
# Support all previous minor Python versions.
for minor in range(version_info[-1], -1, -1):
versions.append(''.join(map(str, major + (minor,))))
impl = get_abbr_impl()
abis = []
abi = get_abi_tag()
if abi:
abis[0:0] = [abi]
abi3s = set()
for suffix in get_all_suffixes():
if suffix.startswith('.abi'):
abi3s.add(suffix.split('.', 2)[1])
abis.extend(sorted(list(abi3s)))
abis.append('none')
platforms = []
if supplied_platform:
platforms.append(supplied_platform)
platforms.append(get_platform(archive_root))
# Current version, current API (built specifically for our Python):
for abi in abis:
for arch in platforms:
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
# abi3 modules compatible with older version of Python
for version in versions[1:]:
# abi3 was introduced in Python 3.2
if version in ('31', '30'):
break
for abi in abi3s: # empty set if not Python 3
for arch in platforms:
supported.append(("%s%s" % (impl, version), abi, arch))
# No abi / arch, but requires our implementation:
for i, version in enumerate(versions):
supported.append(('%s%s' % (impl, version), 'none', 'any'))
if i == 0:
# Tagged specifically as being cross-version compatible
# (with just the major version specified)
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
# Major Python version + platform; e.g. binaries not using the Python API
for arch in platforms:
supported.append(('py%s' % (versions[0][0]), 'none', arch))
# No abi / arch, generic Python
for i, version in enumerate(versions):
supported.append(('py%s' % (version,), 'none', 'any'))
if i == 0:
supported.append(('py%s' % (version[0]), 'none', 'any'))
return supported

@ -0,0 +1,43 @@
"""Tools for reading and writing PKG-INFO / METADATA without caring
about the encoding."""
from email.parser import Parser
try:
unicode
_PY3 = False
except NameError:
_PY3 = True
if not _PY3:
from email.generator import Generator
def read_pkg_info_bytes(bytestr):
return Parser().parsestr(bytestr)
def read_pkg_info(path):
with open(path, "r") as headers:
message = Parser().parse(headers)
return message
def write_pkg_info(path, message):
with open(path, 'w') as metadata:
Generator(metadata, mangle_from_=False, maxheaderlen=0).flatten(message)
else:
from email.generator import BytesGenerator
def read_pkg_info_bytes(bytestr):
headers = bytestr.decode(encoding="ascii", errors="surrogateescape")
message = Parser().parsestr(headers)
return message
def read_pkg_info(path):
with open(path, "r",
encoding="ascii",
errors="surrogateescape") as headers:
message = Parser().parse(headers)
return message
def write_pkg_info(path, message):
with open(path, "wb") as out:
BytesGenerator(out, mangle_from_=False, maxheaderlen=0).flatten(message)

@ -0,0 +1,46 @@
import base64
import io
import sys
if sys.version_info[0] < 3:
text_type = unicode # noqa: F821
StringIO = io.BytesIO
def native(s, encoding='utf-8'):
if isinstance(s, unicode): # noqa: F821
return s.encode(encoding)
return s
else:
text_type = str
StringIO = io.StringIO
def native(s, encoding='utf-8'):
if isinstance(s, bytes):
return s.decode(encoding)
return s
def urlsafe_b64encode(data):
"""urlsafe_b64encode without padding"""
return base64.urlsafe_b64encode(data).rstrip(b'=')
def urlsafe_b64decode(data):
"""urlsafe_b64decode without padding"""
pad = b'=' * (4 - (len(data) & 3))
return base64.urlsafe_b64decode(data + pad)
def as_unicode(s):
if isinstance(s, bytes):
return s.decode('utf-8')
return s
def as_bytes(s):
if isinstance(s, text_type):
return s.encode('utf-8')
return s

@ -0,0 +1,169 @@
from __future__ import print_function
import csv
import hashlib
import os.path
import re
import stat
import time
from collections import OrderedDict
from distutils import log as logger
from zipfile import ZIP_DEFLATED, ZipInfo, ZipFile
from wheel.cli import WheelError
from wheel.util import urlsafe_b64decode, as_unicode, native, urlsafe_b64encode, as_bytes, StringIO
# Non-greedy matching of an optional build number may be too clever (more
# invalid wheel filenames will match). Separate regex for .dist-info?
WHEEL_INFO_RE = re.compile(
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.+?))(-(?P<build>\d[^-]*))?
-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)\.whl$""",
re.VERBOSE)
def get_zipinfo_datetime(timestamp=None):
# Some applications need reproducible .whl files, but they can't do this without forcing
# the timestamp of the individual ZipInfo objects. See issue #143.
timestamp = int(os.environ.get('SOURCE_DATE_EPOCH', timestamp or time.time()))
return time.gmtime(timestamp)[0:6]
class WheelFile(ZipFile):
"""A ZipFile derivative class that also reads SHA-256 hashes from
.dist-info/RECORD and checks any read files against those.
"""
_default_algorithm = hashlib.sha256
def __init__(self, file, mode='r', compression=ZIP_DEFLATED):
basename = os.path.basename(file)
self.parsed_filename = WHEEL_INFO_RE.match(basename)
if not basename.endswith('.whl') or self.parsed_filename is None:
raise WheelError("Bad wheel filename {!r}".format(basename))
ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
self.dist_info_path = '{}.dist-info'.format(self.parsed_filename.group('namever'))
self.record_path = self.dist_info_path + '/RECORD'
self._file_hashes = OrderedDict()
self._file_sizes = {}
if mode == 'r':
# Ignore RECORD and any embedded wheel signatures
self._file_hashes[self.record_path] = None, None
self._file_hashes[self.record_path + '.jws'] = None, None
self._file_hashes[self.record_path + '.p7s'] = None, None
# Fill in the expected hashes by reading them from RECORD
try:
record = self.open(self.record_path)
except KeyError:
raise WheelError('Missing {} file'.format(self.record_path))
with record:
for line in record:
line = line.decode('utf-8')
path, hash_sum, size = line.rsplit(u',', 2)
if hash_sum:
algorithm, hash_sum = hash_sum.split(u'=')
try:
hashlib.new(algorithm)
except ValueError:
raise WheelError('Unsupported hash algorithm: {}'.format(algorithm))
if algorithm.lower() in {'md5', 'sha1'}:
raise WheelError(
'Weak hash algorithm ({}) is not permitted by PEP 427'
.format(algorithm))
self._file_hashes[path] = (
algorithm, urlsafe_b64decode(hash_sum.encode('ascii')))
def open(self, name_or_info, mode="r", pwd=None):
def _update_crc(newdata, eof=None):
if eof is None:
eof = ef._eof
update_crc_orig(newdata)
else: # Python 2
update_crc_orig(newdata, eof)
running_hash.update(newdata)
if eof and running_hash.digest() != expected_hash:
raise WheelError("Hash mismatch for file '{}'".format(native(ef_name)))
ef = ZipFile.open(self, name_or_info, mode, pwd)
ef_name = as_unicode(name_or_info.filename if isinstance(name_or_info, ZipInfo)
else name_or_info)
if mode == 'r' and not ef_name.endswith('/'):
if ef_name not in self._file_hashes:
raise WheelError("No hash found for file '{}'".format(native(ef_name)))
algorithm, expected_hash = self._file_hashes[ef_name]
if expected_hash is not None:
# Monkey patch the _update_crc method to also check for the hash from RECORD
running_hash = hashlib.new(algorithm)
update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
return ef
def write_files(self, base_dir):
logger.info("creating '%s' and adding '%s' to it", self.filename, base_dir)
deferred = []
for root, dirnames, filenames in os.walk(base_dir):
# Sort the directory names so that `os.walk` will walk them in a
# defined order on the next iteration.
dirnames.sort()
for name in sorted(filenames):
path = os.path.normpath(os.path.join(root, name))
if os.path.isfile(path):
arcname = os.path.relpath(path, base_dir).replace(os.path.sep, '/')
if arcname == self.record_path:
pass
elif root.endswith('.dist-info'):
deferred.append((path, arcname))
else:
self.write(path, arcname)
deferred.sort()
for path, arcname in deferred:
self.write(path, arcname)
def write(self, filename, arcname=None, compress_type=None):
with open(filename, 'rb') as f:
st = os.fstat(f.fileno())
data = f.read()
zinfo = ZipInfo(arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime))
zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
zinfo.compress_type = compress_type or self.compression
self.writestr(zinfo, data, compress_type)
def writestr(self, zinfo_or_arcname, bytes, compress_type=None):
ZipFile.writestr(self, zinfo_or_arcname, bytes, compress_type)
fname = (zinfo_or_arcname.filename if isinstance(zinfo_or_arcname, ZipInfo)
else zinfo_or_arcname)
logger.info("adding '%s'", fname)
if fname != self.record_path:
hash_ = self._default_algorithm(bytes)
self._file_hashes[fname] = hash_.name, native(urlsafe_b64encode(hash_.digest()))
self._file_sizes[fname] = len(bytes)
def close(self):
# Write RECORD
if self.fp is not None and self.mode == 'w' and self._file_hashes:
data = StringIO()
writer = csv.writer(data, delimiter=',', quotechar='"', lineterminator='\n')
writer.writerows((
(
fname,
algorithm + "=" + hash_,
self._file_sizes[fname]
)
for fname, (algorithm, hash_) in self._file_hashes.items()
))
writer.writerow((format(self.record_path), "", ""))
zinfo = ZipInfo(native(self.record_path), date_time=get_zipinfo_datetime())
zinfo.compress_type = self.compression
zinfo.external_attr = 0o664 << 16
self.writestr(zinfo, as_bytes(data.getvalue()))
ZipFile.close(self)

Binary file not shown.
Loading…
Cancel
Save