Code import
This commit is contained in:
2
venv/lib/python2.7/site-packages/wheel/__init__.py
Normal file
2
venv/lib/python2.7/site-packages/wheel/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# __variables__ with double-quoted values will be available in setup.py:
|
||||
__version__ = "0.29.0"
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/__init__.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/__init__.pyc
Normal file
Binary file not shown.
17
venv/lib/python2.7/site-packages/wheel/__main__.py
Normal file
17
venv/lib/python2.7/site-packages/wheel/__main__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""
|
||||
Wheel command line tool (enable python -m wheel syntax)
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
def main(): # needed for console script
|
||||
if __package__ == '':
|
||||
# To be able to run 'python wheel-0.9.whl/wheel':
|
||||
import os.path
|
||||
path = os.path.dirname(os.path.dirname(__file__))
|
||||
sys.path[0:0] = [path]
|
||||
import wheel.tool
|
||||
sys.exit(wheel.tool.main())
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/__main__.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/__main__.pyc
Normal file
Binary file not shown.
80
venv/lib/python2.7/site-packages/wheel/archive.py
Normal file
80
venv/lib/python2.7/site-packages/wheel/archive.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""
|
||||
Archive tools for wheel.
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import logging
|
||||
import os.path
|
||||
import zipfile
|
||||
|
||||
log = logging.getLogger("wheel")
|
||||
|
||||
|
||||
def archive_wheelfile(base_name, base_dir):
|
||||
'''Archive all files under `base_dir` in a whl file and name it like
|
||||
`base_name`.
|
||||
'''
|
||||
olddir = os.path.abspath(os.curdir)
|
||||
base_name = os.path.abspath(base_name)
|
||||
try:
|
||||
os.chdir(base_dir)
|
||||
return make_wheelfile_inner(base_name)
|
||||
finally:
|
||||
os.chdir(olddir)
|
||||
|
||||
|
||||
def make_wheelfile_inner(base_name, base_dir='.'):
|
||||
"""Create a whl file from all the files under 'base_dir'.
|
||||
|
||||
Places .dist-info at the end of the archive."""
|
||||
|
||||
zip_filename = base_name + ".whl"
|
||||
|
||||
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
||||
|
||||
# Some applications need reproducible .whl files, but they can't do this
|
||||
# without forcing the timestamp of the individual ZipInfo objects. See
|
||||
# issue #143.
|
||||
timestamp = os.environ.get('SOURCE_DATE_EPOCH')
|
||||
if timestamp is None:
|
||||
date_time = None
|
||||
else:
|
||||
date_time = time.gmtime(int(timestamp))[0:6]
|
||||
|
||||
# XXX support bz2, xz when available
|
||||
zip = zipfile.ZipFile(open(zip_filename, "wb+"), "w",
|
||||
compression=zipfile.ZIP_DEFLATED)
|
||||
|
||||
score = {'WHEEL': 1, 'METADATA': 2, 'RECORD': 3}
|
||||
deferred = []
|
||||
|
||||
def writefile(path, date_time):
|
||||
st = os.stat(path)
|
||||
if date_time is None:
|
||||
mtime = time.gmtime(st.st_mtime)
|
||||
date_time = mtime[0:6]
|
||||
zinfo = zipfile.ZipInfo(path, date_time)
|
||||
zinfo.external_attr = st.st_mode << 16
|
||||
zinfo.compress_type = zipfile.ZIP_DEFLATED
|
||||
with open(path, 'rb') as fp:
|
||||
zip.writestr(zinfo, fp.read())
|
||||
log.info("adding '%s'" % path)
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(base_dir):
|
||||
for name in filenames:
|
||||
path = os.path.normpath(os.path.join(dirpath, name))
|
||||
|
||||
if os.path.isfile(path):
|
||||
if dirpath.endswith('.dist-info'):
|
||||
deferred.append((score.get(name, 0), path))
|
||||
else:
|
||||
writefile(path, date_time)
|
||||
|
||||
deferred.sort()
|
||||
for score, path in deferred:
|
||||
writefile(path, date_time)
|
||||
|
||||
zip.close()
|
||||
|
||||
return zip_filename
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/archive.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/archive.pyc
Normal file
Binary file not shown.
453
venv/lib/python2.7/site-packages/wheel/bdist_wheel.py
Normal file
453
venv/lib/python2.7/site-packages/wheel/bdist_wheel.py
Normal file
@@ -0,0 +1,453 @@
|
||||
"""
|
||||
Create a wheel (.whl) distribution.
|
||||
|
||||
A wheel is a built archive format.
|
||||
"""
|
||||
|
||||
import csv
|
||||
import hashlib
|
||||
import os
|
||||
import subprocess
|
||||
import warnings
|
||||
import shutil
|
||||
import json
|
||||
import wheel
|
||||
|
||||
try:
|
||||
import sysconfig
|
||||
except ImportError: # pragma nocover
|
||||
# Python < 2.7
|
||||
import distutils.sysconfig as sysconfig
|
||||
|
||||
import pkg_resources
|
||||
|
||||
safe_name = pkg_resources.safe_name
|
||||
safe_version = pkg_resources.safe_version
|
||||
|
||||
from shutil import rmtree
|
||||
from email.generator import Generator
|
||||
|
||||
from distutils.util import get_platform
|
||||
from distutils.core import Command
|
||||
from distutils.sysconfig import get_python_version
|
||||
|
||||
from distutils import log as logger
|
||||
|
||||
from .pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag
|
||||
from .util import native, open_for_csv
|
||||
from .archive import archive_wheelfile
|
||||
from .pkginfo import read_pkg_info, write_pkg_info
|
||||
from .metadata import pkginfo_to_dict
|
||||
from . import pep425tags, metadata
|
||||
|
||||
def safer_name(name):
|
||||
return safe_name(name).replace('-', '_')
|
||||
|
||||
def safer_version(version):
|
||||
return safe_version(version).replace('-', '_')
|
||||
|
||||
class bdist_wheel(Command):
|
||||
|
||||
description = 'create a wheel distribution'
|
||||
|
||||
user_options = [('bdist-dir=', 'b',
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform()),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
('relative', None,
|
||||
"build the archive using relative paths"
|
||||
"(default: false)"),
|
||||
('owner=', 'u',
|
||||
"Owner name used when creating a tar file"
|
||||
" [default: current user]"),
|
||||
('group=', 'g',
|
||||
"Group name used when creating a tar file"
|
||||
" [default: current group]"),
|
||||
('universal', None,
|
||||
"make a universal wheel"
|
||||
" (default: false)"),
|
||||
('python-tag=', None,
|
||||
"Python implementation compatibility tag"
|
||||
" (default: py%s)" % get_impl_ver()[0]),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal']
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.data_dir = None
|
||||
self.plat_name = None
|
||||
self.plat_tag = None
|
||||
self.format = 'zip'
|
||||
self.keep_temp = False
|
||||
self.dist_dir = None
|
||||
self.distinfo_dir = None
|
||||
self.egginfo_dir = None
|
||||
self.root_is_pure = None
|
||||
self.skip_build = None
|
||||
self.relative = False
|
||||
self.owner = None
|
||||
self.group = None
|
||||
self.universal = False
|
||||
self.python_tag = 'py' + get_impl_ver()[0]
|
||||
self.plat_name_supplied = False
|
||||
|
||||
def finalize_options(self):
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'wheel')
|
||||
|
||||
self.data_dir = self.wheel_dist_name + '.data'
|
||||
self.plat_name_supplied = self.plat_name is not None
|
||||
|
||||
need_options = ('dist_dir', 'plat_name', 'skip_build')
|
||||
|
||||
self.set_undefined_options('bdist',
|
||||
*zip(need_options, need_options))
|
||||
|
||||
self.root_is_pure = not (self.distribution.has_ext_modules()
|
||||
or self.distribution.has_c_libraries())
|
||||
|
||||
# Support legacy [wheel] section for setting universal
|
||||
wheel = self.distribution.get_option_dict('wheel')
|
||||
if 'universal' in wheel:
|
||||
# please don't define this in your global configs
|
||||
val = wheel['universal'][1].strip()
|
||||
if val.lower() in ('1', 'true', 'yes'):
|
||||
self.universal = True
|
||||
|
||||
@property
|
||||
def wheel_dist_name(self):
|
||||
"""Return distribution full name with - replaced with _"""
|
||||
return '-'.join((safer_name(self.distribution.get_name()),
|
||||
safer_version(self.distribution.get_version())))
|
||||
|
||||
def get_tag(self):
|
||||
# bdist sets self.plat_name if unset, we should only use it for purepy
|
||||
# wheels if the user supplied it.
|
||||
if self.plat_name_supplied:
|
||||
plat_name = self.plat_name
|
||||
elif self.root_is_pure:
|
||||
plat_name = 'any'
|
||||
else:
|
||||
plat_name = self.plat_name or get_platform()
|
||||
plat_name = plat_name.replace('-', '_').replace('.', '_')
|
||||
|
||||
if self.root_is_pure:
|
||||
if self.universal:
|
||||
impl = 'py2.py3'
|
||||
else:
|
||||
impl = self.python_tag
|
||||
tag = (impl, 'none', plat_name)
|
||||
else:
|
||||
impl_name = get_abbr_impl()
|
||||
impl_ver = get_impl_ver()
|
||||
# PEP 3149
|
||||
abi_tag = str(get_abi_tag()).lower()
|
||||
tag = (impl_name + impl_ver, abi_tag, plat_name)
|
||||
supported_tags = pep425tags.get_supported(
|
||||
supplied_platform=plat_name if self.plat_name_supplied else None)
|
||||
# XXX switch to this alternate implementation for non-pure:
|
||||
assert tag == supported_tags[0]
|
||||
return tag
|
||||
|
||||
def get_archive_basename(self):
|
||||
"""Return archive name without extension"""
|
||||
|
||||
impl_tag, abi_tag, plat_tag = self.get_tag()
|
||||
|
||||
archive_basename = "%s-%s-%s-%s" % (
|
||||
self.wheel_dist_name,
|
||||
impl_tag,
|
||||
abi_tag,
|
||||
plat_tag)
|
||||
return archive_basename
|
||||
|
||||
def run(self):
|
||||
build_scripts = self.reinitialize_command('build_scripts')
|
||||
build_scripts.executable = 'python'
|
||||
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.reinitialize_command('install',
|
||||
reinit_subcommands=True)
|
||||
install.root = self.bdist_dir
|
||||
install.compile = False
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = False
|
||||
|
||||
# A wheel without setuptools scripts is more cross-platform.
|
||||
# Use the (undocumented) `no_ep` option to setuptools'
|
||||
# install_scripts command to avoid creating entry point scripts.
|
||||
install_scripts = self.reinitialize_command('install_scripts')
|
||||
install_scripts.no_ep = True
|
||||
|
||||
# Use a custom scheme for the archive, because we have to decide
|
||||
# at installation time which scheme to use.
|
||||
for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'):
|
||||
setattr(install,
|
||||
'install_' + key,
|
||||
os.path.join(self.data_dir, key))
|
||||
|
||||
basedir_observed = ''
|
||||
|
||||
if os.name == 'nt':
|
||||
# win32 barfs if any of these are ''; could be '.'?
|
||||
# (distutils.command.install:change_roots bug)
|
||||
basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..'))
|
||||
self.install_libbase = self.install_lib = basedir_observed
|
||||
|
||||
setattr(install,
|
||||
'install_purelib' if self.root_is_pure else 'install_platlib',
|
||||
basedir_observed)
|
||||
|
||||
logger.info("installing to %s", self.bdist_dir)
|
||||
|
||||
self.run_command('install')
|
||||
|
||||
archive_basename = self.get_archive_basename()
|
||||
|
||||
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
|
||||
if not self.relative:
|
||||
archive_root = self.bdist_dir
|
||||
else:
|
||||
archive_root = os.path.join(
|
||||
self.bdist_dir,
|
||||
self._ensure_relative(install.install_base))
|
||||
|
||||
self.set_undefined_options(
|
||||
'install_egg_info', ('target', 'egginfo_dir'))
|
||||
self.distinfo_dir = os.path.join(self.bdist_dir,
|
||||
'%s.dist-info' % self.wheel_dist_name)
|
||||
self.egg2dist(self.egginfo_dir,
|
||||
self.distinfo_dir)
|
||||
|
||||
self.write_wheelfile(self.distinfo_dir)
|
||||
|
||||
self.write_record(self.bdist_dir, self.distinfo_dir)
|
||||
|
||||
# Make the archive
|
||||
if not os.path.exists(self.dist_dir):
|
||||
os.makedirs(self.dist_dir)
|
||||
wheel_name = archive_wheelfile(pseudoinstall_root, archive_root)
|
||||
|
||||
# Sign the archive
|
||||
if 'WHEEL_TOOL' in os.environ:
|
||||
subprocess.call([os.environ['WHEEL_TOOL'], 'sign', wheel_name])
|
||||
|
||||
# Add to 'Distribution.dist_files' so that the "upload" command works
|
||||
getattr(self.distribution, 'dist_files', []).append(
|
||||
('bdist_wheel', get_python_version(), wheel_name))
|
||||
|
||||
if not self.keep_temp:
|
||||
if self.dry_run:
|
||||
logger.info('removing %s', self.bdist_dir)
|
||||
else:
|
||||
rmtree(self.bdist_dir)
|
||||
|
||||
def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel.__version__ + ')'):
|
||||
from email.message import Message
|
||||
msg = Message()
|
||||
msg['Wheel-Version'] = '1.0' # of the spec
|
||||
msg['Generator'] = generator
|
||||
msg['Root-Is-Purelib'] = str(self.root_is_pure).lower()
|
||||
|
||||
# Doesn't work for bdist_wininst
|
||||
impl_tag, abi_tag, plat_tag = self.get_tag()
|
||||
for impl in impl_tag.split('.'):
|
||||
for abi in abi_tag.split('.'):
|
||||
for plat in plat_tag.split('.'):
|
||||
msg['Tag'] = '-'.join((impl, abi, plat))
|
||||
|
||||
wheelfile_path = os.path.join(wheelfile_base, 'WHEEL')
|
||||
logger.info('creating %s', wheelfile_path)
|
||||
with open(wheelfile_path, 'w') as f:
|
||||
Generator(f, maxheaderlen=0).flatten(msg)
|
||||
|
||||
def _ensure_relative(self, path):
|
||||
# copied from dir_util, deleted
|
||||
drive, path = os.path.splitdrive(path)
|
||||
if path[0:1] == os.sep:
|
||||
path = drive + path[1:]
|
||||
return path
|
||||
|
||||
def _pkginfo_to_metadata(self, egg_info_path, pkginfo_path):
|
||||
return metadata.pkginfo_to_metadata(egg_info_path, pkginfo_path)
|
||||
|
||||
def license_file(self):
|
||||
"""Return license filename from a license-file key in setup.cfg, or None."""
|
||||
metadata = self.distribution.get_option_dict('metadata')
|
||||
if not 'license_file' in metadata:
|
||||
return None
|
||||
return metadata['license_file'][1]
|
||||
|
||||
def setupcfg_requirements(self):
|
||||
"""Generate requirements from setup.cfg as
|
||||
('Requires-Dist', 'requirement; qualifier') tuples. From a metadata
|
||||
section in setup.cfg:
|
||||
|
||||
[metadata]
|
||||
provides-extra = extra1
|
||||
extra2
|
||||
requires-dist = requirement; qualifier
|
||||
another; qualifier2
|
||||
unqualified
|
||||
|
||||
Yields
|
||||
|
||||
('Provides-Extra', 'extra1'),
|
||||
('Provides-Extra', 'extra2'),
|
||||
('Requires-Dist', 'requirement; qualifier'),
|
||||
('Requires-Dist', 'another; qualifier2'),
|
||||
('Requires-Dist', 'unqualified')
|
||||
"""
|
||||
metadata = self.distribution.get_option_dict('metadata')
|
||||
|
||||
# our .ini parser folds - to _ in key names:
|
||||
for key, title in (('provides_extra', 'Provides-Extra'),
|
||||
('requires_dist', 'Requires-Dist')):
|
||||
if not key in metadata:
|
||||
continue
|
||||
field = metadata[key]
|
||||
for line in field[1].splitlines():
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
yield (title, line)
|
||||
|
||||
def add_requirements(self, metadata_path):
|
||||
"""Add additional requirements from setup.cfg to file metadata_path"""
|
||||
additional = list(self.setupcfg_requirements())
|
||||
if not additional: return
|
||||
pkg_info = read_pkg_info(metadata_path)
|
||||
if 'Provides-Extra' in pkg_info or 'Requires-Dist' in pkg_info:
|
||||
warnings.warn('setup.cfg requirements overwrite values from setup.py')
|
||||
del pkg_info['Provides-Extra']
|
||||
del pkg_info['Requires-Dist']
|
||||
for k, v in additional:
|
||||
pkg_info[k] = v
|
||||
write_pkg_info(metadata_path, pkg_info)
|
||||
|
||||
def egg2dist(self, egginfo_path, distinfo_path):
|
||||
"""Convert an .egg-info directory into a .dist-info directory"""
|
||||
def adios(p):
|
||||
"""Appropriately delete directory, file or link."""
|
||||
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
|
||||
shutil.rmtree(p)
|
||||
elif os.path.exists(p):
|
||||
os.unlink(p)
|
||||
|
||||
adios(distinfo_path)
|
||||
|
||||
if not os.path.exists(egginfo_path):
|
||||
# There is no egg-info. This is probably because the egg-info
|
||||
# file/directory is not named matching the distribution name used
|
||||
# to name the archive file. Check for this case and report
|
||||
# accordingly.
|
||||
import glob
|
||||
pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info')
|
||||
possible = glob.glob(pat)
|
||||
err = "Egg metadata expected at %s but not found" % (egginfo_path,)
|
||||
if possible:
|
||||
alt = os.path.basename(possible[0])
|
||||
err += " (%s found - possible misnamed archive file?)" % (alt,)
|
||||
|
||||
raise ValueError(err)
|
||||
|
||||
if os.path.isfile(egginfo_path):
|
||||
# .egg-info is a single file
|
||||
pkginfo_path = egginfo_path
|
||||
pkg_info = self._pkginfo_to_metadata(egginfo_path, egginfo_path)
|
||||
os.mkdir(distinfo_path)
|
||||
else:
|
||||
# .egg-info is a directory
|
||||
pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO')
|
||||
pkg_info = self._pkginfo_to_metadata(egginfo_path, pkginfo_path)
|
||||
|
||||
# ignore common egg metadata that is useless to wheel
|
||||
shutil.copytree(egginfo_path, distinfo_path,
|
||||
ignore=lambda x, y: set(('PKG-INFO',
|
||||
'requires.txt',
|
||||
'SOURCES.txt',
|
||||
'not-zip-safe',)))
|
||||
|
||||
# delete dependency_links if it is only whitespace
|
||||
dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt')
|
||||
with open(dependency_links_path, 'r') as dependency_links_file:
|
||||
dependency_links = dependency_links_file.read().strip()
|
||||
if not dependency_links:
|
||||
adios(dependency_links_path)
|
||||
|
||||
write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info)
|
||||
|
||||
# XXX deprecated. Still useful for current distribute/setuptools.
|
||||
metadata_path = os.path.join(distinfo_path, 'METADATA')
|
||||
self.add_requirements(metadata_path)
|
||||
|
||||
# XXX intentionally a different path than the PEP.
|
||||
metadata_json_path = os.path.join(distinfo_path, 'metadata.json')
|
||||
pymeta = pkginfo_to_dict(metadata_path,
|
||||
distribution=self.distribution)
|
||||
|
||||
if 'description' in pymeta:
|
||||
description_filename = 'DESCRIPTION.rst'
|
||||
description_text = pymeta.pop('description')
|
||||
description_path = os.path.join(distinfo_path,
|
||||
description_filename)
|
||||
with open(description_path, "wb") as description_file:
|
||||
description_file.write(description_text.encode('utf-8'))
|
||||
pymeta['extensions']['python.details']['document_names']['description'] = description_filename
|
||||
|
||||
# XXX heuristically copy any LICENSE/LICENSE.txt?
|
||||
license = self.license_file()
|
||||
if license:
|
||||
license_filename = 'LICENSE.txt'
|
||||
shutil.copy(license, os.path.join(self.distinfo_dir, license_filename))
|
||||
pymeta['extensions']['python.details']['document_names']['license'] = license_filename
|
||||
|
||||
with open(metadata_json_path, "w") as metadata_json:
|
||||
json.dump(pymeta, metadata_json, sort_keys=True)
|
||||
|
||||
adios(egginfo_path)
|
||||
|
||||
def write_record(self, bdist_dir, distinfo_dir):
|
||||
from wheel.util import urlsafe_b64encode
|
||||
|
||||
record_path = os.path.join(distinfo_dir, 'RECORD')
|
||||
record_relpath = os.path.relpath(record_path, bdist_dir)
|
||||
|
||||
def walk():
|
||||
for dir, dirs, files in os.walk(bdist_dir):
|
||||
dirs.sort()
|
||||
for f in sorted(files):
|
||||
yield os.path.join(dir, f)
|
||||
|
||||
def skip(path):
|
||||
"""Wheel hashes every possible file."""
|
||||
return (path == record_relpath)
|
||||
|
||||
with open_for_csv(record_path, 'w+') as record_file:
|
||||
writer = csv.writer(record_file)
|
||||
for path in walk():
|
||||
relpath = os.path.relpath(path, bdist_dir)
|
||||
if skip(relpath):
|
||||
hash = ''
|
||||
size = ''
|
||||
else:
|
||||
with open(path, 'rb') as f:
|
||||
data = f.read()
|
||||
digest = hashlib.sha256(data).digest()
|
||||
hash = 'sha256=' + native(urlsafe_b64encode(digest))
|
||||
size = len(data)
|
||||
record_path = os.path.relpath(
|
||||
path, bdist_dir).replace(os.path.sep, '/')
|
||||
writer.writerow((record_path, hash, size))
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/bdist_wheel.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/bdist_wheel.pyc
Normal file
Binary file not shown.
19
venv/lib/python2.7/site-packages/wheel/decorator.py
Normal file
19
venv/lib/python2.7/site-packages/wheel/decorator.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# from Pyramid
|
||||
|
||||
|
||||
class reify(object):
|
||||
"""Put the result of a method which uses this (non-data)
|
||||
descriptor decorator in the instance dict after the first call,
|
||||
effectively replacing the decorator with an instance variable.
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
self.__doc__ = wrapped.__doc__
|
||||
|
||||
def __get__(self, inst, objtype=None):
|
||||
if inst is None:
|
||||
return self
|
||||
val = self.wrapped(inst)
|
||||
setattr(inst, self.wrapped.__name__, val)
|
||||
return val
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/decorator.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/decorator.pyc
Normal file
Binary file not shown.
73
venv/lib/python2.7/site-packages/wheel/egg2wheel.py
Normal file
73
venv/lib/python2.7/site-packages/wheel/egg2wheel.py
Normal file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env python
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
import wheel.bdist_wheel
|
||||
import shutil
|
||||
import distutils.dist
|
||||
from distutils.archive_util import make_archive
|
||||
from argparse import ArgumentParser
|
||||
from glob import iglob
|
||||
|
||||
egg_info_re = re.compile(r'''(?P<name>.+?)-(?P<ver>.+?)
|
||||
(-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg''', re.VERBOSE)
|
||||
|
||||
def egg2wheel(egg_path, dest_dir):
|
||||
egg_info = egg_info_re.match(os.path.basename(egg_path)).groupdict()
|
||||
dir = tempfile.mkdtemp(suffix="_e2w")
|
||||
if os.path.isfile(egg_path):
|
||||
# assume we have a bdist_egg otherwise
|
||||
egg = zipfile.ZipFile(egg_path)
|
||||
egg.extractall(dir)
|
||||
else:
|
||||
# support buildout-style installed eggs directories
|
||||
for pth in os.listdir(egg_path):
|
||||
src = os.path.join(egg_path, pth)
|
||||
if os.path.isfile(src):
|
||||
shutil.copy2(src, dir)
|
||||
else:
|
||||
shutil.copytree(src, os.path.join(dir, pth))
|
||||
|
||||
dist_info = "%s-%s" % (egg_info['name'], egg_info['ver'])
|
||||
abi = 'none'
|
||||
pyver = egg_info['pyver'].replace('.', '')
|
||||
arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_')
|
||||
if arch != 'any':
|
||||
# assume all binary eggs are for CPython
|
||||
pyver = 'cp' + pyver[2:]
|
||||
wheel_name = '-'.join((
|
||||
dist_info,
|
||||
pyver,
|
||||
abi,
|
||||
arch
|
||||
))
|
||||
bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())
|
||||
bw.root_is_purelib = egg_info['arch'] is None
|
||||
dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
|
||||
bw.egg2dist(os.path.join(dir, 'EGG-INFO'),
|
||||
dist_info_dir)
|
||||
bw.write_wheelfile(dist_info_dir, generator='egg2wheel')
|
||||
bw.write_record(dir, dist_info_dir)
|
||||
filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir)
|
||||
os.rename(filename, filename[:-3] + 'whl')
|
||||
shutil.rmtree(dir)
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument('eggs', nargs='*', help="Eggs to convert")
|
||||
parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
|
||||
help="Directory to store wheels (default %(default)s)")
|
||||
parser.add_argument('--verbose', '-v', action='store_true')
|
||||
args = parser.parse_args()
|
||||
for pat in args.eggs:
|
||||
for egg in iglob(pat):
|
||||
if args.verbose:
|
||||
sys.stdout.write("{0}... ".format(egg))
|
||||
egg2wheel(egg, args.dest_dir)
|
||||
if args.verbose:
|
||||
sys.stdout.write("OK\n")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/egg2wheel.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/egg2wheel.pyc
Normal file
Binary file not shown.
87
venv/lib/python2.7/site-packages/wheel/eggnames.txt
Normal file
87
venv/lib/python2.7/site-packages/wheel/eggnames.txt
Normal file
@@ -0,0 +1,87 @@
|
||||
vcard-0.7.8-py2.7.egg
|
||||
qtalchemy-0.7.1-py2.7.egg
|
||||
AMQPDeliver-0.1-py2.7.egg
|
||||
infi.registry-0.1.1-py2.7.egg
|
||||
infi.instruct-0.5.5-py2.7.egg
|
||||
infi.devicemanager-0.1.2-py2.7.egg
|
||||
TracTixSummary-1.0-py2.7.egg
|
||||
ToscaWidgets-0.9.12-py2.7.egg
|
||||
archipel_agent_iphone_notification-0.5.0beta-py2.7.egg
|
||||
archipel_agent_action_scheduler-0.5.0beta-py2.7.egg
|
||||
ao.social-1.0.2-py2.7.egg
|
||||
apgl-0.7-py2.7.egg
|
||||
satchmo_payment_payworld-0.1.1-py2.7.egg
|
||||
snmpsim-0.1.3-py2.7.egg
|
||||
sshim-0.2-py2.7.egg
|
||||
shove-0.3.4-py2.7.egg
|
||||
simpleavro-0.3.0-py2.7.egg
|
||||
wkhtmltopdf-0.2-py2.7.egg
|
||||
wokkel-0.7.0-py2.7.egg
|
||||
jmbo_social-0.0.6-py2.7.egg
|
||||
jmbo_post-0.0.6-py2.7.egg
|
||||
jcrack-0.0.2-py2.7.egg
|
||||
riak-1.4.0-py2.7.egg
|
||||
restclient-0.10.2-py2.7.egg
|
||||
Sutekh-0.8.1-py2.7.egg
|
||||
trayify-0.0.1-py2.7.egg
|
||||
tweepy-1.9-py2.7.egg
|
||||
topzootools-0.2.1-py2.7.egg
|
||||
haystack-0.16-py2.7.egg
|
||||
zope.interface-4.0.1-py2.7-win32.egg
|
||||
neuroshare-0.8.5-py2.7-macosx-10.7-intel.egg
|
||||
ndg_httpsclient-0.2.0-py2.7.egg
|
||||
libtele-0.3-py2.7.egg
|
||||
litex.cxpool-1.0.2-py2.7.egg
|
||||
obspy.iris-0.5.1-py2.7.egg
|
||||
obspy.mseed-0.6.1-py2.7-win32.egg
|
||||
obspy.core-0.6.2-py2.7.egg
|
||||
CorePost-0.0.3-py2.7.egg
|
||||
fnordstalk-0.0.3-py2.7.egg
|
||||
Persistence-2.13.2-py2.7-win32.egg
|
||||
Pydap-3.1.RC1-py2.7.egg
|
||||
PyExecJS-1.0.4-py2.7.egg
|
||||
Wally-0.7.2-py2.7.egg
|
||||
ExtensionClass-4.0a1-py2.7-win32.egg
|
||||
Feedjack-0.9.16-py2.7.egg
|
||||
Mars24-0.3.9-py2.7.egg
|
||||
HalWeb-0.6.0-py2.7.egg
|
||||
DARE-0.7.140-py2.7.egg
|
||||
macholib-1.3-py2.7.egg
|
||||
marrow.wsgi.egress.compression-1.1-py2.7.egg
|
||||
mcs-0.3.7-py2.7.egg
|
||||
Kook-0.6.0-py2.7.egg
|
||||
er-0.1-py2.7.egg
|
||||
evasion_director-1.1.4-py2.7.egg
|
||||
djquery-0.1a-py2.7.egg
|
||||
django_factory-0.7-py2.7.egg
|
||||
django_gizmo-0.0.3-py2.7.egg
|
||||
django_category-0.1-py2.7.egg
|
||||
dbwrap-0.3.2-py2.7.egg
|
||||
django_supergeneric-1.0-py2.7.egg
|
||||
django_dynamo-0.25-py2.7.egg
|
||||
django_acollabauth-0.1-py2.7.egg
|
||||
django_qrlink-0.1.0-py2.7.egg
|
||||
django_addons-0.6.6-py2.7.egg
|
||||
cover_grabber-1.1.2-py2.7.egg
|
||||
chem-1.1-py2.7.egg
|
||||
crud-0.1-py2.7.egg
|
||||
bongo-0.1-py2.7.egg
|
||||
bytecodehacks-April2000-py2.7.egg
|
||||
greenlet-0.3.4-py2.7-win32.egg
|
||||
ginvoke-0.3.1-py2.7.egg
|
||||
pyobjc_framework_ScriptingBridge-2.3-py2.7.egg
|
||||
pecan-0.2.0a-py2.7.egg
|
||||
pyress-0.2.0-py2.7.egg
|
||||
pyobjc_framework_PubSub-2.3-py2.7.egg
|
||||
pyobjc_framework_ExceptionHandling-2.3-py2.7.egg
|
||||
pywps-trunk-py2.7.egg
|
||||
pyobjc_framework_CFNetwork-2.3-py2.7-macosx-10.6-fat.egg
|
||||
py.saunter-0.40-py2.7.egg
|
||||
pyfnordmetric-0.0.1-py2.7.egg
|
||||
pyws-1.1.1-py2.7.egg
|
||||
prestapyt-0.4.0-py2.7.egg
|
||||
passlib-1.5.3-py2.7.egg
|
||||
pyga-2.1-py2.7.egg
|
||||
pygithub3-0.3-py2.7.egg
|
||||
pyobjc_framework_OpenDirectory-2.3-py2.7.egg
|
||||
yaposib-0.2.75-py2.7-linux-x86_64.egg
|
||||
480
venv/lib/python2.7/site-packages/wheel/install.py
Normal file
480
venv/lib/python2.7/site-packages/wheel/install.py
Normal file
@@ -0,0 +1,480 @@
|
||||
"""
|
||||
Operations on existing wheel files, including basic installation.
|
||||
"""
|
||||
# XXX see patched pip to install
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
import os.path
|
||||
import re
|
||||
import zipfile
|
||||
import hashlib
|
||||
import csv
|
||||
|
||||
import shutil
|
||||
|
||||
try:
|
||||
_big_number = sys.maxsize
|
||||
except NameError:
|
||||
_big_number = sys.maxint
|
||||
|
||||
from wheel.decorator import reify
|
||||
from wheel.util import (urlsafe_b64encode, from_json, urlsafe_b64decode,
|
||||
native, binary, HashingFile)
|
||||
from wheel import signatures
|
||||
from wheel.pkginfo import read_pkg_info_bytes
|
||||
from wheel.util import open_for_csv
|
||||
|
||||
from .pep425tags import get_supported
|
||||
from .paths import get_install_paths
|
||||
|
||||
# The next major version after this version of the 'wheel' tool:
|
||||
VERSION_TOO_HIGH = (1, 0)
|
||||
|
||||
# Non-greedy matching of an optional build number may be too clever (more
|
||||
# invalid wheel filenames will match). Separate regex for .dist-info?
|
||||
WHEEL_INFO_RE = re.compile(
|
||||
r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
|
||||
((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||
\.whl|\.dist-info)$""",
|
||||
re.VERBOSE).match
|
||||
|
||||
def parse_version(version):
|
||||
"""Use parse_version from pkg_resources or distutils as available."""
|
||||
global parse_version
|
||||
try:
|
||||
from pkg_resources import parse_version
|
||||
except ImportError:
|
||||
from distutils.version import LooseVersion as parse_version
|
||||
return parse_version(version)
|
||||
|
||||
class BadWheelFile(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class WheelFile(object):
|
||||
"""Parse wheel-specific attributes from a wheel (.whl) file and offer
|
||||
basic installation and verification support.
|
||||
|
||||
WheelFile can be used to simply parse a wheel filename by avoiding the
|
||||
methods that require the actual file contents."""
|
||||
|
||||
WHEEL_INFO = "WHEEL"
|
||||
RECORD = "RECORD"
|
||||
|
||||
def __init__(self,
|
||||
filename,
|
||||
fp=None,
|
||||
append=False,
|
||||
context=get_supported):
|
||||
"""
|
||||
:param fp: A seekable file-like object or None to open(filename).
|
||||
:param append: Open archive in append mode.
|
||||
:param context: Function returning list of supported tags. Wheels
|
||||
must have the same context to be sortable.
|
||||
"""
|
||||
self.filename = filename
|
||||
self.fp = fp
|
||||
self.append = append
|
||||
self.context = context
|
||||
basename = os.path.basename(filename)
|
||||
self.parsed_filename = WHEEL_INFO_RE(basename)
|
||||
if not basename.endswith('.whl') or self.parsed_filename is None:
|
||||
raise BadWheelFile("Bad filename '%s'" % filename)
|
||||
|
||||
def __repr__(self):
|
||||
return self.filename
|
||||
|
||||
@property
|
||||
def distinfo_name(self):
|
||||
return "%s.dist-info" % self.parsed_filename.group('namever')
|
||||
|
||||
@property
|
||||
def datadir_name(self):
|
||||
return "%s.data" % self.parsed_filename.group('namever')
|
||||
|
||||
@property
|
||||
def record_name(self):
|
||||
return "%s/%s" % (self.distinfo_name, self.RECORD)
|
||||
|
||||
@property
|
||||
def wheelinfo_name(self):
|
||||
return "%s/%s" % (self.distinfo_name, self.WHEEL_INFO)
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
"""A wheel file is compatible with the Cartesian product of the
|
||||
period-delimited tags in its filename.
|
||||
To choose a wheel file among several candidates having the same
|
||||
distribution version 'ver', an installer ranks each triple of
|
||||
(pyver, abi, plat) that its Python installation can run, sorting
|
||||
the wheels by the best-ranked tag it supports and then by their
|
||||
arity which is just len(list(compatibility_tags)).
|
||||
"""
|
||||
tags = self.parsed_filename.groupdict()
|
||||
for pyver in tags['pyver'].split('.'):
|
||||
for abi in tags['abi'].split('.'):
|
||||
for plat in tags['plat'].split('.'):
|
||||
yield (pyver, abi, plat)
|
||||
|
||||
compatibility_tags = tags
|
||||
|
||||
@property
|
||||
def arity(self):
|
||||
"""The number of compatibility tags the wheel declares."""
|
||||
return len(list(self.compatibility_tags))
|
||||
|
||||
@property
|
||||
def rank(self):
|
||||
"""
|
||||
Lowest index of any of this wheel's tags in self.context(), and the
|
||||
arity e.g. (0, 1)
|
||||
"""
|
||||
return self.compatibility_rank(self.context())
|
||||
|
||||
@property
|
||||
def compatible(self):
|
||||
return self.rank[0] != _big_number # bad API!
|
||||
|
||||
# deprecated:
|
||||
def compatibility_rank(self, supported):
|
||||
"""Rank the wheel against the supported tags. Smaller ranks are more
|
||||
compatible!
|
||||
|
||||
:param supported: A list of compatibility tags that the current
|
||||
Python implemenation can run.
|
||||
"""
|
||||
preferences = []
|
||||
for tag in self.compatibility_tags:
|
||||
try:
|
||||
preferences.append(supported.index(tag))
|
||||
# Tag not present
|
||||
except ValueError:
|
||||
pass
|
||||
if len(preferences):
|
||||
return (min(preferences), self.arity)
|
||||
return (_big_number, 0)
|
||||
|
||||
# deprecated
|
||||
def supports_current_python(self, x):
|
||||
assert self.context == x, 'context mismatch'
|
||||
return self.compatible
|
||||
|
||||
# Comparability.
|
||||
# Wheels are equal if they refer to the same file.
|
||||
# If two wheels are not equal, compare based on (in this order):
|
||||
# 1. Name
|
||||
# 2. Version
|
||||
# 3. Compatibility rank
|
||||
# 4. Filename (as a tiebreaker)
|
||||
@property
|
||||
def _sort_key(self):
|
||||
return (self.parsed_filename.group('name'),
|
||||
parse_version(self.parsed_filename.group('ver')),
|
||||
tuple(-x for x in self.rank),
|
||||
self.filename)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.filename == other.filename
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.filename != other.filename
|
||||
|
||||
def __lt__(self, other):
|
||||
if self.context != other.context:
|
||||
raise TypeError("{0}.context != {1}.context".format(self, other))
|
||||
|
||||
return self._sort_key < other._sort_key
|
||||
|
||||
# XXX prune
|
||||
|
||||
sn = self.parsed_filename.group('name')
|
||||
on = other.parsed_filename.group('name')
|
||||
if sn != on:
|
||||
return sn < on
|
||||
sv = parse_version(self.parsed_filename.group('ver'))
|
||||
ov = parse_version(other.parsed_filename.group('ver'))
|
||||
if sv != ov:
|
||||
return sv < ov
|
||||
# Compatibility
|
||||
if self.context != other.context:
|
||||
raise TypeError("{0}.context != {1}.context".format(self, other))
|
||||
sc = self.rank
|
||||
oc = other.rank
|
||||
if sc != None and oc != None and sc != oc:
|
||||
# Smaller compatibility ranks are "better" than larger ones,
|
||||
# so we have to reverse the sense of the comparison here!
|
||||
return sc > oc
|
||||
elif sc == None and oc != None:
|
||||
return False
|
||||
return self.filename < other.filename
|
||||
|
||||
def __gt__(self, other):
|
||||
return other < self
|
||||
|
||||
def __le__(self, other):
|
||||
return self == other or self < other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self == other or other < self
|
||||
|
||||
#
|
||||
# Methods using the file's contents:
|
||||
#
|
||||
|
||||
@reify
|
||||
def zipfile(self):
|
||||
mode = "r"
|
||||
if self.append:
|
||||
mode = "a"
|
||||
vzf = VerifyingZipFile(self.fp if self.fp else self.filename, mode)
|
||||
if not self.append:
|
||||
self.verify(vzf)
|
||||
return vzf
|
||||
|
||||
@reify
|
||||
def parsed_wheel_info(self):
|
||||
"""Parse wheel metadata (the .data/WHEEL file)"""
|
||||
return read_pkg_info_bytes(self.zipfile.read(self.wheelinfo_name))
|
||||
|
||||
def check_version(self):
|
||||
version = self.parsed_wheel_info['Wheel-Version']
|
||||
if tuple(map(int, version.split('.'))) >= VERSION_TOO_HIGH:
|
||||
raise ValueError("Wheel version is too high")
|
||||
|
||||
@reify
|
||||
def install_paths(self):
|
||||
"""
|
||||
Consult distutils to get the install paths for our dist. A dict with
|
||||
('purelib', 'platlib', 'headers', 'scripts', 'data').
|
||||
|
||||
We use the name from our filename as the dist name, which means headers
|
||||
could be installed in the wrong place if the filesystem-escaped name
|
||||
is different than the Name. Who cares?
|
||||
"""
|
||||
name = self.parsed_filename.group('name')
|
||||
return get_install_paths(name)
|
||||
|
||||
def install(self, force=False, overrides={}):
|
||||
"""
|
||||
Install the wheel into site-packages.
|
||||
"""
|
||||
|
||||
# Utility to get the target directory for a particular key
|
||||
def get_path(key):
|
||||
return overrides.get(key) or self.install_paths[key]
|
||||
|
||||
# The base target location is either purelib or platlib
|
||||
if self.parsed_wheel_info['Root-Is-Purelib'] == 'true':
|
||||
root = get_path('purelib')
|
||||
else:
|
||||
root = get_path('platlib')
|
||||
|
||||
# Parse all the names in the archive
|
||||
name_trans = {}
|
||||
for info in self.zipfile.infolist():
|
||||
name = info.filename
|
||||
# Zip files can contain entries representing directories.
|
||||
# These end in a '/'.
|
||||
# We ignore these, as we create directories on demand.
|
||||
if name.endswith('/'):
|
||||
continue
|
||||
|
||||
# Pathnames in a zipfile namelist are always /-separated.
|
||||
# In theory, paths could start with ./ or have other oddities
|
||||
# but this won't happen in practical cases of well-formed wheels.
|
||||
# We'll cover the simple case of an initial './' as it's both easy
|
||||
# to do and more common than most other oddities.
|
||||
if name.startswith('./'):
|
||||
name = name[2:]
|
||||
|
||||
# Split off the base directory to identify files that are to be
|
||||
# installed in non-root locations
|
||||
basedir, sep, filename = name.partition('/')
|
||||
if sep and basedir == self.datadir_name:
|
||||
# Data file. Target destination is elsewhere
|
||||
key, sep, filename = filename.partition('/')
|
||||
if not sep:
|
||||
raise ValueError("Invalid filename in wheel: {0}".format(name))
|
||||
target = get_path(key)
|
||||
else:
|
||||
# Normal file. Target destination is root
|
||||
key = ''
|
||||
target = root
|
||||
filename = name
|
||||
|
||||
# Map the actual filename from the zipfile to its intended target
|
||||
# directory and the pathname relative to that directory.
|
||||
dest = os.path.normpath(os.path.join(target, filename))
|
||||
name_trans[info] = (key, target, filename, dest)
|
||||
|
||||
# We're now ready to start processing the actual install. The process
|
||||
# is as follows:
|
||||
# 1. Prechecks - is the wheel valid, is its declared architecture
|
||||
# OK, etc. [[Responsibility of the caller]]
|
||||
# 2. Overwrite check - do any of the files to be installed already
|
||||
# exist?
|
||||
# 3. Actual install - put the files in their target locations.
|
||||
# 4. Update RECORD - write a suitably modified RECORD file to
|
||||
# reflect the actual installed paths.
|
||||
|
||||
if not force:
|
||||
for info, v in name_trans.items():
|
||||
k = info.filename
|
||||
key, target, filename, dest = v
|
||||
if os.path.exists(dest):
|
||||
raise ValueError("Wheel file {0} would overwrite {1}. Use force if this is intended".format(k, dest))
|
||||
|
||||
# Get the name of our executable, for use when replacing script
|
||||
# wrapper hashbang lines.
|
||||
# We encode it using getfilesystemencoding, as that is "the name of
|
||||
# the encoding used to convert Unicode filenames into system file
|
||||
# names".
|
||||
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||
record_data = []
|
||||
record_name = self.distinfo_name + '/RECORD'
|
||||
for info, (key, target, filename, dest) in name_trans.items():
|
||||
name = info.filename
|
||||
source = self.zipfile.open(info)
|
||||
# Skip the RECORD file
|
||||
if name == record_name:
|
||||
continue
|
||||
ddir = os.path.dirname(dest)
|
||||
if not os.path.isdir(ddir):
|
||||
os.makedirs(ddir)
|
||||
destination = HashingFile(open(dest, 'wb'))
|
||||
if key == 'scripts':
|
||||
hashbang = source.readline()
|
||||
if hashbang.startswith(b'#!python'):
|
||||
hashbang = b'#!' + exename + binary(os.linesep)
|
||||
destination.write(hashbang)
|
||||
shutil.copyfileobj(source, destination)
|
||||
reldest = os.path.relpath(dest, root)
|
||||
reldest.replace(os.sep, '/')
|
||||
record_data.append((reldest, destination.digest(), destination.length))
|
||||
destination.close()
|
||||
source.close()
|
||||
# preserve attributes (especially +x bit for scripts)
|
||||
attrs = info.external_attr >> 16
|
||||
if attrs: # tends to be 0 if Windows.
|
||||
os.chmod(dest, info.external_attr >> 16)
|
||||
|
||||
record_name = os.path.join(root, self.record_name)
|
||||
writer = csv.writer(open_for_csv(record_name, 'w+'))
|
||||
for reldest, digest, length in sorted(record_data):
|
||||
writer.writerow((reldest, digest, length))
|
||||
writer.writerow((self.record_name, '', ''))
|
||||
|
||||
def verify(self, zipfile=None):
|
||||
"""Configure the VerifyingZipFile `zipfile` by verifying its signature
|
||||
and setting expected hashes for every hash in RECORD.
|
||||
Caller must complete the verification process by completely reading
|
||||
every file in the archive (e.g. with extractall)."""
|
||||
sig = None
|
||||
if zipfile is None:
|
||||
zipfile = self.zipfile
|
||||
zipfile.strict = True
|
||||
|
||||
record_name = '/'.join((self.distinfo_name, 'RECORD'))
|
||||
sig_name = '/'.join((self.distinfo_name, 'RECORD.jws'))
|
||||
# tolerate s/mime signatures:
|
||||
smime_sig_name = '/'.join((self.distinfo_name, 'RECORD.p7s'))
|
||||
zipfile.set_expected_hash(record_name, None)
|
||||
zipfile.set_expected_hash(sig_name, None)
|
||||
zipfile.set_expected_hash(smime_sig_name, None)
|
||||
record = zipfile.read(record_name)
|
||||
|
||||
record_digest = urlsafe_b64encode(hashlib.sha256(record).digest())
|
||||
try:
|
||||
sig = from_json(native(zipfile.read(sig_name)))
|
||||
except KeyError: # no signature
|
||||
pass
|
||||
if sig:
|
||||
headers, payload = signatures.verify(sig)
|
||||
if payload['hash'] != "sha256=" + native(record_digest):
|
||||
msg = "RECORD.sig claimed RECORD hash {0} != computed hash {1}."
|
||||
raise BadWheelFile(msg.format(payload['hash'],
|
||||
native(record_digest)))
|
||||
|
||||
reader = csv.reader((native(r) for r in record.splitlines()))
|
||||
|
||||
for row in reader:
|
||||
filename = row[0]
|
||||
hash = row[1]
|
||||
if not hash:
|
||||
if filename not in (record_name, sig_name):
|
||||
sys.stderr.write("%s has no hash!\n" % filename)
|
||||
continue
|
||||
algo, data = row[1].split('=', 1)
|
||||
assert algo == "sha256", "Unsupported hash algorithm"
|
||||
zipfile.set_expected_hash(filename, urlsafe_b64decode(binary(data)))
|
||||
|
||||
|
||||
class VerifyingZipFile(zipfile.ZipFile):
|
||||
"""ZipFile that can assert that each of its extracted contents matches
|
||||
an expected sha256 hash. Note that each file must be completly read in
|
||||
order for its hash to be checked."""
|
||||
|
||||
def __init__(self, file, mode="r",
|
||||
compression=zipfile.ZIP_STORED,
|
||||
allowZip64=False):
|
||||
zipfile.ZipFile.__init__(self, file, mode, compression, allowZip64)
|
||||
|
||||
self.strict = False
|
||||
self._expected_hashes = {}
|
||||
self._hash_algorithm = hashlib.sha256
|
||||
|
||||
def set_expected_hash(self, name, hash):
|
||||
"""
|
||||
:param name: name of zip entry
|
||||
:param hash: bytes of hash (or None for "don't care")
|
||||
"""
|
||||
self._expected_hashes[name] = hash
|
||||
|
||||
def open(self, name_or_info, mode="r", pwd=None):
|
||||
"""Return file-like object for 'name'."""
|
||||
# A non-monkey-patched version would contain most of zipfile.py
|
||||
ef = zipfile.ZipFile.open(self, name_or_info, mode, pwd)
|
||||
if isinstance(name_or_info, zipfile.ZipInfo):
|
||||
name = name_or_info.filename
|
||||
else:
|
||||
name = name_or_info
|
||||
if (name in self._expected_hashes
|
||||
and self._expected_hashes[name] != None):
|
||||
expected_hash = self._expected_hashes[name]
|
||||
try:
|
||||
_update_crc_orig = ef._update_crc
|
||||
except AttributeError:
|
||||
warnings.warn('Need ZipExtFile._update_crc to implement '
|
||||
'file hash verification (in Python >= 2.7)')
|
||||
return ef
|
||||
running_hash = self._hash_algorithm()
|
||||
if hasattr(ef, '_eof'): # py33
|
||||
def _update_crc(data):
|
||||
_update_crc_orig(data)
|
||||
running_hash.update(data)
|
||||
if ef._eof and running_hash.digest() != expected_hash:
|
||||
raise BadWheelFile("Bad hash for file %r" % ef.name)
|
||||
else:
|
||||
def _update_crc(data, eof=None):
|
||||
_update_crc_orig(data, eof=eof)
|
||||
running_hash.update(data)
|
||||
if eof and running_hash.digest() != expected_hash:
|
||||
raise BadWheelFile("Bad hash for file %r" % ef.name)
|
||||
ef._update_crc = _update_crc
|
||||
elif self.strict and name not in self._expected_hashes:
|
||||
raise BadWheelFile("No expected hash for file %r" % ef.name)
|
||||
return ef
|
||||
|
||||
def pop(self):
|
||||
"""Truncate the last file off this zipfile.
|
||||
Assumes infolist() is in the same order as the files (true for
|
||||
ordinary zip files created by Python)"""
|
||||
if not self.fp:
|
||||
raise RuntimeError(
|
||||
"Attempt to pop from ZIP archive that was already closed")
|
||||
last = self.infolist().pop()
|
||||
del self.NameToInfo[last.filename]
|
||||
self.fp.seek(last.header_offset, os.SEEK_SET)
|
||||
self.fp.truncate()
|
||||
self._didModify = True
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/install.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/install.pyc
Normal file
Binary file not shown.
317
venv/lib/python2.7/site-packages/wheel/metadata.py
Normal file
317
venv/lib/python2.7/site-packages/wheel/metadata.py
Normal file
@@ -0,0 +1,317 @@
|
||||
"""
|
||||
Tools for converting old- to new-style metadata.
|
||||
"""
|
||||
|
||||
from collections import namedtuple
|
||||
from .pkginfo import read_pkg_info
|
||||
from .util import OrderedDefaultDict
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
OrderedDict = dict
|
||||
|
||||
import re
|
||||
import os.path
|
||||
import textwrap
|
||||
import pkg_resources
|
||||
import email.parser
|
||||
import wheel
|
||||
|
||||
METADATA_VERSION = "2.0"
|
||||
|
||||
PLURAL_FIELDS = { "classifier" : "classifiers",
|
||||
"provides_dist" : "provides",
|
||||
"provides_extra" : "extras" }
|
||||
|
||||
SKIP_FIELDS = set()
|
||||
|
||||
CONTACT_FIELDS = (({"email":"author_email", "name": "author"},
|
||||
"author"),
|
||||
({"email":"maintainer_email", "name": "maintainer"},
|
||||
"maintainer"))
|
||||
|
||||
# commonly filled out as "UNKNOWN" by distutils:
|
||||
UNKNOWN_FIELDS = set(("author", "author_email", "platform", "home_page",
|
||||
"license"))
|
||||
|
||||
# Wheel itself is probably the only program that uses non-extras markers
|
||||
# in METADATA/PKG-INFO. Support its syntax with the extra at the end only.
|
||||
EXTRA_RE = re.compile("""^(?P<package>.*?)(;\s*(?P<condition>.*?)(extra == '(?P<extra>.*?)')?)$""")
|
||||
KEYWORDS_RE = re.compile("[\0-,]+")
|
||||
|
||||
MayRequiresKey = namedtuple('MayRequiresKey', ('condition', 'extra'))
|
||||
|
||||
def unique(iterable):
|
||||
"""
|
||||
Yield unique values in iterable, preserving order.
|
||||
"""
|
||||
seen = set()
|
||||
for value in iterable:
|
||||
if not value in seen:
|
||||
seen.add(value)
|
||||
yield value
|
||||
|
||||
|
||||
def handle_requires(metadata, pkg_info, key):
|
||||
"""
|
||||
Place the runtime requirements from pkg_info into metadata.
|
||||
"""
|
||||
may_requires = OrderedDefaultDict(list)
|
||||
for value in sorted(pkg_info.get_all(key)):
|
||||
extra_match = EXTRA_RE.search(value)
|
||||
if extra_match:
|
||||
groupdict = extra_match.groupdict()
|
||||
condition = groupdict['condition']
|
||||
extra = groupdict['extra']
|
||||
package = groupdict['package']
|
||||
if condition.endswith(' and '):
|
||||
condition = condition[:-5]
|
||||
else:
|
||||
condition, extra = None, None
|
||||
package = value
|
||||
key = MayRequiresKey(condition, extra)
|
||||
may_requires[key].append(package)
|
||||
|
||||
if may_requires:
|
||||
metadata['run_requires'] = []
|
||||
def sort_key(item):
|
||||
# Both condition and extra could be None, which can't be compared
|
||||
# against strings in Python 3.
|
||||
key, value = item
|
||||
if key.condition is None:
|
||||
return ''
|
||||
return key.condition
|
||||
for key, value in sorted(may_requires.items(), key=sort_key):
|
||||
may_requirement = OrderedDict((('requires', value),))
|
||||
if key.extra:
|
||||
may_requirement['extra'] = key.extra
|
||||
if key.condition:
|
||||
may_requirement['environment'] = key.condition
|
||||
metadata['run_requires'].append(may_requirement)
|
||||
|
||||
if not 'extras' in metadata:
|
||||
metadata['extras'] = []
|
||||
metadata['extras'].extend([key.extra for key in may_requires.keys() if key.extra])
|
||||
|
||||
|
||||
def pkginfo_to_dict(path, distribution=None):
|
||||
"""
|
||||
Convert PKG-INFO to a prototype Metadata 2.0 (PEP 426) dict.
|
||||
|
||||
The description is included under the key ['description'] rather than
|
||||
being written to a separate file.
|
||||
|
||||
path: path to PKG-INFO file
|
||||
distribution: optional distutils Distribution()
|
||||
"""
|
||||
|
||||
metadata = OrderedDefaultDict(lambda: OrderedDefaultDict(lambda: OrderedDefaultDict(OrderedDict)))
|
||||
metadata["generator"] = "bdist_wheel (" + wheel.__version__ + ")"
|
||||
try:
|
||||
unicode
|
||||
pkg_info = read_pkg_info(path)
|
||||
except NameError:
|
||||
pkg_info = email.parser.Parser().parsestr(open(path, 'rb').read().decode('utf-8'))
|
||||
description = None
|
||||
|
||||
if pkg_info['Summary']:
|
||||
metadata['summary'] = pkginfo_unicode(pkg_info, 'Summary')
|
||||
del pkg_info['Summary']
|
||||
|
||||
if pkg_info['Description']:
|
||||
description = dedent_description(pkg_info)
|
||||
del pkg_info['Description']
|
||||
else:
|
||||
payload = pkg_info.get_payload()
|
||||
if isinstance(payload, bytes):
|
||||
# Avoid a Python 2 Unicode error.
|
||||
# We still suffer ? glyphs on Python 3.
|
||||
payload = payload.decode('utf-8')
|
||||
if payload:
|
||||
description = payload
|
||||
|
||||
if description:
|
||||
pkg_info['description'] = description
|
||||
|
||||
for key in sorted(unique(k.lower() for k in pkg_info.keys())):
|
||||
low_key = key.replace('-', '_')
|
||||
|
||||
if low_key in SKIP_FIELDS:
|
||||
continue
|
||||
|
||||
if low_key in UNKNOWN_FIELDS and pkg_info.get(key) == 'UNKNOWN':
|
||||
continue
|
||||
|
||||
if low_key in sorted(PLURAL_FIELDS):
|
||||
metadata[PLURAL_FIELDS[low_key]] = pkg_info.get_all(key)
|
||||
|
||||
elif low_key == "requires_dist":
|
||||
handle_requires(metadata, pkg_info, key)
|
||||
|
||||
elif low_key == 'provides_extra':
|
||||
if not 'extras' in metadata:
|
||||
metadata['extras'] = []
|
||||
metadata['extras'].extend(pkg_info.get_all(key))
|
||||
|
||||
elif low_key == 'home_page':
|
||||
metadata['extensions']['python.details']['project_urls'] = {'Home':pkg_info[key]}
|
||||
|
||||
elif low_key == 'keywords':
|
||||
metadata['keywords'] = KEYWORDS_RE.split(pkg_info[key])
|
||||
|
||||
else:
|
||||
metadata[low_key] = pkg_info[key]
|
||||
|
||||
metadata['metadata_version'] = METADATA_VERSION
|
||||
|
||||
if 'extras' in metadata:
|
||||
metadata['extras'] = sorted(set(metadata['extras']))
|
||||
|
||||
# include more information if distribution is available
|
||||
if distribution:
|
||||
for requires, attr in (('test_requires', 'tests_require'),):
|
||||
try:
|
||||
requirements = getattr(distribution, attr)
|
||||
if isinstance(requirements, list):
|
||||
new_requirements = sorted(convert_requirements(requirements))
|
||||
metadata[requires] = [{'requires':new_requirements}]
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# handle contacts
|
||||
contacts = []
|
||||
for contact_type, role in CONTACT_FIELDS:
|
||||
contact = OrderedDict()
|
||||
for key in sorted(contact_type):
|
||||
if contact_type[key] in metadata:
|
||||
contact[key] = metadata.pop(contact_type[key])
|
||||
if contact:
|
||||
contact['role'] = role
|
||||
contacts.append(contact)
|
||||
if contacts:
|
||||
metadata['extensions']['python.details']['contacts'] = contacts
|
||||
|
||||
# convert entry points to exports
|
||||
try:
|
||||
with open(os.path.join(os.path.dirname(path), "entry_points.txt"), "r") as ep_file:
|
||||
ep_map = pkg_resources.EntryPoint.parse_map(ep_file.read())
|
||||
exports = OrderedDict()
|
||||
for group, items in sorted(ep_map.items()):
|
||||
exports[group] = OrderedDict()
|
||||
for item in sorted(map(str, items.values())):
|
||||
name, export = item.split(' = ', 1)
|
||||
exports[group][name] = export
|
||||
if exports:
|
||||
metadata['extensions']['python.exports'] = exports
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
# copy console_scripts entry points to commands
|
||||
if 'python.exports' in metadata['extensions']:
|
||||
for (ep_script, wrap_script) in (('console_scripts', 'wrap_console'),
|
||||
('gui_scripts', 'wrap_gui')):
|
||||
if ep_script in metadata['extensions']['python.exports']:
|
||||
metadata['extensions']['python.commands'][wrap_script] = \
|
||||
metadata['extensions']['python.exports'][ep_script]
|
||||
|
||||
return metadata
|
||||
|
||||
def requires_to_requires_dist(requirement):
|
||||
"""Compose the version predicates for requirement in PEP 345 fashion."""
|
||||
requires_dist = []
|
||||
for op, ver in requirement.specs:
|
||||
requires_dist.append(op + ver)
|
||||
if not requires_dist:
|
||||
return ''
|
||||
return " (%s)" % ','.join(requires_dist)
|
||||
|
||||
def convert_requirements(requirements):
|
||||
"""Yield Requires-Dist: strings for parsed requirements strings."""
|
||||
for req in requirements:
|
||||
parsed_requirement = pkg_resources.Requirement.parse(req)
|
||||
spec = requires_to_requires_dist(parsed_requirement)
|
||||
extras = ",".join(parsed_requirement.extras)
|
||||
if extras:
|
||||
extras = "[%s]" % extras
|
||||
yield (parsed_requirement.project_name + extras + spec)
|
||||
|
||||
def pkginfo_to_metadata(egg_info_path, pkginfo_path):
|
||||
"""
|
||||
Convert .egg-info directory with PKG-INFO to the Metadata 1.3 aka
|
||||
old-draft Metadata 2.0 format.
|
||||
"""
|
||||
pkg_info = read_pkg_info(pkginfo_path)
|
||||
pkg_info.replace_header('Metadata-Version', '2.0')
|
||||
requires_path = os.path.join(egg_info_path, 'requires.txt')
|
||||
if os.path.exists(requires_path):
|
||||
requires = open(requires_path).read()
|
||||
for extra, reqs in sorted(pkg_resources.split_sections(requires),
|
||||
key=lambda x: x[0] or ''):
|
||||
condition = ''
|
||||
if extra and ':' in extra: # setuptools extra:condition syntax
|
||||
extra, condition = extra.split(':', 1)
|
||||
if extra:
|
||||
pkg_info['Provides-Extra'] = extra
|
||||
if condition:
|
||||
condition += " and "
|
||||
condition += 'extra == %s' % repr(extra)
|
||||
if condition:
|
||||
condition = '; ' + condition
|
||||
for new_req in sorted(convert_requirements(reqs)):
|
||||
pkg_info['Requires-Dist'] = new_req + condition
|
||||
|
||||
description = pkg_info['Description']
|
||||
if description:
|
||||
pkg_info.set_payload(dedent_description(pkg_info))
|
||||
del pkg_info['Description']
|
||||
|
||||
return pkg_info
|
||||
|
||||
|
||||
def pkginfo_unicode(pkg_info, field):
|
||||
"""Hack to coax Unicode out of an email Message() - Python 3.3+"""
|
||||
text = pkg_info[field]
|
||||
field = field.lower()
|
||||
if not isinstance(text, str):
|
||||
if not hasattr(pkg_info, 'raw_items'): # Python 3.2
|
||||
return str(text)
|
||||
for item in pkg_info.raw_items():
|
||||
if item[0].lower() == field:
|
||||
text = item[1].encode('ascii', 'surrogateescape')\
|
||||
.decode('utf-8')
|
||||
break
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def dedent_description(pkg_info):
|
||||
"""
|
||||
Dedent and convert pkg_info['Description'] to Unicode.
|
||||
"""
|
||||
description = pkg_info['Description']
|
||||
|
||||
# Python 3 Unicode handling, sorta.
|
||||
surrogates = False
|
||||
if not isinstance(description, str):
|
||||
surrogates = True
|
||||
description = pkginfo_unicode(pkg_info, 'Description')
|
||||
|
||||
description_lines = description.splitlines()
|
||||
description_dedent = '\n'.join(
|
||||
# if the first line of long_description is blank,
|
||||
# the first line here will be indented.
|
||||
(description_lines[0].lstrip(),
|
||||
textwrap.dedent('\n'.join(description_lines[1:])),
|
||||
'\n'))
|
||||
|
||||
if surrogates:
|
||||
description_dedent = description_dedent\
|
||||
.encode("utf8")\
|
||||
.decode("ascii", "surrogateescape")
|
||||
|
||||
return description_dedent
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys, pprint
|
||||
pprint.pprint(pkginfo_to_dict(sys.argv[1]))
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/metadata.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/metadata.pyc
Normal file
Binary file not shown.
41
venv/lib/python2.7/site-packages/wheel/paths.py
Normal file
41
venv/lib/python2.7/site-packages/wheel/paths.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""
|
||||
Installation paths.
|
||||
|
||||
Map the .data/ subdirectory names to install paths.
|
||||
"""
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
import distutils.dist as dist
|
||||
import distutils.command.install as install
|
||||
|
||||
def get_install_command(name):
|
||||
# late binding due to potential monkeypatching
|
||||
d = dist.Distribution({'name':name})
|
||||
i = install.install(d)
|
||||
i.finalize_options()
|
||||
return i
|
||||
|
||||
def get_install_paths(name):
|
||||
"""
|
||||
Return the (distutils) install paths for the named dist.
|
||||
|
||||
A dict with ('purelib', 'platlib', 'headers', 'scripts', 'data') keys.
|
||||
"""
|
||||
paths = {}
|
||||
|
||||
i = get_install_command(name)
|
||||
|
||||
for key in install.SCHEME_KEYS:
|
||||
paths[key] = getattr(i, 'install_' + key)
|
||||
|
||||
# pip uses a similar path as an alternative to the system's (read-only)
|
||||
# include directory:
|
||||
if hasattr(sys, 'real_prefix'): # virtualenv
|
||||
paths['headers'] = os.path.join(sys.prefix,
|
||||
'include',
|
||||
'site',
|
||||
'python' + sys.version[:3],
|
||||
name)
|
||||
|
||||
return paths
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/paths.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/paths.pyc
Normal file
Binary file not shown.
169
venv/lib/python2.7/site-packages/wheel/pep425tags.py
Normal file
169
venv/lib/python2.7/site-packages/wheel/pep425tags.py
Normal file
@@ -0,0 +1,169 @@
|
||||
"""Generate and work with PEP 425 Compatibility Tags."""
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
try:
|
||||
import sysconfig
|
||||
except ImportError: # pragma nocover
|
||||
# Python < 2.7
|
||||
import distutils.sysconfig as sysconfig
|
||||
import distutils.util
|
||||
|
||||
|
||||
def get_config_var(var):
|
||||
try:
|
||||
return sysconfig.get_config_var(var)
|
||||
except IOError as e: # pip Issue #1074
|
||||
warnings.warn("{0}".format(e), RuntimeWarning)
|
||||
return None
|
||||
|
||||
|
||||
def get_abbr_impl():
|
||||
"""Return abbreviated implementation name."""
|
||||
if hasattr(sys, 'pypy_version_info'):
|
||||
pyimpl = 'pp'
|
||||
elif sys.platform.startswith('java'):
|
||||
pyimpl = 'jy'
|
||||
elif sys.platform == 'cli':
|
||||
pyimpl = 'ip'
|
||||
else:
|
||||
pyimpl = 'cp'
|
||||
return pyimpl
|
||||
|
||||
|
||||
def get_impl_ver():
|
||||
"""Return implementation version."""
|
||||
impl_ver = get_config_var("py_version_nodot")
|
||||
if not impl_ver or get_abbr_impl() == 'pp':
|
||||
impl_ver = ''.join(map(str, get_impl_version_info()))
|
||||
return impl_ver
|
||||
|
||||
|
||||
def get_impl_version_info():
|
||||
"""Return sys.version_info-like tuple for use in decrementing the minor
|
||||
version."""
|
||||
if get_abbr_impl() == 'pp':
|
||||
# as per https://github.com/pypa/pip/issues/2882
|
||||
return (sys.version_info[0], sys.pypy_version_info.major,
|
||||
sys.pypy_version_info.minor)
|
||||
else:
|
||||
return sys.version_info[0], sys.version_info[1]
|
||||
|
||||
|
||||
def get_flag(var, fallback, expected=True, warn=True):
|
||||
"""Use a fallback method for determining SOABI flags if the needed config
|
||||
var is unset or unavailable."""
|
||||
val = get_config_var(var)
|
||||
if val is None:
|
||||
if warn:
|
||||
warnings.warn("Config variable '{0}' is unset, Python ABI tag may "
|
||||
"be incorrect".format(var), RuntimeWarning, 2)
|
||||
return fallback()
|
||||
return val == expected
|
||||
|
||||
|
||||
def get_abi_tag():
|
||||
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
|
||||
(CPython 2, PyPy)."""
|
||||
soabi = get_config_var('SOABI')
|
||||
impl = get_abbr_impl()
|
||||
if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):
|
||||
d = ''
|
||||
m = ''
|
||||
u = ''
|
||||
if get_flag('Py_DEBUG',
|
||||
lambda: hasattr(sys, 'gettotalrefcount'),
|
||||
warn=(impl == 'cp')):
|
||||
d = 'd'
|
||||
if get_flag('WITH_PYMALLOC',
|
||||
lambda: impl == 'cp',
|
||||
warn=(impl == 'cp')):
|
||||
m = 'm'
|
||||
if get_flag('Py_UNICODE_SIZE',
|
||||
lambda: sys.maxunicode == 0x10ffff,
|
||||
expected=4,
|
||||
warn=(impl == 'cp' and
|
||||
sys.version_info < (3, 3))) \
|
||||
and sys.version_info < (3, 3):
|
||||
u = 'u'
|
||||
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
|
||||
elif soabi and soabi.startswith('cpython-'):
|
||||
abi = 'cp' + soabi.split('-')[1]
|
||||
elif soabi:
|
||||
abi = soabi.replace('.', '_').replace('-', '_')
|
||||
else:
|
||||
abi = None
|
||||
return abi
|
||||
|
||||
|
||||
def get_platform():
|
||||
"""Return our platform name 'win32', 'linux_x86_64'"""
|
||||
# XXX remove distutils dependency
|
||||
return distutils.util.get_platform().replace('.', '_').replace('-', '_')
|
||||
|
||||
|
||||
def get_supported(versions=None, supplied_platform=None):
|
||||
"""Return a list of supported tags for each version specified in
|
||||
`versions`.
|
||||
|
||||
:param versions: a list of string versions, of the form ["33", "32"],
|
||||
or None. The first version will be assumed to support our ABI.
|
||||
"""
|
||||
supported = []
|
||||
|
||||
# Versions must be given with respect to the preference
|
||||
if versions is None:
|
||||
versions = []
|
||||
version_info = get_impl_version_info()
|
||||
major = version_info[:-1]
|
||||
# Support all previous minor Python versions.
|
||||
for minor in range(version_info[-1], -1, -1):
|
||||
versions.append(''.join(map(str, major + (minor,))))
|
||||
|
||||
impl = get_abbr_impl()
|
||||
|
||||
abis = []
|
||||
|
||||
abi = get_abi_tag()
|
||||
if abi:
|
||||
abis[0:0] = [abi]
|
||||
|
||||
abi3s = set()
|
||||
import imp
|
||||
for suffix in imp.get_suffixes():
|
||||
if suffix[0].startswith('.abi'):
|
||||
abi3s.add(suffix[0].split('.', 2)[1])
|
||||
|
||||
abis.extend(sorted(list(abi3s)))
|
||||
|
||||
abis.append('none')
|
||||
|
||||
platforms = []
|
||||
if supplied_platform:
|
||||
platforms.append(supplied_platform)
|
||||
platforms.append(get_platform())
|
||||
|
||||
# Current version, current API (built specifically for our Python):
|
||||
for abi in abis:
|
||||
for arch in platforms:
|
||||
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
|
||||
|
||||
# No abi / arch, but requires our implementation:
|
||||
for i, version in enumerate(versions):
|
||||
supported.append(('%s%s' % (impl, version), 'none', 'any'))
|
||||
if i == 0:
|
||||
# Tagged specifically as being cross-version compatible
|
||||
# (with just the major version specified)
|
||||
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
|
||||
|
||||
# Major Python version + platform; e.g. binaries not using the Python API
|
||||
supported.append(('py%s' % (versions[0][0]), 'none', arch))
|
||||
|
||||
# No abi / arch, generic Python
|
||||
for i, version in enumerate(versions):
|
||||
supported.append(('py%s' % (version,), 'none', 'any'))
|
||||
if i == 0:
|
||||
supported.append(('py%s' % (version[0]), 'none', 'any'))
|
||||
|
||||
return supported
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/pep425tags.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/pep425tags.pyc
Normal file
Binary file not shown.
44
venv/lib/python2.7/site-packages/wheel/pkginfo.py
Normal file
44
venv/lib/python2.7/site-packages/wheel/pkginfo.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Tools for reading and writing PKG-INFO / METADATA without caring
|
||||
about the encoding."""
|
||||
|
||||
from email.parser import Parser
|
||||
|
||||
try:
|
||||
unicode
|
||||
_PY3 = False
|
||||
except NameError:
|
||||
_PY3 = True
|
||||
|
||||
if not _PY3:
|
||||
from email.generator import Generator
|
||||
|
||||
def read_pkg_info_bytes(bytestr):
|
||||
return Parser().parsestr(bytestr)
|
||||
|
||||
def read_pkg_info(path):
|
||||
with open(path, "r") as headers:
|
||||
message = Parser().parse(headers)
|
||||
return message
|
||||
|
||||
def write_pkg_info(path, message):
|
||||
with open(path, 'w') as metadata:
|
||||
Generator(metadata, maxheaderlen=0).flatten(message)
|
||||
|
||||
else:
|
||||
from email.generator import BytesGenerator
|
||||
def read_pkg_info_bytes(bytestr):
|
||||
headers = bytestr.decode(encoding="ascii", errors="surrogateescape")
|
||||
message = Parser().parsestr(headers)
|
||||
return message
|
||||
|
||||
def read_pkg_info(path):
|
||||
with open(path, "r",
|
||||
encoding="ascii",
|
||||
errors="surrogateescape") as headers:
|
||||
message = Parser().parse(headers)
|
||||
return message
|
||||
|
||||
def write_pkg_info(path, message):
|
||||
with open(path, "wb") as out:
|
||||
BytesGenerator(out, maxheaderlen=0).flatten(message)
|
||||
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/pkginfo.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/pkginfo.pyc
Normal file
Binary file not shown.
106
venv/lib/python2.7/site-packages/wheel/signatures/__init__.py
Normal file
106
venv/lib/python2.7/site-packages/wheel/signatures/__init__.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""
|
||||
Create and verify jws-js format Ed25519 signatures.
|
||||
"""
|
||||
|
||||
__all__ = [ 'sign', 'verify' ]
|
||||
|
||||
import json
|
||||
from ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary
|
||||
|
||||
ed25519ll = None
|
||||
|
||||
ALG = "Ed25519"
|
||||
|
||||
def get_ed25519ll():
|
||||
"""Lazy import-and-test of ed25519 module"""
|
||||
global ed25519ll
|
||||
|
||||
if not ed25519ll:
|
||||
try:
|
||||
import ed25519ll # fast (thousands / s)
|
||||
except (ImportError, OSError): # pragma nocover
|
||||
from . import ed25519py as ed25519ll # pure Python (hundreds / s)
|
||||
test()
|
||||
|
||||
return ed25519ll
|
||||
|
||||
def sign(payload, keypair):
|
||||
"""Return a JWS-JS format signature given a JSON-serializable payload and
|
||||
an Ed25519 keypair."""
|
||||
get_ed25519ll()
|
||||
#
|
||||
header = {
|
||||
"alg": ALG,
|
||||
"jwk": {
|
||||
"kty": ALG, # alg -> kty in jwk-08.
|
||||
"vk": native(urlsafe_b64encode(keypair.vk))
|
||||
}
|
||||
}
|
||||
|
||||
encoded_header = urlsafe_b64encode(binary(json.dumps(header, sort_keys=True)))
|
||||
encoded_payload = urlsafe_b64encode(binary(json.dumps(payload, sort_keys=True)))
|
||||
secured_input = b".".join((encoded_header, encoded_payload))
|
||||
sig_msg = ed25519ll.crypto_sign(secured_input, keypair.sk)
|
||||
signature = sig_msg[:ed25519ll.SIGNATUREBYTES]
|
||||
encoded_signature = urlsafe_b64encode(signature)
|
||||
|
||||
return {"recipients":
|
||||
[{"header":native(encoded_header),
|
||||
"signature":native(encoded_signature)}],
|
||||
"payload": native(encoded_payload)}
|
||||
|
||||
def assertTrue(condition, message=""):
|
||||
if not condition:
|
||||
raise ValueError(message)
|
||||
|
||||
def verify(jwsjs):
|
||||
"""Return (decoded headers, payload) if all signatures in jwsjs are
|
||||
consistent, else raise ValueError.
|
||||
|
||||
Caller must decide whether the keys are actually trusted."""
|
||||
get_ed25519ll()
|
||||
# XXX forbid duplicate keys in JSON input using object_pairs_hook (2.7+)
|
||||
recipients = jwsjs["recipients"]
|
||||
encoded_payload = binary(jwsjs["payload"])
|
||||
headers = []
|
||||
for recipient in recipients:
|
||||
assertTrue(len(recipient) == 2, "Unknown recipient key {0}".format(recipient))
|
||||
h = binary(recipient["header"])
|
||||
s = binary(recipient["signature"])
|
||||
header = json.loads(native(urlsafe_b64decode(h)))
|
||||
assertTrue(header["alg"] == ALG,
|
||||
"Unexpected algorithm {0}".format(header["alg"]))
|
||||
if "alg" in header["jwk"] and not "kty" in header["jwk"]:
|
||||
header["jwk"]["kty"] = header["jwk"]["alg"] # b/w for JWK < -08
|
||||
assertTrue(header["jwk"]["kty"] == ALG, # true for Ed25519
|
||||
"Unexpected key type {0}".format(header["jwk"]["kty"]))
|
||||
vk = urlsafe_b64decode(binary(header["jwk"]["vk"]))
|
||||
secured_input = b".".join((h, encoded_payload))
|
||||
sig = urlsafe_b64decode(s)
|
||||
sig_msg = sig+secured_input
|
||||
verified_input = native(ed25519ll.crypto_sign_open(sig_msg, vk))
|
||||
verified_header, verified_payload = verified_input.split('.')
|
||||
verified_header = binary(verified_header)
|
||||
decoded_header = native(urlsafe_b64decode(verified_header))
|
||||
headers.append(json.loads(decoded_header))
|
||||
|
||||
verified_payload = binary(verified_payload)
|
||||
|
||||
# only return header, payload that have passed through the crypto library.
|
||||
payload = json.loads(native(urlsafe_b64decode(verified_payload)))
|
||||
|
||||
return headers, payload
|
||||
|
||||
def test():
|
||||
kp = ed25519ll.crypto_sign_keypair()
|
||||
payload = {'test': 'onstartup'}
|
||||
jwsjs = json.loads(json.dumps(sign(payload, kp)))
|
||||
verify(jwsjs)
|
||||
jwsjs['payload'] += 'x'
|
||||
try:
|
||||
verify(jwsjs)
|
||||
except ValueError:
|
||||
pass
|
||||
else: # pragma no cover
|
||||
raise RuntimeError("No error from bad wheel.signatures payload.")
|
||||
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/signatures/__init__.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/signatures/__init__.pyc
Normal file
Binary file not shown.
270
venv/lib/python2.7/site-packages/wheel/signatures/djbec.py
Normal file
270
venv/lib/python2.7/site-packages/wheel/signatures/djbec.py
Normal file
@@ -0,0 +1,270 @@
|
||||
# Ed25519 digital signatures
|
||||
# Based on http://ed25519.cr.yp.to/python/ed25519.py
|
||||
# See also http://ed25519.cr.yp.to/software.html
|
||||
# Adapted by Ron Garret
|
||||
# Sped up considerably using coordinate transforms found on:
|
||||
# http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
|
||||
# Specifically add-2008-hwcd-4 and dbl-2008-hwcd
|
||||
|
||||
try: # pragma nocover
|
||||
unicode
|
||||
PY3 = False
|
||||
def asbytes(b):
|
||||
"""Convert array of integers to byte string"""
|
||||
return ''.join(chr(x) for x in b)
|
||||
def joinbytes(b):
|
||||
"""Convert array of bytes to byte string"""
|
||||
return ''.join(b)
|
||||
def bit(h, i):
|
||||
"""Return i'th bit of bytestring h"""
|
||||
return (ord(h[i//8]) >> (i%8)) & 1
|
||||
|
||||
except NameError: # pragma nocover
|
||||
PY3 = True
|
||||
asbytes = bytes
|
||||
joinbytes = bytes
|
||||
def bit(h, i):
|
||||
return (h[i//8] >> (i%8)) & 1
|
||||
|
||||
import hashlib
|
||||
|
||||
b = 256
|
||||
q = 2**255 - 19
|
||||
l = 2**252 + 27742317777372353535851937790883648493
|
||||
|
||||
def H(m):
|
||||
return hashlib.sha512(m).digest()
|
||||
|
||||
def expmod(b, e, m):
|
||||
if e == 0: return 1
|
||||
t = expmod(b, e // 2, m) ** 2 % m
|
||||
if e & 1: t = (t * b) % m
|
||||
return t
|
||||
|
||||
# Can probably get some extra speedup here by replacing this with
|
||||
# an extended-euclidean, but performance seems OK without that
|
||||
def inv(x):
|
||||
return expmod(x, q-2, q)
|
||||
|
||||
d = -121665 * inv(121666)
|
||||
I = expmod(2,(q-1)//4,q)
|
||||
|
||||
def xrecover(y):
|
||||
xx = (y*y-1) * inv(d*y*y+1)
|
||||
x = expmod(xx,(q+3)//8,q)
|
||||
if (x*x - xx) % q != 0: x = (x*I) % q
|
||||
if x % 2 != 0: x = q-x
|
||||
return x
|
||||
|
||||
By = 4 * inv(5)
|
||||
Bx = xrecover(By)
|
||||
B = [Bx % q,By % q]
|
||||
|
||||
#def edwards(P,Q):
|
||||
# x1 = P[0]
|
||||
# y1 = P[1]
|
||||
# x2 = Q[0]
|
||||
# y2 = Q[1]
|
||||
# x3 = (x1*y2+x2*y1) * inv(1+d*x1*x2*y1*y2)
|
||||
# y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2)
|
||||
# return (x3 % q,y3 % q)
|
||||
|
||||
#def scalarmult(P,e):
|
||||
# if e == 0: return [0,1]
|
||||
# Q = scalarmult(P,e/2)
|
||||
# Q = edwards(Q,Q)
|
||||
# if e & 1: Q = edwards(Q,P)
|
||||
# return Q
|
||||
|
||||
# Faster (!) version based on:
|
||||
# http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
|
||||
|
||||
def xpt_add(pt1, pt2):
|
||||
(X1, Y1, Z1, T1) = pt1
|
||||
(X2, Y2, Z2, T2) = pt2
|
||||
A = ((Y1-X1)*(Y2+X2)) % q
|
||||
B = ((Y1+X1)*(Y2-X2)) % q
|
||||
C = (Z1*2*T2) % q
|
||||
D = (T1*2*Z2) % q
|
||||
E = (D+C) % q
|
||||
F = (B-A) % q
|
||||
G = (B+A) % q
|
||||
H = (D-C) % q
|
||||
X3 = (E*F) % q
|
||||
Y3 = (G*H) % q
|
||||
Z3 = (F*G) % q
|
||||
T3 = (E*H) % q
|
||||
return (X3, Y3, Z3, T3)
|
||||
|
||||
def xpt_double (pt):
|
||||
(X1, Y1, Z1, _) = pt
|
||||
A = (X1*X1)
|
||||
B = (Y1*Y1)
|
||||
C = (2*Z1*Z1)
|
||||
D = (-A) % q
|
||||
J = (X1+Y1) % q
|
||||
E = (J*J-A-B) % q
|
||||
G = (D+B) % q
|
||||
F = (G-C) % q
|
||||
H = (D-B) % q
|
||||
X3 = (E*F) % q
|
||||
Y3 = (G*H) % q
|
||||
Z3 = (F*G) % q
|
||||
T3 = (E*H) % q
|
||||
return (X3, Y3, Z3, T3)
|
||||
|
||||
def pt_xform (pt):
|
||||
(x, y) = pt
|
||||
return (x, y, 1, (x*y)%q)
|
||||
|
||||
def pt_unxform (pt):
|
||||
(x, y, z, _) = pt
|
||||
return ((x*inv(z))%q, (y*inv(z))%q)
|
||||
|
||||
def xpt_mult (pt, n):
|
||||
if n==0: return pt_xform((0,1))
|
||||
_ = xpt_double(xpt_mult(pt, n>>1))
|
||||
return xpt_add(_, pt) if n&1 else _
|
||||
|
||||
def scalarmult(pt, e):
|
||||
return pt_unxform(xpt_mult(pt_xform(pt), e))
|
||||
|
||||
def encodeint(y):
|
||||
bits = [(y >> i) & 1 for i in range(b)]
|
||||
e = [(sum([bits[i * 8 + j] << j for j in range(8)]))
|
||||
for i in range(b//8)]
|
||||
return asbytes(e)
|
||||
|
||||
def encodepoint(P):
|
||||
x = P[0]
|
||||
y = P[1]
|
||||
bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1]
|
||||
e = [(sum([bits[i * 8 + j] << j for j in range(8)]))
|
||||
for i in range(b//8)]
|
||||
return asbytes(e)
|
||||
|
||||
def publickey(sk):
|
||||
h = H(sk)
|
||||
a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2))
|
||||
A = scalarmult(B,a)
|
||||
return encodepoint(A)
|
||||
|
||||
def Hint(m):
|
||||
h = H(m)
|
||||
return sum(2**i * bit(h,i) for i in range(2*b))
|
||||
|
||||
def signature(m,sk,pk):
|
||||
h = H(sk)
|
||||
a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2))
|
||||
inter = joinbytes([h[i] for i in range(b//8,b//4)])
|
||||
r = Hint(inter + m)
|
||||
R = scalarmult(B,r)
|
||||
S = (r + Hint(encodepoint(R) + pk + m) * a) % l
|
||||
return encodepoint(R) + encodeint(S)
|
||||
|
||||
def isoncurve(P):
|
||||
x = P[0]
|
||||
y = P[1]
|
||||
return (-x*x + y*y - 1 - d*x*x*y*y) % q == 0
|
||||
|
||||
def decodeint(s):
|
||||
return sum(2**i * bit(s,i) for i in range(0,b))
|
||||
|
||||
def decodepoint(s):
|
||||
y = sum(2**i * bit(s,i) for i in range(0,b-1))
|
||||
x = xrecover(y)
|
||||
if x & 1 != bit(s,b-1): x = q-x
|
||||
P = [x,y]
|
||||
if not isoncurve(P): raise Exception("decoding point that is not on curve")
|
||||
return P
|
||||
|
||||
def checkvalid(s, m, pk):
|
||||
if len(s) != b//4: raise Exception("signature length is wrong")
|
||||
if len(pk) != b//8: raise Exception("public-key length is wrong")
|
||||
R = decodepoint(s[0:b//8])
|
||||
A = decodepoint(pk)
|
||||
S = decodeint(s[b//8:b//4])
|
||||
h = Hint(encodepoint(R) + pk + m)
|
||||
v1 = scalarmult(B,S)
|
||||
# v2 = edwards(R,scalarmult(A,h))
|
||||
v2 = pt_unxform(xpt_add(pt_xform(R), pt_xform(scalarmult(A, h))))
|
||||
return v1==v2
|
||||
|
||||
##########################################################
|
||||
#
|
||||
# Curve25519 reference implementation by Matthew Dempsky, from:
|
||||
# http://cr.yp.to/highspeed/naclcrypto-20090310.pdf
|
||||
|
||||
# P = 2 ** 255 - 19
|
||||
P = q
|
||||
A = 486662
|
||||
|
||||
#def expmod(b, e, m):
|
||||
# if e == 0: return 1
|
||||
# t = expmod(b, e / 2, m) ** 2 % m
|
||||
# if e & 1: t = (t * b) % m
|
||||
# return t
|
||||
|
||||
# def inv(x): return expmod(x, P - 2, P)
|
||||
|
||||
def add(n, m, d):
|
||||
(xn, zn) = n
|
||||
(xm, zm) = m
|
||||
(xd, zd) = d
|
||||
x = 4 * (xm * xn - zm * zn) ** 2 * zd
|
||||
z = 4 * (xm * zn - zm * xn) ** 2 * xd
|
||||
return (x % P, z % P)
|
||||
|
||||
def double(n):
|
||||
(xn, zn) = n
|
||||
x = (xn ** 2 - zn ** 2) ** 2
|
||||
z = 4 * xn * zn * (xn ** 2 + A * xn * zn + zn ** 2)
|
||||
return (x % P, z % P)
|
||||
|
||||
def curve25519(n, base=9):
|
||||
one = (base,1)
|
||||
two = double(one)
|
||||
# f(m) evaluates to a tuple
|
||||
# containing the mth multiple and the
|
||||
# (m+1)th multiple of base.
|
||||
def f(m):
|
||||
if m == 1: return (one, two)
|
||||
(pm, pm1) = f(m // 2)
|
||||
if (m & 1):
|
||||
return (add(pm, pm1, one), double(pm1))
|
||||
return (double(pm), add(pm, pm1, one))
|
||||
((x,z), _) = f(n)
|
||||
return (x * inv(z)) % P
|
||||
|
||||
import random
|
||||
|
||||
def genkey(n=0):
|
||||
n = n or random.randint(0,P)
|
||||
n &= ~7
|
||||
n &= ~(128 << 8 * 31)
|
||||
n |= 64 << 8 * 31
|
||||
return n
|
||||
|
||||
#def str2int(s):
|
||||
# return int(hexlify(s), 16)
|
||||
# # return sum(ord(s[i]) << (8 * i) for i in range(32))
|
||||
#
|
||||
#def int2str(n):
|
||||
# return unhexlify("%x" % n)
|
||||
# # return ''.join([chr((n >> (8 * i)) & 255) for i in range(32)])
|
||||
|
||||
#################################################
|
||||
|
||||
def dsa_test():
|
||||
import os
|
||||
msg = str(random.randint(q,q+q)).encode('utf-8')
|
||||
sk = os.urandom(32)
|
||||
pk = publickey(sk)
|
||||
sig = signature(msg, sk, pk)
|
||||
return checkvalid(sig, msg, pk)
|
||||
|
||||
def dh_test():
|
||||
sk1 = genkey()
|
||||
sk2 = genkey()
|
||||
return curve25519(sk1, curve25519(sk2)) == curve25519(sk2, curve25519(sk1))
|
||||
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/signatures/djbec.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/signatures/djbec.pyc
Normal file
Binary file not shown.
@@ -0,0 +1,52 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import warnings
|
||||
import os
|
||||
|
||||
from collections import namedtuple
|
||||
from . import djbec
|
||||
|
||||
__all__ = ['crypto_sign', 'crypto_sign_open', 'crypto_sign_keypair', 'Keypair',
|
||||
'PUBLICKEYBYTES', 'SECRETKEYBYTES', 'SIGNATUREBYTES']
|
||||
|
||||
PUBLICKEYBYTES=32
|
||||
SECRETKEYBYTES=64
|
||||
SIGNATUREBYTES=64
|
||||
|
||||
Keypair = namedtuple('Keypair', ('vk', 'sk')) # verifying key, secret key
|
||||
|
||||
def crypto_sign_keypair(seed=None):
|
||||
"""Return (verifying, secret) key from a given seed, or os.urandom(32)"""
|
||||
if seed is None:
|
||||
seed = os.urandom(PUBLICKEYBYTES)
|
||||
else:
|
||||
warnings.warn("ed25519ll should choose random seed.",
|
||||
RuntimeWarning)
|
||||
if len(seed) != 32:
|
||||
raise ValueError("seed must be 32 random bytes or None.")
|
||||
skbytes = seed
|
||||
vkbytes = djbec.publickey(skbytes)
|
||||
return Keypair(vkbytes, skbytes+vkbytes)
|
||||
|
||||
|
||||
def crypto_sign(msg, sk):
|
||||
"""Return signature+message given message and secret key.
|
||||
The signature is the first SIGNATUREBYTES bytes of the return value.
|
||||
A copy of msg is in the remainder."""
|
||||
if len(sk) != SECRETKEYBYTES:
|
||||
raise ValueError("Bad signing key length %d" % len(sk))
|
||||
vkbytes = sk[PUBLICKEYBYTES:]
|
||||
skbytes = sk[:PUBLICKEYBYTES]
|
||||
sig = djbec.signature(msg, skbytes, vkbytes)
|
||||
return sig + msg
|
||||
|
||||
|
||||
def crypto_sign_open(signed, vk):
|
||||
"""Return message given signature+message and the verifying key."""
|
||||
if len(vk) != PUBLICKEYBYTES:
|
||||
raise ValueError("Bad verifying key length %d" % len(vk))
|
||||
rc = djbec.checkvalid(signed[:SIGNATUREBYTES], signed[SIGNATUREBYTES:], vk)
|
||||
if not rc:
|
||||
raise ValueError("rc != True", rc)
|
||||
return signed[SIGNATUREBYTES:]
|
||||
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/signatures/ed25519py.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/signatures/ed25519py.pyc
Normal file
Binary file not shown.
99
venv/lib/python2.7/site-packages/wheel/signatures/keys.py
Normal file
99
venv/lib/python2.7/site-packages/wheel/signatures/keys.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Store and retrieve wheel signing / verifying keys.
|
||||
|
||||
Given a scope (a package name, + meaning "all packages", or - meaning
|
||||
"no packages"), return a list of verifying keys that are trusted for that
|
||||
scope.
|
||||
|
||||
Given a package name, return a list of (scope, key) suggested keys to sign
|
||||
that package (only the verifying keys; the private signing key is stored
|
||||
elsewhere).
|
||||
|
||||
Keys here are represented as urlsafe_b64encoded strings with no padding.
|
||||
|
||||
Tentative command line interface:
|
||||
|
||||
# list trusts
|
||||
wheel trust
|
||||
# trust a particular key for all
|
||||
wheel trust + key
|
||||
# trust key for beaglevote
|
||||
wheel trust beaglevote key
|
||||
# stop trusting a key for all
|
||||
wheel untrust + key
|
||||
|
||||
# generate a key pair
|
||||
wheel keygen
|
||||
|
||||
# import a signing key from a file
|
||||
wheel import keyfile
|
||||
|
||||
# export a signing key
|
||||
wheel export key
|
||||
"""
|
||||
|
||||
import json
|
||||
import os.path
|
||||
from wheel.util import native, load_config_paths, save_config_path
|
||||
|
||||
class WheelKeys(object):
|
||||
SCHEMA = 1
|
||||
CONFIG_NAME = 'wheel.json'
|
||||
|
||||
def __init__(self):
|
||||
self.data = {'signers':[], 'verifiers':[]}
|
||||
|
||||
def load(self):
|
||||
# XXX JSON is not a great database
|
||||
for path in load_config_paths('wheel'):
|
||||
conf = os.path.join(native(path), self.CONFIG_NAME)
|
||||
if os.path.exists(conf):
|
||||
with open(conf, 'r') as infile:
|
||||
self.data = json.load(infile)
|
||||
for x in ('signers', 'verifiers'):
|
||||
if not x in self.data:
|
||||
self.data[x] = []
|
||||
if 'schema' not in self.data:
|
||||
self.data['schema'] = self.SCHEMA
|
||||
elif self.data['schema'] != self.SCHEMA:
|
||||
raise ValueError(
|
||||
"Bad wheel.json version {0}, expected {1}".format(
|
||||
self.data['schema'], self.SCHEMA))
|
||||
break
|
||||
return self
|
||||
|
||||
def save(self):
|
||||
# Try not to call this a very long time after load()
|
||||
path = save_config_path('wheel')
|
||||
conf = os.path.join(native(path), self.CONFIG_NAME)
|
||||
with open(conf, 'w+') as out:
|
||||
json.dump(self.data, out, indent=2)
|
||||
return self
|
||||
|
||||
def trust(self, scope, vk):
|
||||
"""Start trusting a particular key for given scope."""
|
||||
self.data['verifiers'].append({'scope':scope, 'vk':vk})
|
||||
return self
|
||||
|
||||
def untrust(self, scope, vk):
|
||||
"""Stop trusting a particular key for given scope."""
|
||||
self.data['verifiers'].remove({'scope':scope, 'vk':vk})
|
||||
return self
|
||||
|
||||
def trusted(self, scope=None):
|
||||
"""Return list of [(scope, trusted key), ...] for given scope."""
|
||||
trust = [(x['scope'], x['vk']) for x in self.data['verifiers'] if x['scope'] in (scope, '+')]
|
||||
trust.sort(key=lambda x: x[0])
|
||||
trust.reverse()
|
||||
return trust
|
||||
|
||||
def signers(self, scope):
|
||||
"""Return list of signing key(s)."""
|
||||
sign = [(x['scope'], x['vk']) for x in self.data['signers'] if x['scope'] in (scope, '+')]
|
||||
sign.sort(key=lambda x: x[0])
|
||||
sign.reverse()
|
||||
return sign
|
||||
|
||||
def add_signer(self, scope, vk):
|
||||
"""Remember verifying key vk as being valid for signing in scope."""
|
||||
self.data['signers'].append({'scope':scope, 'vk':vk})
|
||||
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/signatures/keys.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/signatures/keys.pyc
Normal file
Binary file not shown.
1
venv/lib/python2.7/site-packages/wheel/test/__init__.py
Normal file
1
venv/lib/python2.7/site-packages/wheel/test/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
#
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/test/__init__.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/test/__init__.pyc
Normal file
Binary file not shown.
@@ -0,0 +1,2 @@
|
||||
def main():
|
||||
return
|
||||
Binary file not shown.
@@ -0,0 +1,30 @@
|
||||
from setuptools import setup
|
||||
|
||||
try:
|
||||
unicode
|
||||
def u8(s):
|
||||
return s.decode('unicode-escape')
|
||||
except NameError:
|
||||
def u8(s):
|
||||
return s
|
||||
|
||||
setup(name='complex-dist',
|
||||
version='0.1',
|
||||
description=u8('Another testing distribution \N{SNOWMAN}'),
|
||||
long_description=u8('Another testing distribution \N{SNOWMAN}'),
|
||||
author="Illustrious Author",
|
||||
author_email="illustrious@example.org",
|
||||
url="http://example.org/exemplary",
|
||||
packages=['complexdist'],
|
||||
setup_requires=["wheel", "setuptools"],
|
||||
install_requires=["quux", "splort"],
|
||||
extras_require={'simple':['simple.dist']},
|
||||
tests_require=["foo", "bar>=10.0.0"],
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'complex-dist=complexdist:main',
|
||||
'complex-dist2=complexdist:main',
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,16 @@
|
||||
from setuptools import setup
|
||||
|
||||
try:
|
||||
unicode
|
||||
def u8(s):
|
||||
return s.decode('unicode-escape').encode('utf-8')
|
||||
except NameError:
|
||||
def u8(s):
|
||||
return s.encode('utf-8')
|
||||
|
||||
setup(name='headers.dist',
|
||||
version='0.1',
|
||||
description=u8('A distribution with headers'),
|
||||
headers=['header.h']
|
||||
)
|
||||
|
||||
Binary file not shown.
362
venv/lib/python2.7/site-packages/wheel/test/pydist-schema.json
Normal file
362
venv/lib/python2.7/site-packages/wheel/test/pydist-schema.json
Normal file
@@ -0,0 +1,362 @@
|
||||
{
|
||||
"id": "http://www.python.org/dev/peps/pep-0426/",
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "Metadata for Python Software Packages 2.0",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"metadata_version": {
|
||||
"description": "Version of the file format",
|
||||
"type": "string",
|
||||
"pattern": "^(\\d+(\\.\\d+)*)$"
|
||||
},
|
||||
"generator": {
|
||||
"description": "Name and version of the program that produced this file.",
|
||||
"type": "string",
|
||||
"pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])( \\(.*\\))?$"
|
||||
},
|
||||
"name": {
|
||||
"description": "The name of the distribution.",
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/distribution_name"
|
||||
},
|
||||
"version": {
|
||||
"description": "The distribution's public version identifier",
|
||||
"type": "string",
|
||||
"pattern": "^(\\d+(\\.\\d+)*)((a|b|c|rc)(\\d+))?(\\.(post)(\\d+))?(\\.(dev)(\\d+))?$"
|
||||
},
|
||||
"source_label": {
|
||||
"description": "A constrained identifying text string",
|
||||
"type": "string",
|
||||
"pattern": "^[0-9a-z_.-+]+$"
|
||||
},
|
||||
"source_url": {
|
||||
"description": "A string containing a full URL where the source for this specific version of the distribution can be downloaded.",
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"summary": {
|
||||
"description": "A one-line summary of what the distribution does.",
|
||||
"type": "string"
|
||||
},
|
||||
"extras": {
|
||||
"description": "A list of optional sets of dependencies that may be used to define conditional dependencies in \"may_require\" and similar fields.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/extra_name"
|
||||
}
|
||||
},
|
||||
"meta_requires": {
|
||||
"description": "A list of subdistributions made available through this metadistribution.",
|
||||
"type": "array",
|
||||
"$ref": "#/definitions/dependencies"
|
||||
},
|
||||
"run_requires": {
|
||||
"description": "A list of other distributions needed to run this distribution.",
|
||||
"type": "array",
|
||||
"$ref": "#/definitions/dependencies"
|
||||
},
|
||||
"test_requires": {
|
||||
"description": "A list of other distributions needed when this distribution is tested.",
|
||||
"type": "array",
|
||||
"$ref": "#/definitions/dependencies"
|
||||
},
|
||||
"build_requires": {
|
||||
"description": "A list of other distributions needed when this distribution is built.",
|
||||
"type": "array",
|
||||
"$ref": "#/definitions/dependencies"
|
||||
},
|
||||
"dev_requires": {
|
||||
"description": "A list of other distributions needed when this distribution is developed.",
|
||||
"type": "array",
|
||||
"$ref": "#/definitions/dependencies"
|
||||
},
|
||||
"provides": {
|
||||
"description": "A list of strings naming additional dependency requirements that are satisfied by installing this distribution. These strings must be of the form Name or Name (Version)",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/provides_declaration"
|
||||
}
|
||||
},
|
||||
"modules": {
|
||||
"description": "A list of modules and/or packages available for import after installing this distribution.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/qualified_name"
|
||||
}
|
||||
},
|
||||
"namespaces": {
|
||||
"description": "A list of namespace packages this distribution contributes to",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/qualified_name"
|
||||
}
|
||||
},
|
||||
"obsoleted_by": {
|
||||
"description": "A string that indicates that this project is no longer being developed. The named project provides a substitute or replacement.",
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/requirement"
|
||||
},
|
||||
"supports_environments": {
|
||||
"description": "A list of strings specifying the environments that the distribution explicitly supports.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/environment_marker"
|
||||
}
|
||||
},
|
||||
"install_hooks": {
|
||||
"description": "The install_hooks field is used to define various operations that may be invoked on a distribution in a platform independent manner.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"postinstall": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/export_specifier"
|
||||
},
|
||||
"preuninstall": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/export_specifier"
|
||||
}
|
||||
}
|
||||
},
|
||||
"extensions": {
|
||||
"description": "Extensions to the metadata may be present in a mapping under the 'extensions' key.",
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/extensions"
|
||||
}
|
||||
},
|
||||
|
||||
"required": ["metadata_version", "name", "version", "summary"],
|
||||
"additionalProperties": false,
|
||||
|
||||
"definitions": {
|
||||
"contact": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"role": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["name"],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/dependency"
|
||||
}
|
||||
},
|
||||
"dependency": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"extra": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/extra_name"
|
||||
},
|
||||
"environment": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/environment_marker"
|
||||
},
|
||||
"requires": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/requirement"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["requires"],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"extensions": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[A-Za-z][0-9A-Za-z_]*([.][0-9A-Za-z_]*)*$": {}
|
||||
},
|
||||
"properties": {
|
||||
"python.details" : {
|
||||
"description": "More information regarding the distribution.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"document_names": {
|
||||
"description": "Names of supporting metadata documents",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/document_name"
|
||||
},
|
||||
"changelog": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/document_name"
|
||||
},
|
||||
"license": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/document_name"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"keywords": {
|
||||
"description": "A list of additional keywords to be used to assist searching for the distribution in a larger catalog.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"license": {
|
||||
"description": "A string indicating the license covering the distribution.",
|
||||
"type": "string"
|
||||
},
|
||||
"classifiers": {
|
||||
"description": "A list of strings, with each giving a single classification value for the distribution.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"python.project" : {
|
||||
"description": "More information regarding the creation and maintenance of the distribution.",
|
||||
"$ref": "#/definitions/project_or_integrator"
|
||||
},
|
||||
"python.integrator" : {
|
||||
"description": "More information regarding the downstream redistributor of the distribution.",
|
||||
"$ref": "#/definitions/project_or_integrator"
|
||||
},
|
||||
"python.commands" : {
|
||||
"description": "Command line interfaces provided by this distribution",
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/commands"
|
||||
},
|
||||
"python.exports" : {
|
||||
"description": "Other exported interfaces provided by this distribution",
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/exports"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"commands": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"wrap_console": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/command_map"
|
||||
},
|
||||
"wrap_gui": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/command_map"
|
||||
},
|
||||
"prebuilt": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/relative_path"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"exports": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[A-Za-z][0-9A-Za-z_]*([.][0-9A-Za-z_]*)*$": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
".": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/export_specifier"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"command_map": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/export_specifier"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"project_or_integrator" : {
|
||||
"type": "object",
|
||||
"properties" : {
|
||||
"contacts": {
|
||||
"description": "A list of contributor entries giving the recommended contact points for getting more information about the project.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/contact"
|
||||
}
|
||||
},
|
||||
"contributors": {
|
||||
"description": "A list of contributor entries for other contributors not already listed as current project points of contact.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/contact"
|
||||
}
|
||||
},
|
||||
"project_urls": {
|
||||
"description": "A mapping of arbitrary text labels to additional URLs relevant to the project.",
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
},
|
||||
"distribution_name": {
|
||||
"type": "string",
|
||||
"pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$"
|
||||
},
|
||||
"requirement": {
|
||||
"type": "string"
|
||||
},
|
||||
"provides_declaration": {
|
||||
"type": "string"
|
||||
},
|
||||
"environment_marker": {
|
||||
"type": "string"
|
||||
},
|
||||
"document_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"extra_name" : {
|
||||
"type": "string",
|
||||
"pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$"
|
||||
},
|
||||
"relative_path" : {
|
||||
"type": "string"
|
||||
},
|
||||
"export_specifier": {
|
||||
"type": "string",
|
||||
"pattern": "^([A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)(:[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)?(\\[[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?\\])?$"
|
||||
},
|
||||
"qualified_name" : {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*$"
|
||||
},
|
||||
"prefixed_name" : {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_0-9]*)*$"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
from setuptools import setup
|
||||
|
||||
try:
|
||||
unicode
|
||||
def u8(s):
|
||||
return s.decode('unicode-escape').encode('utf-8')
|
||||
except NameError:
|
||||
def u8(s):
|
||||
return s.encode('utf-8')
|
||||
|
||||
setup(name='simple.dist',
|
||||
version='0.1',
|
||||
description=u8('A testing distribution \N{SNOWMAN}'),
|
||||
packages=['simpledist'],
|
||||
extras_require={'voting': ['beaglevote']},
|
||||
)
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
176
venv/lib/python2.7/site-packages/wheel/test/test_basic.py
Normal file
176
venv/lib/python2.7/site-packages/wheel/test/test_basic.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""
|
||||
Basic wheel tests.
|
||||
"""
|
||||
|
||||
import os
|
||||
import pkg_resources
|
||||
import json
|
||||
import sys
|
||||
|
||||
from pkg_resources import resource_filename
|
||||
|
||||
import wheel.util
|
||||
import wheel.tool
|
||||
|
||||
from wheel import egg2wheel
|
||||
from wheel.install import WheelFile
|
||||
from zipfile import ZipFile
|
||||
from shutil import rmtree
|
||||
|
||||
test_distributions = ("complex-dist", "simple.dist", "headers.dist")
|
||||
|
||||
def teardown_module():
|
||||
"""Delete eggs/wheels created by tests."""
|
||||
base = pkg_resources.resource_filename('wheel.test', '')
|
||||
for dist in test_distributions:
|
||||
for subdir in ('build', 'dist'):
|
||||
try:
|
||||
rmtree(os.path.join(base, dist, subdir))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def setup_module():
|
||||
build_wheel()
|
||||
build_egg()
|
||||
|
||||
def build_wheel():
|
||||
"""Build wheels from test distributions."""
|
||||
for dist in test_distributions:
|
||||
pwd = os.path.abspath(os.curdir)
|
||||
distdir = pkg_resources.resource_filename('wheel.test', dist)
|
||||
os.chdir(distdir)
|
||||
try:
|
||||
sys.argv = ['', 'bdist_wheel']
|
||||
exec(compile(open('setup.py').read(), 'setup.py', 'exec'))
|
||||
finally:
|
||||
os.chdir(pwd)
|
||||
|
||||
def build_egg():
|
||||
"""Build eggs from test distributions."""
|
||||
for dist in test_distributions:
|
||||
pwd = os.path.abspath(os.curdir)
|
||||
distdir = pkg_resources.resource_filename('wheel.test', dist)
|
||||
os.chdir(distdir)
|
||||
try:
|
||||
sys.argv = ['', 'bdist_egg']
|
||||
exec(compile(open('setup.py').read(), 'setup.py', 'exec'))
|
||||
finally:
|
||||
os.chdir(pwd)
|
||||
|
||||
def test_findable():
|
||||
"""Make sure pkg_resources can find us."""
|
||||
assert pkg_resources.working_set.by_key['wheel'].version
|
||||
|
||||
def test_egg_re():
|
||||
"""Make sure egg_info_re matches."""
|
||||
egg_names = open(pkg_resources.resource_filename('wheel', 'eggnames.txt'))
|
||||
for line in egg_names:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
assert egg2wheel.egg_info_re.match(line), line
|
||||
|
||||
def test_compatibility_tags():
|
||||
"""Test compatibilty tags are working."""
|
||||
wf = WheelFile("package-1.0.0-cp32.cp33-noabi-noarch.whl")
|
||||
assert (list(wf.compatibility_tags) ==
|
||||
[('cp32', 'noabi', 'noarch'), ('cp33', 'noabi', 'noarch')])
|
||||
assert (wf.arity == 2)
|
||||
|
||||
wf2 = WheelFile("package-1.0.0-1st-cp33-noabi-noarch.whl")
|
||||
wf2_info = wf2.parsed_filename.groupdict()
|
||||
assert wf2_info['build'] == '1st', wf2_info
|
||||
|
||||
def test_convert_egg():
|
||||
base = pkg_resources.resource_filename('wheel.test', '')
|
||||
for dist in test_distributions:
|
||||
distdir = os.path.join(base, dist, 'dist')
|
||||
eggs = [e for e in os.listdir(distdir) if e.endswith('.egg')]
|
||||
wheel.tool.convert(eggs, distdir, verbose=False)
|
||||
|
||||
def test_unpack():
|
||||
"""
|
||||
Make sure 'wheel unpack' works.
|
||||
This also verifies the integrity of our testing wheel files.
|
||||
"""
|
||||
for dist in test_distributions:
|
||||
distdir = pkg_resources.resource_filename('wheel.test',
|
||||
os.path.join(dist, 'dist'))
|
||||
for wheelfile in (w for w in os.listdir(distdir) if w.endswith('.whl')):
|
||||
wheel.tool.unpack(os.path.join(distdir, wheelfile), distdir)
|
||||
|
||||
def test_no_scripts():
|
||||
"""Make sure entry point scripts are not generated."""
|
||||
dist = "complex-dist"
|
||||
basedir = pkg_resources.resource_filename('wheel.test', dist)
|
||||
for (dirname, subdirs, filenames) in os.walk(basedir):
|
||||
for filename in filenames:
|
||||
if filename.endswith('.whl'):
|
||||
whl = ZipFile(os.path.join(dirname, filename))
|
||||
for entry in whl.infolist():
|
||||
assert not '.data/scripts/' in entry.filename
|
||||
|
||||
def test_pydist():
|
||||
"""Make sure pydist.json exists and validates against our schema."""
|
||||
# XXX this test may need manual cleanup of older wheels
|
||||
|
||||
import jsonschema
|
||||
|
||||
def open_json(filename):
|
||||
return json.loads(open(filename, 'rb').read().decode('utf-8'))
|
||||
|
||||
pymeta_schema = open_json(resource_filename('wheel.test',
|
||||
'pydist-schema.json'))
|
||||
valid = 0
|
||||
for dist in ("simple.dist", "complex-dist"):
|
||||
basedir = pkg_resources.resource_filename('wheel.test', dist)
|
||||
for (dirname, subdirs, filenames) in os.walk(basedir):
|
||||
for filename in filenames:
|
||||
if filename.endswith('.whl'):
|
||||
whl = ZipFile(os.path.join(dirname, filename))
|
||||
for entry in whl.infolist():
|
||||
if entry.filename.endswith('/metadata.json'):
|
||||
pymeta = json.loads(whl.read(entry).decode('utf-8'))
|
||||
jsonschema.validate(pymeta, pymeta_schema)
|
||||
valid += 1
|
||||
assert valid > 0, "No metadata.json found"
|
||||
|
||||
def test_util():
|
||||
"""Test functions in util.py."""
|
||||
for i in range(10):
|
||||
before = b'*' * i
|
||||
encoded = wheel.util.urlsafe_b64encode(before)
|
||||
assert not encoded.endswith(b'=')
|
||||
after = wheel.util.urlsafe_b64decode(encoded)
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_pick_best():
|
||||
"""Test the wheel ranking algorithm."""
|
||||
def get_tags(res):
|
||||
info = res[-1].parsed_filename.groupdict()
|
||||
return info['pyver'], info['abi'], info['plat']
|
||||
|
||||
cand_tags = [('py27', 'noabi', 'noarch'), ('py26', 'noabi', 'noarch'),
|
||||
('cp27', 'noabi', 'linux_i686'),
|
||||
('cp26', 'noabi', 'linux_i686'),
|
||||
('cp27', 'noabi', 'linux_x86_64'),
|
||||
('cp26', 'noabi', 'linux_x86_64')]
|
||||
cand_wheels = [WheelFile('testpkg-1.0-%s-%s-%s.whl' % t)
|
||||
for t in cand_tags]
|
||||
|
||||
supported = [('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch')]
|
||||
supported2 = [('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch'),
|
||||
('cp26', 'noabi', 'linux_i686'), ('py26', 'noabi', 'noarch')]
|
||||
supported3 = [('cp26', 'noabi', 'linux_i686'), ('py26', 'noabi', 'noarch'),
|
||||
('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch')]
|
||||
|
||||
for supp in (supported, supported2, supported3):
|
||||
context = lambda: list(supp)
|
||||
for wheel in cand_wheels:
|
||||
wheel.context = context
|
||||
best = max(cand_wheels)
|
||||
assert list(best.tags)[0] == supp[0]
|
||||
|
||||
# assert_equal(
|
||||
# list(map(get_tags, pick_best(cand_wheels, supp, top=False))), supp)
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/test/test_basic.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/test/test_basic.pyc
Normal file
Binary file not shown.
55
venv/lib/python2.7/site-packages/wheel/test/test_install.py
Normal file
55
venv/lib/python2.7/site-packages/wheel/test/test_install.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# Test wheel.
|
||||
# The file has the following contents:
|
||||
# hello.pyd
|
||||
# hello/hello.py
|
||||
# hello/__init__.py
|
||||
# test-1.0.data/data/hello.dat
|
||||
# test-1.0.data/headers/hello.dat
|
||||
# test-1.0.data/scripts/hello.sh
|
||||
# test-1.0.dist-info/WHEEL
|
||||
# test-1.0.dist-info/METADATA
|
||||
# test-1.0.dist-info/RECORD
|
||||
# The root is PLATLIB
|
||||
# So, some in PLATLIB, and one in each of DATA, HEADERS and SCRIPTS.
|
||||
|
||||
import wheel.tool
|
||||
import wheel.pep425tags
|
||||
from wheel.install import WheelFile
|
||||
from tempfile import mkdtemp
|
||||
import shutil
|
||||
import os
|
||||
|
||||
THISDIR = os.path.dirname(__file__)
|
||||
TESTWHEEL = os.path.join(THISDIR, 'test-1.0-py2.py3-none-win32.whl')
|
||||
|
||||
def check(*path):
|
||||
return os.path.exists(os.path.join(*path))
|
||||
|
||||
def test_install():
|
||||
tempdir = mkdtemp()
|
||||
def get_supported():
|
||||
return list(wheel.pep425tags.get_supported()) + [('py3', 'none', 'win32')]
|
||||
whl = WheelFile(TESTWHEEL, context=get_supported)
|
||||
assert whl.supports_current_python(get_supported)
|
||||
try:
|
||||
locs = {}
|
||||
for key in ('purelib', 'platlib', 'scripts', 'headers', 'data'):
|
||||
locs[key] = os.path.join(tempdir, key)
|
||||
os.mkdir(locs[key])
|
||||
whl.install(overrides=locs)
|
||||
assert len(os.listdir(locs['purelib'])) == 0
|
||||
assert check(locs['platlib'], 'hello.pyd')
|
||||
assert check(locs['platlib'], 'hello', 'hello.py')
|
||||
assert check(locs['platlib'], 'hello', '__init__.py')
|
||||
assert check(locs['data'], 'hello.dat')
|
||||
assert check(locs['headers'], 'hello.dat')
|
||||
assert check(locs['scripts'], 'hello.sh')
|
||||
assert check(locs['platlib'], 'test-1.0.dist-info', 'RECORD')
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
def test_install_tool():
|
||||
"""Slightly improve coverage of wheel.install"""
|
||||
wheel.tool.install([TESTWHEEL], force=True, dry_run=True)
|
||||
|
||||
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/test/test_install.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/test/test_install.pyc
Normal file
Binary file not shown.
98
venv/lib/python2.7/site-packages/wheel/test/test_keys.py
Normal file
98
venv/lib/python2.7/site-packages/wheel/test/test_keys.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import tempfile
|
||||
import os.path
|
||||
import unittest
|
||||
import json
|
||||
|
||||
from wheel.signatures import keys
|
||||
|
||||
wheel_json = """
|
||||
{
|
||||
"verifiers": [
|
||||
{
|
||||
"scope": "+",
|
||||
"vk": "bp-bjK2fFgtA-8DhKKAAPm9-eAZcX_u03oBv2RlKOBc"
|
||||
},
|
||||
{
|
||||
"scope": "+",
|
||||
"vk": "KAHZBfyqFW3OcFDbLSG4nPCjXxUPy72phP9I4Rn9MAo"
|
||||
},
|
||||
{
|
||||
"scope": "+",
|
||||
"vk": "tmAYCrSfj8gtJ10v3VkvW7jOndKmQIYE12hgnFu3cvk"
|
||||
}
|
||||
],
|
||||
"signers": [
|
||||
{
|
||||
"scope": "+",
|
||||
"vk": "tmAYCrSfj8gtJ10v3VkvW7jOndKmQIYE12hgnFu3cvk"
|
||||
},
|
||||
{
|
||||
"scope": "+",
|
||||
"vk": "KAHZBfyqFW3OcFDbLSG4nPCjXxUPy72phP9I4Rn9MAo"
|
||||
}
|
||||
],
|
||||
"schema": 1
|
||||
}
|
||||
"""
|
||||
|
||||
class TestWheelKeys(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.config = tempfile.NamedTemporaryFile(suffix='.json')
|
||||
self.config.close()
|
||||
|
||||
self.config_path, self.config_filename = os.path.split(self.config.name)
|
||||
def load(*args):
|
||||
return [self.config_path]
|
||||
def save(*args):
|
||||
return self.config_path
|
||||
keys.load_config_paths = load
|
||||
keys.save_config_path = save
|
||||
self.wk = keys.WheelKeys()
|
||||
self.wk.CONFIG_NAME = self.config_filename
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.config.name)
|
||||
|
||||
def test_load_save(self):
|
||||
self.wk.data = json.loads(wheel_json)
|
||||
|
||||
self.wk.add_signer('+', '67890')
|
||||
self.wk.add_signer('scope', 'abcdefg')
|
||||
|
||||
self.wk.trust('epocs', 'gfedcba')
|
||||
self.wk.trust('+', '12345')
|
||||
|
||||
self.wk.save()
|
||||
|
||||
del self.wk.data
|
||||
self.wk.load()
|
||||
|
||||
signers = self.wk.signers('scope')
|
||||
self.assertTrue(signers[0] == ('scope', 'abcdefg'), self.wk.data['signers'])
|
||||
self.assertTrue(signers[1][0] == '+', self.wk.data['signers'])
|
||||
|
||||
trusted = self.wk.trusted('epocs')
|
||||
self.assertTrue(trusted[0] == ('epocs', 'gfedcba'))
|
||||
self.assertTrue(trusted[1][0] == '+')
|
||||
|
||||
self.wk.untrust('epocs', 'gfedcba')
|
||||
trusted = self.wk.trusted('epocs')
|
||||
self.assertTrue(('epocs', 'gfedcba') not in trusted)
|
||||
|
||||
def test_load_save_incomplete(self):
|
||||
self.wk.data = json.loads(wheel_json)
|
||||
del self.wk.data['signers']
|
||||
self.wk.data['schema'] = self.wk.SCHEMA+1
|
||||
self.wk.save()
|
||||
try:
|
||||
self.wk.load()
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
raise Exception("Expected ValueError")
|
||||
|
||||
del self.wk.data['schema']
|
||||
self.wk.save()
|
||||
self.wk.load()
|
||||
|
||||
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/test/test_keys.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/test/test_keys.pyc
Normal file
Binary file not shown.
@@ -0,0 +1,6 @@
|
||||
import wheel.paths
|
||||
from distutils.command.install import SCHEME_KEYS
|
||||
|
||||
def test_path():
|
||||
d = wheel.paths.get_install_paths('wheel')
|
||||
assert len(d) == len(SCHEME_KEYS)
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/test/test_paths.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/test/test_paths.pyc
Normal file
Binary file not shown.
43
venv/lib/python2.7/site-packages/wheel/test/test_ranking.py
Normal file
43
venv/lib/python2.7/site-packages/wheel/test/test_ranking.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import unittest
|
||||
|
||||
from wheel.pep425tags import get_supported
|
||||
from wheel.install import WheelFile
|
||||
|
||||
WHEELPAT = "%(name)s-%(ver)s-%(pyver)s-%(abi)s-%(arch)s.whl"
|
||||
def make_wheel(name, ver, pyver, abi, arch):
|
||||
name = WHEELPAT % dict(name=name, ver=ver, pyver=pyver, abi=abi,
|
||||
arch=arch)
|
||||
return WheelFile(name)
|
||||
|
||||
# This relies on the fact that generate_supported will always return the
|
||||
# exact pyver, abi, and architecture for its first (best) match.
|
||||
sup = get_supported()
|
||||
pyver, abi, arch = sup[0]
|
||||
genver = 'py' + pyver[2:]
|
||||
majver = genver[:3]
|
||||
|
||||
COMBINATIONS = (
|
||||
('bar', '0.9', 'py2.py3', 'none', 'any'),
|
||||
('bar', '0.9', majver, 'none', 'any'),
|
||||
('bar', '0.9', genver, 'none', 'any'),
|
||||
('bar', '0.9', pyver, abi, arch),
|
||||
('bar', '1.3.2', majver, 'none', 'any'),
|
||||
('bar', '3.1', genver, 'none', 'any'),
|
||||
('bar', '3.1', pyver, abi, arch),
|
||||
('foo', '1.0', majver, 'none', 'any'),
|
||||
('foo', '1.1', pyver, abi, arch),
|
||||
('foo', '2.1', majver + '0', 'none', 'any'),
|
||||
# This will not be compatible for Python x.0. Beware when we hit Python
|
||||
# 4.0, and don't test with 3.0!!!
|
||||
('foo', '2.1', majver + '1', 'none', 'any'),
|
||||
('foo', '2.1', pyver , 'none', 'any'),
|
||||
('foo', '2.1', pyver , abi, arch),
|
||||
)
|
||||
|
||||
WHEELS = [ make_wheel(*args) for args in COMBINATIONS ]
|
||||
|
||||
class TestRanking(unittest.TestCase):
|
||||
def test_comparison(self):
|
||||
for i in range(len(WHEELS)-1):
|
||||
for j in range(i):
|
||||
self.assertTrue(WHEELS[j]<WHEELS[i])
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/test/test_ranking.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/test/test_ranking.pyc
Normal file
Binary file not shown.
@@ -0,0 +1,47 @@
|
||||
from wheel import signatures
|
||||
from wheel.signatures import djbec, ed25519py
|
||||
from wheel.util import binary
|
||||
|
||||
def test_getlib():
|
||||
signatures.get_ed25519ll()
|
||||
|
||||
def test_djbec():
|
||||
djbec.dsa_test()
|
||||
djbec.dh_test()
|
||||
|
||||
def test_ed25519py():
|
||||
kp0 = ed25519py.crypto_sign_keypair(binary(' '*32))
|
||||
kp = ed25519py.crypto_sign_keypair()
|
||||
|
||||
signed = ed25519py.crypto_sign(binary('test'), kp.sk)
|
||||
|
||||
ed25519py.crypto_sign_open(signed, kp.vk)
|
||||
|
||||
try:
|
||||
ed25519py.crypto_sign_open(signed, kp0.vk)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
raise Exception("Expected ValueError")
|
||||
|
||||
try:
|
||||
ed25519py.crypto_sign_keypair(binary(' '*33))
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
raise Exception("Expected ValueError")
|
||||
|
||||
try:
|
||||
ed25519py.crypto_sign(binary(''), binary(' ')*31)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
raise Exception("Expected ValueError")
|
||||
|
||||
try:
|
||||
ed25519py.crypto_sign_open(binary(''), binary(' ')*31)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
raise Exception("Expected ValueError")
|
||||
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/test/test_signatures.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/test/test_signatures.pyc
Normal file
Binary file not shown.
176
venv/lib/python2.7/site-packages/wheel/test/test_tagopt.py
Normal file
176
venv/lib/python2.7/site-packages/wheel/test/test_tagopt.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""
|
||||
Tests for the bdist_wheel tag options (--python-tag, --universal, and
|
||||
--plat-name)
|
||||
"""
|
||||
|
||||
import sys
|
||||
import shutil
|
||||
import pytest
|
||||
import py.path
|
||||
import tempfile
|
||||
import subprocess
|
||||
|
||||
SETUP_PY = """\
|
||||
from setuptools import setup, Extension
|
||||
|
||||
setup(
|
||||
name="Test",
|
||||
version="1.0",
|
||||
author_email="author@example.com",
|
||||
py_modules=["test"],
|
||||
{ext_modules}
|
||||
)
|
||||
"""
|
||||
|
||||
EXT_MODULES = "ext_modules=[Extension('_test', sources=['test.c'])],"
|
||||
|
||||
@pytest.fixture
|
||||
def temp_pkg(request, ext=False):
|
||||
tempdir = tempfile.mkdtemp()
|
||||
def fin():
|
||||
shutil.rmtree(tempdir)
|
||||
request.addfinalizer(fin)
|
||||
temppath = py.path.local(tempdir)
|
||||
temppath.join('test.py').write('print("Hello, world")')
|
||||
if ext:
|
||||
temppath.join('test.c').write('#include <stdio.h>')
|
||||
setup_py = SETUP_PY.format(ext_modules=EXT_MODULES)
|
||||
else:
|
||||
setup_py = SETUP_PY.format(ext_modules='')
|
||||
temppath.join('setup.py').write(setup_py)
|
||||
return temppath
|
||||
|
||||
@pytest.fixture
|
||||
def temp_ext_pkg(request):
|
||||
return temp_pkg(request, ext=True)
|
||||
|
||||
def test_default_tag(temp_pkg):
|
||||
subprocess.check_call([sys.executable, 'setup.py', 'bdist_wheel'],
|
||||
cwd=str(temp_pkg))
|
||||
dist_dir = temp_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename == 'Test-1.0-py%s-none-any.whl' % (sys.version[0],)
|
||||
assert wheels[0].ext == '.whl'
|
||||
|
||||
def test_explicit_tag(temp_pkg):
|
||||
subprocess.check_call(
|
||||
[sys.executable, 'setup.py', 'bdist_wheel', '--python-tag=py32'],
|
||||
cwd=str(temp_pkg))
|
||||
dist_dir = temp_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename.startswith('Test-1.0-py32-')
|
||||
assert wheels[0].ext == '.whl'
|
||||
|
||||
def test_universal_tag(temp_pkg):
|
||||
subprocess.check_call(
|
||||
[sys.executable, 'setup.py', 'bdist_wheel', '--universal'],
|
||||
cwd=str(temp_pkg))
|
||||
dist_dir = temp_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
|
||||
assert wheels[0].ext == '.whl'
|
||||
|
||||
def test_universal_beats_explicit_tag(temp_pkg):
|
||||
subprocess.check_call(
|
||||
[sys.executable, 'setup.py', 'bdist_wheel', '--universal', '--python-tag=py32'],
|
||||
cwd=str(temp_pkg))
|
||||
dist_dir = temp_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
|
||||
assert wheels[0].ext == '.whl'
|
||||
|
||||
def test_universal_in_setup_cfg(temp_pkg):
|
||||
temp_pkg.join('setup.cfg').write('[bdist_wheel]\nuniversal=1')
|
||||
subprocess.check_call(
|
||||
[sys.executable, 'setup.py', 'bdist_wheel'],
|
||||
cwd=str(temp_pkg))
|
||||
dist_dir = temp_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
|
||||
assert wheels[0].ext == '.whl'
|
||||
|
||||
def test_pythontag_in_setup_cfg(temp_pkg):
|
||||
temp_pkg.join('setup.cfg').write('[bdist_wheel]\npython_tag=py32')
|
||||
subprocess.check_call(
|
||||
[sys.executable, 'setup.py', 'bdist_wheel'],
|
||||
cwd=str(temp_pkg))
|
||||
dist_dir = temp_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename.startswith('Test-1.0-py32-')
|
||||
assert wheels[0].ext == '.whl'
|
||||
|
||||
def test_legacy_wheel_section_in_setup_cfg(temp_pkg):
|
||||
temp_pkg.join('setup.cfg').write('[wheel]\nuniversal=1')
|
||||
subprocess.check_call(
|
||||
[sys.executable, 'setup.py', 'bdist_wheel'],
|
||||
cwd=str(temp_pkg))
|
||||
dist_dir = temp_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
|
||||
assert wheels[0].ext == '.whl'
|
||||
|
||||
def test_plat_name_purepy(temp_pkg):
|
||||
subprocess.check_call(
|
||||
[sys.executable, 'setup.py', 'bdist_wheel', '--plat-name=testplat.pure'],
|
||||
cwd=str(temp_pkg))
|
||||
dist_dir = temp_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename.endswith('-testplat_pure.whl')
|
||||
assert wheels[0].ext == '.whl'
|
||||
|
||||
def test_plat_name_ext(temp_ext_pkg):
|
||||
try:
|
||||
subprocess.check_call(
|
||||
[sys.executable, 'setup.py', 'bdist_wheel', '--plat-name=testplat.arch'],
|
||||
cwd=str(temp_ext_pkg))
|
||||
except subprocess.CalledProcessError:
|
||||
pytest.skip("Cannot compile C Extensions")
|
||||
dist_dir = temp_ext_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename.endswith('-testplat_arch.whl')
|
||||
assert wheels[0].ext == '.whl'
|
||||
|
||||
def test_plat_name_purepy_in_setupcfg(temp_pkg):
|
||||
temp_pkg.join('setup.cfg').write('[bdist_wheel]\nplat_name=testplat.pure')
|
||||
subprocess.check_call(
|
||||
[sys.executable, 'setup.py', 'bdist_wheel'],
|
||||
cwd=str(temp_pkg))
|
||||
dist_dir = temp_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename.endswith('-testplat_pure.whl')
|
||||
assert wheels[0].ext == '.whl'
|
||||
|
||||
def test_plat_name_ext_in_setupcfg(temp_ext_pkg):
|
||||
temp_ext_pkg.join('setup.cfg').write('[bdist_wheel]\nplat_name=testplat.arch')
|
||||
try:
|
||||
subprocess.check_call(
|
||||
[sys.executable, 'setup.py', 'bdist_wheel'],
|
||||
cwd=str(temp_ext_pkg))
|
||||
except subprocess.CalledProcessError:
|
||||
pytest.skip("Cannot compile C Extensions")
|
||||
dist_dir = temp_ext_pkg.join('dist')
|
||||
assert dist_dir.check(dir=1)
|
||||
wheels = dist_dir.listdir()
|
||||
assert len(wheels) == 1
|
||||
assert wheels[0].basename.endswith('-testplat_arch.whl')
|
||||
assert wheels[0].ext == '.whl'
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/test/test_tagopt.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/test/test_tagopt.pyc
Normal file
Binary file not shown.
25
venv/lib/python2.7/site-packages/wheel/test/test_tool.py
Normal file
25
venv/lib/python2.7/site-packages/wheel/test/test_tool.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from .. import tool
|
||||
|
||||
def test_keygen():
|
||||
def get_keyring():
|
||||
WheelKeys, keyring = tool.get_keyring()
|
||||
|
||||
class WheelKeysTest(WheelKeys):
|
||||
def save(self):
|
||||
pass
|
||||
|
||||
class keyringTest:
|
||||
@classmethod
|
||||
def get_keyring(cls):
|
||||
class keyringTest2:
|
||||
pw = None
|
||||
def set_password(self, a, b, c):
|
||||
self.pw = c
|
||||
def get_password(self, a, b):
|
||||
return self.pw
|
||||
|
||||
return keyringTest2()
|
||||
|
||||
return WheelKeysTest, keyringTest
|
||||
|
||||
tool.keygen(get_keyring=get_keyring)
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/test/test_tool.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/test/test_tool.pyc
Normal file
Binary file not shown.
142
venv/lib/python2.7/site-packages/wheel/test/test_wheelfile.py
Normal file
142
venv/lib/python2.7/site-packages/wheel/test/test_wheelfile.py
Normal file
@@ -0,0 +1,142 @@
|
||||
import os
|
||||
import wheel.install
|
||||
import wheel.archive
|
||||
import hashlib
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import BytesIO as StringIO
|
||||
import codecs
|
||||
import zipfile
|
||||
import pytest
|
||||
import shutil
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
|
||||
@contextmanager
|
||||
def environ(key, value):
|
||||
old_value = os.environ.get(key)
|
||||
try:
|
||||
os.environ[key] = value
|
||||
yield
|
||||
finally:
|
||||
if old_value is None:
|
||||
del os.environ[key]
|
||||
else:
|
||||
os.environ[key] = old_value
|
||||
|
||||
@contextmanager
|
||||
def temporary_directory():
|
||||
# tempfile.TemporaryDirectory doesn't exist in Python 2.
|
||||
tempdir = tempfile.mkdtemp()
|
||||
try:
|
||||
yield tempdir
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
@contextmanager
|
||||
def readable_zipfile(path):
|
||||
# zipfile.ZipFile() isn't a context manager under Python 2.
|
||||
zf = zipfile.ZipFile(path, 'r')
|
||||
try:
|
||||
yield zf
|
||||
finally:
|
||||
zf.close()
|
||||
|
||||
|
||||
def test_verifying_zipfile():
|
||||
if not hasattr(zipfile.ZipExtFile, '_update_crc'):
|
||||
pytest.skip('No ZIP verification. Missing ZipExtFile._update_crc.')
|
||||
|
||||
sio = StringIO()
|
||||
zf = zipfile.ZipFile(sio, 'w')
|
||||
zf.writestr("one", b"first file")
|
||||
zf.writestr("two", b"second file")
|
||||
zf.writestr("three", b"third file")
|
||||
zf.close()
|
||||
|
||||
# In default mode, VerifyingZipFile checks the hash of any read file
|
||||
# mentioned with set_expected_hash(). Files not mentioned with
|
||||
# set_expected_hash() are not checked.
|
||||
vzf = wheel.install.VerifyingZipFile(sio, 'r')
|
||||
vzf.set_expected_hash("one", hashlib.sha256(b"first file").digest())
|
||||
vzf.set_expected_hash("three", "blurble")
|
||||
vzf.open("one").read()
|
||||
vzf.open("two").read()
|
||||
try:
|
||||
vzf.open("three").read()
|
||||
except wheel.install.BadWheelFile:
|
||||
pass
|
||||
else:
|
||||
raise Exception("expected exception 'BadWheelFile()'")
|
||||
|
||||
# In strict mode, VerifyingZipFile requires every read file to be
|
||||
# mentioned with set_expected_hash().
|
||||
vzf.strict = True
|
||||
try:
|
||||
vzf.open("two").read()
|
||||
except wheel.install.BadWheelFile:
|
||||
pass
|
||||
else:
|
||||
raise Exception("expected exception 'BadWheelFile()'")
|
||||
|
||||
vzf.set_expected_hash("two", None)
|
||||
vzf.open("two").read()
|
||||
|
||||
def test_pop_zipfile():
|
||||
sio = StringIO()
|
||||
zf = wheel.install.VerifyingZipFile(sio, 'w')
|
||||
zf.writestr("one", b"first file")
|
||||
zf.writestr("two", b"second file")
|
||||
zf.close()
|
||||
|
||||
try:
|
||||
zf.pop()
|
||||
except RuntimeError:
|
||||
pass # already closed
|
||||
else:
|
||||
raise Exception("expected RuntimeError")
|
||||
|
||||
zf = wheel.install.VerifyingZipFile(sio, 'a')
|
||||
zf.pop()
|
||||
zf.close()
|
||||
|
||||
zf = wheel.install.VerifyingZipFile(sio, 'r')
|
||||
assert len(zf.infolist()) == 1
|
||||
|
||||
def test_zipfile_timestamp():
|
||||
# An environment variable can be used to influence the timestamp on
|
||||
# TarInfo objects inside the zip. See issue #143. TemporaryDirectory is
|
||||
# not a context manager under Python 3.
|
||||
with temporary_directory() as tempdir:
|
||||
for filename in ('one', 'two', 'three'):
|
||||
path = os.path.join(tempdir, filename)
|
||||
with codecs.open(path, 'w', encoding='utf-8') as fp:
|
||||
fp.write(filename + '\n')
|
||||
zip_base_name = os.path.join(tempdir, 'dummy')
|
||||
# The earliest date representable in TarInfos, 1980-01-01
|
||||
with environ('SOURCE_DATE_EPOCH', '315576060'):
|
||||
zip_filename = wheel.archive.make_wheelfile_inner(
|
||||
zip_base_name, tempdir)
|
||||
with readable_zipfile(zip_filename) as zf:
|
||||
for info in zf.infolist():
|
||||
assert info.date_time[:3] == (1980, 1, 1)
|
||||
|
||||
def test_zipfile_attributes():
|
||||
# With the change from ZipFile.write() to .writestr(), we need to manually
|
||||
# set member attributes.
|
||||
with temporary_directory() as tempdir:
|
||||
files = (('foo', 0o644), ('bar', 0o755))
|
||||
for filename, mode in files:
|
||||
path = os.path.join(tempdir, filename)
|
||||
with codecs.open(path, 'w', encoding='utf-8') as fp:
|
||||
fp.write(filename + '\n')
|
||||
os.chmod(path, mode)
|
||||
zip_base_name = os.path.join(tempdir, 'dummy')
|
||||
zip_filename = wheel.archive.make_wheelfile_inner(
|
||||
zip_base_name, tempdir)
|
||||
with readable_zipfile(zip_filename) as zf:
|
||||
for filename, mode in files:
|
||||
info = zf.getinfo(os.path.join(tempdir, filename))
|
||||
assert info.external_attr == (mode | 0o100000) << 16
|
||||
assert info.compress_type == zipfile.ZIP_DEFLATED
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/test/test_wheelfile.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/test/test_wheelfile.pyc
Normal file
Binary file not shown.
360
venv/lib/python2.7/site-packages/wheel/tool/__init__.py
Normal file
360
venv/lib/python2.7/site-packages/wheel/tool/__init__.py
Normal file
@@ -0,0 +1,360 @@
|
||||
"""
|
||||
Wheel command-line utility.
|
||||
"""
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
import sys
|
||||
import json
|
||||
import wheel.paths
|
||||
|
||||
from glob import iglob
|
||||
from .. import signatures
|
||||
from ..util import (urlsafe_b64decode, urlsafe_b64encode, native, binary,
|
||||
matches_requirement)
|
||||
from ..install import WheelFile
|
||||
|
||||
def require_pkgresources(name):
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name))
|
||||
|
||||
import argparse
|
||||
|
||||
class WheelError(Exception): pass
|
||||
|
||||
# For testability
|
||||
def get_keyring():
|
||||
try:
|
||||
from ..signatures import keys
|
||||
import keyring
|
||||
assert keyring.get_keyring().priority
|
||||
except (ImportError, AssertionError):
|
||||
raise WheelError("Install wheel[signatures] (requires keyring, keyrings.alt, pyxdg) for signatures.")
|
||||
return keys.WheelKeys, keyring
|
||||
|
||||
def keygen(get_keyring=get_keyring):
|
||||
"""Generate a public/private key pair."""
|
||||
WheelKeys, keyring = get_keyring()
|
||||
|
||||
ed25519ll = signatures.get_ed25519ll()
|
||||
|
||||
wk = WheelKeys().load()
|
||||
|
||||
keypair = ed25519ll.crypto_sign_keypair()
|
||||
vk = native(urlsafe_b64encode(keypair.vk))
|
||||
sk = native(urlsafe_b64encode(keypair.sk))
|
||||
kr = keyring.get_keyring()
|
||||
kr.set_password("wheel", vk, sk)
|
||||
sys.stdout.write("Created Ed25519 keypair with vk={0}\n".format(vk))
|
||||
sys.stdout.write("in {0!r}\n".format(kr))
|
||||
|
||||
sk2 = kr.get_password('wheel', vk)
|
||||
if sk2 != sk:
|
||||
raise WheelError("Keyring is broken. Could not retrieve secret key.")
|
||||
|
||||
sys.stdout.write("Trusting {0} to sign and verify all packages.\n".format(vk))
|
||||
wk.add_signer('+', vk)
|
||||
wk.trust('+', vk)
|
||||
wk.save()
|
||||
|
||||
def sign(wheelfile, replace=False, get_keyring=get_keyring):
|
||||
"""Sign a wheel"""
|
||||
WheelKeys, keyring = get_keyring()
|
||||
|
||||
ed25519ll = signatures.get_ed25519ll()
|
||||
|
||||
wf = WheelFile(wheelfile, append=True)
|
||||
wk = WheelKeys().load()
|
||||
|
||||
name = wf.parsed_filename.group('name')
|
||||
sign_with = wk.signers(name)[0]
|
||||
sys.stdout.write("Signing {0} with {1}\n".format(name, sign_with[1]))
|
||||
|
||||
vk = sign_with[1]
|
||||
kr = keyring.get_keyring()
|
||||
sk = kr.get_password('wheel', vk)
|
||||
keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)),
|
||||
urlsafe_b64decode(binary(sk)))
|
||||
|
||||
|
||||
record_name = wf.distinfo_name + '/RECORD'
|
||||
sig_name = wf.distinfo_name + '/RECORD.jws'
|
||||
if sig_name in wf.zipfile.namelist():
|
||||
raise WheelError("Wheel is already signed.")
|
||||
record_data = wf.zipfile.read(record_name)
|
||||
payload = {"hash":"sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))}
|
||||
sig = signatures.sign(payload, keypair)
|
||||
wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True))
|
||||
wf.zipfile.close()
|
||||
|
||||
def unsign(wheelfile):
|
||||
"""
|
||||
Remove RECORD.jws from a wheel by truncating the zip file.
|
||||
|
||||
RECORD.jws must be at the end of the archive. The zip file must be an
|
||||
ordinary archive, with the compressed files and the directory in the same
|
||||
order, and without any non-zip content after the truncation point.
|
||||
"""
|
||||
import wheel.install
|
||||
vzf = wheel.install.VerifyingZipFile(wheelfile, "a")
|
||||
info = vzf.infolist()
|
||||
if not (len(info) and info[-1].filename.endswith('/RECORD.jws')):
|
||||
raise WheelError("RECORD.jws not found at end of archive.")
|
||||
vzf.pop()
|
||||
vzf.close()
|
||||
|
||||
def verify(wheelfile):
|
||||
"""Verify a wheel.
|
||||
|
||||
The signature will be verified for internal consistency ONLY and printed.
|
||||
Wheel's own unpack/install commands verify the manifest against the
|
||||
signature and file contents.
|
||||
"""
|
||||
wf = WheelFile(wheelfile)
|
||||
sig_name = wf.distinfo_name + '/RECORD.jws'
|
||||
sig = json.loads(native(wf.zipfile.open(sig_name).read()))
|
||||
verified = signatures.verify(sig)
|
||||
sys.stderr.write("Signatures are internally consistent.\n")
|
||||
sys.stdout.write(json.dumps(verified, indent=2))
|
||||
sys.stdout.write('\n')
|
||||
|
||||
def unpack(wheelfile, dest='.'):
|
||||
"""Unpack a wheel.
|
||||
|
||||
Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
|
||||
is the package name and {ver} its version.
|
||||
|
||||
:param wheelfile: The path to the wheel.
|
||||
:param dest: Destination directory (default to current directory).
|
||||
"""
|
||||
wf = WheelFile(wheelfile)
|
||||
namever = wf.parsed_filename.group('namever')
|
||||
destination = os.path.join(dest, namever)
|
||||
sys.stderr.write("Unpacking to: %s\n" % (destination))
|
||||
wf.zipfile.extractall(destination)
|
||||
wf.zipfile.close()
|
||||
|
||||
def install(requirements, requirements_file=None,
|
||||
wheel_dirs=None, force=False, list_files=False,
|
||||
dry_run=False):
|
||||
"""Install wheels.
|
||||
|
||||
:param requirements: A list of requirements or wheel files to install.
|
||||
:param requirements_file: A file containing requirements to install.
|
||||
:param wheel_dirs: A list of directories to search for wheels.
|
||||
:param force: Install a wheel file even if it is not compatible.
|
||||
:param list_files: Only list the files to install, don't install them.
|
||||
:param dry_run: Do everything but the actual install.
|
||||
"""
|
||||
|
||||
# If no wheel directories specified, use the WHEELPATH environment
|
||||
# variable, or the current directory if that is not set.
|
||||
if not wheel_dirs:
|
||||
wheelpath = os.getenv("WHEELPATH")
|
||||
if wheelpath:
|
||||
wheel_dirs = wheelpath.split(os.pathsep)
|
||||
else:
|
||||
wheel_dirs = [ os.path.curdir ]
|
||||
|
||||
# Get a list of all valid wheels in wheel_dirs
|
||||
all_wheels = []
|
||||
for d in wheel_dirs:
|
||||
for w in os.listdir(d):
|
||||
if w.endswith('.whl'):
|
||||
wf = WheelFile(os.path.join(d, w))
|
||||
if wf.compatible:
|
||||
all_wheels.append(wf)
|
||||
|
||||
# If there is a requirements file, add it to the list of requirements
|
||||
if requirements_file:
|
||||
# If the file doesn't exist, search for it in wheel_dirs
|
||||
# This allows standard requirements files to be stored with the
|
||||
# wheels.
|
||||
if not os.path.exists(requirements_file):
|
||||
for d in wheel_dirs:
|
||||
name = os.path.join(d, requirements_file)
|
||||
if os.path.exists(name):
|
||||
requirements_file = name
|
||||
break
|
||||
|
||||
with open(requirements_file) as fd:
|
||||
requirements.extend(fd)
|
||||
|
||||
to_install = []
|
||||
for req in requirements:
|
||||
if req.endswith('.whl'):
|
||||
# Explicitly specified wheel filename
|
||||
if os.path.exists(req):
|
||||
wf = WheelFile(req)
|
||||
if wf.compatible or force:
|
||||
to_install.append(wf)
|
||||
else:
|
||||
msg = ("{0} is not compatible with this Python. "
|
||||
"--force to install anyway.".format(req))
|
||||
raise WheelError(msg)
|
||||
else:
|
||||
# We could search on wheel_dirs, but it's probably OK to
|
||||
# assume the user has made an error.
|
||||
raise WheelError("No such wheel file: {}".format(req))
|
||||
continue
|
||||
|
||||
# We have a requirement spec
|
||||
# If we don't have pkg_resources, this will raise an exception
|
||||
matches = matches_requirement(req, all_wheels)
|
||||
if not matches:
|
||||
raise WheelError("No match for requirement {}".format(req))
|
||||
to_install.append(max(matches))
|
||||
|
||||
# We now have a list of wheels to install
|
||||
if list_files:
|
||||
sys.stdout.write("Installing:\n")
|
||||
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
for wf in to_install:
|
||||
if list_files:
|
||||
sys.stdout.write(" {0}\n".format(wf.filename))
|
||||
continue
|
||||
wf.install(force=force)
|
||||
wf.zipfile.close()
|
||||
|
||||
def install_scripts(distributions):
|
||||
"""
|
||||
Regenerate the entry_points console_scripts for the named distribution.
|
||||
"""
|
||||
try:
|
||||
from setuptools.command import easy_install
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
raise RuntimeError("'wheel install_scripts' needs setuptools.")
|
||||
|
||||
for dist in distributions:
|
||||
pkg_resources_dist = pkg_resources.get_distribution(dist)
|
||||
install = wheel.paths.get_install_command(dist)
|
||||
command = easy_install.easy_install(install.distribution)
|
||||
command.args = ['wheel'] # dummy argument
|
||||
command.finalize_options()
|
||||
command.install_egg_scripts(pkg_resources_dist)
|
||||
|
||||
def convert(installers, dest_dir, verbose):
|
||||
require_pkgresources('wheel convert')
|
||||
|
||||
# Only support wheel convert if pkg_resources is present
|
||||
from ..wininst2wheel import bdist_wininst2wheel
|
||||
from ..egg2wheel import egg2wheel
|
||||
|
||||
for pat in installers:
|
||||
for installer in iglob(pat):
|
||||
if os.path.splitext(installer)[1] == '.egg':
|
||||
conv = egg2wheel
|
||||
else:
|
||||
conv = bdist_wininst2wheel
|
||||
if verbose:
|
||||
sys.stdout.write("{0}... ".format(installer))
|
||||
sys.stdout.flush()
|
||||
conv(installer, dest_dir)
|
||||
if verbose:
|
||||
sys.stdout.write("OK\n")
|
||||
|
||||
def parser():
|
||||
p = argparse.ArgumentParser()
|
||||
s = p.add_subparsers(help="commands")
|
||||
|
||||
def keygen_f(args):
|
||||
keygen()
|
||||
keygen_parser = s.add_parser('keygen', help='Generate signing key')
|
||||
keygen_parser.set_defaults(func=keygen_f)
|
||||
|
||||
def sign_f(args):
|
||||
sign(args.wheelfile)
|
||||
sign_parser = s.add_parser('sign', help='Sign wheel')
|
||||
sign_parser.add_argument('wheelfile', help='Wheel file')
|
||||
sign_parser.set_defaults(func=sign_f)
|
||||
|
||||
def unsign_f(args):
|
||||
unsign(args.wheelfile)
|
||||
unsign_parser = s.add_parser('unsign', help=unsign.__doc__)
|
||||
unsign_parser.add_argument('wheelfile', help='Wheel file')
|
||||
unsign_parser.set_defaults(func=unsign_f)
|
||||
|
||||
def verify_f(args):
|
||||
verify(args.wheelfile)
|
||||
verify_parser = s.add_parser('verify', help=verify.__doc__)
|
||||
verify_parser.add_argument('wheelfile', help='Wheel file')
|
||||
verify_parser.set_defaults(func=verify_f)
|
||||
|
||||
def unpack_f(args):
|
||||
unpack(args.wheelfile, args.dest)
|
||||
unpack_parser = s.add_parser('unpack', help='Unpack wheel')
|
||||
unpack_parser.add_argument('--dest', '-d', help='Destination directory',
|
||||
default='.')
|
||||
unpack_parser.add_argument('wheelfile', help='Wheel file')
|
||||
unpack_parser.set_defaults(func=unpack_f)
|
||||
|
||||
def install_f(args):
|
||||
install(args.requirements, args.requirements_file,
|
||||
args.wheel_dirs, args.force, args.list_files)
|
||||
install_parser = s.add_parser('install', help='Install wheels')
|
||||
install_parser.add_argument('requirements', nargs='*',
|
||||
help='Requirements to install.')
|
||||
install_parser.add_argument('--force', default=False,
|
||||
action='store_true',
|
||||
help='Install incompatible wheel files.')
|
||||
install_parser.add_argument('--wheel-dir', '-d', action='append',
|
||||
dest='wheel_dirs',
|
||||
help='Directories containing wheels.')
|
||||
install_parser.add_argument('--requirements-file', '-r',
|
||||
help="A file containing requirements to "
|
||||
"install.")
|
||||
install_parser.add_argument('--list', '-l', default=False,
|
||||
dest='list_files',
|
||||
action='store_true',
|
||||
help="List wheels which would be installed, "
|
||||
"but don't actually install anything.")
|
||||
install_parser.set_defaults(func=install_f)
|
||||
|
||||
def install_scripts_f(args):
|
||||
install_scripts(args.distributions)
|
||||
install_scripts_parser = s.add_parser('install-scripts', help='Install console_scripts')
|
||||
install_scripts_parser.add_argument('distributions', nargs='*',
|
||||
help='Regenerate console_scripts for these distributions')
|
||||
install_scripts_parser.set_defaults(func=install_scripts_f)
|
||||
|
||||
def convert_f(args):
|
||||
convert(args.installers, args.dest_dir, args.verbose)
|
||||
convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel')
|
||||
convert_parser.add_argument('installers', nargs='*', help='Installers to convert')
|
||||
convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
|
||||
help="Directory to store wheels (default %(default)s)")
|
||||
convert_parser.add_argument('--verbose', '-v', action='store_true')
|
||||
convert_parser.set_defaults(func=convert_f)
|
||||
|
||||
def version_f(args):
|
||||
from .. import __version__
|
||||
sys.stdout.write("wheel %s\n" % __version__)
|
||||
version_parser = s.add_parser('version', help='Print version and exit')
|
||||
version_parser.set_defaults(func=version_f)
|
||||
|
||||
def help_f(args):
|
||||
p.print_help()
|
||||
help_parser = s.add_parser('help', help='Show this help')
|
||||
help_parser.set_defaults(func=help_f)
|
||||
|
||||
return p
|
||||
|
||||
def main():
|
||||
p = parser()
|
||||
args = p.parse_args()
|
||||
if not hasattr(args, 'func'):
|
||||
p.print_help()
|
||||
else:
|
||||
# XXX on Python 3.3 we get 'args has no func' rather than short help.
|
||||
try:
|
||||
args.func(args)
|
||||
return 0
|
||||
except WheelError as e:
|
||||
sys.stderr.write(e.message + "\n")
|
||||
return 1
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/tool/__init__.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/tool/__init__.pyc
Normal file
Binary file not shown.
167
venv/lib/python2.7/site-packages/wheel/util.py
Normal file
167
venv/lib/python2.7/site-packages/wheel/util.py
Normal file
@@ -0,0 +1,167 @@
|
||||
"""Utility functions."""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import base64
|
||||
import json
|
||||
import hashlib
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
OrderedDict = dict
|
||||
|
||||
__all__ = ['urlsafe_b64encode', 'urlsafe_b64decode', 'utf8',
|
||||
'to_json', 'from_json', 'matches_requirement']
|
||||
|
||||
def urlsafe_b64encode(data):
|
||||
"""urlsafe_b64encode without padding"""
|
||||
return base64.urlsafe_b64encode(data).rstrip(binary('='))
|
||||
|
||||
|
||||
def urlsafe_b64decode(data):
|
||||
"""urlsafe_b64decode without padding"""
|
||||
pad = b'=' * (4 - (len(data) & 3))
|
||||
return base64.urlsafe_b64decode(data + pad)
|
||||
|
||||
|
||||
def to_json(o):
|
||||
'''Convert given data to JSON.'''
|
||||
return json.dumps(o, sort_keys=True)
|
||||
|
||||
|
||||
def from_json(j):
|
||||
'''Decode a JSON payload.'''
|
||||
return json.loads(j)
|
||||
|
||||
def open_for_csv(name, mode):
|
||||
if sys.version_info[0] < 3:
|
||||
nl = {}
|
||||
bin = 'b'
|
||||
else:
|
||||
nl = { 'newline': '' }
|
||||
bin = ''
|
||||
return open(name, mode + bin, **nl)
|
||||
|
||||
try:
|
||||
unicode
|
||||
|
||||
def utf8(data):
|
||||
'''Utf-8 encode data.'''
|
||||
if isinstance(data, unicode):
|
||||
return data.encode('utf-8')
|
||||
return data
|
||||
except NameError:
|
||||
def utf8(data):
|
||||
'''Utf-8 encode data.'''
|
||||
if isinstance(data, str):
|
||||
return data.encode('utf-8')
|
||||
return data
|
||||
|
||||
|
||||
try:
|
||||
# For encoding ascii back and forth between bytestrings, as is repeatedly
|
||||
# necessary in JSON-based crypto under Python 3
|
||||
unicode
|
||||
def native(s):
|
||||
return s
|
||||
def binary(s):
|
||||
if isinstance(s, unicode):
|
||||
return s.encode('ascii')
|
||||
return s
|
||||
except NameError:
|
||||
def native(s):
|
||||
if isinstance(s, bytes):
|
||||
return s.decode('ascii')
|
||||
return s
|
||||
def binary(s):
|
||||
if isinstance(s, str):
|
||||
return s.encode('ascii')
|
||||
|
||||
class HashingFile(object):
|
||||
def __init__(self, fd, hashtype='sha256'):
|
||||
self.fd = fd
|
||||
self.hashtype = hashtype
|
||||
self.hash = hashlib.new(hashtype)
|
||||
self.length = 0
|
||||
def write(self, data):
|
||||
self.hash.update(data)
|
||||
self.length += len(data)
|
||||
self.fd.write(data)
|
||||
def close(self):
|
||||
self.fd.close()
|
||||
def digest(self):
|
||||
if self.hashtype == 'md5':
|
||||
return self.hash.hexdigest()
|
||||
digest = self.hash.digest()
|
||||
return self.hashtype + '=' + native(urlsafe_b64encode(digest))
|
||||
|
||||
class OrderedDefaultDict(OrderedDict):
|
||||
def __init__(self, *args, **kwargs):
|
||||
if not args:
|
||||
self.default_factory = None
|
||||
else:
|
||||
if not (args[0] is None or callable(args[0])):
|
||||
raise TypeError('first argument must be callable or None')
|
||||
self.default_factory = args[0]
|
||||
args = args[1:]
|
||||
super(OrderedDefaultDict, self).__init__(*args, **kwargs)
|
||||
|
||||
def __missing__ (self, key):
|
||||
if self.default_factory is None:
|
||||
raise KeyError(key)
|
||||
self[key] = default = self.default_factory()
|
||||
return default
|
||||
|
||||
if sys.platform == 'win32':
|
||||
import ctypes.wintypes
|
||||
# CSIDL_APPDATA for reference - not used here for compatibility with
|
||||
# dirspec, which uses LOCAL_APPDATA and COMMON_APPDATA in that order
|
||||
csidl = dict(CSIDL_APPDATA=26, CSIDL_LOCAL_APPDATA=28,
|
||||
CSIDL_COMMON_APPDATA=35)
|
||||
def get_path(name):
|
||||
SHGFP_TYPE_CURRENT = 0
|
||||
buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
|
||||
ctypes.windll.shell32.SHGetFolderPathW(0, csidl[name], 0, SHGFP_TYPE_CURRENT, buf)
|
||||
return buf.value
|
||||
|
||||
def save_config_path(*resource):
|
||||
appdata = get_path("CSIDL_LOCAL_APPDATA")
|
||||
path = os.path.join(appdata, *resource)
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path)
|
||||
return path
|
||||
def load_config_paths(*resource):
|
||||
ids = ["CSIDL_LOCAL_APPDATA", "CSIDL_COMMON_APPDATA"]
|
||||
for id in ids:
|
||||
base = get_path(id)
|
||||
path = os.path.join(base, *resource)
|
||||
if os.path.exists(path):
|
||||
yield path
|
||||
else:
|
||||
def save_config_path(*resource):
|
||||
import xdg.BaseDirectory
|
||||
return xdg.BaseDirectory.save_config_path(*resource)
|
||||
def load_config_paths(*resource):
|
||||
import xdg.BaseDirectory
|
||||
return xdg.BaseDirectory.load_config_paths(*resource)
|
||||
|
||||
def matches_requirement(req, wheels):
|
||||
"""List of wheels matching a requirement.
|
||||
|
||||
:param req: The requirement to satisfy
|
||||
:param wheels: List of wheels to search.
|
||||
"""
|
||||
try:
|
||||
from pkg_resources import Distribution, Requirement
|
||||
except ImportError:
|
||||
raise RuntimeError("Cannot use requirements without pkg_resources")
|
||||
|
||||
req = Requirement.parse(req)
|
||||
|
||||
selected = []
|
||||
for wf in wheels:
|
||||
f = wf.parsed_filename
|
||||
dist = Distribution(project_name=f.group("name"), version=f.group("ver"))
|
||||
if dist in req:
|
||||
selected.append(wf)
|
||||
return selected
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/util.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/util.pyc
Normal file
Binary file not shown.
187
venv/lib/python2.7/site-packages/wheel/wininst2wheel.py
Normal file
187
venv/lib/python2.7/site-packages/wheel/wininst2wheel.py
Normal file
@@ -0,0 +1,187 @@
|
||||
#!/usr/bin/env python
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
import wheel.bdist_wheel
|
||||
import distutils.dist
|
||||
from distutils.archive_util import make_archive
|
||||
from shutil import rmtree
|
||||
from wheel.archive import archive_wheelfile
|
||||
from argparse import ArgumentParser
|
||||
from glob import iglob
|
||||
|
||||
egg_info_re = re.compile(r'''(^|/)(?P<name>[^/]+?)-(?P<ver>.+?)
|
||||
(-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg-info(/|$)''', re.VERBOSE)
|
||||
|
||||
def parse_info(wininfo_name, egginfo_name):
|
||||
"""Extract metadata from filenames.
|
||||
|
||||
Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
|
||||
the installer filename and the name of the egg-info directory embedded in
|
||||
the zipfile (if any).
|
||||
|
||||
The egginfo filename has the format::
|
||||
|
||||
name-ver(-pyver)(-arch).egg-info
|
||||
|
||||
The installer filename has the format::
|
||||
|
||||
name-ver.arch(-pyver).exe
|
||||
|
||||
Some things to note:
|
||||
|
||||
1. The installer filename is not definitive. An installer can be renamed
|
||||
and work perfectly well as an installer. So more reliable data should
|
||||
be used whenever possible.
|
||||
2. The egg-info data should be preferred for the name and version, because
|
||||
these come straight from the distutils metadata, and are mandatory.
|
||||
3. The pyver from the egg-info data should be ignored, as it is
|
||||
constructed from the version of Python used to build the installer,
|
||||
which is irrelevant - the installer filename is correct here (even to
|
||||
the point that when it's not there, any version is implied).
|
||||
4. The architecture must be taken from the installer filename, as it is
|
||||
not included in the egg-info data.
|
||||
5. Architecture-neutral installers still have an architecture because the
|
||||
installer format itself (being executable) is architecture-specific. We
|
||||
should therefore ignore the architecture if the content is pure-python.
|
||||
"""
|
||||
|
||||
egginfo = None
|
||||
if egginfo_name:
|
||||
egginfo = egg_info_re.search(egginfo_name)
|
||||
if not egginfo:
|
||||
raise ValueError("Egg info filename %s is not valid" %
|
||||
(egginfo_name,))
|
||||
|
||||
# Parse the wininst filename
|
||||
# 1. Distribution name (up to the first '-')
|
||||
w_name, sep, rest = wininfo_name.partition('-')
|
||||
if not sep:
|
||||
raise ValueError("Installer filename %s is not valid" %
|
||||
(wininfo_name,))
|
||||
# Strip '.exe'
|
||||
rest = rest[:-4]
|
||||
# 2. Python version (from the last '-', must start with 'py')
|
||||
rest2, sep, w_pyver = rest.rpartition('-')
|
||||
if sep and w_pyver.startswith('py'):
|
||||
rest = rest2
|
||||
w_pyver = w_pyver.replace('.', '')
|
||||
else:
|
||||
# Not version specific - use py2.py3. While it is possible that
|
||||
# pure-Python code is not compatible with both Python 2 and 3, there
|
||||
# is no way of knowing from the wininst format, so we assume the best
|
||||
# here (the user can always manually rename the wheel to be more
|
||||
# restrictive if needed).
|
||||
w_pyver = 'py2.py3'
|
||||
# 3. Version and architecture
|
||||
w_ver, sep, w_arch = rest.rpartition('.')
|
||||
if not sep:
|
||||
raise ValueError("Installer filename %s is not valid" %
|
||||
(wininfo_name,))
|
||||
|
||||
if egginfo:
|
||||
w_name = egginfo.group('name')
|
||||
w_ver = egginfo.group('ver')
|
||||
|
||||
return dict(name=w_name, ver=w_ver, arch=w_arch, pyver=w_pyver)
|
||||
|
||||
def bdist_wininst2wheel(path, dest_dir=os.path.curdir):
|
||||
bdw = zipfile.ZipFile(path)
|
||||
|
||||
# Search for egg-info in the archive
|
||||
egginfo_name = None
|
||||
for filename in bdw.namelist():
|
||||
if '.egg-info' in filename:
|
||||
egginfo_name = filename
|
||||
break
|
||||
|
||||
info = parse_info(os.path.basename(path), egginfo_name)
|
||||
|
||||
root_is_purelib = True
|
||||
for zipinfo in bdw.infolist():
|
||||
if zipinfo.filename.startswith('PLATLIB'):
|
||||
root_is_purelib = False
|
||||
break
|
||||
if root_is_purelib:
|
||||
paths = {'purelib': ''}
|
||||
else:
|
||||
paths = {'platlib': ''}
|
||||
|
||||
dist_info = "%(name)s-%(ver)s" % info
|
||||
datadir = "%s.data/" % dist_info
|
||||
|
||||
# rewrite paths to trick ZipFile into extracting an egg
|
||||
# XXX grab wininst .ini - between .exe, padding, and first zip file.
|
||||
members = []
|
||||
egginfo_name = ''
|
||||
for zipinfo in bdw.infolist():
|
||||
key, basename = zipinfo.filename.split('/', 1)
|
||||
key = key.lower()
|
||||
basepath = paths.get(key, None)
|
||||
if basepath is None:
|
||||
basepath = datadir + key.lower() + '/'
|
||||
oldname = zipinfo.filename
|
||||
newname = basepath + basename
|
||||
zipinfo.filename = newname
|
||||
del bdw.NameToInfo[oldname]
|
||||
bdw.NameToInfo[newname] = zipinfo
|
||||
# Collect member names, but omit '' (from an entry like "PLATLIB/"
|
||||
if newname:
|
||||
members.append(newname)
|
||||
# Remember egg-info name for the egg2dist call below
|
||||
if not egginfo_name:
|
||||
if newname.endswith('.egg-info'):
|
||||
egginfo_name = newname
|
||||
elif '.egg-info/' in newname:
|
||||
egginfo_name, sep, _ = newname.rpartition('/')
|
||||
dir = tempfile.mkdtemp(suffix="_b2w")
|
||||
bdw.extractall(dir, members)
|
||||
|
||||
# egg2wheel
|
||||
abi = 'none'
|
||||
pyver = info['pyver']
|
||||
arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_')
|
||||
# Wininst installers always have arch even if they are not
|
||||
# architecture-specific (because the format itself is).
|
||||
# So, assume the content is architecture-neutral if root is purelib.
|
||||
if root_is_purelib:
|
||||
arch = 'any'
|
||||
# If the installer is architecture-specific, it's almost certainly also
|
||||
# CPython-specific.
|
||||
if arch != 'any':
|
||||
pyver = pyver.replace('py', 'cp')
|
||||
wheel_name = '-'.join((
|
||||
dist_info,
|
||||
pyver,
|
||||
abi,
|
||||
arch
|
||||
))
|
||||
bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())
|
||||
bw.root_is_purelib = root_is_purelib
|
||||
dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
|
||||
bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
|
||||
bw.write_wheelfile(dist_info_dir, generator='wininst2wheel')
|
||||
bw.write_record(dir, dist_info_dir)
|
||||
|
||||
archive_wheelfile(os.path.join(dest_dir, wheel_name), dir)
|
||||
rmtree(dir)
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument('installers', nargs='*', help="Installers to convert")
|
||||
parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
|
||||
help="Directory to store wheels (default %(default)s)")
|
||||
parser.add_argument('--verbose', '-v', action='store_true')
|
||||
args = parser.parse_args()
|
||||
for pat in args.installers:
|
||||
for installer in iglob(pat):
|
||||
if args.verbose:
|
||||
sys.stdout.write("{0}... ".format(installer))
|
||||
bdist_wininst2wheel(installer, args.dest_dir)
|
||||
if args.verbose:
|
||||
sys.stdout.write("OK\n")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
BIN
venv/lib/python2.7/site-packages/wheel/wininst2wheel.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/wheel/wininst2wheel.pyc
Normal file
Binary file not shown.
Reference in New Issue
Block a user