Mercurial > repos > shellac > guppy_basecaller
comparison env/lib/python3.7/site-packages/distlib/wheel.py @ 0:26e78fe6e8c4 draft
"planemo upload commit c699937486c35866861690329de38ec1a5d9f783"
| author | shellac |
|---|---|
| date | Sat, 02 May 2020 07:14:21 -0400 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| -1:000000000000 | 0:26e78fe6e8c4 |
|---|---|
| 1 # -*- coding: utf-8 -*- | |
| 2 # | |
| 3 # Copyright (C) 2013-2017 Vinay Sajip. | |
| 4 # Licensed to the Python Software Foundation under a contributor agreement. | |
| 5 # See LICENSE.txt and CONTRIBUTORS.txt. | |
| 6 # | |
| 7 from __future__ import unicode_literals | |
| 8 | |
| 9 import base64 | |
| 10 import codecs | |
| 11 import datetime | |
| 12 import distutils.util | |
| 13 from email import message_from_file | |
| 14 import hashlib | |
| 15 import imp | |
| 16 import json | |
| 17 import logging | |
| 18 import os | |
| 19 import posixpath | |
| 20 import re | |
| 21 import shutil | |
| 22 import sys | |
| 23 import tempfile | |
| 24 import zipfile | |
| 25 | |
| 26 from . import __version__, DistlibException | |
| 27 from .compat import sysconfig, ZipFile, fsdecode, text_type, filter | |
| 28 from .database import InstalledDistribution | |
| 29 from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME | |
| 30 from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, | |
| 31 cached_property, get_cache_base, read_exports, tempdir) | |
| 32 from .version import NormalizedVersion, UnsupportedVersionError | |
| 33 | |
| 34 logger = logging.getLogger(__name__) | |
| 35 | |
| 36 cache = None # created when needed | |
| 37 | |
| 38 if hasattr(sys, 'pypy_version_info'): # pragma: no cover | |
| 39 IMP_PREFIX = 'pp' | |
| 40 elif sys.platform.startswith('java'): # pragma: no cover | |
| 41 IMP_PREFIX = 'jy' | |
| 42 elif sys.platform == 'cli': # pragma: no cover | |
| 43 IMP_PREFIX = 'ip' | |
| 44 else: | |
| 45 IMP_PREFIX = 'cp' | |
| 46 | |
| 47 VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') | |
| 48 if not VER_SUFFIX: # pragma: no cover | |
| 49 VER_SUFFIX = '%s%s' % sys.version_info[:2] | |
| 50 PYVER = 'py' + VER_SUFFIX | |
| 51 IMPVER = IMP_PREFIX + VER_SUFFIX | |
| 52 | |
| 53 ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') | |
| 54 | |
| 55 ABI = sysconfig.get_config_var('SOABI') | |
| 56 if ABI and ABI.startswith('cpython-'): | |
| 57 ABI = ABI.replace('cpython-', 'cp') | |
| 58 else: | |
| 59 def _derive_abi(): | |
| 60 parts = ['cp', VER_SUFFIX] | |
| 61 if sysconfig.get_config_var('Py_DEBUG'): | |
| 62 parts.append('d') | |
| 63 if sysconfig.get_config_var('WITH_PYMALLOC'): | |
| 64 parts.append('m') | |
| 65 if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: | |
| 66 parts.append('u') | |
| 67 return ''.join(parts) | |
| 68 ABI = _derive_abi() | |
| 69 del _derive_abi | |
| 70 | |
| 71 FILENAME_RE = re.compile(r''' | |
| 72 (?P<nm>[^-]+) | |
| 73 -(?P<vn>\d+[^-]*) | |
| 74 (-(?P<bn>\d+[^-]*))? | |
| 75 -(?P<py>\w+\d+(\.\w+\d+)*) | |
| 76 -(?P<bi>\w+) | |
| 77 -(?P<ar>\w+(\.\w+)*) | |
| 78 \.whl$ | |
| 79 ''', re.IGNORECASE | re.VERBOSE) | |
| 80 | |
| 81 NAME_VERSION_RE = re.compile(r''' | |
| 82 (?P<nm>[^-]+) | |
| 83 -(?P<vn>\d+[^-]*) | |
| 84 (-(?P<bn>\d+[^-]*))?$ | |
| 85 ''', re.IGNORECASE | re.VERBOSE) | |
| 86 | |
| 87 SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') | |
| 88 SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') | |
| 89 SHEBANG_PYTHON = b'#!python' | |
| 90 SHEBANG_PYTHONW = b'#!pythonw' | |
| 91 | |
| 92 if os.sep == '/': | |
| 93 to_posix = lambda o: o | |
| 94 else: | |
| 95 to_posix = lambda o: o.replace(os.sep, '/') | |
| 96 | |
| 97 | |
| 98 class Mounter(object): | |
| 99 def __init__(self): | |
| 100 self.impure_wheels = {} | |
| 101 self.libs = {} | |
| 102 | |
| 103 def add(self, pathname, extensions): | |
| 104 self.impure_wheels[pathname] = extensions | |
| 105 self.libs.update(extensions) | |
| 106 | |
| 107 def remove(self, pathname): | |
| 108 extensions = self.impure_wheels.pop(pathname) | |
| 109 for k, v in extensions: | |
| 110 if k in self.libs: | |
| 111 del self.libs[k] | |
| 112 | |
| 113 def find_module(self, fullname, path=None): | |
| 114 if fullname in self.libs: | |
| 115 result = self | |
| 116 else: | |
| 117 result = None | |
| 118 return result | |
| 119 | |
| 120 def load_module(self, fullname): | |
| 121 if fullname in sys.modules: | |
| 122 result = sys.modules[fullname] | |
| 123 else: | |
| 124 if fullname not in self.libs: | |
| 125 raise ImportError('unable to find extension for %s' % fullname) | |
| 126 result = imp.load_dynamic(fullname, self.libs[fullname]) | |
| 127 result.__loader__ = self | |
| 128 parts = fullname.rsplit('.', 1) | |
| 129 if len(parts) > 1: | |
| 130 result.__package__ = parts[0] | |
| 131 return result | |
| 132 | |
| 133 _hook = Mounter() | |
| 134 | |
| 135 | |
| 136 class Wheel(object): | |
| 137 """ | |
| 138 Class to build and install from Wheel files (PEP 427). | |
| 139 """ | |
| 140 | |
| 141 wheel_version = (1, 1) | |
| 142 hash_kind = 'sha256' | |
| 143 | |
| 144 def __init__(self, filename=None, sign=False, verify=False): | |
| 145 """ | |
| 146 Initialise an instance using a (valid) filename. | |
| 147 """ | |
| 148 self.sign = sign | |
| 149 self.should_verify = verify | |
| 150 self.buildver = '' | |
| 151 self.pyver = [PYVER] | |
| 152 self.abi = ['none'] | |
| 153 self.arch = ['any'] | |
| 154 self.dirname = os.getcwd() | |
| 155 if filename is None: | |
| 156 self.name = 'dummy' | |
| 157 self.version = '0.1' | |
| 158 self._filename = self.filename | |
| 159 else: | |
| 160 m = NAME_VERSION_RE.match(filename) | |
| 161 if m: | |
| 162 info = m.groupdict('') | |
| 163 self.name = info['nm'] | |
| 164 # Reinstate the local version separator | |
| 165 self.version = info['vn'].replace('_', '-') | |
| 166 self.buildver = info['bn'] | |
| 167 self._filename = self.filename | |
| 168 else: | |
| 169 dirname, filename = os.path.split(filename) | |
| 170 m = FILENAME_RE.match(filename) | |
| 171 if not m: | |
| 172 raise DistlibException('Invalid name or ' | |
| 173 'filename: %r' % filename) | |
| 174 if dirname: | |
| 175 self.dirname = os.path.abspath(dirname) | |
| 176 self._filename = filename | |
| 177 info = m.groupdict('') | |
| 178 self.name = info['nm'] | |
| 179 self.version = info['vn'] | |
| 180 self.buildver = info['bn'] | |
| 181 self.pyver = info['py'].split('.') | |
| 182 self.abi = info['bi'].split('.') | |
| 183 self.arch = info['ar'].split('.') | |
| 184 | |
| 185 @property | |
| 186 def filename(self): | |
| 187 """ | |
| 188 Build and return a filename from the various components. | |
| 189 """ | |
| 190 if self.buildver: | |
| 191 buildver = '-' + self.buildver | |
| 192 else: | |
| 193 buildver = '' | |
| 194 pyver = '.'.join(self.pyver) | |
| 195 abi = '.'.join(self.abi) | |
| 196 arch = '.'.join(self.arch) | |
| 197 # replace - with _ as a local version separator | |
| 198 version = self.version.replace('-', '_') | |
| 199 return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, | |
| 200 pyver, abi, arch) | |
| 201 | |
| 202 @property | |
| 203 def exists(self): | |
| 204 path = os.path.join(self.dirname, self.filename) | |
| 205 return os.path.isfile(path) | |
| 206 | |
| 207 @property | |
| 208 def tags(self): | |
| 209 for pyver in self.pyver: | |
| 210 for abi in self.abi: | |
| 211 for arch in self.arch: | |
| 212 yield pyver, abi, arch | |
| 213 | |
| 214 @cached_property | |
| 215 def metadata(self): | |
| 216 pathname = os.path.join(self.dirname, self.filename) | |
| 217 name_ver = '%s-%s' % (self.name, self.version) | |
| 218 info_dir = '%s.dist-info' % name_ver | |
| 219 wrapper = codecs.getreader('utf-8') | |
| 220 with ZipFile(pathname, 'r') as zf: | |
| 221 wheel_metadata = self.get_wheel_metadata(zf) | |
| 222 wv = wheel_metadata['Wheel-Version'].split('.', 1) | |
| 223 file_version = tuple([int(i) for i in wv]) | |
| 224 if file_version < (1, 1): | |
| 225 fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA'] | |
| 226 else: | |
| 227 fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] | |
| 228 result = None | |
| 229 for fn in fns: | |
| 230 try: | |
| 231 metadata_filename = posixpath.join(info_dir, fn) | |
| 232 with zf.open(metadata_filename) as bf: | |
| 233 wf = wrapper(bf) | |
| 234 result = Metadata(fileobj=wf) | |
| 235 if result: | |
| 236 break | |
| 237 except KeyError: | |
| 238 pass | |
| 239 if not result: | |
| 240 raise ValueError('Invalid wheel, because metadata is ' | |
| 241 'missing: looked in %s' % ', '.join(fns)) | |
| 242 return result | |
| 243 | |
| 244 def get_wheel_metadata(self, zf): | |
| 245 name_ver = '%s-%s' % (self.name, self.version) | |
| 246 info_dir = '%s.dist-info' % name_ver | |
| 247 metadata_filename = posixpath.join(info_dir, 'WHEEL') | |
| 248 with zf.open(metadata_filename) as bf: | |
| 249 wf = codecs.getreader('utf-8')(bf) | |
| 250 message = message_from_file(wf) | |
| 251 return dict(message) | |
| 252 | |
| 253 @cached_property | |
| 254 def info(self): | |
| 255 pathname = os.path.join(self.dirname, self.filename) | |
| 256 with ZipFile(pathname, 'r') as zf: | |
| 257 result = self.get_wheel_metadata(zf) | |
| 258 return result | |
| 259 | |
| 260 def process_shebang(self, data): | |
| 261 m = SHEBANG_RE.match(data) | |
| 262 if m: | |
| 263 end = m.end() | |
| 264 shebang, data_after_shebang = data[:end], data[end:] | |
| 265 # Preserve any arguments after the interpreter | |
| 266 if b'pythonw' in shebang.lower(): | |
| 267 shebang_python = SHEBANG_PYTHONW | |
| 268 else: | |
| 269 shebang_python = SHEBANG_PYTHON | |
| 270 m = SHEBANG_DETAIL_RE.match(shebang) | |
| 271 if m: | |
| 272 args = b' ' + m.groups()[-1] | |
| 273 else: | |
| 274 args = b'' | |
| 275 shebang = shebang_python + args | |
| 276 data = shebang + data_after_shebang | |
| 277 else: | |
| 278 cr = data.find(b'\r') | |
| 279 lf = data.find(b'\n') | |
| 280 if cr < 0 or cr > lf: | |
| 281 term = b'\n' | |
| 282 else: | |
| 283 if data[cr:cr + 2] == b'\r\n': | |
| 284 term = b'\r\n' | |
| 285 else: | |
| 286 term = b'\r' | |
| 287 data = SHEBANG_PYTHON + term + data | |
| 288 return data | |
| 289 | |
| 290 def get_hash(self, data, hash_kind=None): | |
| 291 if hash_kind is None: | |
| 292 hash_kind = self.hash_kind | |
| 293 try: | |
| 294 hasher = getattr(hashlib, hash_kind) | |
| 295 except AttributeError: | |
| 296 raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) | |
| 297 result = hasher(data).digest() | |
| 298 result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') | |
| 299 return hash_kind, result | |
| 300 | |
| 301 def write_record(self, records, record_path, base): | |
| 302 records = list(records) # make a copy for sorting | |
| 303 p = to_posix(os.path.relpath(record_path, base)) | |
| 304 records.append((p, '', '')) | |
| 305 records.sort() | |
| 306 with CSVWriter(record_path) as writer: | |
| 307 for row in records: | |
| 308 writer.writerow(row) | |
| 309 | |
| 310 def write_records(self, info, libdir, archive_paths): | |
| 311 records = [] | |
| 312 distinfo, info_dir = info | |
| 313 hasher = getattr(hashlib, self.hash_kind) | |
| 314 for ap, p in archive_paths: | |
| 315 with open(p, 'rb') as f: | |
| 316 data = f.read() | |
| 317 digest = '%s=%s' % self.get_hash(data) | |
| 318 size = os.path.getsize(p) | |
| 319 records.append((ap, digest, size)) | |
| 320 | |
| 321 p = os.path.join(distinfo, 'RECORD') | |
| 322 self.write_record(records, p, libdir) | |
| 323 ap = to_posix(os.path.join(info_dir, 'RECORD')) | |
| 324 archive_paths.append((ap, p)) | |
| 325 | |
| 326 def build_zip(self, pathname, archive_paths): | |
| 327 with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: | |
| 328 for ap, p in archive_paths: | |
| 329 logger.debug('Wrote %s to %s in wheel', p, ap) | |
| 330 zf.write(p, ap) | |
| 331 | |
| 332 def build(self, paths, tags=None, wheel_version=None): | |
| 333 """ | |
| 334 Build a wheel from files in specified paths, and use any specified tags | |
| 335 when determining the name of the wheel. | |
| 336 """ | |
| 337 if tags is None: | |
| 338 tags = {} | |
| 339 | |
| 340 libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] | |
| 341 if libkey == 'platlib': | |
| 342 is_pure = 'false' | |
| 343 default_pyver = [IMPVER] | |
| 344 default_abi = [ABI] | |
| 345 default_arch = [ARCH] | |
| 346 else: | |
| 347 is_pure = 'true' | |
| 348 default_pyver = [PYVER] | |
| 349 default_abi = ['none'] | |
| 350 default_arch = ['any'] | |
| 351 | |
| 352 self.pyver = tags.get('pyver', default_pyver) | |
| 353 self.abi = tags.get('abi', default_abi) | |
| 354 self.arch = tags.get('arch', default_arch) | |
| 355 | |
| 356 libdir = paths[libkey] | |
| 357 | |
| 358 name_ver = '%s-%s' % (self.name, self.version) | |
| 359 data_dir = '%s.data' % name_ver | |
| 360 info_dir = '%s.dist-info' % name_ver | |
| 361 | |
| 362 archive_paths = [] | |
| 363 | |
| 364 # First, stuff which is not in site-packages | |
| 365 for key in ('data', 'headers', 'scripts'): | |
| 366 if key not in paths: | |
| 367 continue | |
| 368 path = paths[key] | |
| 369 if os.path.isdir(path): | |
| 370 for root, dirs, files in os.walk(path): | |
| 371 for fn in files: | |
| 372 p = fsdecode(os.path.join(root, fn)) | |
| 373 rp = os.path.relpath(p, path) | |
| 374 ap = to_posix(os.path.join(data_dir, key, rp)) | |
| 375 archive_paths.append((ap, p)) | |
| 376 if key == 'scripts' and not p.endswith('.exe'): | |
| 377 with open(p, 'rb') as f: | |
| 378 data = f.read() | |
| 379 data = self.process_shebang(data) | |
| 380 with open(p, 'wb') as f: | |
| 381 f.write(data) | |
| 382 | |
| 383 # Now, stuff which is in site-packages, other than the | |
| 384 # distinfo stuff. | |
| 385 path = libdir | |
| 386 distinfo = None | |
| 387 for root, dirs, files in os.walk(path): | |
| 388 if root == path: | |
| 389 # At the top level only, save distinfo for later | |
| 390 # and skip it for now | |
| 391 for i, dn in enumerate(dirs): | |
| 392 dn = fsdecode(dn) | |
| 393 if dn.endswith('.dist-info'): | |
| 394 distinfo = os.path.join(root, dn) | |
| 395 del dirs[i] | |
| 396 break | |
| 397 assert distinfo, '.dist-info directory expected, not found' | |
| 398 | |
| 399 for fn in files: | |
| 400 # comment out next suite to leave .pyc files in | |
| 401 if fsdecode(fn).endswith(('.pyc', '.pyo')): | |
| 402 continue | |
| 403 p = os.path.join(root, fn) | |
| 404 rp = to_posix(os.path.relpath(p, path)) | |
| 405 archive_paths.append((rp, p)) | |
| 406 | |
| 407 # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. | |
| 408 files = os.listdir(distinfo) | |
| 409 for fn in files: | |
| 410 if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): | |
| 411 p = fsdecode(os.path.join(distinfo, fn)) | |
| 412 ap = to_posix(os.path.join(info_dir, fn)) | |
| 413 archive_paths.append((ap, p)) | |
| 414 | |
| 415 wheel_metadata = [ | |
| 416 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), | |
| 417 'Generator: distlib %s' % __version__, | |
| 418 'Root-Is-Purelib: %s' % is_pure, | |
| 419 ] | |
| 420 for pyver, abi, arch in self.tags: | |
| 421 wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) | |
| 422 p = os.path.join(distinfo, 'WHEEL') | |
| 423 with open(p, 'w') as f: | |
| 424 f.write('\n'.join(wheel_metadata)) | |
| 425 ap = to_posix(os.path.join(info_dir, 'WHEEL')) | |
| 426 archive_paths.append((ap, p)) | |
| 427 | |
| 428 # Now, at last, RECORD. | |
| 429 # Paths in here are archive paths - nothing else makes sense. | |
| 430 self.write_records((distinfo, info_dir), libdir, archive_paths) | |
| 431 # Now, ready to build the zip file | |
| 432 pathname = os.path.join(self.dirname, self.filename) | |
| 433 self.build_zip(pathname, archive_paths) | |
| 434 return pathname | |
| 435 | |
| 436 def skip_entry(self, arcname): | |
| 437 """ | |
| 438 Determine whether an archive entry should be skipped when verifying | |
| 439 or installing. | |
| 440 """ | |
| 441 # The signature file won't be in RECORD, | |
| 442 # and we don't currently don't do anything with it | |
| 443 # We also skip directories, as they won't be in RECORD | |
| 444 # either. See: | |
| 445 # | |
| 446 # https://github.com/pypa/wheel/issues/294 | |
| 447 # https://github.com/pypa/wheel/issues/287 | |
| 448 # https://github.com/pypa/wheel/pull/289 | |
| 449 # | |
| 450 return arcname.endswith(('/', '/RECORD.jws')) | |
| 451 | |
| 452 def install(self, paths, maker, **kwargs): | |
| 453 """ | |
| 454 Install a wheel to the specified paths. If kwarg ``warner`` is | |
| 455 specified, it should be a callable, which will be called with two | |
| 456 tuples indicating the wheel version of this software and the wheel | |
| 457 version in the file, if there is a discrepancy in the versions. | |
| 458 This can be used to issue any warnings to raise any exceptions. | |
| 459 If kwarg ``lib_only`` is True, only the purelib/platlib files are | |
| 460 installed, and the headers, scripts, data and dist-info metadata are | |
| 461 not written. If kwarg ``bytecode_hashed_invalidation`` is True, written | |
| 462 bytecode will try to use file-hash based invalidation (PEP-552) on | |
| 463 supported interpreter versions (CPython 2.7+). | |
| 464 | |
| 465 The return value is a :class:`InstalledDistribution` instance unless | |
| 466 ``options.lib_only`` is True, in which case the return value is ``None``. | |
| 467 """ | |
| 468 | |
| 469 dry_run = maker.dry_run | |
| 470 warner = kwargs.get('warner') | |
| 471 lib_only = kwargs.get('lib_only', False) | |
| 472 bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) | |
| 473 | |
| 474 pathname = os.path.join(self.dirname, self.filename) | |
| 475 name_ver = '%s-%s' % (self.name, self.version) | |
| 476 data_dir = '%s.data' % name_ver | |
| 477 info_dir = '%s.dist-info' % name_ver | |
| 478 | |
| 479 metadata_name = posixpath.join(info_dir, METADATA_FILENAME) | |
| 480 wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') | |
| 481 record_name = posixpath.join(info_dir, 'RECORD') | |
| 482 | |
| 483 wrapper = codecs.getreader('utf-8') | |
| 484 | |
| 485 with ZipFile(pathname, 'r') as zf: | |
| 486 with zf.open(wheel_metadata_name) as bwf: | |
| 487 wf = wrapper(bwf) | |
| 488 message = message_from_file(wf) | |
| 489 wv = message['Wheel-Version'].split('.', 1) | |
| 490 file_version = tuple([int(i) for i in wv]) | |
| 491 if (file_version != self.wheel_version) and warner: | |
| 492 warner(self.wheel_version, file_version) | |
| 493 | |
| 494 if message['Root-Is-Purelib'] == 'true': | |
| 495 libdir = paths['purelib'] | |
| 496 else: | |
| 497 libdir = paths['platlib'] | |
| 498 | |
| 499 records = {} | |
| 500 with zf.open(record_name) as bf: | |
| 501 with CSVReader(stream=bf) as reader: | |
| 502 for row in reader: | |
| 503 p = row[0] | |
| 504 records[p] = row | |
| 505 | |
| 506 data_pfx = posixpath.join(data_dir, '') | |
| 507 info_pfx = posixpath.join(info_dir, '') | |
| 508 script_pfx = posixpath.join(data_dir, 'scripts', '') | |
| 509 | |
| 510 # make a new instance rather than a copy of maker's, | |
| 511 # as we mutate it | |
| 512 fileop = FileOperator(dry_run=dry_run) | |
| 513 fileop.record = True # so we can rollback if needed | |
| 514 | |
| 515 bc = not sys.dont_write_bytecode # Double negatives. Lovely! | |
| 516 | |
| 517 outfiles = [] # for RECORD writing | |
| 518 | |
| 519 # for script copying/shebang processing | |
| 520 workdir = tempfile.mkdtemp() | |
| 521 # set target dir later | |
| 522 # we default add_launchers to False, as the | |
| 523 # Python Launcher should be used instead | |
| 524 maker.source_dir = workdir | |
| 525 maker.target_dir = None | |
| 526 try: | |
| 527 for zinfo in zf.infolist(): | |
| 528 arcname = zinfo.filename | |
| 529 if isinstance(arcname, text_type): | |
| 530 u_arcname = arcname | |
| 531 else: | |
| 532 u_arcname = arcname.decode('utf-8') | |
| 533 if self.skip_entry(u_arcname): | |
| 534 continue | |
| 535 row = records[u_arcname] | |
| 536 if row[2] and str(zinfo.file_size) != row[2]: | |
| 537 raise DistlibException('size mismatch for ' | |
| 538 '%s' % u_arcname) | |
| 539 if row[1]: | |
| 540 kind, value = row[1].split('=', 1) | |
| 541 with zf.open(arcname) as bf: | |
| 542 data = bf.read() | |
| 543 _, digest = self.get_hash(data, kind) | |
| 544 if digest != value: | |
| 545 raise DistlibException('digest mismatch for ' | |
| 546 '%s' % arcname) | |
| 547 | |
| 548 if lib_only and u_arcname.startswith((info_pfx, data_pfx)): | |
| 549 logger.debug('lib_only: skipping %s', u_arcname) | |
| 550 continue | |
| 551 is_script = (u_arcname.startswith(script_pfx) | |
| 552 and not u_arcname.endswith('.exe')) | |
| 553 | |
| 554 if u_arcname.startswith(data_pfx): | |
| 555 _, where, rp = u_arcname.split('/', 2) | |
| 556 outfile = os.path.join(paths[where], convert_path(rp)) | |
| 557 else: | |
| 558 # meant for site-packages. | |
| 559 if u_arcname in (wheel_metadata_name, record_name): | |
| 560 continue | |
| 561 outfile = os.path.join(libdir, convert_path(u_arcname)) | |
| 562 if not is_script: | |
| 563 with zf.open(arcname) as bf: | |
| 564 fileop.copy_stream(bf, outfile) | |
| 565 outfiles.append(outfile) | |
| 566 # Double check the digest of the written file | |
| 567 if not dry_run and row[1]: | |
| 568 with open(outfile, 'rb') as bf: | |
| 569 data = bf.read() | |
| 570 _, newdigest = self.get_hash(data, kind) | |
| 571 if newdigest != digest: | |
| 572 raise DistlibException('digest mismatch ' | |
| 573 'on write for ' | |
| 574 '%s' % outfile) | |
| 575 if bc and outfile.endswith('.py'): | |
| 576 try: | |
| 577 pyc = fileop.byte_compile(outfile, | |
| 578 hashed_invalidation=bc_hashed_invalidation) | |
| 579 outfiles.append(pyc) | |
| 580 except Exception: | |
| 581 # Don't give up if byte-compilation fails, | |
| 582 # but log it and perhaps warn the user | |
| 583 logger.warning('Byte-compilation failed', | |
| 584 exc_info=True) | |
| 585 else: | |
| 586 fn = os.path.basename(convert_path(arcname)) | |
| 587 workname = os.path.join(workdir, fn) | |
| 588 with zf.open(arcname) as bf: | |
| 589 fileop.copy_stream(bf, workname) | |
| 590 | |
| 591 dn, fn = os.path.split(outfile) | |
| 592 maker.target_dir = dn | |
| 593 filenames = maker.make(fn) | |
| 594 fileop.set_executable_mode(filenames) | |
| 595 outfiles.extend(filenames) | |
| 596 | |
| 597 if lib_only: | |
| 598 logger.debug('lib_only: returning None') | |
| 599 dist = None | |
| 600 else: | |
| 601 # Generate scripts | |
| 602 | |
| 603 # Try to get pydist.json so we can see if there are | |
| 604 # any commands to generate. If this fails (e.g. because | |
| 605 # of a legacy wheel), log a warning but don't give up. | |
| 606 commands = None | |
| 607 file_version = self.info['Wheel-Version'] | |
| 608 if file_version == '1.0': | |
| 609 # Use legacy info | |
| 610 ep = posixpath.join(info_dir, 'entry_points.txt') | |
| 611 try: | |
| 612 with zf.open(ep) as bwf: | |
| 613 epdata = read_exports(bwf) | |
| 614 commands = {} | |
| 615 for key in ('console', 'gui'): | |
| 616 k = '%s_scripts' % key | |
| 617 if k in epdata: | |
| 618 commands['wrap_%s' % key] = d = {} | |
| 619 for v in epdata[k].values(): | |
| 620 s = '%s:%s' % (v.prefix, v.suffix) | |
| 621 if v.flags: | |
| 622 s += ' %s' % v.flags | |
| 623 d[v.name] = s | |
| 624 except Exception: | |
| 625 logger.warning('Unable to read legacy script ' | |
| 626 'metadata, so cannot generate ' | |
| 627 'scripts') | |
| 628 else: | |
| 629 try: | |
| 630 with zf.open(metadata_name) as bwf: | |
| 631 wf = wrapper(bwf) | |
| 632 commands = json.load(wf).get('extensions') | |
| 633 if commands: | |
| 634 commands = commands.get('python.commands') | |
| 635 except Exception: | |
| 636 logger.warning('Unable to read JSON metadata, so ' | |
| 637 'cannot generate scripts') | |
| 638 if commands: | |
| 639 console_scripts = commands.get('wrap_console', {}) | |
| 640 gui_scripts = commands.get('wrap_gui', {}) | |
| 641 if console_scripts or gui_scripts: | |
| 642 script_dir = paths.get('scripts', '') | |
| 643 if not os.path.isdir(script_dir): | |
| 644 raise ValueError('Valid script path not ' | |
| 645 'specified') | |
| 646 maker.target_dir = script_dir | |
| 647 for k, v in console_scripts.items(): | |
| 648 script = '%s = %s' % (k, v) | |
| 649 filenames = maker.make(script) | |
| 650 fileop.set_executable_mode(filenames) | |
| 651 | |
| 652 if gui_scripts: | |
| 653 options = {'gui': True } | |
| 654 for k, v in gui_scripts.items(): | |
| 655 script = '%s = %s' % (k, v) | |
| 656 filenames = maker.make(script, options) | |
| 657 fileop.set_executable_mode(filenames) | |
| 658 | |
| 659 p = os.path.join(libdir, info_dir) | |
| 660 dist = InstalledDistribution(p) | |
| 661 | |
| 662 # Write SHARED | |
| 663 paths = dict(paths) # don't change passed in dict | |
| 664 del paths['purelib'] | |
| 665 del paths['platlib'] | |
| 666 paths['lib'] = libdir | |
| 667 p = dist.write_shared_locations(paths, dry_run) | |
| 668 if p: | |
| 669 outfiles.append(p) | |
| 670 | |
| 671 # Write RECORD | |
| 672 dist.write_installed_files(outfiles, paths['prefix'], | |
| 673 dry_run) | |
| 674 return dist | |
| 675 except Exception: # pragma: no cover | |
| 676 logger.exception('installation failed.') | |
| 677 fileop.rollback() | |
| 678 raise | |
| 679 finally: | |
| 680 shutil.rmtree(workdir) | |
| 681 | |
| 682 def _get_dylib_cache(self): | |
| 683 global cache | |
| 684 if cache is None: | |
| 685 # Use native string to avoid issues on 2.x: see Python #20140. | |
| 686 base = os.path.join(get_cache_base(), str('dylib-cache'), | |
| 687 '%s.%s' % sys.version_info[:2]) | |
| 688 cache = Cache(base) | |
| 689 return cache | |
| 690 | |
| 691 def _get_extensions(self): | |
| 692 pathname = os.path.join(self.dirname, self.filename) | |
| 693 name_ver = '%s-%s' % (self.name, self.version) | |
| 694 info_dir = '%s.dist-info' % name_ver | |
| 695 arcname = posixpath.join(info_dir, 'EXTENSIONS') | |
| 696 wrapper = codecs.getreader('utf-8') | |
| 697 result = [] | |
| 698 with ZipFile(pathname, 'r') as zf: | |
| 699 try: | |
| 700 with zf.open(arcname) as bf: | |
| 701 wf = wrapper(bf) | |
| 702 extensions = json.load(wf) | |
| 703 cache = self._get_dylib_cache() | |
| 704 prefix = cache.prefix_to_dir(pathname) | |
| 705 cache_base = os.path.join(cache.base, prefix) | |
| 706 if not os.path.isdir(cache_base): | |
| 707 os.makedirs(cache_base) | |
| 708 for name, relpath in extensions.items(): | |
| 709 dest = os.path.join(cache_base, convert_path(relpath)) | |
| 710 if not os.path.exists(dest): | |
| 711 extract = True | |
| 712 else: | |
| 713 file_time = os.stat(dest).st_mtime | |
| 714 file_time = datetime.datetime.fromtimestamp(file_time) | |
| 715 info = zf.getinfo(relpath) | |
| 716 wheel_time = datetime.datetime(*info.date_time) | |
| 717 extract = wheel_time > file_time | |
| 718 if extract: | |
| 719 zf.extract(relpath, cache_base) | |
| 720 result.append((name, dest)) | |
| 721 except KeyError: | |
| 722 pass | |
| 723 return result | |
| 724 | |
| 725 def is_compatible(self): | |
| 726 """ | |
| 727 Determine if a wheel is compatible with the running system. | |
| 728 """ | |
| 729 return is_compatible(self) | |
| 730 | |
| 731 def is_mountable(self): | |
| 732 """ | |
| 733 Determine if a wheel is asserted as mountable by its metadata. | |
| 734 """ | |
| 735 return True # for now - metadata details TBD | |
| 736 | |
| 737 def mount(self, append=False): | |
| 738 pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) | |
| 739 if not self.is_compatible(): | |
| 740 msg = 'Wheel %s not compatible with this Python.' % pathname | |
| 741 raise DistlibException(msg) | |
| 742 if not self.is_mountable(): | |
| 743 msg = 'Wheel %s is marked as not mountable.' % pathname | |
| 744 raise DistlibException(msg) | |
| 745 if pathname in sys.path: | |
| 746 logger.debug('%s already in path', pathname) | |
| 747 else: | |
| 748 if append: | |
| 749 sys.path.append(pathname) | |
| 750 else: | |
| 751 sys.path.insert(0, pathname) | |
| 752 extensions = self._get_extensions() | |
| 753 if extensions: | |
| 754 if _hook not in sys.meta_path: | |
| 755 sys.meta_path.append(_hook) | |
| 756 _hook.add(pathname, extensions) | |
| 757 | |
| 758 def unmount(self): | |
| 759 pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) | |
| 760 if pathname not in sys.path: | |
| 761 logger.debug('%s not in path', pathname) | |
| 762 else: | |
| 763 sys.path.remove(pathname) | |
| 764 if pathname in _hook.impure_wheels: | |
| 765 _hook.remove(pathname) | |
| 766 if not _hook.impure_wheels: | |
| 767 if _hook in sys.meta_path: | |
| 768 sys.meta_path.remove(_hook) | |
| 769 | |
| 770 def verify(self): | |
| 771 pathname = os.path.join(self.dirname, self.filename) | |
| 772 name_ver = '%s-%s' % (self.name, self.version) | |
| 773 data_dir = '%s.data' % name_ver | |
| 774 info_dir = '%s.dist-info' % name_ver | |
| 775 | |
| 776 metadata_name = posixpath.join(info_dir, METADATA_FILENAME) | |
| 777 wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') | |
| 778 record_name = posixpath.join(info_dir, 'RECORD') | |
| 779 | |
| 780 wrapper = codecs.getreader('utf-8') | |
| 781 | |
| 782 with ZipFile(pathname, 'r') as zf: | |
| 783 with zf.open(wheel_metadata_name) as bwf: | |
| 784 wf = wrapper(bwf) | |
| 785 message = message_from_file(wf) | |
| 786 wv = message['Wheel-Version'].split('.', 1) | |
| 787 file_version = tuple([int(i) for i in wv]) | |
| 788 # TODO version verification | |
| 789 | |
| 790 records = {} | |
| 791 with zf.open(record_name) as bf: | |
| 792 with CSVReader(stream=bf) as reader: | |
| 793 for row in reader: | |
| 794 p = row[0] | |
| 795 records[p] = row | |
| 796 | |
| 797 for zinfo in zf.infolist(): | |
| 798 arcname = zinfo.filename | |
| 799 if isinstance(arcname, text_type): | |
| 800 u_arcname = arcname | |
| 801 else: | |
| 802 u_arcname = arcname.decode('utf-8') | |
| 803 # See issue #115: some wheels have .. in their entries, but | |
| 804 # in the filename ... e.g. __main__..py ! So the check is | |
| 805 # updated to look for .. in the directory portions | |
| 806 p = u_arcname.split('/') | |
| 807 if '..' in p: | |
| 808 raise DistlibException('invalid entry in ' | |
| 809 'wheel: %r' % u_arcname) | |
| 810 | |
| 811 if self.skip_entry(u_arcname): | |
| 812 continue | |
| 813 row = records[u_arcname] | |
| 814 if row[2] and str(zinfo.file_size) != row[2]: | |
| 815 raise DistlibException('size mismatch for ' | |
| 816 '%s' % u_arcname) | |
| 817 if row[1]: | |
| 818 kind, value = row[1].split('=', 1) | |
| 819 with zf.open(arcname) as bf: | |
| 820 data = bf.read() | |
| 821 _, digest = self.get_hash(data, kind) | |
| 822 if digest != value: | |
| 823 raise DistlibException('digest mismatch for ' | |
| 824 '%s' % arcname) | |
| 825 | |
| 826 def update(self, modifier, dest_dir=None, **kwargs): | |
| 827 """ | |
| 828 Update the contents of a wheel in a generic way. The modifier should | |
| 829 be a callable which expects a dictionary argument: its keys are | |
| 830 archive-entry paths, and its values are absolute filesystem paths | |
| 831 where the contents the corresponding archive entries can be found. The | |
| 832 modifier is free to change the contents of the files pointed to, add | |
| 833 new entries and remove entries, before returning. This method will | |
| 834 extract the entire contents of the wheel to a temporary location, call | |
| 835 the modifier, and then use the passed (and possibly updated) | |
| 836 dictionary to write a new wheel. If ``dest_dir`` is specified, the new | |
| 837 wheel is written there -- otherwise, the original wheel is overwritten. | |
| 838 | |
| 839 The modifier should return True if it updated the wheel, else False. | |
| 840 This method returns the same value the modifier returns. | |
| 841 """ | |
| 842 | |
| 843 def get_version(path_map, info_dir): | |
| 844 version = path = None | |
| 845 key = '%s/%s' % (info_dir, METADATA_FILENAME) | |
| 846 if key not in path_map: | |
| 847 key = '%s/PKG-INFO' % info_dir | |
| 848 if key in path_map: | |
| 849 path = path_map[key] | |
| 850 version = Metadata(path=path).version | |
| 851 return version, path | |
| 852 | |
| 853 def update_version(version, path): | |
| 854 updated = None | |
| 855 try: | |
| 856 v = NormalizedVersion(version) | |
| 857 i = version.find('-') | |
| 858 if i < 0: | |
| 859 updated = '%s+1' % version | |
| 860 else: | |
| 861 parts = [int(s) for s in version[i + 1:].split('.')] | |
| 862 parts[-1] += 1 | |
| 863 updated = '%s+%s' % (version[:i], | |
| 864 '.'.join(str(i) for i in parts)) | |
| 865 except UnsupportedVersionError: | |
| 866 logger.debug('Cannot update non-compliant (PEP-440) ' | |
| 867 'version %r', version) | |
| 868 if updated: | |
| 869 md = Metadata(path=path) | |
| 870 md.version = updated | |
| 871 legacy = not path.endswith(METADATA_FILENAME) | |
| 872 md.write(path=path, legacy=legacy) | |
| 873 logger.debug('Version updated from %r to %r', version, | |
| 874 updated) | |
| 875 | |
| 876 pathname = os.path.join(self.dirname, self.filename) | |
| 877 name_ver = '%s-%s' % (self.name, self.version) | |
| 878 info_dir = '%s.dist-info' % name_ver | |
| 879 record_name = posixpath.join(info_dir, 'RECORD') | |
| 880 with tempdir() as workdir: | |
| 881 with ZipFile(pathname, 'r') as zf: | |
| 882 path_map = {} | |
| 883 for zinfo in zf.infolist(): | |
| 884 arcname = zinfo.filename | |
| 885 if isinstance(arcname, text_type): | |
| 886 u_arcname = arcname | |
| 887 else: | |
| 888 u_arcname = arcname.decode('utf-8') | |
| 889 if u_arcname == record_name: | |
| 890 continue | |
| 891 if '..' in u_arcname: | |
| 892 raise DistlibException('invalid entry in ' | |
| 893 'wheel: %r' % u_arcname) | |
| 894 zf.extract(zinfo, workdir) | |
| 895 path = os.path.join(workdir, convert_path(u_arcname)) | |
| 896 path_map[u_arcname] = path | |
| 897 | |
| 898 # Remember the version. | |
| 899 original_version, _ = get_version(path_map, info_dir) | |
| 900 # Files extracted. Call the modifier. | |
| 901 modified = modifier(path_map, **kwargs) | |
| 902 if modified: | |
| 903 # Something changed - need to build a new wheel. | |
| 904 current_version, path = get_version(path_map, info_dir) | |
| 905 if current_version and (current_version == original_version): | |
| 906 # Add or update local version to signify changes. | |
| 907 update_version(current_version, path) | |
| 908 # Decide where the new wheel goes. | |
| 909 if dest_dir is None: | |
| 910 fd, newpath = tempfile.mkstemp(suffix='.whl', | |
| 911 prefix='wheel-update-', | |
| 912 dir=workdir) | |
| 913 os.close(fd) | |
| 914 else: | |
| 915 if not os.path.isdir(dest_dir): | |
| 916 raise DistlibException('Not a directory: %r' % dest_dir) | |
| 917 newpath = os.path.join(dest_dir, self.filename) | |
| 918 archive_paths = list(path_map.items()) | |
| 919 distinfo = os.path.join(workdir, info_dir) | |
| 920 info = distinfo, info_dir | |
| 921 self.write_records(info, workdir, archive_paths) | |
| 922 self.build_zip(newpath, archive_paths) | |
| 923 if dest_dir is None: | |
| 924 shutil.copyfile(newpath, pathname) | |
| 925 return modified | |
| 926 | |
| 927 def compatible_tags(): | |
| 928 """ | |
| 929 Return (pyver, abi, arch) tuples compatible with this Python. | |
| 930 """ | |
| 931 versions = [VER_SUFFIX] | |
| 932 major = VER_SUFFIX[0] | |
| 933 for minor in range(sys.version_info[1] - 1, - 1, -1): | |
| 934 versions.append(''.join([major, str(minor)])) | |
| 935 | |
| 936 abis = [] | |
| 937 for suffix, _, _ in imp.get_suffixes(): | |
| 938 if suffix.startswith('.abi'): | |
| 939 abis.append(suffix.split('.', 2)[1]) | |
| 940 abis.sort() | |
| 941 if ABI != 'none': | |
| 942 abis.insert(0, ABI) | |
| 943 abis.append('none') | |
| 944 result = [] | |
| 945 | |
| 946 arches = [ARCH] | |
| 947 if sys.platform == 'darwin': | |
| 948 m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) | |
| 949 if m: | |
| 950 name, major, minor, arch = m.groups() | |
| 951 minor = int(minor) | |
| 952 matches = [arch] | |
| 953 if arch in ('i386', 'ppc'): | |
| 954 matches.append('fat') | |
| 955 if arch in ('i386', 'ppc', 'x86_64'): | |
| 956 matches.append('fat3') | |
| 957 if arch in ('ppc64', 'x86_64'): | |
| 958 matches.append('fat64') | |
| 959 if arch in ('i386', 'x86_64'): | |
| 960 matches.append('intel') | |
| 961 if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): | |
| 962 matches.append('universal') | |
| 963 while minor >= 0: | |
| 964 for match in matches: | |
| 965 s = '%s_%s_%s_%s' % (name, major, minor, match) | |
| 966 if s != ARCH: # already there | |
| 967 arches.append(s) | |
| 968 minor -= 1 | |
| 969 | |
| 970 # Most specific - our Python version, ABI and arch | |
| 971 for abi in abis: | |
| 972 for arch in arches: | |
| 973 result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) | |
| 974 | |
| 975 # where no ABI / arch dependency, but IMP_PREFIX dependency | |
| 976 for i, version in enumerate(versions): | |
| 977 result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) | |
| 978 if i == 0: | |
| 979 result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) | |
| 980 | |
| 981 # no IMP_PREFIX, ABI or arch dependency | |
| 982 for i, version in enumerate(versions): | |
| 983 result.append((''.join(('py', version)), 'none', 'any')) | |
| 984 if i == 0: | |
| 985 result.append((''.join(('py', version[0])), 'none', 'any')) | |
| 986 return set(result) | |
| 987 | |
| 988 | |
| 989 COMPATIBLE_TAGS = compatible_tags() | |
| 990 | |
| 991 del compatible_tags | |
| 992 | |
| 993 | |
| 994 def is_compatible(wheel, tags=None): | |
| 995 if not isinstance(wheel, Wheel): | |
| 996 wheel = Wheel(wheel) # assume it's a filename | |
| 997 result = False | |
| 998 if tags is None: | |
| 999 tags = COMPATIBLE_TAGS | |
| 1000 for ver, abi, arch in tags: | |
| 1001 if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: | |
| 1002 result = True | |
| 1003 break | |
| 1004 return result |
