Mercurial > repos > guerler > springsuite
comparison planemo/lib/python3.7/site-packages/distlib/wheel.py @ 0:d30785e31577 draft
"planemo upload commit 6eee67778febed82ddd413c3ca40b3183a3898f1"
| author | guerler |
|---|---|
| date | Fri, 31 Jul 2020 00:18:57 -0400 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| -1:000000000000 | 0:d30785e31577 |
|---|---|
| 1 # -*- coding: utf-8 -*- | |
| 2 # | |
| 3 # Copyright (C) 2013-2017 Vinay Sajip. | |
| 4 # Licensed to the Python Software Foundation under a contributor agreement. | |
| 5 # See LICENSE.txt and CONTRIBUTORS.txt. | |
| 6 # | |
| 7 from __future__ import unicode_literals | |
| 8 | |
| 9 import base64 | |
| 10 import codecs | |
| 11 import datetime | |
| 12 import distutils.util | |
| 13 from email import message_from_file | |
| 14 import hashlib | |
| 15 import imp | |
| 16 import json | |
| 17 import logging | |
| 18 import os | |
| 19 import posixpath | |
| 20 import re | |
| 21 import shutil | |
| 22 import sys | |
| 23 import tempfile | |
| 24 import zipfile | |
| 25 | |
| 26 from . import __version__, DistlibException | |
| 27 from .compat import sysconfig, ZipFile, fsdecode, text_type, filter | |
| 28 from .database import InstalledDistribution | |
| 29 from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, | |
| 30 LEGACY_METADATA_FILENAME) | |
| 31 from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, | |
| 32 cached_property, get_cache_base, read_exports, tempdir) | |
| 33 from .version import NormalizedVersion, UnsupportedVersionError | |
| 34 | |
| 35 logger = logging.getLogger(__name__) | |
| 36 | |
| 37 cache = None # created when needed | |
| 38 | |
| 39 if hasattr(sys, 'pypy_version_info'): # pragma: no cover | |
| 40 IMP_PREFIX = 'pp' | |
| 41 elif sys.platform.startswith('java'): # pragma: no cover | |
| 42 IMP_PREFIX = 'jy' | |
| 43 elif sys.platform == 'cli': # pragma: no cover | |
| 44 IMP_PREFIX = 'ip' | |
| 45 else: | |
| 46 IMP_PREFIX = 'cp' | |
| 47 | |
| 48 VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') | |
| 49 if not VER_SUFFIX: # pragma: no cover | |
| 50 VER_SUFFIX = '%s%s' % sys.version_info[:2] | |
| 51 PYVER = 'py' + VER_SUFFIX | |
| 52 IMPVER = IMP_PREFIX + VER_SUFFIX | |
| 53 | |
| 54 ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') | |
| 55 | |
| 56 ABI = sysconfig.get_config_var('SOABI') | |
| 57 if ABI and ABI.startswith('cpython-'): | |
| 58 ABI = ABI.replace('cpython-', 'cp') | |
| 59 else: | |
| 60 def _derive_abi(): | |
| 61 parts = ['cp', VER_SUFFIX] | |
| 62 if sysconfig.get_config_var('Py_DEBUG'): | |
| 63 parts.append('d') | |
| 64 if sysconfig.get_config_var('WITH_PYMALLOC'): | |
| 65 parts.append('m') | |
| 66 if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: | |
| 67 parts.append('u') | |
| 68 return ''.join(parts) | |
| 69 ABI = _derive_abi() | |
| 70 del _derive_abi | |
| 71 | |
| 72 FILENAME_RE = re.compile(r''' | |
| 73 (?P<nm>[^-]+) | |
| 74 -(?P<vn>\d+[^-]*) | |
| 75 (-(?P<bn>\d+[^-]*))? | |
| 76 -(?P<py>\w+\d+(\.\w+\d+)*) | |
| 77 -(?P<bi>\w+) | |
| 78 -(?P<ar>\w+(\.\w+)*) | |
| 79 \.whl$ | |
| 80 ''', re.IGNORECASE | re.VERBOSE) | |
| 81 | |
| 82 NAME_VERSION_RE = re.compile(r''' | |
| 83 (?P<nm>[^-]+) | |
| 84 -(?P<vn>\d+[^-]*) | |
| 85 (-(?P<bn>\d+[^-]*))?$ | |
| 86 ''', re.IGNORECASE | re.VERBOSE) | |
| 87 | |
| 88 SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') | |
| 89 SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') | |
| 90 SHEBANG_PYTHON = b'#!python' | |
| 91 SHEBANG_PYTHONW = b'#!pythonw' | |
| 92 | |
| 93 if os.sep == '/': | |
| 94 to_posix = lambda o: o | |
| 95 else: | |
| 96 to_posix = lambda o: o.replace(os.sep, '/') | |
| 97 | |
| 98 | |
| 99 class Mounter(object): | |
| 100 def __init__(self): | |
| 101 self.impure_wheels = {} | |
| 102 self.libs = {} | |
| 103 | |
| 104 def add(self, pathname, extensions): | |
| 105 self.impure_wheels[pathname] = extensions | |
| 106 self.libs.update(extensions) | |
| 107 | |
| 108 def remove(self, pathname): | |
| 109 extensions = self.impure_wheels.pop(pathname) | |
| 110 for k, v in extensions: | |
| 111 if k in self.libs: | |
| 112 del self.libs[k] | |
| 113 | |
| 114 def find_module(self, fullname, path=None): | |
| 115 if fullname in self.libs: | |
| 116 result = self | |
| 117 else: | |
| 118 result = None | |
| 119 return result | |
| 120 | |
| 121 def load_module(self, fullname): | |
| 122 if fullname in sys.modules: | |
| 123 result = sys.modules[fullname] | |
| 124 else: | |
| 125 if fullname not in self.libs: | |
| 126 raise ImportError('unable to find extension for %s' % fullname) | |
| 127 result = imp.load_dynamic(fullname, self.libs[fullname]) | |
| 128 result.__loader__ = self | |
| 129 parts = fullname.rsplit('.', 1) | |
| 130 if len(parts) > 1: | |
| 131 result.__package__ = parts[0] | |
| 132 return result | |
| 133 | |
| 134 _hook = Mounter() | |
| 135 | |
| 136 | |
| 137 class Wheel(object): | |
| 138 """ | |
| 139 Class to build and install from Wheel files (PEP 427). | |
| 140 """ | |
| 141 | |
| 142 wheel_version = (1, 1) | |
| 143 hash_kind = 'sha256' | |
| 144 | |
| 145 def __init__(self, filename=None, sign=False, verify=False): | |
| 146 """ | |
| 147 Initialise an instance using a (valid) filename. | |
| 148 """ | |
| 149 self.sign = sign | |
| 150 self.should_verify = verify | |
| 151 self.buildver = '' | |
| 152 self.pyver = [PYVER] | |
| 153 self.abi = ['none'] | |
| 154 self.arch = ['any'] | |
| 155 self.dirname = os.getcwd() | |
| 156 if filename is None: | |
| 157 self.name = 'dummy' | |
| 158 self.version = '0.1' | |
| 159 self._filename = self.filename | |
| 160 else: | |
| 161 m = NAME_VERSION_RE.match(filename) | |
| 162 if m: | |
| 163 info = m.groupdict('') | |
| 164 self.name = info['nm'] | |
| 165 # Reinstate the local version separator | |
| 166 self.version = info['vn'].replace('_', '-') | |
| 167 self.buildver = info['bn'] | |
| 168 self._filename = self.filename | |
| 169 else: | |
| 170 dirname, filename = os.path.split(filename) | |
| 171 m = FILENAME_RE.match(filename) | |
| 172 if not m: | |
| 173 raise DistlibException('Invalid name or ' | |
| 174 'filename: %r' % filename) | |
| 175 if dirname: | |
| 176 self.dirname = os.path.abspath(dirname) | |
| 177 self._filename = filename | |
| 178 info = m.groupdict('') | |
| 179 self.name = info['nm'] | |
| 180 self.version = info['vn'] | |
| 181 self.buildver = info['bn'] | |
| 182 self.pyver = info['py'].split('.') | |
| 183 self.abi = info['bi'].split('.') | |
| 184 self.arch = info['ar'].split('.') | |
| 185 | |
| 186 @property | |
| 187 def filename(self): | |
| 188 """ | |
| 189 Build and return a filename from the various components. | |
| 190 """ | |
| 191 if self.buildver: | |
| 192 buildver = '-' + self.buildver | |
| 193 else: | |
| 194 buildver = '' | |
| 195 pyver = '.'.join(self.pyver) | |
| 196 abi = '.'.join(self.abi) | |
| 197 arch = '.'.join(self.arch) | |
| 198 # replace - with _ as a local version separator | |
| 199 version = self.version.replace('-', '_') | |
| 200 return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, | |
| 201 pyver, abi, arch) | |
| 202 | |
| 203 @property | |
| 204 def exists(self): | |
| 205 path = os.path.join(self.dirname, self.filename) | |
| 206 return os.path.isfile(path) | |
| 207 | |
| 208 @property | |
| 209 def tags(self): | |
| 210 for pyver in self.pyver: | |
| 211 for abi in self.abi: | |
| 212 for arch in self.arch: | |
| 213 yield pyver, abi, arch | |
| 214 | |
| 215 @cached_property | |
| 216 def metadata(self): | |
| 217 pathname = os.path.join(self.dirname, self.filename) | |
| 218 name_ver = '%s-%s' % (self.name, self.version) | |
| 219 info_dir = '%s.dist-info' % name_ver | |
| 220 wrapper = codecs.getreader('utf-8') | |
| 221 with ZipFile(pathname, 'r') as zf: | |
| 222 wheel_metadata = self.get_wheel_metadata(zf) | |
| 223 wv = wheel_metadata['Wheel-Version'].split('.', 1) | |
| 224 file_version = tuple([int(i) for i in wv]) | |
| 225 # if file_version < (1, 1): | |
| 226 # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, | |
| 227 # LEGACY_METADATA_FILENAME] | |
| 228 # else: | |
| 229 # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] | |
| 230 fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] | |
| 231 result = None | |
| 232 for fn in fns: | |
| 233 try: | |
| 234 metadata_filename = posixpath.join(info_dir, fn) | |
| 235 with zf.open(metadata_filename) as bf: | |
| 236 wf = wrapper(bf) | |
| 237 result = Metadata(fileobj=wf) | |
| 238 if result: | |
| 239 break | |
| 240 except KeyError: | |
| 241 pass | |
| 242 if not result: | |
| 243 raise ValueError('Invalid wheel, because metadata is ' | |
| 244 'missing: looked in %s' % ', '.join(fns)) | |
| 245 return result | |
| 246 | |
| 247 def get_wheel_metadata(self, zf): | |
| 248 name_ver = '%s-%s' % (self.name, self.version) | |
| 249 info_dir = '%s.dist-info' % name_ver | |
| 250 metadata_filename = posixpath.join(info_dir, 'WHEEL') | |
| 251 with zf.open(metadata_filename) as bf: | |
| 252 wf = codecs.getreader('utf-8')(bf) | |
| 253 message = message_from_file(wf) | |
| 254 return dict(message) | |
| 255 | |
| 256 @cached_property | |
| 257 def info(self): | |
| 258 pathname = os.path.join(self.dirname, self.filename) | |
| 259 with ZipFile(pathname, 'r') as zf: | |
| 260 result = self.get_wheel_metadata(zf) | |
| 261 return result | |
| 262 | |
| 263 def process_shebang(self, data): | |
| 264 m = SHEBANG_RE.match(data) | |
| 265 if m: | |
| 266 end = m.end() | |
| 267 shebang, data_after_shebang = data[:end], data[end:] | |
| 268 # Preserve any arguments after the interpreter | |
| 269 if b'pythonw' in shebang.lower(): | |
| 270 shebang_python = SHEBANG_PYTHONW | |
| 271 else: | |
| 272 shebang_python = SHEBANG_PYTHON | |
| 273 m = SHEBANG_DETAIL_RE.match(shebang) | |
| 274 if m: | |
| 275 args = b' ' + m.groups()[-1] | |
| 276 else: | |
| 277 args = b'' | |
| 278 shebang = shebang_python + args | |
| 279 data = shebang + data_after_shebang | |
| 280 else: | |
| 281 cr = data.find(b'\r') | |
| 282 lf = data.find(b'\n') | |
| 283 if cr < 0 or cr > lf: | |
| 284 term = b'\n' | |
| 285 else: | |
| 286 if data[cr:cr + 2] == b'\r\n': | |
| 287 term = b'\r\n' | |
| 288 else: | |
| 289 term = b'\r' | |
| 290 data = SHEBANG_PYTHON + term + data | |
| 291 return data | |
| 292 | |
| 293 def get_hash(self, data, hash_kind=None): | |
| 294 if hash_kind is None: | |
| 295 hash_kind = self.hash_kind | |
| 296 try: | |
| 297 hasher = getattr(hashlib, hash_kind) | |
| 298 except AttributeError: | |
| 299 raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) | |
| 300 result = hasher(data).digest() | |
| 301 result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') | |
| 302 return hash_kind, result | |
| 303 | |
| 304 def write_record(self, records, record_path, base): | |
| 305 records = list(records) # make a copy, as mutated | |
| 306 p = to_posix(os.path.relpath(record_path, base)) | |
| 307 records.append((p, '', '')) | |
| 308 with CSVWriter(record_path) as writer: | |
| 309 for row in records: | |
| 310 writer.writerow(row) | |
| 311 | |
| 312 def write_records(self, info, libdir, archive_paths): | |
| 313 records = [] | |
| 314 distinfo, info_dir = info | |
| 315 hasher = getattr(hashlib, self.hash_kind) | |
| 316 for ap, p in archive_paths: | |
| 317 with open(p, 'rb') as f: | |
| 318 data = f.read() | |
| 319 digest = '%s=%s' % self.get_hash(data) | |
| 320 size = os.path.getsize(p) | |
| 321 records.append((ap, digest, size)) | |
| 322 | |
| 323 p = os.path.join(distinfo, 'RECORD') | |
| 324 self.write_record(records, p, libdir) | |
| 325 ap = to_posix(os.path.join(info_dir, 'RECORD')) | |
| 326 archive_paths.append((ap, p)) | |
| 327 | |
| 328 def build_zip(self, pathname, archive_paths): | |
| 329 with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: | |
| 330 for ap, p in archive_paths: | |
| 331 logger.debug('Wrote %s to %s in wheel', p, ap) | |
| 332 zf.write(p, ap) | |
| 333 | |
| 334 def build(self, paths, tags=None, wheel_version=None): | |
| 335 """ | |
| 336 Build a wheel from files in specified paths, and use any specified tags | |
| 337 when determining the name of the wheel. | |
| 338 """ | |
| 339 if tags is None: | |
| 340 tags = {} | |
| 341 | |
| 342 libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] | |
| 343 if libkey == 'platlib': | |
| 344 is_pure = 'false' | |
| 345 default_pyver = [IMPVER] | |
| 346 default_abi = [ABI] | |
| 347 default_arch = [ARCH] | |
| 348 else: | |
| 349 is_pure = 'true' | |
| 350 default_pyver = [PYVER] | |
| 351 default_abi = ['none'] | |
| 352 default_arch = ['any'] | |
| 353 | |
| 354 self.pyver = tags.get('pyver', default_pyver) | |
| 355 self.abi = tags.get('abi', default_abi) | |
| 356 self.arch = tags.get('arch', default_arch) | |
| 357 | |
| 358 libdir = paths[libkey] | |
| 359 | |
| 360 name_ver = '%s-%s' % (self.name, self.version) | |
| 361 data_dir = '%s.data' % name_ver | |
| 362 info_dir = '%s.dist-info' % name_ver | |
| 363 | |
| 364 archive_paths = [] | |
| 365 | |
| 366 # First, stuff which is not in site-packages | |
| 367 for key in ('data', 'headers', 'scripts'): | |
| 368 if key not in paths: | |
| 369 continue | |
| 370 path = paths[key] | |
| 371 if os.path.isdir(path): | |
| 372 for root, dirs, files in os.walk(path): | |
| 373 for fn in files: | |
| 374 p = fsdecode(os.path.join(root, fn)) | |
| 375 rp = os.path.relpath(p, path) | |
| 376 ap = to_posix(os.path.join(data_dir, key, rp)) | |
| 377 archive_paths.append((ap, p)) | |
| 378 if key == 'scripts' and not p.endswith('.exe'): | |
| 379 with open(p, 'rb') as f: | |
| 380 data = f.read() | |
| 381 data = self.process_shebang(data) | |
| 382 with open(p, 'wb') as f: | |
| 383 f.write(data) | |
| 384 | |
| 385 # Now, stuff which is in site-packages, other than the | |
| 386 # distinfo stuff. | |
| 387 path = libdir | |
| 388 distinfo = None | |
| 389 for root, dirs, files in os.walk(path): | |
| 390 if root == path: | |
| 391 # At the top level only, save distinfo for later | |
| 392 # and skip it for now | |
| 393 for i, dn in enumerate(dirs): | |
| 394 dn = fsdecode(dn) | |
| 395 if dn.endswith('.dist-info'): | |
| 396 distinfo = os.path.join(root, dn) | |
| 397 del dirs[i] | |
| 398 break | |
| 399 assert distinfo, '.dist-info directory expected, not found' | |
| 400 | |
| 401 for fn in files: | |
| 402 # comment out next suite to leave .pyc files in | |
| 403 if fsdecode(fn).endswith(('.pyc', '.pyo')): | |
| 404 continue | |
| 405 p = os.path.join(root, fn) | |
| 406 rp = to_posix(os.path.relpath(p, path)) | |
| 407 archive_paths.append((rp, p)) | |
| 408 | |
| 409 # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. | |
| 410 files = os.listdir(distinfo) | |
| 411 for fn in files: | |
| 412 if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): | |
| 413 p = fsdecode(os.path.join(distinfo, fn)) | |
| 414 ap = to_posix(os.path.join(info_dir, fn)) | |
| 415 archive_paths.append((ap, p)) | |
| 416 | |
| 417 wheel_metadata = [ | |
| 418 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), | |
| 419 'Generator: distlib %s' % __version__, | |
| 420 'Root-Is-Purelib: %s' % is_pure, | |
| 421 ] | |
| 422 for pyver, abi, arch in self.tags: | |
| 423 wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) | |
| 424 p = os.path.join(distinfo, 'WHEEL') | |
| 425 with open(p, 'w') as f: | |
| 426 f.write('\n'.join(wheel_metadata)) | |
| 427 ap = to_posix(os.path.join(info_dir, 'WHEEL')) | |
| 428 archive_paths.append((ap, p)) | |
| 429 | |
| 430 # sort the entries by archive path. Not needed by any spec, but it | |
| 431 # keeps the archive listing and RECORD tidier than they would otherwise | |
| 432 # be. Use the number of path segments to keep directory entries together, | |
| 433 # and keep the dist-info stuff at the end. | |
| 434 def sorter(t): | |
| 435 ap = t[0] | |
| 436 n = ap.count('/') | |
| 437 if '.dist-info' in ap: | |
| 438 n += 10000 | |
| 439 return (n, ap) | |
| 440 archive_paths = sorted(archive_paths, key=sorter) | |
| 441 | |
| 442 # Now, at last, RECORD. | |
| 443 # Paths in here are archive paths - nothing else makes sense. | |
| 444 self.write_records((distinfo, info_dir), libdir, archive_paths) | |
| 445 # Now, ready to build the zip file | |
| 446 pathname = os.path.join(self.dirname, self.filename) | |
| 447 self.build_zip(pathname, archive_paths) | |
| 448 return pathname | |
| 449 | |
| 450 def skip_entry(self, arcname): | |
| 451 """ | |
| 452 Determine whether an archive entry should be skipped when verifying | |
| 453 or installing. | |
| 454 """ | |
| 455 # The signature file won't be in RECORD, | |
| 456 # and we don't currently don't do anything with it | |
| 457 # We also skip directories, as they won't be in RECORD | |
| 458 # either. See: | |
| 459 # | |
| 460 # https://github.com/pypa/wheel/issues/294 | |
| 461 # https://github.com/pypa/wheel/issues/287 | |
| 462 # https://github.com/pypa/wheel/pull/289 | |
| 463 # | |
| 464 return arcname.endswith(('/', '/RECORD.jws')) | |
| 465 | |
| 466 def install(self, paths, maker, **kwargs): | |
| 467 """ | |
| 468 Install a wheel to the specified paths. If kwarg ``warner`` is | |
| 469 specified, it should be a callable, which will be called with two | |
| 470 tuples indicating the wheel version of this software and the wheel | |
| 471 version in the file, if there is a discrepancy in the versions. | |
| 472 This can be used to issue any warnings to raise any exceptions. | |
| 473 If kwarg ``lib_only`` is True, only the purelib/platlib files are | |
| 474 installed, and the headers, scripts, data and dist-info metadata are | |
| 475 not written. If kwarg ``bytecode_hashed_invalidation`` is True, written | |
| 476 bytecode will try to use file-hash based invalidation (PEP-552) on | |
| 477 supported interpreter versions (CPython 2.7+). | |
| 478 | |
| 479 The return value is a :class:`InstalledDistribution` instance unless | |
| 480 ``options.lib_only`` is True, in which case the return value is ``None``. | |
| 481 """ | |
| 482 | |
| 483 dry_run = maker.dry_run | |
| 484 warner = kwargs.get('warner') | |
| 485 lib_only = kwargs.get('lib_only', False) | |
| 486 bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) | |
| 487 | |
| 488 pathname = os.path.join(self.dirname, self.filename) | |
| 489 name_ver = '%s-%s' % (self.name, self.version) | |
| 490 data_dir = '%s.data' % name_ver | |
| 491 info_dir = '%s.dist-info' % name_ver | |
| 492 | |
| 493 metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) | |
| 494 wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') | |
| 495 record_name = posixpath.join(info_dir, 'RECORD') | |
| 496 | |
| 497 wrapper = codecs.getreader('utf-8') | |
| 498 | |
| 499 with ZipFile(pathname, 'r') as zf: | |
| 500 with zf.open(wheel_metadata_name) as bwf: | |
| 501 wf = wrapper(bwf) | |
| 502 message = message_from_file(wf) | |
| 503 wv = message['Wheel-Version'].split('.', 1) | |
| 504 file_version = tuple([int(i) for i in wv]) | |
| 505 if (file_version != self.wheel_version) and warner: | |
| 506 warner(self.wheel_version, file_version) | |
| 507 | |
| 508 if message['Root-Is-Purelib'] == 'true': | |
| 509 libdir = paths['purelib'] | |
| 510 else: | |
| 511 libdir = paths['platlib'] | |
| 512 | |
| 513 records = {} | |
| 514 with zf.open(record_name) as bf: | |
| 515 with CSVReader(stream=bf) as reader: | |
| 516 for row in reader: | |
| 517 p = row[0] | |
| 518 records[p] = row | |
| 519 | |
| 520 data_pfx = posixpath.join(data_dir, '') | |
| 521 info_pfx = posixpath.join(info_dir, '') | |
| 522 script_pfx = posixpath.join(data_dir, 'scripts', '') | |
| 523 | |
| 524 # make a new instance rather than a copy of maker's, | |
| 525 # as we mutate it | |
| 526 fileop = FileOperator(dry_run=dry_run) | |
| 527 fileop.record = True # so we can rollback if needed | |
| 528 | |
| 529 bc = not sys.dont_write_bytecode # Double negatives. Lovely! | |
| 530 | |
| 531 outfiles = [] # for RECORD writing | |
| 532 | |
| 533 # for script copying/shebang processing | |
| 534 workdir = tempfile.mkdtemp() | |
| 535 # set target dir later | |
| 536 # we default add_launchers to False, as the | |
| 537 # Python Launcher should be used instead | |
| 538 maker.source_dir = workdir | |
| 539 maker.target_dir = None | |
| 540 try: | |
| 541 for zinfo in zf.infolist(): | |
| 542 arcname = zinfo.filename | |
| 543 if isinstance(arcname, text_type): | |
| 544 u_arcname = arcname | |
| 545 else: | |
| 546 u_arcname = arcname.decode('utf-8') | |
| 547 if self.skip_entry(u_arcname): | |
| 548 continue | |
| 549 row = records[u_arcname] | |
| 550 if row[2] and str(zinfo.file_size) != row[2]: | |
| 551 raise DistlibException('size mismatch for ' | |
| 552 '%s' % u_arcname) | |
| 553 if row[1]: | |
| 554 kind, value = row[1].split('=', 1) | |
| 555 with zf.open(arcname) as bf: | |
| 556 data = bf.read() | |
| 557 _, digest = self.get_hash(data, kind) | |
| 558 if digest != value: | |
| 559 raise DistlibException('digest mismatch for ' | |
| 560 '%s' % arcname) | |
| 561 | |
| 562 if lib_only and u_arcname.startswith((info_pfx, data_pfx)): | |
| 563 logger.debug('lib_only: skipping %s', u_arcname) | |
| 564 continue | |
| 565 is_script = (u_arcname.startswith(script_pfx) | |
| 566 and not u_arcname.endswith('.exe')) | |
| 567 | |
| 568 if u_arcname.startswith(data_pfx): | |
| 569 _, where, rp = u_arcname.split('/', 2) | |
| 570 outfile = os.path.join(paths[where], convert_path(rp)) | |
| 571 else: | |
| 572 # meant for site-packages. | |
| 573 if u_arcname in (wheel_metadata_name, record_name): | |
| 574 continue | |
| 575 outfile = os.path.join(libdir, convert_path(u_arcname)) | |
| 576 if not is_script: | |
| 577 with zf.open(arcname) as bf: | |
| 578 fileop.copy_stream(bf, outfile) | |
| 579 outfiles.append(outfile) | |
| 580 # Double check the digest of the written file | |
| 581 if not dry_run and row[1]: | |
| 582 with open(outfile, 'rb') as bf: | |
| 583 data = bf.read() | |
| 584 _, newdigest = self.get_hash(data, kind) | |
| 585 if newdigest != digest: | |
| 586 raise DistlibException('digest mismatch ' | |
| 587 'on write for ' | |
| 588 '%s' % outfile) | |
| 589 if bc and outfile.endswith('.py'): | |
| 590 try: | |
| 591 pyc = fileop.byte_compile(outfile, | |
| 592 hashed_invalidation=bc_hashed_invalidation) | |
| 593 outfiles.append(pyc) | |
| 594 except Exception: | |
| 595 # Don't give up if byte-compilation fails, | |
| 596 # but log it and perhaps warn the user | |
| 597 logger.warning('Byte-compilation failed', | |
| 598 exc_info=True) | |
| 599 else: | |
| 600 fn = os.path.basename(convert_path(arcname)) | |
| 601 workname = os.path.join(workdir, fn) | |
| 602 with zf.open(arcname) as bf: | |
| 603 fileop.copy_stream(bf, workname) | |
| 604 | |
| 605 dn, fn = os.path.split(outfile) | |
| 606 maker.target_dir = dn | |
| 607 filenames = maker.make(fn) | |
| 608 fileop.set_executable_mode(filenames) | |
| 609 outfiles.extend(filenames) | |
| 610 | |
| 611 if lib_only: | |
| 612 logger.debug('lib_only: returning None') | |
| 613 dist = None | |
| 614 else: | |
| 615 # Generate scripts | |
| 616 | |
| 617 # Try to get pydist.json so we can see if there are | |
| 618 # any commands to generate. If this fails (e.g. because | |
| 619 # of a legacy wheel), log a warning but don't give up. | |
| 620 commands = None | |
| 621 file_version = self.info['Wheel-Version'] | |
| 622 if file_version == '1.0': | |
| 623 # Use legacy info | |
| 624 ep = posixpath.join(info_dir, 'entry_points.txt') | |
| 625 try: | |
| 626 with zf.open(ep) as bwf: | |
| 627 epdata = read_exports(bwf) | |
| 628 commands = {} | |
| 629 for key in ('console', 'gui'): | |
| 630 k = '%s_scripts' % key | |
| 631 if k in epdata: | |
| 632 commands['wrap_%s' % key] = d = {} | |
| 633 for v in epdata[k].values(): | |
| 634 s = '%s:%s' % (v.prefix, v.suffix) | |
| 635 if v.flags: | |
| 636 s += ' [%s]' % ','.join(v.flags) | |
| 637 d[v.name] = s | |
| 638 except Exception: | |
| 639 logger.warning('Unable to read legacy script ' | |
| 640 'metadata, so cannot generate ' | |
| 641 'scripts') | |
| 642 else: | |
| 643 try: | |
| 644 with zf.open(metadata_name) as bwf: | |
| 645 wf = wrapper(bwf) | |
| 646 commands = json.load(wf).get('extensions') | |
| 647 if commands: | |
| 648 commands = commands.get('python.commands') | |
| 649 except Exception: | |
| 650 logger.warning('Unable to read JSON metadata, so ' | |
| 651 'cannot generate scripts') | |
| 652 if commands: | |
| 653 console_scripts = commands.get('wrap_console', {}) | |
| 654 gui_scripts = commands.get('wrap_gui', {}) | |
| 655 if console_scripts or gui_scripts: | |
| 656 script_dir = paths.get('scripts', '') | |
| 657 if not os.path.isdir(script_dir): | |
| 658 raise ValueError('Valid script path not ' | |
| 659 'specified') | |
| 660 maker.target_dir = script_dir | |
| 661 for k, v in console_scripts.items(): | |
| 662 script = '%s = %s' % (k, v) | |
| 663 filenames = maker.make(script) | |
| 664 fileop.set_executable_mode(filenames) | |
| 665 | |
| 666 if gui_scripts: | |
| 667 options = {'gui': True } | |
| 668 for k, v in gui_scripts.items(): | |
| 669 script = '%s = %s' % (k, v) | |
| 670 filenames = maker.make(script, options) | |
| 671 fileop.set_executable_mode(filenames) | |
| 672 | |
| 673 p = os.path.join(libdir, info_dir) | |
| 674 dist = InstalledDistribution(p) | |
| 675 | |
| 676 # Write SHARED | |
| 677 paths = dict(paths) # don't change passed in dict | |
| 678 del paths['purelib'] | |
| 679 del paths['platlib'] | |
| 680 paths['lib'] = libdir | |
| 681 p = dist.write_shared_locations(paths, dry_run) | |
| 682 if p: | |
| 683 outfiles.append(p) | |
| 684 | |
| 685 # Write RECORD | |
| 686 dist.write_installed_files(outfiles, paths['prefix'], | |
| 687 dry_run) | |
| 688 return dist | |
| 689 except Exception: # pragma: no cover | |
| 690 logger.exception('installation failed.') | |
| 691 fileop.rollback() | |
| 692 raise | |
| 693 finally: | |
| 694 shutil.rmtree(workdir) | |
| 695 | |
| 696 def _get_dylib_cache(self): | |
| 697 global cache | |
| 698 if cache is None: | |
| 699 # Use native string to avoid issues on 2.x: see Python #20140. | |
| 700 base = os.path.join(get_cache_base(), str('dylib-cache'), | |
| 701 '%s.%s' % sys.version_info[:2]) | |
| 702 cache = Cache(base) | |
| 703 return cache | |
| 704 | |
| 705 def _get_extensions(self): | |
| 706 pathname = os.path.join(self.dirname, self.filename) | |
| 707 name_ver = '%s-%s' % (self.name, self.version) | |
| 708 info_dir = '%s.dist-info' % name_ver | |
| 709 arcname = posixpath.join(info_dir, 'EXTENSIONS') | |
| 710 wrapper = codecs.getreader('utf-8') | |
| 711 result = [] | |
| 712 with ZipFile(pathname, 'r') as zf: | |
| 713 try: | |
| 714 with zf.open(arcname) as bf: | |
| 715 wf = wrapper(bf) | |
| 716 extensions = json.load(wf) | |
| 717 cache = self._get_dylib_cache() | |
| 718 prefix = cache.prefix_to_dir(pathname) | |
| 719 cache_base = os.path.join(cache.base, prefix) | |
| 720 if not os.path.isdir(cache_base): | |
| 721 os.makedirs(cache_base) | |
| 722 for name, relpath in extensions.items(): | |
| 723 dest = os.path.join(cache_base, convert_path(relpath)) | |
| 724 if not os.path.exists(dest): | |
| 725 extract = True | |
| 726 else: | |
| 727 file_time = os.stat(dest).st_mtime | |
| 728 file_time = datetime.datetime.fromtimestamp(file_time) | |
| 729 info = zf.getinfo(relpath) | |
| 730 wheel_time = datetime.datetime(*info.date_time) | |
| 731 extract = wheel_time > file_time | |
| 732 if extract: | |
| 733 zf.extract(relpath, cache_base) | |
| 734 result.append((name, dest)) | |
| 735 except KeyError: | |
| 736 pass | |
| 737 return result | |
| 738 | |
| 739 def is_compatible(self): | |
| 740 """ | |
| 741 Determine if a wheel is compatible with the running system. | |
| 742 """ | |
| 743 return is_compatible(self) | |
| 744 | |
| 745 def is_mountable(self): | |
| 746 """ | |
| 747 Determine if a wheel is asserted as mountable by its metadata. | |
| 748 """ | |
| 749 return True # for now - metadata details TBD | |
| 750 | |
| 751 def mount(self, append=False): | |
| 752 pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) | |
| 753 if not self.is_compatible(): | |
| 754 msg = 'Wheel %s not compatible with this Python.' % pathname | |
| 755 raise DistlibException(msg) | |
| 756 if not self.is_mountable(): | |
| 757 msg = 'Wheel %s is marked as not mountable.' % pathname | |
| 758 raise DistlibException(msg) | |
| 759 if pathname in sys.path: | |
| 760 logger.debug('%s already in path', pathname) | |
| 761 else: | |
| 762 if append: | |
| 763 sys.path.append(pathname) | |
| 764 else: | |
| 765 sys.path.insert(0, pathname) | |
| 766 extensions = self._get_extensions() | |
| 767 if extensions: | |
| 768 if _hook not in sys.meta_path: | |
| 769 sys.meta_path.append(_hook) | |
| 770 _hook.add(pathname, extensions) | |
| 771 | |
| 772 def unmount(self): | |
| 773 pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) | |
| 774 if pathname not in sys.path: | |
| 775 logger.debug('%s not in path', pathname) | |
| 776 else: | |
| 777 sys.path.remove(pathname) | |
| 778 if pathname in _hook.impure_wheels: | |
| 779 _hook.remove(pathname) | |
| 780 if not _hook.impure_wheels: | |
| 781 if _hook in sys.meta_path: | |
| 782 sys.meta_path.remove(_hook) | |
| 783 | |
| 784 def verify(self): | |
| 785 pathname = os.path.join(self.dirname, self.filename) | |
| 786 name_ver = '%s-%s' % (self.name, self.version) | |
| 787 data_dir = '%s.data' % name_ver | |
| 788 info_dir = '%s.dist-info' % name_ver | |
| 789 | |
| 790 metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) | |
| 791 wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') | |
| 792 record_name = posixpath.join(info_dir, 'RECORD') | |
| 793 | |
| 794 wrapper = codecs.getreader('utf-8') | |
| 795 | |
| 796 with ZipFile(pathname, 'r') as zf: | |
| 797 with zf.open(wheel_metadata_name) as bwf: | |
| 798 wf = wrapper(bwf) | |
| 799 message = message_from_file(wf) | |
| 800 wv = message['Wheel-Version'].split('.', 1) | |
| 801 file_version = tuple([int(i) for i in wv]) | |
| 802 # TODO version verification | |
| 803 | |
| 804 records = {} | |
| 805 with zf.open(record_name) as bf: | |
| 806 with CSVReader(stream=bf) as reader: | |
| 807 for row in reader: | |
| 808 p = row[0] | |
| 809 records[p] = row | |
| 810 | |
| 811 for zinfo in zf.infolist(): | |
| 812 arcname = zinfo.filename | |
| 813 if isinstance(arcname, text_type): | |
| 814 u_arcname = arcname | |
| 815 else: | |
| 816 u_arcname = arcname.decode('utf-8') | |
| 817 # See issue #115: some wheels have .. in their entries, but | |
| 818 # in the filename ... e.g. __main__..py ! So the check is | |
| 819 # updated to look for .. in the directory portions | |
| 820 p = u_arcname.split('/') | |
| 821 if '..' in p: | |
| 822 raise DistlibException('invalid entry in ' | |
| 823 'wheel: %r' % u_arcname) | |
| 824 | |
| 825 if self.skip_entry(u_arcname): | |
| 826 continue | |
| 827 row = records[u_arcname] | |
| 828 if row[2] and str(zinfo.file_size) != row[2]: | |
| 829 raise DistlibException('size mismatch for ' | |
| 830 '%s' % u_arcname) | |
| 831 if row[1]: | |
| 832 kind, value = row[1].split('=', 1) | |
| 833 with zf.open(arcname) as bf: | |
| 834 data = bf.read() | |
| 835 _, digest = self.get_hash(data, kind) | |
| 836 if digest != value: | |
| 837 raise DistlibException('digest mismatch for ' | |
| 838 '%s' % arcname) | |
| 839 | |
| 840 def update(self, modifier, dest_dir=None, **kwargs): | |
| 841 """ | |
| 842 Update the contents of a wheel in a generic way. The modifier should | |
| 843 be a callable which expects a dictionary argument: its keys are | |
| 844 archive-entry paths, and its values are absolute filesystem paths | |
| 845 where the contents the corresponding archive entries can be found. The | |
| 846 modifier is free to change the contents of the files pointed to, add | |
| 847 new entries and remove entries, before returning. This method will | |
| 848 extract the entire contents of the wheel to a temporary location, call | |
| 849 the modifier, and then use the passed (and possibly updated) | |
| 850 dictionary to write a new wheel. If ``dest_dir`` is specified, the new | |
| 851 wheel is written there -- otherwise, the original wheel is overwritten. | |
| 852 | |
| 853 The modifier should return True if it updated the wheel, else False. | |
| 854 This method returns the same value the modifier returns. | |
| 855 """ | |
| 856 | |
| 857 def get_version(path_map, info_dir): | |
| 858 version = path = None | |
| 859 key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) | |
| 860 if key not in path_map: | |
| 861 key = '%s/PKG-INFO' % info_dir | |
| 862 if key in path_map: | |
| 863 path = path_map[key] | |
| 864 version = Metadata(path=path).version | |
| 865 return version, path | |
| 866 | |
| 867 def update_version(version, path): | |
| 868 updated = None | |
| 869 try: | |
| 870 v = NormalizedVersion(version) | |
| 871 i = version.find('-') | |
| 872 if i < 0: | |
| 873 updated = '%s+1' % version | |
| 874 else: | |
| 875 parts = [int(s) for s in version[i + 1:].split('.')] | |
| 876 parts[-1] += 1 | |
| 877 updated = '%s+%s' % (version[:i], | |
| 878 '.'.join(str(i) for i in parts)) | |
| 879 except UnsupportedVersionError: | |
| 880 logger.debug('Cannot update non-compliant (PEP-440) ' | |
| 881 'version %r', version) | |
| 882 if updated: | |
| 883 md = Metadata(path=path) | |
| 884 md.version = updated | |
| 885 legacy = path.endswith(LEGACY_METADATA_FILENAME) | |
| 886 md.write(path=path, legacy=legacy) | |
| 887 logger.debug('Version updated from %r to %r', version, | |
| 888 updated) | |
| 889 | |
| 890 pathname = os.path.join(self.dirname, self.filename) | |
| 891 name_ver = '%s-%s' % (self.name, self.version) | |
| 892 info_dir = '%s.dist-info' % name_ver | |
| 893 record_name = posixpath.join(info_dir, 'RECORD') | |
| 894 with tempdir() as workdir: | |
| 895 with ZipFile(pathname, 'r') as zf: | |
| 896 path_map = {} | |
| 897 for zinfo in zf.infolist(): | |
| 898 arcname = zinfo.filename | |
| 899 if isinstance(arcname, text_type): | |
| 900 u_arcname = arcname | |
| 901 else: | |
| 902 u_arcname = arcname.decode('utf-8') | |
| 903 if u_arcname == record_name: | |
| 904 continue | |
| 905 if '..' in u_arcname: | |
| 906 raise DistlibException('invalid entry in ' | |
| 907 'wheel: %r' % u_arcname) | |
| 908 zf.extract(zinfo, workdir) | |
| 909 path = os.path.join(workdir, convert_path(u_arcname)) | |
| 910 path_map[u_arcname] = path | |
| 911 | |
| 912 # Remember the version. | |
| 913 original_version, _ = get_version(path_map, info_dir) | |
| 914 # Files extracted. Call the modifier. | |
| 915 modified = modifier(path_map, **kwargs) | |
| 916 if modified: | |
| 917 # Something changed - need to build a new wheel. | |
| 918 current_version, path = get_version(path_map, info_dir) | |
| 919 if current_version and (current_version == original_version): | |
| 920 # Add or update local version to signify changes. | |
| 921 update_version(current_version, path) | |
| 922 # Decide where the new wheel goes. | |
| 923 if dest_dir is None: | |
| 924 fd, newpath = tempfile.mkstemp(suffix='.whl', | |
| 925 prefix='wheel-update-', | |
| 926 dir=workdir) | |
| 927 os.close(fd) | |
| 928 else: | |
| 929 if not os.path.isdir(dest_dir): | |
| 930 raise DistlibException('Not a directory: %r' % dest_dir) | |
| 931 newpath = os.path.join(dest_dir, self.filename) | |
| 932 archive_paths = list(path_map.items()) | |
| 933 distinfo = os.path.join(workdir, info_dir) | |
| 934 info = distinfo, info_dir | |
| 935 self.write_records(info, workdir, archive_paths) | |
| 936 self.build_zip(newpath, archive_paths) | |
| 937 if dest_dir is None: | |
| 938 shutil.copyfile(newpath, pathname) | |
| 939 return modified | |
| 940 | |
| 941 def compatible_tags(): | |
| 942 """ | |
| 943 Return (pyver, abi, arch) tuples compatible with this Python. | |
| 944 """ | |
| 945 versions = [VER_SUFFIX] | |
| 946 major = VER_SUFFIX[0] | |
| 947 for minor in range(sys.version_info[1] - 1, - 1, -1): | |
| 948 versions.append(''.join([major, str(minor)])) | |
| 949 | |
| 950 abis = [] | |
| 951 for suffix, _, _ in imp.get_suffixes(): | |
| 952 if suffix.startswith('.abi'): | |
| 953 abis.append(suffix.split('.', 2)[1]) | |
| 954 abis.sort() | |
| 955 if ABI != 'none': | |
| 956 abis.insert(0, ABI) | |
| 957 abis.append('none') | |
| 958 result = [] | |
| 959 | |
| 960 arches = [ARCH] | |
| 961 if sys.platform == 'darwin': | |
| 962 m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) | |
| 963 if m: | |
| 964 name, major, minor, arch = m.groups() | |
| 965 minor = int(minor) | |
| 966 matches = [arch] | |
| 967 if arch in ('i386', 'ppc'): | |
| 968 matches.append('fat') | |
| 969 if arch in ('i386', 'ppc', 'x86_64'): | |
| 970 matches.append('fat3') | |
| 971 if arch in ('ppc64', 'x86_64'): | |
| 972 matches.append('fat64') | |
| 973 if arch in ('i386', 'x86_64'): | |
| 974 matches.append('intel') | |
| 975 if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): | |
| 976 matches.append('universal') | |
| 977 while minor >= 0: | |
| 978 for match in matches: | |
| 979 s = '%s_%s_%s_%s' % (name, major, minor, match) | |
| 980 if s != ARCH: # already there | |
| 981 arches.append(s) | |
| 982 minor -= 1 | |
| 983 | |
| 984 # Most specific - our Python version, ABI and arch | |
| 985 for abi in abis: | |
| 986 for arch in arches: | |
| 987 result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) | |
| 988 | |
| 989 # where no ABI / arch dependency, but IMP_PREFIX dependency | |
| 990 for i, version in enumerate(versions): | |
| 991 result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) | |
| 992 if i == 0: | |
| 993 result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) | |
| 994 | |
| 995 # no IMP_PREFIX, ABI or arch dependency | |
| 996 for i, version in enumerate(versions): | |
| 997 result.append((''.join(('py', version)), 'none', 'any')) | |
| 998 if i == 0: | |
| 999 result.append((''.join(('py', version[0])), 'none', 'any')) | |
| 1000 return set(result) | |
| 1001 | |
| 1002 | |
| 1003 COMPATIBLE_TAGS = compatible_tags() | |
| 1004 | |
| 1005 del compatible_tags | |
| 1006 | |
| 1007 | |
| 1008 def is_compatible(wheel, tags=None): | |
| 1009 if not isinstance(wheel, Wheel): | |
| 1010 wheel = Wheel(wheel) # assume it's a filename | |
| 1011 result = False | |
| 1012 if tags is None: | |
| 1013 tags = COMPATIBLE_TAGS | |
| 1014 for ver, abi, arch in tags: | |
| 1015 if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: | |
| 1016 result = True | |
| 1017 break | |
| 1018 return result |
