Mercurial > repos > guerler > hhblits
comparison lib/python3.8/site-packages/pip/_internal/operations/prepare.py @ 1:64071f2a4cf0 draft default tip
Deleted selected files
author | guerler |
---|---|
date | Mon, 27 Jul 2020 03:55:49 -0400 |
parents | 9e54283cc701 |
children |
comparison
equal
deleted
inserted
replaced
0:9e54283cc701 | 1:64071f2a4cf0 |
---|---|
1 """Prepares a distribution for installation | |
2 """ | |
3 | |
4 # The following comment should be removed at some point in the future. | |
5 # mypy: strict-optional=False | |
6 | |
7 import logging | |
8 import mimetypes | |
9 import os | |
10 import shutil | |
11 import sys | |
12 | |
13 from pip._vendor import requests | |
14 from pip._vendor.six import PY2 | |
15 | |
16 from pip._internal.distributions import ( | |
17 make_distribution_for_install_requirement, | |
18 ) | |
19 from pip._internal.distributions.installed import InstalledDistribution | |
20 from pip._internal.exceptions import ( | |
21 DirectoryUrlHashUnsupported, | |
22 HashMismatch, | |
23 HashUnpinned, | |
24 InstallationError, | |
25 PreviousBuildDirError, | |
26 VcsHashUnsupported, | |
27 ) | |
28 from pip._internal.utils.filesystem import copy2_fixed | |
29 from pip._internal.utils.hashes import MissingHashes | |
30 from pip._internal.utils.logging import indent_log | |
31 from pip._internal.utils.marker_files import write_delete_marker_file | |
32 from pip._internal.utils.misc import ( | |
33 ask_path_exists, | |
34 backup_dir, | |
35 display_path, | |
36 hide_url, | |
37 path_to_display, | |
38 rmtree, | |
39 ) | |
40 from pip._internal.utils.temp_dir import TempDirectory | |
41 from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
42 from pip._internal.utils.unpacking import unpack_file | |
43 from pip._internal.vcs import vcs | |
44 | |
45 if MYPY_CHECK_RUNNING: | |
46 from typing import ( | |
47 Callable, List, Optional, Tuple, | |
48 ) | |
49 | |
50 from mypy_extensions import TypedDict | |
51 | |
52 from pip._internal.distributions import AbstractDistribution | |
53 from pip._internal.index.package_finder import PackageFinder | |
54 from pip._internal.models.link import Link | |
55 from pip._internal.network.download import Downloader | |
56 from pip._internal.req.req_install import InstallRequirement | |
57 from pip._internal.req.req_tracker import RequirementTracker | |
58 from pip._internal.utils.hashes import Hashes | |
59 | |
60 if PY2: | |
61 CopytreeKwargs = TypedDict( | |
62 'CopytreeKwargs', | |
63 { | |
64 'ignore': Callable[[str, List[str]], List[str]], | |
65 'symlinks': bool, | |
66 }, | |
67 total=False, | |
68 ) | |
69 else: | |
70 CopytreeKwargs = TypedDict( | |
71 'CopytreeKwargs', | |
72 { | |
73 'copy_function': Callable[[str, str], None], | |
74 'ignore': Callable[[str, List[str]], List[str]], | |
75 'ignore_dangling_symlinks': bool, | |
76 'symlinks': bool, | |
77 }, | |
78 total=False, | |
79 ) | |
80 | |
81 logger = logging.getLogger(__name__) | |
82 | |
83 | |
84 def _get_prepared_distribution( | |
85 req, # type: InstallRequirement | |
86 req_tracker, # type: RequirementTracker | |
87 finder, # type: PackageFinder | |
88 build_isolation # type: bool | |
89 ): | |
90 # type: (...) -> AbstractDistribution | |
91 """Prepare a distribution for installation. | |
92 """ | |
93 abstract_dist = make_distribution_for_install_requirement(req) | |
94 with req_tracker.track(req): | |
95 abstract_dist.prepare_distribution_metadata(finder, build_isolation) | |
96 return abstract_dist | |
97 | |
98 | |
99 def unpack_vcs_link(link, location): | |
100 # type: (Link, str) -> None | |
101 vcs_backend = vcs.get_backend_for_scheme(link.scheme) | |
102 assert vcs_backend is not None | |
103 vcs_backend.unpack(location, url=hide_url(link.url)) | |
104 | |
105 | |
106 def _copy_file(filename, location, link): | |
107 # type: (str, str, Link) -> None | |
108 copy = True | |
109 download_location = os.path.join(location, link.filename) | |
110 if os.path.exists(download_location): | |
111 response = ask_path_exists( | |
112 'The file {} exists. (i)gnore, (w)ipe, (b)ackup, (a)abort'.format( | |
113 display_path(download_location) | |
114 ), | |
115 ('i', 'w', 'b', 'a'), | |
116 ) | |
117 if response == 'i': | |
118 copy = False | |
119 elif response == 'w': | |
120 logger.warning('Deleting %s', display_path(download_location)) | |
121 os.remove(download_location) | |
122 elif response == 'b': | |
123 dest_file = backup_dir(download_location) | |
124 logger.warning( | |
125 'Backing up %s to %s', | |
126 display_path(download_location), | |
127 display_path(dest_file), | |
128 ) | |
129 shutil.move(download_location, dest_file) | |
130 elif response == 'a': | |
131 sys.exit(-1) | |
132 if copy: | |
133 shutil.copy(filename, download_location) | |
134 logger.info('Saved %s', display_path(download_location)) | |
135 | |
136 | |
137 def unpack_http_url( | |
138 link, # type: Link | |
139 location, # type: str | |
140 downloader, # type: Downloader | |
141 download_dir=None, # type: Optional[str] | |
142 hashes=None, # type: Optional[Hashes] | |
143 ): | |
144 # type: (...) -> str | |
145 temp_dir = TempDirectory(kind="unpack", globally_managed=True) | |
146 # If a download dir is specified, is the file already downloaded there? | |
147 already_downloaded_path = None | |
148 if download_dir: | |
149 already_downloaded_path = _check_download_dir( | |
150 link, download_dir, hashes | |
151 ) | |
152 | |
153 if already_downloaded_path: | |
154 from_path = already_downloaded_path | |
155 content_type = mimetypes.guess_type(from_path)[0] | |
156 else: | |
157 # let's download to a tmp dir | |
158 from_path, content_type = _download_http_url( | |
159 link, downloader, temp_dir.path, hashes | |
160 ) | |
161 | |
162 # unpack the archive to the build dir location. even when only | |
163 # downloading archives, they have to be unpacked to parse dependencies | |
164 unpack_file(from_path, location, content_type) | |
165 | |
166 return from_path | |
167 | |
168 | |
169 def _copy2_ignoring_special_files(src, dest): | |
170 # type: (str, str) -> None | |
171 """Copying special files is not supported, but as a convenience to users | |
172 we skip errors copying them. This supports tools that may create e.g. | |
173 socket files in the project source directory. | |
174 """ | |
175 try: | |
176 copy2_fixed(src, dest) | |
177 except shutil.SpecialFileError as e: | |
178 # SpecialFileError may be raised due to either the source or | |
179 # destination. If the destination was the cause then we would actually | |
180 # care, but since the destination directory is deleted prior to | |
181 # copy we ignore all of them assuming it is caused by the source. | |
182 logger.warning( | |
183 "Ignoring special file error '%s' encountered copying %s to %s.", | |
184 str(e), | |
185 path_to_display(src), | |
186 path_to_display(dest), | |
187 ) | |
188 | |
189 | |
190 def _copy_source_tree(source, target): | |
191 # type: (str, str) -> None | |
192 def ignore(d, names): | |
193 # type: (str, List[str]) -> List[str] | |
194 # Pulling in those directories can potentially be very slow, | |
195 # exclude the following directories if they appear in the top | |
196 # level dir (and only it). | |
197 # See discussion at https://github.com/pypa/pip/pull/6770 | |
198 return ['.tox', '.nox'] if d == source else [] | |
199 | |
200 kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs | |
201 | |
202 if not PY2: | |
203 # Python 2 does not support copy_function, so we only ignore | |
204 # errors on special file copy in Python 3. | |
205 kwargs['copy_function'] = _copy2_ignoring_special_files | |
206 | |
207 shutil.copytree(source, target, **kwargs) | |
208 | |
209 | |
210 def unpack_file_url( | |
211 link, # type: Link | |
212 location, # type: str | |
213 download_dir=None, # type: Optional[str] | |
214 hashes=None # type: Optional[Hashes] | |
215 ): | |
216 # type: (...) -> Optional[str] | |
217 """Unpack link into location. | |
218 """ | |
219 link_path = link.file_path | |
220 # If it's a url to a local directory | |
221 if link.is_existing_dir(): | |
222 if os.path.isdir(location): | |
223 rmtree(location) | |
224 _copy_source_tree(link_path, location) | |
225 return None | |
226 | |
227 # If a download dir is specified, is the file already there and valid? | |
228 already_downloaded_path = None | |
229 if download_dir: | |
230 already_downloaded_path = _check_download_dir( | |
231 link, download_dir, hashes | |
232 ) | |
233 | |
234 if already_downloaded_path: | |
235 from_path = already_downloaded_path | |
236 else: | |
237 from_path = link_path | |
238 | |
239 # If --require-hashes is off, `hashes` is either empty, the | |
240 # link's embedded hash, or MissingHashes; it is required to | |
241 # match. If --require-hashes is on, we are satisfied by any | |
242 # hash in `hashes` matching: a URL-based or an option-based | |
243 # one; no internet-sourced hash will be in `hashes`. | |
244 if hashes: | |
245 hashes.check_against_path(from_path) | |
246 | |
247 content_type = mimetypes.guess_type(from_path)[0] | |
248 | |
249 # unpack the archive to the build dir location. even when only downloading | |
250 # archives, they have to be unpacked to parse dependencies | |
251 unpack_file(from_path, location, content_type) | |
252 | |
253 return from_path | |
254 | |
255 | |
256 def unpack_url( | |
257 link, # type: Link | |
258 location, # type: str | |
259 downloader, # type: Downloader | |
260 download_dir=None, # type: Optional[str] | |
261 hashes=None, # type: Optional[Hashes] | |
262 ): | |
263 # type: (...) -> Optional[str] | |
264 """Unpack link into location, downloading if required. | |
265 | |
266 :param hashes: A Hashes object, one of whose embedded hashes must match, | |
267 or HashMismatch will be raised. If the Hashes is empty, no matches are | |
268 required, and unhashable types of requirements (like VCS ones, which | |
269 would ordinarily raise HashUnsupported) are allowed. | |
270 """ | |
271 # non-editable vcs urls | |
272 if link.is_vcs: | |
273 unpack_vcs_link(link, location) | |
274 return None | |
275 | |
276 # file urls | |
277 elif link.is_file: | |
278 return unpack_file_url(link, location, download_dir, hashes=hashes) | |
279 | |
280 # http urls | |
281 else: | |
282 return unpack_http_url( | |
283 link, | |
284 location, | |
285 downloader, | |
286 download_dir, | |
287 hashes=hashes, | |
288 ) | |
289 | |
290 | |
291 def _download_http_url( | |
292 link, # type: Link | |
293 downloader, # type: Downloader | |
294 temp_dir, # type: str | |
295 hashes, # type: Optional[Hashes] | |
296 ): | |
297 # type: (...) -> Tuple[str, str] | |
298 """Download link url into temp_dir using provided session""" | |
299 download = downloader(link) | |
300 | |
301 file_path = os.path.join(temp_dir, download.filename) | |
302 with open(file_path, 'wb') as content_file: | |
303 for chunk in download.chunks: | |
304 content_file.write(chunk) | |
305 | |
306 if hashes: | |
307 hashes.check_against_path(file_path) | |
308 | |
309 return file_path, download.response.headers.get('content-type', '') | |
310 | |
311 | |
312 def _check_download_dir(link, download_dir, hashes): | |
313 # type: (Link, str, Optional[Hashes]) -> Optional[str] | |
314 """ Check download_dir for previously downloaded file with correct hash | |
315 If a correct file is found return its path else None | |
316 """ | |
317 download_path = os.path.join(download_dir, link.filename) | |
318 | |
319 if not os.path.exists(download_path): | |
320 return None | |
321 | |
322 # If already downloaded, does its hash match? | |
323 logger.info('File was already downloaded %s', download_path) | |
324 if hashes: | |
325 try: | |
326 hashes.check_against_path(download_path) | |
327 except HashMismatch: | |
328 logger.warning( | |
329 'Previously-downloaded file %s has bad hash. ' | |
330 'Re-downloading.', | |
331 download_path | |
332 ) | |
333 os.unlink(download_path) | |
334 return None | |
335 return download_path | |
336 | |
337 | |
338 class RequirementPreparer(object): | |
339 """Prepares a Requirement | |
340 """ | |
341 | |
342 def __init__( | |
343 self, | |
344 build_dir, # type: str | |
345 download_dir, # type: Optional[str] | |
346 src_dir, # type: str | |
347 wheel_download_dir, # type: Optional[str] | |
348 build_isolation, # type: bool | |
349 req_tracker, # type: RequirementTracker | |
350 downloader, # type: Downloader | |
351 finder, # type: PackageFinder | |
352 require_hashes, # type: bool | |
353 use_user_site, # type: bool | |
354 ): | |
355 # type: (...) -> None | |
356 super(RequirementPreparer, self).__init__() | |
357 | |
358 self.src_dir = src_dir | |
359 self.build_dir = build_dir | |
360 self.req_tracker = req_tracker | |
361 self.downloader = downloader | |
362 self.finder = finder | |
363 | |
364 # Where still-packed archives should be written to. If None, they are | |
365 # not saved, and are deleted immediately after unpacking. | |
366 self.download_dir = download_dir | |
367 | |
368 # Where still-packed .whl files should be written to. If None, they are | |
369 # written to the download_dir parameter. Separate to download_dir to | |
370 # permit only keeping wheel archives for pip wheel. | |
371 self.wheel_download_dir = wheel_download_dir | |
372 | |
373 # NOTE | |
374 # download_dir and wheel_download_dir overlap semantically and may | |
375 # be combined if we're willing to have non-wheel archives present in | |
376 # the wheelhouse output by 'pip wheel'. | |
377 | |
378 # Is build isolation allowed? | |
379 self.build_isolation = build_isolation | |
380 | |
381 # Should hash-checking be required? | |
382 self.require_hashes = require_hashes | |
383 | |
384 # Should install in user site-packages? | |
385 self.use_user_site = use_user_site | |
386 | |
387 @property | |
388 def _download_should_save(self): | |
389 # type: () -> bool | |
390 if not self.download_dir: | |
391 return False | |
392 | |
393 if os.path.exists(self.download_dir): | |
394 return True | |
395 | |
396 logger.critical('Could not find download directory') | |
397 raise InstallationError( | |
398 "Could not find or access download directory '{}'" | |
399 .format(self.download_dir)) | |
400 | |
401 def prepare_linked_requirement( | |
402 self, | |
403 req, # type: InstallRequirement | |
404 ): | |
405 # type: (...) -> AbstractDistribution | |
406 """Prepare a requirement that would be obtained from req.link | |
407 """ | |
408 assert req.link | |
409 link = req.link | |
410 | |
411 # TODO: Breakup into smaller functions | |
412 if link.scheme == 'file': | |
413 path = link.file_path | |
414 logger.info('Processing %s', display_path(path)) | |
415 else: | |
416 logger.info('Collecting %s', req.req or req) | |
417 | |
418 with indent_log(): | |
419 # @@ if filesystem packages are not marked | |
420 # editable in a req, a non deterministic error | |
421 # occurs when the script attempts to unpack the | |
422 # build directory | |
423 # Since source_dir is only set for editable requirements. | |
424 assert req.source_dir is None | |
425 req.ensure_has_source_dir(self.build_dir) | |
426 # If a checkout exists, it's unwise to keep going. version | |
427 # inconsistencies are logged later, but do not fail the | |
428 # installation. | |
429 # FIXME: this won't upgrade when there's an existing | |
430 # package unpacked in `req.source_dir` | |
431 if os.path.exists(os.path.join(req.source_dir, 'setup.py')): | |
432 raise PreviousBuildDirError( | |
433 "pip can't proceed with requirements '{}' due to a" | |
434 " pre-existing build directory ({}). This is " | |
435 "likely due to a previous installation that failed" | |
436 ". pip is being responsible and not assuming it " | |
437 "can delete this. Please delete it and try again." | |
438 .format(req, req.source_dir) | |
439 ) | |
440 | |
441 # Now that we have the real link, we can tell what kind of | |
442 # requirements we have and raise some more informative errors | |
443 # than otherwise. (For example, we can raise VcsHashUnsupported | |
444 # for a VCS URL rather than HashMissing.) | |
445 if self.require_hashes: | |
446 # We could check these first 2 conditions inside | |
447 # unpack_url and save repetition of conditions, but then | |
448 # we would report less-useful error messages for | |
449 # unhashable requirements, complaining that there's no | |
450 # hash provided. | |
451 if link.is_vcs: | |
452 raise VcsHashUnsupported() | |
453 elif link.is_existing_dir(): | |
454 raise DirectoryUrlHashUnsupported() | |
455 if not req.original_link and not req.is_pinned: | |
456 # Unpinned packages are asking for trouble when a new | |
457 # version is uploaded. This isn't a security check, but | |
458 # it saves users a surprising hash mismatch in the | |
459 # future. | |
460 # | |
461 # file:/// URLs aren't pinnable, so don't complain | |
462 # about them not being pinned. | |
463 raise HashUnpinned() | |
464 | |
465 hashes = req.hashes(trust_internet=not self.require_hashes) | |
466 if self.require_hashes and not hashes: | |
467 # Known-good hashes are missing for this requirement, so | |
468 # shim it with a facade object that will provoke hash | |
469 # computation and then raise a HashMissing exception | |
470 # showing the user what the hash should be. | |
471 hashes = MissingHashes() | |
472 | |
473 download_dir = self.download_dir | |
474 if link.is_wheel and self.wheel_download_dir: | |
475 # when doing 'pip wheel` we download wheels to a | |
476 # dedicated dir. | |
477 download_dir = self.wheel_download_dir | |
478 | |
479 try: | |
480 local_path = unpack_url( | |
481 link, req.source_dir, self.downloader, download_dir, | |
482 hashes=hashes, | |
483 ) | |
484 except requests.HTTPError as exc: | |
485 logger.critical( | |
486 'Could not install requirement %s because of error %s', | |
487 req, | |
488 exc, | |
489 ) | |
490 raise InstallationError( | |
491 'Could not install requirement {} because of HTTP ' | |
492 'error {} for URL {}'.format(req, exc, link) | |
493 ) | |
494 | |
495 # For use in later processing, preserve the file path on the | |
496 # requirement. | |
497 if local_path: | |
498 req.local_file_path = local_path | |
499 | |
500 if link.is_wheel: | |
501 if download_dir: | |
502 # When downloading, we only unpack wheels to get | |
503 # metadata. | |
504 autodelete_unpacked = True | |
505 else: | |
506 # When installing a wheel, we use the unpacked | |
507 # wheel. | |
508 autodelete_unpacked = False | |
509 else: | |
510 # We always delete unpacked sdists after pip runs. | |
511 autodelete_unpacked = True | |
512 if autodelete_unpacked: | |
513 write_delete_marker_file(req.source_dir) | |
514 | |
515 abstract_dist = _get_prepared_distribution( | |
516 req, self.req_tracker, self.finder, self.build_isolation, | |
517 ) | |
518 | |
519 if download_dir: | |
520 if link.is_existing_dir(): | |
521 logger.info('Link is a directory, ignoring download_dir') | |
522 elif local_path and not os.path.exists( | |
523 os.path.join(download_dir, link.filename) | |
524 ): | |
525 _copy_file(local_path, download_dir, link) | |
526 | |
527 if self._download_should_save: | |
528 # Make a .zip of the source_dir we already created. | |
529 if link.is_vcs: | |
530 req.archive(self.download_dir) | |
531 return abstract_dist | |
532 | |
533 def prepare_editable_requirement( | |
534 self, | |
535 req, # type: InstallRequirement | |
536 ): | |
537 # type: (...) -> AbstractDistribution | |
538 """Prepare an editable requirement | |
539 """ | |
540 assert req.editable, "cannot prepare a non-editable req as editable" | |
541 | |
542 logger.info('Obtaining %s', req) | |
543 | |
544 with indent_log(): | |
545 if self.require_hashes: | |
546 raise InstallationError( | |
547 'The editable requirement {} cannot be installed when ' | |
548 'requiring hashes, because there is no single file to ' | |
549 'hash.'.format(req) | |
550 ) | |
551 req.ensure_has_source_dir(self.src_dir) | |
552 req.update_editable(not self._download_should_save) | |
553 | |
554 abstract_dist = _get_prepared_distribution( | |
555 req, self.req_tracker, self.finder, self.build_isolation, | |
556 ) | |
557 | |
558 if self._download_should_save: | |
559 req.archive(self.download_dir) | |
560 req.check_if_exists(self.use_user_site) | |
561 | |
562 return abstract_dist | |
563 | |
564 def prepare_installed_requirement( | |
565 self, | |
566 req, # type: InstallRequirement | |
567 skip_reason # type: str | |
568 ): | |
569 # type: (...) -> AbstractDistribution | |
570 """Prepare an already-installed requirement | |
571 """ | |
572 assert req.satisfied_by, "req should have been satisfied but isn't" | |
573 assert skip_reason is not None, ( | |
574 "did not get skip reason skipped but req.satisfied_by " | |
575 "is set to {}".format(req.satisfied_by) | |
576 ) | |
577 logger.info( | |
578 'Requirement %s: %s (%s)', | |
579 skip_reason, req, req.satisfied_by.version | |
580 ) | |
581 with indent_log(): | |
582 if self.require_hashes: | |
583 logger.debug( | |
584 'Since it is already installed, we are trusting this ' | |
585 'package without checking its hash. To ensure a ' | |
586 'completely repeatable environment, install into an ' | |
587 'empty virtualenv.' | |
588 ) | |
589 abstract_dist = InstalledDistribution(req) | |
590 | |
591 return abstract_dist |