Mercurial > repos > guerler > hhblits
comparison lib/python3.8/site-packages/pip/_internal/req/req_install.py @ 1:64071f2a4cf0 draft default tip
Deleted selected files
author | guerler |
---|---|
date | Mon, 27 Jul 2020 03:55:49 -0400 |
parents | 9e54283cc701 |
children |
comparison
equal
deleted
inserted
replaced
0:9e54283cc701 | 1:64071f2a4cf0 |
---|---|
1 # The following comment should be removed at some point in the future. | |
2 # mypy: strict-optional=False | |
3 | |
4 from __future__ import absolute_import | |
5 | |
6 import logging | |
7 import os | |
8 import shutil | |
9 import sys | |
10 import zipfile | |
11 | |
12 from pip._vendor import pkg_resources, six | |
13 from pip._vendor.packaging.requirements import Requirement | |
14 from pip._vendor.packaging.utils import canonicalize_name | |
15 from pip._vendor.packaging.version import Version | |
16 from pip._vendor.packaging.version import parse as parse_version | |
17 from pip._vendor.pep517.wrappers import Pep517HookCaller | |
18 | |
19 from pip._internal import pep425tags | |
20 from pip._internal.build_env import NoOpBuildEnvironment | |
21 from pip._internal.exceptions import InstallationError | |
22 from pip._internal.locations import get_scheme | |
23 from pip._internal.models.link import Link | |
24 from pip._internal.operations.build.metadata import generate_metadata | |
25 from pip._internal.operations.build.metadata_legacy import \ | |
26 generate_metadata as generate_metadata_legacy | |
27 from pip._internal.operations.install.editable_legacy import \ | |
28 install_editable as install_editable_legacy | |
29 from pip._internal.operations.install.legacy import install as install_legacy | |
30 from pip._internal.operations.install.wheel import install_wheel | |
31 from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path | |
32 from pip._internal.req.req_uninstall import UninstallPathSet | |
33 from pip._internal.utils.deprecation import deprecated | |
34 from pip._internal.utils.hashes import Hashes | |
35 from pip._internal.utils.logging import indent_log | |
36 from pip._internal.utils.marker_files import ( | |
37 PIP_DELETE_MARKER_FILENAME, | |
38 has_delete_marker_file, | |
39 write_delete_marker_file, | |
40 ) | |
41 from pip._internal.utils.misc import ( | |
42 ask_path_exists, | |
43 backup_dir, | |
44 display_path, | |
45 dist_in_site_packages, | |
46 dist_in_usersite, | |
47 get_installed_version, | |
48 hide_url, | |
49 redact_auth_from_url, | |
50 rmtree, | |
51 ) | |
52 from pip._internal.utils.packaging import get_metadata | |
53 from pip._internal.utils.temp_dir import TempDirectory | |
54 from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
55 from pip._internal.utils.virtualenv import running_under_virtualenv | |
56 from pip._internal.vcs import vcs | |
57 | |
58 if MYPY_CHECK_RUNNING: | |
59 from typing import ( | |
60 Any, Dict, Iterable, List, Optional, Sequence, Union, | |
61 ) | |
62 from pip._internal.build_env import BuildEnvironment | |
63 from pip._internal.cache import WheelCache | |
64 from pip._internal.index.package_finder import PackageFinder | |
65 from pip._vendor.pkg_resources import Distribution | |
66 from pip._vendor.packaging.specifiers import SpecifierSet | |
67 from pip._vendor.packaging.markers import Marker | |
68 | |
69 | |
70 logger = logging.getLogger(__name__) | |
71 | |
72 | |
73 def _get_dist(metadata_directory): | |
74 # type: (str) -> Distribution | |
75 """Return a pkg_resources.Distribution for the provided | |
76 metadata directory. | |
77 """ | |
78 dist_dir = metadata_directory.rstrip(os.sep) | |
79 | |
80 # Determine the correct Distribution object type. | |
81 if dist_dir.endswith(".egg-info"): | |
82 dist_cls = pkg_resources.Distribution | |
83 else: | |
84 assert dist_dir.endswith(".dist-info") | |
85 dist_cls = pkg_resources.DistInfoDistribution | |
86 | |
87 # Build a PathMetadata object, from path to metadata. :wink: | |
88 base_dir, dist_dir_name = os.path.split(dist_dir) | |
89 dist_name = os.path.splitext(dist_dir_name)[0] | |
90 metadata = pkg_resources.PathMetadata(base_dir, dist_dir) | |
91 | |
92 return dist_cls( | |
93 base_dir, | |
94 project_name=dist_name, | |
95 metadata=metadata, | |
96 ) | |
97 | |
98 | |
99 class InstallRequirement(object): | |
100 """ | |
101 Represents something that may be installed later on, may have information | |
102 about where to fetch the relevant requirement and also contains logic for | |
103 installing the said requirement. | |
104 """ | |
105 | |
106 def __init__( | |
107 self, | |
108 req, # type: Optional[Requirement] | |
109 comes_from, # type: Optional[Union[str, InstallRequirement]] | |
110 source_dir=None, # type: Optional[str] | |
111 editable=False, # type: bool | |
112 link=None, # type: Optional[Link] | |
113 markers=None, # type: Optional[Marker] | |
114 use_pep517=None, # type: Optional[bool] | |
115 isolated=False, # type: bool | |
116 options=None, # type: Optional[Dict[str, Any]] | |
117 wheel_cache=None, # type: Optional[WheelCache] | |
118 constraint=False, # type: bool | |
119 extras=() # type: Iterable[str] | |
120 ): | |
121 # type: (...) -> None | |
122 assert req is None or isinstance(req, Requirement), req | |
123 self.req = req | |
124 self.comes_from = comes_from | |
125 self.constraint = constraint | |
126 if source_dir is None: | |
127 self.source_dir = None # type: Optional[str] | |
128 else: | |
129 self.source_dir = os.path.normpath(os.path.abspath(source_dir)) | |
130 self.editable = editable | |
131 | |
132 self._wheel_cache = wheel_cache | |
133 if link is None and req and req.url: | |
134 # PEP 508 URL requirement | |
135 link = Link(req.url) | |
136 self.link = self.original_link = link | |
137 # Path to any downloaded or already-existing package. | |
138 self.local_file_path = None # type: Optional[str] | |
139 if self.link and self.link.is_file: | |
140 self.local_file_path = self.link.file_path | |
141 | |
142 if extras: | |
143 self.extras = extras | |
144 elif req: | |
145 self.extras = { | |
146 pkg_resources.safe_extra(extra) for extra in req.extras | |
147 } | |
148 else: | |
149 self.extras = set() | |
150 if markers is None and req: | |
151 markers = req.marker | |
152 self.markers = markers | |
153 | |
154 # This holds the pkg_resources.Distribution object if this requirement | |
155 # is already available: | |
156 self.satisfied_by = None # type: Optional[Distribution] | |
157 # Whether the installation process should try to uninstall an existing | |
158 # distribution before installing this requirement. | |
159 self.should_reinstall = False | |
160 # Temporary build location | |
161 self._temp_build_dir = None # type: Optional[TempDirectory] | |
162 # Set to True after successful installation | |
163 self.install_succeeded = None # type: Optional[bool] | |
164 self.options = options if options else {} | |
165 # Set to True after successful preparation of this requirement | |
166 self.prepared = False | |
167 self.is_direct = False | |
168 | |
169 self.isolated = isolated | |
170 self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment | |
171 | |
172 # For PEP 517, the directory where we request the project metadata | |
173 # gets stored. We need this to pass to build_wheel, so the backend | |
174 # can ensure that the wheel matches the metadata (see the PEP for | |
175 # details). | |
176 self.metadata_directory = None # type: Optional[str] | |
177 | |
178 # The static build requirements (from pyproject.toml) | |
179 self.pyproject_requires = None # type: Optional[List[str]] | |
180 | |
181 # Build requirements that we will check are available | |
182 self.requirements_to_check = [] # type: List[str] | |
183 | |
184 # The PEP 517 backend we should use to build the project | |
185 self.pep517_backend = None # type: Optional[Pep517HookCaller] | |
186 | |
187 # Are we using PEP 517 for this requirement? | |
188 # After pyproject.toml has been loaded, the only valid values are True | |
189 # and False. Before loading, None is valid (meaning "use the default"). | |
190 # Setting an explicit value before loading pyproject.toml is supported, | |
191 # but after loading this flag should be treated as read only. | |
192 self.use_pep517 = use_pep517 | |
193 | |
194 def __str__(self): | |
195 # type: () -> str | |
196 if self.req: | |
197 s = str(self.req) | |
198 if self.link: | |
199 s += ' from %s' % redact_auth_from_url(self.link.url) | |
200 elif self.link: | |
201 s = redact_auth_from_url(self.link.url) | |
202 else: | |
203 s = '<InstallRequirement>' | |
204 if self.satisfied_by is not None: | |
205 s += ' in %s' % display_path(self.satisfied_by.location) | |
206 if self.comes_from: | |
207 if isinstance(self.comes_from, six.string_types): | |
208 comes_from = self.comes_from # type: Optional[str] | |
209 else: | |
210 comes_from = self.comes_from.from_path() | |
211 if comes_from: | |
212 s += ' (from %s)' % comes_from | |
213 return s | |
214 | |
215 def __repr__(self): | |
216 # type: () -> str | |
217 return '<%s object: %s editable=%r>' % ( | |
218 self.__class__.__name__, str(self), self.editable) | |
219 | |
220 def format_debug(self): | |
221 # type: () -> str | |
222 """An un-tested helper for getting state, for debugging. | |
223 """ | |
224 attributes = vars(self) | |
225 names = sorted(attributes) | |
226 | |
227 state = ( | |
228 "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names) | |
229 ) | |
230 return '<{name} object: {{{state}}}>'.format( | |
231 name=self.__class__.__name__, | |
232 state=", ".join(state), | |
233 ) | |
234 | |
235 def populate_link(self, finder, upgrade, require_hashes): | |
236 # type: (PackageFinder, bool, bool) -> None | |
237 """Ensure that if a link can be found for this, that it is found. | |
238 | |
239 Note that self.link may still be None - if Upgrade is False and the | |
240 requirement is already installed. | |
241 | |
242 If require_hashes is True, don't use the wheel cache, because cached | |
243 wheels, always built locally, have different hashes than the files | |
244 downloaded from the index server and thus throw false hash mismatches. | |
245 Furthermore, cached wheels at present have undeterministic contents due | |
246 to file modification times. | |
247 """ | |
248 if self.link is None: | |
249 self.link = finder.find_requirement(self, upgrade) | |
250 if self._wheel_cache is not None and not require_hashes: | |
251 old_link = self.link | |
252 supported_tags = pep425tags.get_supported() | |
253 self.link = self._wheel_cache.get( | |
254 link=self.link, | |
255 package_name=self.name, | |
256 supported_tags=supported_tags, | |
257 ) | |
258 if old_link != self.link: | |
259 logger.debug('Using cached wheel link: %s', self.link) | |
260 | |
261 # Things that are valid for all kinds of requirements? | |
262 @property | |
263 def name(self): | |
264 # type: () -> Optional[str] | |
265 if self.req is None: | |
266 return None | |
267 return six.ensure_str(pkg_resources.safe_name(self.req.name)) | |
268 | |
269 @property | |
270 def specifier(self): | |
271 # type: () -> SpecifierSet | |
272 return self.req.specifier | |
273 | |
274 @property | |
275 def is_pinned(self): | |
276 # type: () -> bool | |
277 """Return whether I am pinned to an exact version. | |
278 | |
279 For example, some-package==1.2 is pinned; some-package>1.2 is not. | |
280 """ | |
281 specifiers = self.specifier | |
282 return (len(specifiers) == 1 and | |
283 next(iter(specifiers)).operator in {'==', '==='}) | |
284 | |
285 @property | |
286 def installed_version(self): | |
287 # type: () -> Optional[str] | |
288 return get_installed_version(self.name) | |
289 | |
290 def match_markers(self, extras_requested=None): | |
291 # type: (Optional[Iterable[str]]) -> bool | |
292 if not extras_requested: | |
293 # Provide an extra to safely evaluate the markers | |
294 # without matching any extra | |
295 extras_requested = ('',) | |
296 if self.markers is not None: | |
297 return any( | |
298 self.markers.evaluate({'extra': extra}) | |
299 for extra in extras_requested) | |
300 else: | |
301 return True | |
302 | |
303 @property | |
304 def has_hash_options(self): | |
305 # type: () -> bool | |
306 """Return whether any known-good hashes are specified as options. | |
307 | |
308 These activate --require-hashes mode; hashes specified as part of a | |
309 URL do not. | |
310 | |
311 """ | |
312 return bool(self.options.get('hashes', {})) | |
313 | |
314 def hashes(self, trust_internet=True): | |
315 # type: (bool) -> Hashes | |
316 """Return a hash-comparer that considers my option- and URL-based | |
317 hashes to be known-good. | |
318 | |
319 Hashes in URLs--ones embedded in the requirements file, not ones | |
320 downloaded from an index server--are almost peers with ones from | |
321 flags. They satisfy --require-hashes (whether it was implicitly or | |
322 explicitly activated) but do not activate it. md5 and sha224 are not | |
323 allowed in flags, which should nudge people toward good algos. We | |
324 always OR all hashes together, even ones from URLs. | |
325 | |
326 :param trust_internet: Whether to trust URL-based (#md5=...) hashes | |
327 downloaded from the internet, as by populate_link() | |
328 | |
329 """ | |
330 good_hashes = self.options.get('hashes', {}).copy() | |
331 link = self.link if trust_internet else self.original_link | |
332 if link and link.hash: | |
333 good_hashes.setdefault(link.hash_name, []).append(link.hash) | |
334 return Hashes(good_hashes) | |
335 | |
336 def from_path(self): | |
337 # type: () -> Optional[str] | |
338 """Format a nice indicator to show where this "comes from" | |
339 """ | |
340 if self.req is None: | |
341 return None | |
342 s = str(self.req) | |
343 if self.comes_from: | |
344 if isinstance(self.comes_from, six.string_types): | |
345 comes_from = self.comes_from | |
346 else: | |
347 comes_from = self.comes_from.from_path() | |
348 if comes_from: | |
349 s += '->' + comes_from | |
350 return s | |
351 | |
352 def ensure_build_location(self, build_dir): | |
353 # type: (str) -> str | |
354 assert build_dir is not None | |
355 if self._temp_build_dir is not None: | |
356 assert self._temp_build_dir.path | |
357 return self._temp_build_dir.path | |
358 if self.req is None: | |
359 # Some systems have /tmp as a symlink which confuses custom | |
360 # builds (such as numpy). Thus, we ensure that the real path | |
361 # is returned. | |
362 self._temp_build_dir = TempDirectory(kind="req-build") | |
363 | |
364 return self._temp_build_dir.path | |
365 if self.editable: | |
366 name = self.name.lower() | |
367 else: | |
368 name = self.name | |
369 # FIXME: Is there a better place to create the build_dir? (hg and bzr | |
370 # need this) | |
371 if not os.path.exists(build_dir): | |
372 logger.debug('Creating directory %s', build_dir) | |
373 os.makedirs(build_dir) | |
374 write_delete_marker_file(build_dir) | |
375 return os.path.join(build_dir, name) | |
376 | |
377 def _set_requirement(self): | |
378 # type: () -> None | |
379 """Set requirement after generating metadata. | |
380 """ | |
381 assert self.req is None | |
382 assert self.metadata is not None | |
383 assert self.source_dir is not None | |
384 | |
385 # Construct a Requirement object from the generated metadata | |
386 if isinstance(parse_version(self.metadata["Version"]), Version): | |
387 op = "==" | |
388 else: | |
389 op = "===" | |
390 | |
391 self.req = Requirement( | |
392 "".join([ | |
393 self.metadata["Name"], | |
394 op, | |
395 self.metadata["Version"], | |
396 ]) | |
397 ) | |
398 | |
399 def warn_on_mismatching_name(self): | |
400 # type: () -> None | |
401 metadata_name = canonicalize_name(self.metadata["Name"]) | |
402 if canonicalize_name(self.req.name) == metadata_name: | |
403 # Everything is fine. | |
404 return | |
405 | |
406 # If we're here, there's a mismatch. Log a warning about it. | |
407 logger.warning( | |
408 'Generating metadata for package %s ' | |
409 'produced metadata for project name %s. Fix your ' | |
410 '#egg=%s fragments.', | |
411 self.name, metadata_name, self.name | |
412 ) | |
413 self.req = Requirement(metadata_name) | |
414 | |
415 def remove_temporary_source(self): | |
416 # type: () -> None | |
417 """Remove the source files from this requirement, if they are marked | |
418 for deletion""" | |
419 if self.source_dir and has_delete_marker_file(self.source_dir): | |
420 logger.debug('Removing source in %s', self.source_dir) | |
421 rmtree(self.source_dir) | |
422 self.source_dir = None | |
423 if self._temp_build_dir: | |
424 self._temp_build_dir.cleanup() | |
425 self._temp_build_dir = None | |
426 self.build_env.cleanup() | |
427 | |
428 def check_if_exists(self, use_user_site): | |
429 # type: (bool) -> None | |
430 """Find an installed distribution that satisfies or conflicts | |
431 with this requirement, and set self.satisfied_by or | |
432 self.should_reinstall appropriately. | |
433 """ | |
434 if self.req is None: | |
435 return | |
436 # get_distribution() will resolve the entire list of requirements | |
437 # anyway, and we've already determined that we need the requirement | |
438 # in question, so strip the marker so that we don't try to | |
439 # evaluate it. | |
440 no_marker = Requirement(str(self.req)) | |
441 no_marker.marker = None | |
442 try: | |
443 self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) | |
444 except pkg_resources.DistributionNotFound: | |
445 return | |
446 except pkg_resources.VersionConflict: | |
447 existing_dist = pkg_resources.get_distribution( | |
448 self.req.name | |
449 ) | |
450 if use_user_site: | |
451 if dist_in_usersite(existing_dist): | |
452 self.should_reinstall = True | |
453 elif (running_under_virtualenv() and | |
454 dist_in_site_packages(existing_dist)): | |
455 raise InstallationError( | |
456 "Will not install to the user site because it will " | |
457 "lack sys.path precedence to %s in %s" % | |
458 (existing_dist.project_name, existing_dist.location) | |
459 ) | |
460 else: | |
461 self.should_reinstall = True | |
462 else: | |
463 if self.editable and self.satisfied_by: | |
464 self.should_reinstall = True | |
465 # when installing editables, nothing pre-existing should ever | |
466 # satisfy | |
467 self.satisfied_by = None | |
468 | |
469 # Things valid for wheels | |
470 @property | |
471 def is_wheel(self): | |
472 # type: () -> bool | |
473 if not self.link: | |
474 return False | |
475 return self.link.is_wheel | |
476 | |
477 # Things valid for sdists | |
478 @property | |
479 def unpacked_source_directory(self): | |
480 # type: () -> str | |
481 return os.path.join( | |
482 self.source_dir, | |
483 self.link and self.link.subdirectory_fragment or '') | |
484 | |
485 @property | |
486 def setup_py_path(self): | |
487 # type: () -> str | |
488 assert self.source_dir, "No source dir for %s" % self | |
489 setup_py = os.path.join(self.unpacked_source_directory, 'setup.py') | |
490 | |
491 # Python2 __file__ should not be unicode | |
492 if six.PY2 and isinstance(setup_py, six.text_type): | |
493 setup_py = setup_py.encode(sys.getfilesystemencoding()) | |
494 | |
495 return setup_py | |
496 | |
497 @property | |
498 def pyproject_toml_path(self): | |
499 # type: () -> str | |
500 assert self.source_dir, "No source dir for %s" % self | |
501 return make_pyproject_path(self.unpacked_source_directory) | |
502 | |
503 def load_pyproject_toml(self): | |
504 # type: () -> None | |
505 """Load the pyproject.toml file. | |
506 | |
507 After calling this routine, all of the attributes related to PEP 517 | |
508 processing for this requirement have been set. In particular, the | |
509 use_pep517 attribute can be used to determine whether we should | |
510 follow the PEP 517 or legacy (setup.py) code path. | |
511 """ | |
512 pyproject_toml_data = load_pyproject_toml( | |
513 self.use_pep517, | |
514 self.pyproject_toml_path, | |
515 self.setup_py_path, | |
516 str(self) | |
517 ) | |
518 | |
519 if pyproject_toml_data is None: | |
520 self.use_pep517 = False | |
521 return | |
522 | |
523 self.use_pep517 = True | |
524 requires, backend, check, backend_path = pyproject_toml_data | |
525 self.requirements_to_check = check | |
526 self.pyproject_requires = requires | |
527 self.pep517_backend = Pep517HookCaller( | |
528 self.unpacked_source_directory, backend, backend_path=backend_path, | |
529 ) | |
530 | |
531 def _generate_metadata(self): | |
532 # type: () -> str | |
533 """Invokes metadata generator functions, with the required arguments. | |
534 """ | |
535 if not self.use_pep517: | |
536 assert self.unpacked_source_directory | |
537 | |
538 return generate_metadata_legacy( | |
539 build_env=self.build_env, | |
540 setup_py_path=self.setup_py_path, | |
541 source_dir=self.unpacked_source_directory, | |
542 editable=self.editable, | |
543 isolated=self.isolated, | |
544 details=self.name or "from {}".format(self.link) | |
545 ) | |
546 | |
547 assert self.pep517_backend is not None | |
548 | |
549 return generate_metadata( | |
550 build_env=self.build_env, | |
551 backend=self.pep517_backend, | |
552 ) | |
553 | |
554 def prepare_metadata(self): | |
555 # type: () -> None | |
556 """Ensure that project metadata is available. | |
557 | |
558 Under PEP 517, call the backend hook to prepare the metadata. | |
559 Under legacy processing, call setup.py egg-info. | |
560 """ | |
561 assert self.source_dir | |
562 | |
563 with indent_log(): | |
564 self.metadata_directory = self._generate_metadata() | |
565 | |
566 # Act on the newly generated metadata, based on the name and version. | |
567 if not self.name: | |
568 self._set_requirement() | |
569 else: | |
570 self.warn_on_mismatching_name() | |
571 | |
572 self.assert_source_matches_version() | |
573 | |
574 @property | |
575 def metadata(self): | |
576 # type: () -> Any | |
577 if not hasattr(self, '_metadata'): | |
578 self._metadata = get_metadata(self.get_dist()) | |
579 | |
580 return self._metadata | |
581 | |
582 def get_dist(self): | |
583 # type: () -> Distribution | |
584 return _get_dist(self.metadata_directory) | |
585 | |
586 def assert_source_matches_version(self): | |
587 # type: () -> None | |
588 assert self.source_dir | |
589 version = self.metadata['version'] | |
590 if self.req.specifier and version not in self.req.specifier: | |
591 logger.warning( | |
592 'Requested %s, but installing version %s', | |
593 self, | |
594 version, | |
595 ) | |
596 else: | |
597 logger.debug( | |
598 'Source in %s has version %s, which satisfies requirement %s', | |
599 display_path(self.source_dir), | |
600 version, | |
601 self, | |
602 ) | |
603 | |
604 # For both source distributions and editables | |
605 def ensure_has_source_dir(self, parent_dir): | |
606 # type: (str) -> None | |
607 """Ensure that a source_dir is set. | |
608 | |
609 This will create a temporary build dir if the name of the requirement | |
610 isn't known yet. | |
611 | |
612 :param parent_dir: The ideal pip parent_dir for the source_dir. | |
613 Generally src_dir for editables and build_dir for sdists. | |
614 :return: self.source_dir | |
615 """ | |
616 if self.source_dir is None: | |
617 self.source_dir = self.ensure_build_location(parent_dir) | |
618 | |
619 # For editable installations | |
620 def update_editable(self, obtain=True): | |
621 # type: (bool) -> None | |
622 if not self.link: | |
623 logger.debug( | |
624 "Cannot update repository at %s; repository location is " | |
625 "unknown", | |
626 self.source_dir, | |
627 ) | |
628 return | |
629 assert self.editable | |
630 assert self.source_dir | |
631 if self.link.scheme == 'file': | |
632 # Static paths don't get updated | |
633 return | |
634 assert '+' in self.link.url, "bad url: %r" % self.link.url | |
635 vc_type, url = self.link.url.split('+', 1) | |
636 vcs_backend = vcs.get_backend(vc_type) | |
637 if vcs_backend: | |
638 if not self.link.is_vcs: | |
639 reason = ( | |
640 "This form of VCS requirement is being deprecated: {}." | |
641 ).format( | |
642 self.link.url | |
643 ) | |
644 replacement = None | |
645 if self.link.url.startswith("git+git@"): | |
646 replacement = ( | |
647 "git+https://git@example.com/..., " | |
648 "git+ssh://git@example.com/..., " | |
649 "or the insecure git+git://git@example.com/..." | |
650 ) | |
651 deprecated(reason, replacement, gone_in="21.0", issue=7554) | |
652 hidden_url = hide_url(self.link.url) | |
653 if obtain: | |
654 vcs_backend.obtain(self.source_dir, url=hidden_url) | |
655 else: | |
656 vcs_backend.export(self.source_dir, url=hidden_url) | |
657 else: | |
658 assert 0, ( | |
659 'Unexpected version control type (in %s): %s' | |
660 % (self.link, vc_type)) | |
661 | |
662 # Top-level Actions | |
663 def uninstall(self, auto_confirm=False, verbose=False): | |
664 # type: (bool, bool) -> Optional[UninstallPathSet] | |
665 """ | |
666 Uninstall the distribution currently satisfying this requirement. | |
667 | |
668 Prompts before removing or modifying files unless | |
669 ``auto_confirm`` is True. | |
670 | |
671 Refuses to delete or modify files outside of ``sys.prefix`` - | |
672 thus uninstallation within a virtual environment can only | |
673 modify that virtual environment, even if the virtualenv is | |
674 linked to global site-packages. | |
675 | |
676 """ | |
677 assert self.req | |
678 try: | |
679 dist = pkg_resources.get_distribution(self.req.name) | |
680 except pkg_resources.DistributionNotFound: | |
681 logger.warning("Skipping %s as it is not installed.", self.name) | |
682 return None | |
683 else: | |
684 logger.info('Found existing installation: %s', dist) | |
685 | |
686 uninstalled_pathset = UninstallPathSet.from_dist(dist) | |
687 uninstalled_pathset.remove(auto_confirm, verbose) | |
688 return uninstalled_pathset | |
689 | |
690 def _get_archive_name(self, path, parentdir, rootdir): | |
691 # type: (str, str, str) -> str | |
692 | |
693 def _clean_zip_name(name, prefix): | |
694 # type: (str, str) -> str | |
695 assert name.startswith(prefix + os.path.sep), ( | |
696 "name %r doesn't start with prefix %r" % (name, prefix) | |
697 ) | |
698 name = name[len(prefix) + 1:] | |
699 name = name.replace(os.path.sep, '/') | |
700 return name | |
701 | |
702 path = os.path.join(parentdir, path) | |
703 name = _clean_zip_name(path, rootdir) | |
704 return self.name + '/' + name | |
705 | |
706 def archive(self, build_dir): | |
707 # type: (str) -> None | |
708 """Saves archive to provided build_dir. | |
709 | |
710 Used for saving downloaded VCS requirements as part of `pip download`. | |
711 """ | |
712 assert self.source_dir | |
713 | |
714 create_archive = True | |
715 archive_name = '%s-%s.zip' % (self.name, self.metadata["version"]) | |
716 archive_path = os.path.join(build_dir, archive_name) | |
717 | |
718 if os.path.exists(archive_path): | |
719 response = ask_path_exists( | |
720 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' % | |
721 display_path(archive_path), ('i', 'w', 'b', 'a')) | |
722 if response == 'i': | |
723 create_archive = False | |
724 elif response == 'w': | |
725 logger.warning('Deleting %s', display_path(archive_path)) | |
726 os.remove(archive_path) | |
727 elif response == 'b': | |
728 dest_file = backup_dir(archive_path) | |
729 logger.warning( | |
730 'Backing up %s to %s', | |
731 display_path(archive_path), | |
732 display_path(dest_file), | |
733 ) | |
734 shutil.move(archive_path, dest_file) | |
735 elif response == 'a': | |
736 sys.exit(-1) | |
737 | |
738 if not create_archive: | |
739 return | |
740 | |
741 zip_output = zipfile.ZipFile( | |
742 archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True, | |
743 ) | |
744 with zip_output: | |
745 dir = os.path.normcase( | |
746 os.path.abspath(self.unpacked_source_directory) | |
747 ) | |
748 for dirpath, dirnames, filenames in os.walk(dir): | |
749 if 'pip-egg-info' in dirnames: | |
750 dirnames.remove('pip-egg-info') | |
751 for dirname in dirnames: | |
752 dir_arcname = self._get_archive_name( | |
753 dirname, parentdir=dirpath, rootdir=dir, | |
754 ) | |
755 zipdir = zipfile.ZipInfo(dir_arcname + '/') | |
756 zipdir.external_attr = 0x1ED << 16 # 0o755 | |
757 zip_output.writestr(zipdir, '') | |
758 for filename in filenames: | |
759 if filename == PIP_DELETE_MARKER_FILENAME: | |
760 continue | |
761 file_arcname = self._get_archive_name( | |
762 filename, parentdir=dirpath, rootdir=dir, | |
763 ) | |
764 filename = os.path.join(dirpath, filename) | |
765 zip_output.write(filename, file_arcname) | |
766 | |
767 logger.info('Saved %s', display_path(archive_path)) | |
768 | |
769 def install( | |
770 self, | |
771 install_options, # type: List[str] | |
772 global_options=None, # type: Optional[Sequence[str]] | |
773 root=None, # type: Optional[str] | |
774 home=None, # type: Optional[str] | |
775 prefix=None, # type: Optional[str] | |
776 warn_script_location=True, # type: bool | |
777 use_user_site=False, # type: bool | |
778 pycompile=True # type: bool | |
779 ): | |
780 # type: (...) -> None | |
781 scheme = get_scheme( | |
782 self.name, | |
783 user=use_user_site, | |
784 home=home, | |
785 root=root, | |
786 isolated=self.isolated, | |
787 prefix=prefix, | |
788 ) | |
789 | |
790 global_options = global_options if global_options is not None else [] | |
791 if self.editable: | |
792 install_editable_legacy( | |
793 install_options, | |
794 global_options, | |
795 prefix=prefix, | |
796 home=home, | |
797 use_user_site=use_user_site, | |
798 name=self.name, | |
799 setup_py_path=self.setup_py_path, | |
800 isolated=self.isolated, | |
801 build_env=self.build_env, | |
802 unpacked_source_directory=self.unpacked_source_directory, | |
803 ) | |
804 self.install_succeeded = True | |
805 return | |
806 | |
807 if self.is_wheel: | |
808 assert self.local_file_path | |
809 install_wheel( | |
810 self.name, | |
811 self.local_file_path, | |
812 scheme=scheme, | |
813 req_description=str(self.req), | |
814 pycompile=pycompile, | |
815 warn_script_location=warn_script_location, | |
816 ) | |
817 self.install_succeeded = True | |
818 return | |
819 | |
820 install_legacy( | |
821 self, | |
822 install_options=install_options, | |
823 global_options=global_options, | |
824 root=root, | |
825 home=home, | |
826 prefix=prefix, | |
827 use_user_site=use_user_site, | |
828 pycompile=pycompile, | |
829 scheme=scheme, | |
830 ) |