comparison planemo/lib/python3.7/site-packages/pip/_internal/operations/prepare.py @ 1:56ad4e20f292 draft

"planemo upload commit 6eee67778febed82ddd413c3ca40b3183a3898f1"
author guerler
date Fri, 31 Jul 2020 00:32:28 -0400
parents
children
comparison
equal deleted inserted replaced
0:d30785e31577 1:56ad4e20f292
1 """Prepares a distribution for installation
2 """
3
4 import logging
5 import os
6
7 from pip._vendor import requests
8
9 from pip._internal.distributions import (
10 make_distribution_for_install_requirement,
11 )
12 from pip._internal.distributions.installed import InstalledDistribution
13 from pip._internal.download import (
14 is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
15 )
16 from pip._internal.exceptions import (
17 DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
18 PreviousBuildDirError, VcsHashUnsupported,
19 )
20 from pip._internal.utils.compat import expanduser
21 from pip._internal.utils.hashes import MissingHashes
22 from pip._internal.utils.logging import indent_log
23 from pip._internal.utils.misc import display_path, normalize_path
24 from pip._internal.utils.typing import MYPY_CHECK_RUNNING
25
26 if MYPY_CHECK_RUNNING:
27 from typing import Optional
28
29 from pip._internal.distributions import AbstractDistribution
30 from pip._internal.download import PipSession
31 from pip._internal.index import PackageFinder
32 from pip._internal.req.req_install import InstallRequirement
33 from pip._internal.req.req_tracker import RequirementTracker
34
35 logger = logging.getLogger(__name__)
36
37
38 class RequirementPreparer(object):
39 """Prepares a Requirement
40 """
41
42 def __init__(
43 self,
44 build_dir, # type: str
45 download_dir, # type: Optional[str]
46 src_dir, # type: str
47 wheel_download_dir, # type: Optional[str]
48 progress_bar, # type: str
49 build_isolation, # type: bool
50 req_tracker # type: RequirementTracker
51 ):
52 # type: (...) -> None
53 super(RequirementPreparer, self).__init__()
54
55 self.src_dir = src_dir
56 self.build_dir = build_dir
57 self.req_tracker = req_tracker
58
59 # Where still packed archives should be written to. If None, they are
60 # not saved, and are deleted immediately after unpacking.
61 self.download_dir = download_dir
62
63 # Where still-packed .whl files should be written to. If None, they are
64 # written to the download_dir parameter. Separate to download_dir to
65 # permit only keeping wheel archives for pip wheel.
66 if wheel_download_dir:
67 wheel_download_dir = normalize_path(wheel_download_dir)
68 self.wheel_download_dir = wheel_download_dir
69
70 # NOTE
71 # download_dir and wheel_download_dir overlap semantically and may
72 # be combined if we're willing to have non-wheel archives present in
73 # the wheelhouse output by 'pip wheel'.
74
75 self.progress_bar = progress_bar
76
77 # Is build isolation allowed?
78 self.build_isolation = build_isolation
79
80 @property
81 def _download_should_save(self):
82 # type: () -> bool
83 # TODO: Modify to reduce indentation needed
84 if self.download_dir:
85 self.download_dir = expanduser(self.download_dir)
86 if os.path.exists(self.download_dir):
87 return True
88 else:
89 logger.critical('Could not find download directory')
90 raise InstallationError(
91 "Could not find or access download directory '%s'"
92 % display_path(self.download_dir))
93 return False
94
95 def prepare_linked_requirement(
96 self,
97 req, # type: InstallRequirement
98 session, # type: PipSession
99 finder, # type: PackageFinder
100 upgrade_allowed, # type: bool
101 require_hashes # type: bool
102 ):
103 # type: (...) -> AbstractDistribution
104 """Prepare a requirement that would be obtained from req.link
105 """
106 # TODO: Breakup into smaller functions
107 if req.link and req.link.scheme == 'file':
108 path = url_to_path(req.link.url)
109 logger.info('Processing %s', display_path(path))
110 else:
111 logger.info('Collecting %s', req)
112
113 with indent_log():
114 # @@ if filesystem packages are not marked
115 # editable in a req, a non deterministic error
116 # occurs when the script attempts to unpack the
117 # build directory
118 req.ensure_has_source_dir(self.build_dir)
119 # If a checkout exists, it's unwise to keep going. version
120 # inconsistencies are logged later, but do not fail the
121 # installation.
122 # FIXME: this won't upgrade when there's an existing
123 # package unpacked in `req.source_dir`
124 # package unpacked in `req.source_dir`
125 if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
126 raise PreviousBuildDirError(
127 "pip can't proceed with requirements '%s' due to a"
128 " pre-existing build directory (%s). This is "
129 "likely due to a previous installation that failed"
130 ". pip is being responsible and not assuming it "
131 "can delete this. Please delete it and try again."
132 % (req, req.source_dir)
133 )
134 req.populate_link(finder, upgrade_allowed, require_hashes)
135
136 # We can't hit this spot and have populate_link return None.
137 # req.satisfied_by is None here (because we're
138 # guarded) and upgrade has no impact except when satisfied_by
139 # is not None.
140 # Then inside find_requirement existing_applicable -> False
141 # If no new versions are found, DistributionNotFound is raised,
142 # otherwise a result is guaranteed.
143 assert req.link
144 link = req.link
145
146 # Now that we have the real link, we can tell what kind of
147 # requirements we have and raise some more informative errors
148 # than otherwise. (For example, we can raise VcsHashUnsupported
149 # for a VCS URL rather than HashMissing.)
150 if require_hashes:
151 # We could check these first 2 conditions inside
152 # unpack_url and save repetition of conditions, but then
153 # we would report less-useful error messages for
154 # unhashable requirements, complaining that there's no
155 # hash provided.
156 if is_vcs_url(link):
157 raise VcsHashUnsupported()
158 elif is_file_url(link) and is_dir_url(link):
159 raise DirectoryUrlHashUnsupported()
160 if not req.original_link and not req.is_pinned:
161 # Unpinned packages are asking for trouble when a new
162 # version is uploaded. This isn't a security check, but
163 # it saves users a surprising hash mismatch in the
164 # future.
165 #
166 # file:/// URLs aren't pinnable, so don't complain
167 # about them not being pinned.
168 raise HashUnpinned()
169
170 hashes = req.hashes(trust_internet=not require_hashes)
171 if require_hashes and not hashes:
172 # Known-good hashes are missing for this requirement, so
173 # shim it with a facade object that will provoke hash
174 # computation and then raise a HashMissing exception
175 # showing the user what the hash should be.
176 hashes = MissingHashes()
177
178 try:
179 download_dir = self.download_dir
180 # We always delete unpacked sdists after pip ran.
181 autodelete_unpacked = True
182 if req.link.is_wheel and self.wheel_download_dir:
183 # when doing 'pip wheel` we download wheels to a
184 # dedicated dir.
185 download_dir = self.wheel_download_dir
186 if req.link.is_wheel:
187 if download_dir:
188 # When downloading, we only unpack wheels to get
189 # metadata.
190 autodelete_unpacked = True
191 else:
192 # When installing a wheel, we use the unpacked
193 # wheel.
194 autodelete_unpacked = False
195 unpack_url(
196 req.link, req.source_dir,
197 download_dir, autodelete_unpacked,
198 session=session, hashes=hashes,
199 progress_bar=self.progress_bar
200 )
201 except requests.HTTPError as exc:
202 logger.critical(
203 'Could not install requirement %s because of error %s',
204 req,
205 exc,
206 )
207 raise InstallationError(
208 'Could not install requirement %s because of HTTP '
209 'error %s for URL %s' %
210 (req, exc, req.link)
211 )
212 abstract_dist = make_distribution_for_install_requirement(req)
213 with self.req_tracker.track(req):
214 abstract_dist.prepare_distribution_metadata(
215 finder, self.build_isolation,
216 )
217 if self._download_should_save:
218 # Make a .zip of the source_dir we already created.
219 if not req.link.is_artifact:
220 req.archive(self.download_dir)
221 return abstract_dist
222
223 def prepare_editable_requirement(
224 self,
225 req, # type: InstallRequirement
226 require_hashes, # type: bool
227 use_user_site, # type: bool
228 finder # type: PackageFinder
229 ):
230 # type: (...) -> AbstractDistribution
231 """Prepare an editable requirement
232 """
233 assert req.editable, "cannot prepare a non-editable req as editable"
234
235 logger.info('Obtaining %s', req)
236
237 with indent_log():
238 if require_hashes:
239 raise InstallationError(
240 'The editable requirement %s cannot be installed when '
241 'requiring hashes, because there is no single file to '
242 'hash.' % req
243 )
244 req.ensure_has_source_dir(self.src_dir)
245 req.update_editable(not self._download_should_save)
246
247 abstract_dist = make_distribution_for_install_requirement(req)
248 with self.req_tracker.track(req):
249 abstract_dist.prepare_distribution_metadata(
250 finder, self.build_isolation,
251 )
252
253 if self._download_should_save:
254 req.archive(self.download_dir)
255 req.check_if_exists(use_user_site)
256
257 return abstract_dist
258
259 def prepare_installed_requirement(
260 self,
261 req, # type: InstallRequirement
262 require_hashes, # type: bool
263 skip_reason # type: str
264 ):
265 # type: (...) -> AbstractDistribution
266 """Prepare an already-installed requirement
267 """
268 assert req.satisfied_by, "req should have been satisfied but isn't"
269 assert skip_reason is not None, (
270 "did not get skip reason skipped but req.satisfied_by "
271 "is set to %r" % (req.satisfied_by,)
272 )
273 logger.info(
274 'Requirement %s: %s (%s)',
275 skip_reason, req, req.satisfied_by.version
276 )
277 with indent_log():
278 if require_hashes:
279 logger.debug(
280 'Since it is already installed, we are trusting this '
281 'package without checking its hash. To ensure a '
282 'completely repeatable environment, install into an '
283 'empty virtualenv.'
284 )
285 abstract_dist = InstalledDistribution(req)
286
287 return abstract_dist