Mercurial > repos > guerler > hhblits
comparison lib/python3.8/site-packages/setuptools/command/egg_info.py @ 0:9e54283cc701 draft
"planemo upload commit d12c32a45bcd441307e632fca6d9af7d60289d44"
author | guerler |
---|---|
date | Mon, 27 Jul 2020 03:47:31 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:9e54283cc701 |
---|---|
1 """setuptools.command.egg_info | |
2 | |
3 Create a distribution's .egg-info directory and contents""" | |
4 | |
5 from distutils.filelist import FileList as _FileList | |
6 from distutils.errors import DistutilsInternalError | |
7 from distutils.util import convert_path | |
8 from distutils import log | |
9 import distutils.errors | |
10 import distutils.filelist | |
11 import os | |
12 import re | |
13 import sys | |
14 import io | |
15 import warnings | |
16 import time | |
17 import collections | |
18 | |
19 from setuptools.extern import six | |
20 from setuptools.extern.six.moves import map | |
21 | |
22 from setuptools import Command | |
23 from setuptools.command.sdist import sdist | |
24 from setuptools.command.sdist import walk_revctrl | |
25 from setuptools.command.setopt import edit_config | |
26 from setuptools.command import bdist_egg | |
27 from pkg_resources import ( | |
28 parse_requirements, safe_name, parse_version, | |
29 safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) | |
30 import setuptools.unicode_utils as unicode_utils | |
31 from setuptools.glob import glob | |
32 | |
33 from setuptools.extern import packaging | |
34 from setuptools import SetuptoolsDeprecationWarning | |
35 | |
36 | |
37 def translate_pattern(glob): | |
38 """ | |
39 Translate a file path glob like '*.txt' in to a regular expression. | |
40 This differs from fnmatch.translate which allows wildcards to match | |
41 directory separators. It also knows about '**/' which matches any number of | |
42 directories. | |
43 """ | |
44 pat = '' | |
45 | |
46 # This will split on '/' within [character classes]. This is deliberate. | |
47 chunks = glob.split(os.path.sep) | |
48 | |
49 sep = re.escape(os.sep) | |
50 valid_char = '[^%s]' % (sep,) | |
51 | |
52 for c, chunk in enumerate(chunks): | |
53 last_chunk = c == len(chunks) - 1 | |
54 | |
55 # Chunks that are a literal ** are globstars. They match anything. | |
56 if chunk == '**': | |
57 if last_chunk: | |
58 # Match anything if this is the last component | |
59 pat += '.*' | |
60 else: | |
61 # Match '(name/)*' | |
62 pat += '(?:%s+%s)*' % (valid_char, sep) | |
63 continue # Break here as the whole path component has been handled | |
64 | |
65 # Find any special characters in the remainder | |
66 i = 0 | |
67 chunk_len = len(chunk) | |
68 while i < chunk_len: | |
69 char = chunk[i] | |
70 if char == '*': | |
71 # Match any number of name characters | |
72 pat += valid_char + '*' | |
73 elif char == '?': | |
74 # Match a name character | |
75 pat += valid_char | |
76 elif char == '[': | |
77 # Character class | |
78 inner_i = i + 1 | |
79 # Skip initial !/] chars | |
80 if inner_i < chunk_len and chunk[inner_i] == '!': | |
81 inner_i = inner_i + 1 | |
82 if inner_i < chunk_len and chunk[inner_i] == ']': | |
83 inner_i = inner_i + 1 | |
84 | |
85 # Loop till the closing ] is found | |
86 while inner_i < chunk_len and chunk[inner_i] != ']': | |
87 inner_i = inner_i + 1 | |
88 | |
89 if inner_i >= chunk_len: | |
90 # Got to the end of the string without finding a closing ] | |
91 # Do not treat this as a matching group, but as a literal [ | |
92 pat += re.escape(char) | |
93 else: | |
94 # Grab the insides of the [brackets] | |
95 inner = chunk[i + 1:inner_i] | |
96 char_class = '' | |
97 | |
98 # Class negation | |
99 if inner[0] == '!': | |
100 char_class = '^' | |
101 inner = inner[1:] | |
102 | |
103 char_class += re.escape(inner) | |
104 pat += '[%s]' % (char_class,) | |
105 | |
106 # Skip to the end ] | |
107 i = inner_i | |
108 else: | |
109 pat += re.escape(char) | |
110 i += 1 | |
111 | |
112 # Join each chunk with the dir separator | |
113 if not last_chunk: | |
114 pat += sep | |
115 | |
116 pat += r'\Z' | |
117 return re.compile(pat, flags=re.MULTILINE | re.DOTALL) | |
118 | |
119 | |
120 class InfoCommon: | |
121 tag_build = None | |
122 tag_date = None | |
123 | |
124 @property | |
125 def name(self): | |
126 return safe_name(self.distribution.get_name()) | |
127 | |
128 def tagged_version(self): | |
129 version = self.distribution.get_version() | |
130 # egg_info may be called more than once for a distribution, | |
131 # in which case the version string already contains all tags. | |
132 if self.vtags and version.endswith(self.vtags): | |
133 return safe_version(version) | |
134 return safe_version(version + self.vtags) | |
135 | |
136 def tags(self): | |
137 version = '' | |
138 if self.tag_build: | |
139 version += self.tag_build | |
140 if self.tag_date: | |
141 version += time.strftime("-%Y%m%d") | |
142 return version | |
143 vtags = property(tags) | |
144 | |
145 | |
146 class egg_info(InfoCommon, Command): | |
147 description = "create a distribution's .egg-info directory" | |
148 | |
149 user_options = [ | |
150 ('egg-base=', 'e', "directory containing .egg-info directories" | |
151 " (default: top of the source tree)"), | |
152 ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), | |
153 ('tag-build=', 'b', "Specify explicit tag to add to version number"), | |
154 ('no-date', 'D', "Don't include date stamp [default]"), | |
155 ] | |
156 | |
157 boolean_options = ['tag-date'] | |
158 negative_opt = { | |
159 'no-date': 'tag-date', | |
160 } | |
161 | |
162 def initialize_options(self): | |
163 self.egg_base = None | |
164 self.egg_name = None | |
165 self.egg_info = None | |
166 self.egg_version = None | |
167 self.broken_egg_info = False | |
168 | |
169 #################################### | |
170 # allow the 'tag_svn_revision' to be detected and | |
171 # set, supporting sdists built on older Setuptools. | |
172 @property | |
173 def tag_svn_revision(self): | |
174 pass | |
175 | |
176 @tag_svn_revision.setter | |
177 def tag_svn_revision(self, value): | |
178 pass | |
179 #################################### | |
180 | |
181 def save_version_info(self, filename): | |
182 """ | |
183 Materialize the value of date into the | |
184 build tag. Install build keys in a deterministic order | |
185 to avoid arbitrary reordering on subsequent builds. | |
186 """ | |
187 egg_info = collections.OrderedDict() | |
188 # follow the order these keys would have been added | |
189 # when PYTHONHASHSEED=0 | |
190 egg_info['tag_build'] = self.tags() | |
191 egg_info['tag_date'] = 0 | |
192 edit_config(filename, dict(egg_info=egg_info)) | |
193 | |
194 def finalize_options(self): | |
195 # Note: we need to capture the current value returned | |
196 # by `self.tagged_version()`, so we can later update | |
197 # `self.distribution.metadata.version` without | |
198 # repercussions. | |
199 self.egg_name = self.name | |
200 self.egg_version = self.tagged_version() | |
201 parsed_version = parse_version(self.egg_version) | |
202 | |
203 try: | |
204 is_version = isinstance(parsed_version, packaging.version.Version) | |
205 spec = ( | |
206 "%s==%s" if is_version else "%s===%s" | |
207 ) | |
208 list( | |
209 parse_requirements(spec % (self.egg_name, self.egg_version)) | |
210 ) | |
211 except ValueError: | |
212 raise distutils.errors.DistutilsOptionError( | |
213 "Invalid distribution name or version syntax: %s-%s" % | |
214 (self.egg_name, self.egg_version) | |
215 ) | |
216 | |
217 if self.egg_base is None: | |
218 dirs = self.distribution.package_dir | |
219 self.egg_base = (dirs or {}).get('', os.curdir) | |
220 | |
221 self.ensure_dirname('egg_base') | |
222 self.egg_info = to_filename(self.egg_name) + '.egg-info' | |
223 if self.egg_base != os.curdir: | |
224 self.egg_info = os.path.join(self.egg_base, self.egg_info) | |
225 if '-' in self.egg_name: | |
226 self.check_broken_egg_info() | |
227 | |
228 # Set package version for the benefit of dumber commands | |
229 # (e.g. sdist, bdist_wininst, etc.) | |
230 # | |
231 self.distribution.metadata.version = self.egg_version | |
232 | |
233 # If we bootstrapped around the lack of a PKG-INFO, as might be the | |
234 # case in a fresh checkout, make sure that any special tags get added | |
235 # to the version info | |
236 # | |
237 pd = self.distribution._patched_dist | |
238 if pd is not None and pd.key == self.egg_name.lower(): | |
239 pd._version = self.egg_version | |
240 pd._parsed_version = parse_version(self.egg_version) | |
241 self.distribution._patched_dist = None | |
242 | |
243 def write_or_delete_file(self, what, filename, data, force=False): | |
244 """Write `data` to `filename` or delete if empty | |
245 | |
246 If `data` is non-empty, this routine is the same as ``write_file()``. | |
247 If `data` is empty but not ``None``, this is the same as calling | |
248 ``delete_file(filename)`. If `data` is ``None``, then this is a no-op | |
249 unless `filename` exists, in which case a warning is issued about the | |
250 orphaned file (if `force` is false), or deleted (if `force` is true). | |
251 """ | |
252 if data: | |
253 self.write_file(what, filename, data) | |
254 elif os.path.exists(filename): | |
255 if data is None and not force: | |
256 log.warn( | |
257 "%s not set in setup(), but %s exists", what, filename | |
258 ) | |
259 return | |
260 else: | |
261 self.delete_file(filename) | |
262 | |
263 def write_file(self, what, filename, data): | |
264 """Write `data` to `filename` (if not a dry run) after announcing it | |
265 | |
266 `what` is used in a log message to identify what is being written | |
267 to the file. | |
268 """ | |
269 log.info("writing %s to %s", what, filename) | |
270 if not six.PY2: | |
271 data = data.encode("utf-8") | |
272 if not self.dry_run: | |
273 f = open(filename, 'wb') | |
274 f.write(data) | |
275 f.close() | |
276 | |
277 def delete_file(self, filename): | |
278 """Delete `filename` (if not a dry run) after announcing it""" | |
279 log.info("deleting %s", filename) | |
280 if not self.dry_run: | |
281 os.unlink(filename) | |
282 | |
283 def run(self): | |
284 self.mkpath(self.egg_info) | |
285 os.utime(self.egg_info, None) | |
286 installer = self.distribution.fetch_build_egg | |
287 for ep in iter_entry_points('egg_info.writers'): | |
288 ep.require(installer=installer) | |
289 writer = ep.resolve() | |
290 writer(self, ep.name, os.path.join(self.egg_info, ep.name)) | |
291 | |
292 # Get rid of native_libs.txt if it was put there by older bdist_egg | |
293 nl = os.path.join(self.egg_info, "native_libs.txt") | |
294 if os.path.exists(nl): | |
295 self.delete_file(nl) | |
296 | |
297 self.find_sources() | |
298 | |
299 def find_sources(self): | |
300 """Generate SOURCES.txt manifest file""" | |
301 manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") | |
302 mm = manifest_maker(self.distribution) | |
303 mm.manifest = manifest_filename | |
304 mm.run() | |
305 self.filelist = mm.filelist | |
306 | |
307 def check_broken_egg_info(self): | |
308 bei = self.egg_name + '.egg-info' | |
309 if self.egg_base != os.curdir: | |
310 bei = os.path.join(self.egg_base, bei) | |
311 if os.path.exists(bei): | |
312 log.warn( | |
313 "-" * 78 + '\n' | |
314 "Note: Your current .egg-info directory has a '-' in its name;" | |
315 '\nthis will not work correctly with "setup.py develop".\n\n' | |
316 'Please rename %s to %s to correct this problem.\n' + '-' * 78, | |
317 bei, self.egg_info | |
318 ) | |
319 self.broken_egg_info = self.egg_info | |
320 self.egg_info = bei # make it work for now | |
321 | |
322 | |
323 class FileList(_FileList): | |
324 # Implementations of the various MANIFEST.in commands | |
325 | |
326 def process_template_line(self, line): | |
327 # Parse the line: split it up, make sure the right number of words | |
328 # is there, and return the relevant words. 'action' is always | |
329 # defined: it's the first word of the line. Which of the other | |
330 # three are defined depends on the action; it'll be either | |
331 # patterns, (dir and patterns), or (dir_pattern). | |
332 (action, patterns, dir, dir_pattern) = self._parse_template_line(line) | |
333 | |
334 # OK, now we know that the action is valid and we have the | |
335 # right number of words on the line for that action -- so we | |
336 # can proceed with minimal error-checking. | |
337 if action == 'include': | |
338 self.debug_print("include " + ' '.join(patterns)) | |
339 for pattern in patterns: | |
340 if not self.include(pattern): | |
341 log.warn("warning: no files found matching '%s'", pattern) | |
342 | |
343 elif action == 'exclude': | |
344 self.debug_print("exclude " + ' '.join(patterns)) | |
345 for pattern in patterns: | |
346 if not self.exclude(pattern): | |
347 log.warn(("warning: no previously-included files " | |
348 "found matching '%s'"), pattern) | |
349 | |
350 elif action == 'global-include': | |
351 self.debug_print("global-include " + ' '.join(patterns)) | |
352 for pattern in patterns: | |
353 if not self.global_include(pattern): | |
354 log.warn(("warning: no files found matching '%s' " | |
355 "anywhere in distribution"), pattern) | |
356 | |
357 elif action == 'global-exclude': | |
358 self.debug_print("global-exclude " + ' '.join(patterns)) | |
359 for pattern in patterns: | |
360 if not self.global_exclude(pattern): | |
361 log.warn(("warning: no previously-included files matching " | |
362 "'%s' found anywhere in distribution"), | |
363 pattern) | |
364 | |
365 elif action == 'recursive-include': | |
366 self.debug_print("recursive-include %s %s" % | |
367 (dir, ' '.join(patterns))) | |
368 for pattern in patterns: | |
369 if not self.recursive_include(dir, pattern): | |
370 log.warn(("warning: no files found matching '%s' " | |
371 "under directory '%s'"), | |
372 pattern, dir) | |
373 | |
374 elif action == 'recursive-exclude': | |
375 self.debug_print("recursive-exclude %s %s" % | |
376 (dir, ' '.join(patterns))) | |
377 for pattern in patterns: | |
378 if not self.recursive_exclude(dir, pattern): | |
379 log.warn(("warning: no previously-included files matching " | |
380 "'%s' found under directory '%s'"), | |
381 pattern, dir) | |
382 | |
383 elif action == 'graft': | |
384 self.debug_print("graft " + dir_pattern) | |
385 if not self.graft(dir_pattern): | |
386 log.warn("warning: no directories found matching '%s'", | |
387 dir_pattern) | |
388 | |
389 elif action == 'prune': | |
390 self.debug_print("prune " + dir_pattern) | |
391 if not self.prune(dir_pattern): | |
392 log.warn(("no previously-included directories found " | |
393 "matching '%s'"), dir_pattern) | |
394 | |
395 else: | |
396 raise DistutilsInternalError( | |
397 "this cannot happen: invalid action '%s'" % action) | |
398 | |
399 def _remove_files(self, predicate): | |
400 """ | |
401 Remove all files from the file list that match the predicate. | |
402 Return True if any matching files were removed | |
403 """ | |
404 found = False | |
405 for i in range(len(self.files) - 1, -1, -1): | |
406 if predicate(self.files[i]): | |
407 self.debug_print(" removing " + self.files[i]) | |
408 del self.files[i] | |
409 found = True | |
410 return found | |
411 | |
412 def include(self, pattern): | |
413 """Include files that match 'pattern'.""" | |
414 found = [f for f in glob(pattern) if not os.path.isdir(f)] | |
415 self.extend(found) | |
416 return bool(found) | |
417 | |
418 def exclude(self, pattern): | |
419 """Exclude files that match 'pattern'.""" | |
420 match = translate_pattern(pattern) | |
421 return self._remove_files(match.match) | |
422 | |
423 def recursive_include(self, dir, pattern): | |
424 """ | |
425 Include all files anywhere in 'dir/' that match the pattern. | |
426 """ | |
427 full_pattern = os.path.join(dir, '**', pattern) | |
428 found = [f for f in glob(full_pattern, recursive=True) | |
429 if not os.path.isdir(f)] | |
430 self.extend(found) | |
431 return bool(found) | |
432 | |
433 def recursive_exclude(self, dir, pattern): | |
434 """ | |
435 Exclude any file anywhere in 'dir/' that match the pattern. | |
436 """ | |
437 match = translate_pattern(os.path.join(dir, '**', pattern)) | |
438 return self._remove_files(match.match) | |
439 | |
440 def graft(self, dir): | |
441 """Include all files from 'dir/'.""" | |
442 found = [ | |
443 item | |
444 for match_dir in glob(dir) | |
445 for item in distutils.filelist.findall(match_dir) | |
446 ] | |
447 self.extend(found) | |
448 return bool(found) | |
449 | |
450 def prune(self, dir): | |
451 """Filter out files from 'dir/'.""" | |
452 match = translate_pattern(os.path.join(dir, '**')) | |
453 return self._remove_files(match.match) | |
454 | |
455 def global_include(self, pattern): | |
456 """ | |
457 Include all files anywhere in the current directory that match the | |
458 pattern. This is very inefficient on large file trees. | |
459 """ | |
460 if self.allfiles is None: | |
461 self.findall() | |
462 match = translate_pattern(os.path.join('**', pattern)) | |
463 found = [f for f in self.allfiles if match.match(f)] | |
464 self.extend(found) | |
465 return bool(found) | |
466 | |
467 def global_exclude(self, pattern): | |
468 """ | |
469 Exclude all files anywhere that match the pattern. | |
470 """ | |
471 match = translate_pattern(os.path.join('**', pattern)) | |
472 return self._remove_files(match.match) | |
473 | |
474 def append(self, item): | |
475 if item.endswith('\r'): # Fix older sdists built on Windows | |
476 item = item[:-1] | |
477 path = convert_path(item) | |
478 | |
479 if self._safe_path(path): | |
480 self.files.append(path) | |
481 | |
482 def extend(self, paths): | |
483 self.files.extend(filter(self._safe_path, paths)) | |
484 | |
485 def _repair(self): | |
486 """ | |
487 Replace self.files with only safe paths | |
488 | |
489 Because some owners of FileList manipulate the underlying | |
490 ``files`` attribute directly, this method must be called to | |
491 repair those paths. | |
492 """ | |
493 self.files = list(filter(self._safe_path, self.files)) | |
494 | |
495 def _safe_path(self, path): | |
496 enc_warn = "'%s' not %s encodable -- skipping" | |
497 | |
498 # To avoid accidental trans-codings errors, first to unicode | |
499 u_path = unicode_utils.filesys_decode(path) | |
500 if u_path is None: | |
501 log.warn("'%s' in unexpected encoding -- skipping" % path) | |
502 return False | |
503 | |
504 # Must ensure utf-8 encodability | |
505 utf8_path = unicode_utils.try_encode(u_path, "utf-8") | |
506 if utf8_path is None: | |
507 log.warn(enc_warn, path, 'utf-8') | |
508 return False | |
509 | |
510 try: | |
511 # accept is either way checks out | |
512 if os.path.exists(u_path) or os.path.exists(utf8_path): | |
513 return True | |
514 # this will catch any encode errors decoding u_path | |
515 except UnicodeEncodeError: | |
516 log.warn(enc_warn, path, sys.getfilesystemencoding()) | |
517 | |
518 | |
519 class manifest_maker(sdist): | |
520 template = "MANIFEST.in" | |
521 | |
522 def initialize_options(self): | |
523 self.use_defaults = 1 | |
524 self.prune = 1 | |
525 self.manifest_only = 1 | |
526 self.force_manifest = 1 | |
527 | |
528 def finalize_options(self): | |
529 pass | |
530 | |
531 def run(self): | |
532 self.filelist = FileList() | |
533 if not os.path.exists(self.manifest): | |
534 self.write_manifest() # it must exist so it'll get in the list | |
535 self.add_defaults() | |
536 if os.path.exists(self.template): | |
537 self.read_template() | |
538 self.prune_file_list() | |
539 self.filelist.sort() | |
540 self.filelist.remove_duplicates() | |
541 self.write_manifest() | |
542 | |
543 def _manifest_normalize(self, path): | |
544 path = unicode_utils.filesys_decode(path) | |
545 return path.replace(os.sep, '/') | |
546 | |
547 def write_manifest(self): | |
548 """ | |
549 Write the file list in 'self.filelist' to the manifest file | |
550 named by 'self.manifest'. | |
551 """ | |
552 self.filelist._repair() | |
553 | |
554 # Now _repairs should encodability, but not unicode | |
555 files = [self._manifest_normalize(f) for f in self.filelist.files] | |
556 msg = "writing manifest file '%s'" % self.manifest | |
557 self.execute(write_file, (self.manifest, files), msg) | |
558 | |
559 def warn(self, msg): | |
560 if not self._should_suppress_warning(msg): | |
561 sdist.warn(self, msg) | |
562 | |
563 @staticmethod | |
564 def _should_suppress_warning(msg): | |
565 """ | |
566 suppress missing-file warnings from sdist | |
567 """ | |
568 return re.match(r"standard file .*not found", msg) | |
569 | |
570 def add_defaults(self): | |
571 sdist.add_defaults(self) | |
572 self.check_license() | |
573 self.filelist.append(self.template) | |
574 self.filelist.append(self.manifest) | |
575 rcfiles = list(walk_revctrl()) | |
576 if rcfiles: | |
577 self.filelist.extend(rcfiles) | |
578 elif os.path.exists(self.manifest): | |
579 self.read_manifest() | |
580 | |
581 if os.path.exists("setup.py"): | |
582 # setup.py should be included by default, even if it's not | |
583 # the script called to create the sdist | |
584 self.filelist.append("setup.py") | |
585 | |
586 ei_cmd = self.get_finalized_command('egg_info') | |
587 self.filelist.graft(ei_cmd.egg_info) | |
588 | |
589 def prune_file_list(self): | |
590 build = self.get_finalized_command('build') | |
591 base_dir = self.distribution.get_fullname() | |
592 self.filelist.prune(build.build_base) | |
593 self.filelist.prune(base_dir) | |
594 sep = re.escape(os.sep) | |
595 self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, | |
596 is_regex=1) | |
597 | |
598 | |
599 def write_file(filename, contents): | |
600 """Create a file with the specified name and write 'contents' (a | |
601 sequence of strings without line terminators) to it. | |
602 """ | |
603 contents = "\n".join(contents) | |
604 | |
605 # assuming the contents has been vetted for utf-8 encoding | |
606 contents = contents.encode("utf-8") | |
607 | |
608 with open(filename, "wb") as f: # always write POSIX-style manifest | |
609 f.write(contents) | |
610 | |
611 | |
612 def write_pkg_info(cmd, basename, filename): | |
613 log.info("writing %s", filename) | |
614 if not cmd.dry_run: | |
615 metadata = cmd.distribution.metadata | |
616 metadata.version, oldver = cmd.egg_version, metadata.version | |
617 metadata.name, oldname = cmd.egg_name, metadata.name | |
618 | |
619 try: | |
620 # write unescaped data to PKG-INFO, so older pkg_resources | |
621 # can still parse it | |
622 metadata.write_pkg_info(cmd.egg_info) | |
623 finally: | |
624 metadata.name, metadata.version = oldname, oldver | |
625 | |
626 safe = getattr(cmd.distribution, 'zip_safe', None) | |
627 | |
628 bdist_egg.write_safety_flag(cmd.egg_info, safe) | |
629 | |
630 | |
631 def warn_depends_obsolete(cmd, basename, filename): | |
632 if os.path.exists(filename): | |
633 log.warn( | |
634 "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" | |
635 "Use the install_requires/extras_require setup() args instead." | |
636 ) | |
637 | |
638 | |
639 def _write_requirements(stream, reqs): | |
640 lines = yield_lines(reqs or ()) | |
641 | |
642 def append_cr(line): | |
643 return line + '\n' | |
644 lines = map(append_cr, lines) | |
645 stream.writelines(lines) | |
646 | |
647 | |
648 def write_requirements(cmd, basename, filename): | |
649 dist = cmd.distribution | |
650 data = six.StringIO() | |
651 _write_requirements(data, dist.install_requires) | |
652 extras_require = dist.extras_require or {} | |
653 for extra in sorted(extras_require): | |
654 data.write('\n[{extra}]\n'.format(**vars())) | |
655 _write_requirements(data, extras_require[extra]) | |
656 cmd.write_or_delete_file("requirements", filename, data.getvalue()) | |
657 | |
658 | |
659 def write_setup_requirements(cmd, basename, filename): | |
660 data = io.StringIO() | |
661 _write_requirements(data, cmd.distribution.setup_requires) | |
662 cmd.write_or_delete_file("setup-requirements", filename, data.getvalue()) | |
663 | |
664 | |
665 def write_toplevel_names(cmd, basename, filename): | |
666 pkgs = dict.fromkeys( | |
667 [ | |
668 k.split('.', 1)[0] | |
669 for k in cmd.distribution.iter_distribution_names() | |
670 ] | |
671 ) | |
672 cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') | |
673 | |
674 | |
675 def overwrite_arg(cmd, basename, filename): | |
676 write_arg(cmd, basename, filename, True) | |
677 | |
678 | |
679 def write_arg(cmd, basename, filename, force=False): | |
680 argname = os.path.splitext(basename)[0] | |
681 value = getattr(cmd.distribution, argname, None) | |
682 if value is not None: | |
683 value = '\n'.join(value) + '\n' | |
684 cmd.write_or_delete_file(argname, filename, value, force) | |
685 | |
686 | |
687 def write_entries(cmd, basename, filename): | |
688 ep = cmd.distribution.entry_points | |
689 | |
690 if isinstance(ep, six.string_types) or ep is None: | |
691 data = ep | |
692 elif ep is not None: | |
693 data = [] | |
694 for section, contents in sorted(ep.items()): | |
695 if not isinstance(contents, six.string_types): | |
696 contents = EntryPoint.parse_group(section, contents) | |
697 contents = '\n'.join(sorted(map(str, contents.values()))) | |
698 data.append('[%s]\n%s\n\n' % (section, contents)) | |
699 data = ''.join(data) | |
700 | |
701 cmd.write_or_delete_file('entry points', filename, data, True) | |
702 | |
703 | |
704 def get_pkg_info_revision(): | |
705 """ | |
706 Get a -r### off of PKG-INFO Version in case this is an sdist of | |
707 a subversion revision. | |
708 """ | |
709 warnings.warn( | |
710 "get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning) | |
711 if os.path.exists('PKG-INFO'): | |
712 with io.open('PKG-INFO') as f: | |
713 for line in f: | |
714 match = re.match(r"Version:.*-r(\d+)\s*$", line) | |
715 if match: | |
716 return int(match.group(1)) | |
717 return 0 | |
718 | |
719 | |
720 class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): | |
721 """Deprecated behavior warning for EggInfo, bypassing suppression.""" |