Mercurial > repos > guerler > springsuite
comparison planemo/lib/python3.7/site-packages/setuptools/command/egg_info.py @ 1:56ad4e20f292 draft
"planemo upload commit 6eee67778febed82ddd413c3ca40b3183a3898f1"
author | guerler |
---|---|
date | Fri, 31 Jul 2020 00:32:28 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
0:d30785e31577 | 1:56ad4e20f292 |
---|---|
1 """setuptools.command.egg_info | |
2 | |
3 Create a distribution's .egg-info directory and contents""" | |
4 | |
5 from distutils.filelist import FileList as _FileList | |
6 from distutils.errors import DistutilsInternalError | |
7 from distutils.util import convert_path | |
8 from distutils import log | |
9 import distutils.errors | |
10 import distutils.filelist | |
11 import os | |
12 import re | |
13 import sys | |
14 import io | |
15 import warnings | |
16 import time | |
17 import collections | |
18 | |
19 from setuptools.extern import six | |
20 from setuptools.extern.six.moves import map | |
21 | |
22 from setuptools import Command | |
23 from setuptools.command.sdist import sdist | |
24 from setuptools.command.sdist import walk_revctrl | |
25 from setuptools.command.setopt import edit_config | |
26 from setuptools.command import bdist_egg | |
27 from pkg_resources import ( | |
28 parse_requirements, safe_name, parse_version, | |
29 safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) | |
30 import setuptools.unicode_utils as unicode_utils | |
31 from setuptools.glob import glob | |
32 | |
33 from setuptools.extern import packaging | |
34 from setuptools import SetuptoolsDeprecationWarning | |
35 | |
36 def translate_pattern(glob): | |
37 """ | |
38 Translate a file path glob like '*.txt' in to a regular expression. | |
39 This differs from fnmatch.translate which allows wildcards to match | |
40 directory separators. It also knows about '**/' which matches any number of | |
41 directories. | |
42 """ | |
43 pat = '' | |
44 | |
45 # This will split on '/' within [character classes]. This is deliberate. | |
46 chunks = glob.split(os.path.sep) | |
47 | |
48 sep = re.escape(os.sep) | |
49 valid_char = '[^%s]' % (sep,) | |
50 | |
51 for c, chunk in enumerate(chunks): | |
52 last_chunk = c == len(chunks) - 1 | |
53 | |
54 # Chunks that are a literal ** are globstars. They match anything. | |
55 if chunk == '**': | |
56 if last_chunk: | |
57 # Match anything if this is the last component | |
58 pat += '.*' | |
59 else: | |
60 # Match '(name/)*' | |
61 pat += '(?:%s+%s)*' % (valid_char, sep) | |
62 continue # Break here as the whole path component has been handled | |
63 | |
64 # Find any special characters in the remainder | |
65 i = 0 | |
66 chunk_len = len(chunk) | |
67 while i < chunk_len: | |
68 char = chunk[i] | |
69 if char == '*': | |
70 # Match any number of name characters | |
71 pat += valid_char + '*' | |
72 elif char == '?': | |
73 # Match a name character | |
74 pat += valid_char | |
75 elif char == '[': | |
76 # Character class | |
77 inner_i = i + 1 | |
78 # Skip initial !/] chars | |
79 if inner_i < chunk_len and chunk[inner_i] == '!': | |
80 inner_i = inner_i + 1 | |
81 if inner_i < chunk_len and chunk[inner_i] == ']': | |
82 inner_i = inner_i + 1 | |
83 | |
84 # Loop till the closing ] is found | |
85 while inner_i < chunk_len and chunk[inner_i] != ']': | |
86 inner_i = inner_i + 1 | |
87 | |
88 if inner_i >= chunk_len: | |
89 # Got to the end of the string without finding a closing ] | |
90 # Do not treat this as a matching group, but as a literal [ | |
91 pat += re.escape(char) | |
92 else: | |
93 # Grab the insides of the [brackets] | |
94 inner = chunk[i + 1:inner_i] | |
95 char_class = '' | |
96 | |
97 # Class negation | |
98 if inner[0] == '!': | |
99 char_class = '^' | |
100 inner = inner[1:] | |
101 | |
102 char_class += re.escape(inner) | |
103 pat += '[%s]' % (char_class,) | |
104 | |
105 # Skip to the end ] | |
106 i = inner_i | |
107 else: | |
108 pat += re.escape(char) | |
109 i += 1 | |
110 | |
111 # Join each chunk with the dir separator | |
112 if not last_chunk: | |
113 pat += sep | |
114 | |
115 pat += r'\Z' | |
116 return re.compile(pat, flags=re.MULTILINE|re.DOTALL) | |
117 | |
118 | |
119 class InfoCommon: | |
120 tag_build = None | |
121 tag_date = None | |
122 | |
123 @property | |
124 def name(self): | |
125 return safe_name(self.distribution.get_name()) | |
126 | |
127 def tagged_version(self): | |
128 version = self.distribution.get_version() | |
129 # egg_info may be called more than once for a distribution, | |
130 # in which case the version string already contains all tags. | |
131 if self.vtags and version.endswith(self.vtags): | |
132 return safe_version(version) | |
133 return safe_version(version + self.vtags) | |
134 | |
135 def tags(self): | |
136 version = '' | |
137 if self.tag_build: | |
138 version += self.tag_build | |
139 if self.tag_date: | |
140 version += time.strftime("-%Y%m%d") | |
141 return version | |
142 vtags = property(tags) | |
143 | |
144 | |
145 class egg_info(InfoCommon, Command): | |
146 description = "create a distribution's .egg-info directory" | |
147 | |
148 user_options = [ | |
149 ('egg-base=', 'e', "directory containing .egg-info directories" | |
150 " (default: top of the source tree)"), | |
151 ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), | |
152 ('tag-build=', 'b', "Specify explicit tag to add to version number"), | |
153 ('no-date', 'D', "Don't include date stamp [default]"), | |
154 ] | |
155 | |
156 boolean_options = ['tag-date'] | |
157 negative_opt = { | |
158 'no-date': 'tag-date', | |
159 } | |
160 | |
161 def initialize_options(self): | |
162 self.egg_base = None | |
163 self.egg_name = None | |
164 self.egg_info = None | |
165 self.egg_version = None | |
166 self.broken_egg_info = False | |
167 | |
168 #################################### | |
169 # allow the 'tag_svn_revision' to be detected and | |
170 # set, supporting sdists built on older Setuptools. | |
171 @property | |
172 def tag_svn_revision(self): | |
173 pass | |
174 | |
175 @tag_svn_revision.setter | |
176 def tag_svn_revision(self, value): | |
177 pass | |
178 #################################### | |
179 | |
180 def save_version_info(self, filename): | |
181 """ | |
182 Materialize the value of date into the | |
183 build tag. Install build keys in a deterministic order | |
184 to avoid arbitrary reordering on subsequent builds. | |
185 """ | |
186 egg_info = collections.OrderedDict() | |
187 # follow the order these keys would have been added | |
188 # when PYTHONHASHSEED=0 | |
189 egg_info['tag_build'] = self.tags() | |
190 egg_info['tag_date'] = 0 | |
191 edit_config(filename, dict(egg_info=egg_info)) | |
192 | |
193 def finalize_options(self): | |
194 # Note: we need to capture the current value returned | |
195 # by `self.tagged_version()`, so we can later update | |
196 # `self.distribution.metadata.version` without | |
197 # repercussions. | |
198 self.egg_name = self.name | |
199 self.egg_version = self.tagged_version() | |
200 parsed_version = parse_version(self.egg_version) | |
201 | |
202 try: | |
203 is_version = isinstance(parsed_version, packaging.version.Version) | |
204 spec = ( | |
205 "%s==%s" if is_version else "%s===%s" | |
206 ) | |
207 list( | |
208 parse_requirements(spec % (self.egg_name, self.egg_version)) | |
209 ) | |
210 except ValueError: | |
211 raise distutils.errors.DistutilsOptionError( | |
212 "Invalid distribution name or version syntax: %s-%s" % | |
213 (self.egg_name, self.egg_version) | |
214 ) | |
215 | |
216 if self.egg_base is None: | |
217 dirs = self.distribution.package_dir | |
218 self.egg_base = (dirs or {}).get('', os.curdir) | |
219 | |
220 self.ensure_dirname('egg_base') | |
221 self.egg_info = to_filename(self.egg_name) + '.egg-info' | |
222 if self.egg_base != os.curdir: | |
223 self.egg_info = os.path.join(self.egg_base, self.egg_info) | |
224 if '-' in self.egg_name: | |
225 self.check_broken_egg_info() | |
226 | |
227 # Set package version for the benefit of dumber commands | |
228 # (e.g. sdist, bdist_wininst, etc.) | |
229 # | |
230 self.distribution.metadata.version = self.egg_version | |
231 | |
232 # If we bootstrapped around the lack of a PKG-INFO, as might be the | |
233 # case in a fresh checkout, make sure that any special tags get added | |
234 # to the version info | |
235 # | |
236 pd = self.distribution._patched_dist | |
237 if pd is not None and pd.key == self.egg_name.lower(): | |
238 pd._version = self.egg_version | |
239 pd._parsed_version = parse_version(self.egg_version) | |
240 self.distribution._patched_dist = None | |
241 | |
242 def write_or_delete_file(self, what, filename, data, force=False): | |
243 """Write `data` to `filename` or delete if empty | |
244 | |
245 If `data` is non-empty, this routine is the same as ``write_file()``. | |
246 If `data` is empty but not ``None``, this is the same as calling | |
247 ``delete_file(filename)`. If `data` is ``None``, then this is a no-op | |
248 unless `filename` exists, in which case a warning is issued about the | |
249 orphaned file (if `force` is false), or deleted (if `force` is true). | |
250 """ | |
251 if data: | |
252 self.write_file(what, filename, data) | |
253 elif os.path.exists(filename): | |
254 if data is None and not force: | |
255 log.warn( | |
256 "%s not set in setup(), but %s exists", what, filename | |
257 ) | |
258 return | |
259 else: | |
260 self.delete_file(filename) | |
261 | |
262 def write_file(self, what, filename, data): | |
263 """Write `data` to `filename` (if not a dry run) after announcing it | |
264 | |
265 `what` is used in a log message to identify what is being written | |
266 to the file. | |
267 """ | |
268 log.info("writing %s to %s", what, filename) | |
269 if six.PY3: | |
270 data = data.encode("utf-8") | |
271 if not self.dry_run: | |
272 f = open(filename, 'wb') | |
273 f.write(data) | |
274 f.close() | |
275 | |
276 def delete_file(self, filename): | |
277 """Delete `filename` (if not a dry run) after announcing it""" | |
278 log.info("deleting %s", filename) | |
279 if not self.dry_run: | |
280 os.unlink(filename) | |
281 | |
282 def run(self): | |
283 self.mkpath(self.egg_info) | |
284 os.utime(self.egg_info, None) | |
285 installer = self.distribution.fetch_build_egg | |
286 for ep in iter_entry_points('egg_info.writers'): | |
287 ep.require(installer=installer) | |
288 writer = ep.resolve() | |
289 writer(self, ep.name, os.path.join(self.egg_info, ep.name)) | |
290 | |
291 # Get rid of native_libs.txt if it was put there by older bdist_egg | |
292 nl = os.path.join(self.egg_info, "native_libs.txt") | |
293 if os.path.exists(nl): | |
294 self.delete_file(nl) | |
295 | |
296 self.find_sources() | |
297 | |
298 def find_sources(self): | |
299 """Generate SOURCES.txt manifest file""" | |
300 manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") | |
301 mm = manifest_maker(self.distribution) | |
302 mm.manifest = manifest_filename | |
303 mm.run() | |
304 self.filelist = mm.filelist | |
305 | |
306 def check_broken_egg_info(self): | |
307 bei = self.egg_name + '.egg-info' | |
308 if self.egg_base != os.curdir: | |
309 bei = os.path.join(self.egg_base, bei) | |
310 if os.path.exists(bei): | |
311 log.warn( | |
312 "-" * 78 + '\n' | |
313 "Note: Your current .egg-info directory has a '-' in its name;" | |
314 '\nthis will not work correctly with "setup.py develop".\n\n' | |
315 'Please rename %s to %s to correct this problem.\n' + '-' * 78, | |
316 bei, self.egg_info | |
317 ) | |
318 self.broken_egg_info = self.egg_info | |
319 self.egg_info = bei # make it work for now | |
320 | |
321 | |
322 class FileList(_FileList): | |
323 # Implementations of the various MANIFEST.in commands | |
324 | |
325 def process_template_line(self, line): | |
326 # Parse the line: split it up, make sure the right number of words | |
327 # is there, and return the relevant words. 'action' is always | |
328 # defined: it's the first word of the line. Which of the other | |
329 # three are defined depends on the action; it'll be either | |
330 # patterns, (dir and patterns), or (dir_pattern). | |
331 (action, patterns, dir, dir_pattern) = self._parse_template_line(line) | |
332 | |
333 # OK, now we know that the action is valid and we have the | |
334 # right number of words on the line for that action -- so we | |
335 # can proceed with minimal error-checking. | |
336 if action == 'include': | |
337 self.debug_print("include " + ' '.join(patterns)) | |
338 for pattern in patterns: | |
339 if not self.include(pattern): | |
340 log.warn("warning: no files found matching '%s'", pattern) | |
341 | |
342 elif action == 'exclude': | |
343 self.debug_print("exclude " + ' '.join(patterns)) | |
344 for pattern in patterns: | |
345 if not self.exclude(pattern): | |
346 log.warn(("warning: no previously-included files " | |
347 "found matching '%s'"), pattern) | |
348 | |
349 elif action == 'global-include': | |
350 self.debug_print("global-include " + ' '.join(patterns)) | |
351 for pattern in patterns: | |
352 if not self.global_include(pattern): | |
353 log.warn(("warning: no files found matching '%s' " | |
354 "anywhere in distribution"), pattern) | |
355 | |
356 elif action == 'global-exclude': | |
357 self.debug_print("global-exclude " + ' '.join(patterns)) | |
358 for pattern in patterns: | |
359 if not self.global_exclude(pattern): | |
360 log.warn(("warning: no previously-included files matching " | |
361 "'%s' found anywhere in distribution"), | |
362 pattern) | |
363 | |
364 elif action == 'recursive-include': | |
365 self.debug_print("recursive-include %s %s" % | |
366 (dir, ' '.join(patterns))) | |
367 for pattern in patterns: | |
368 if not self.recursive_include(dir, pattern): | |
369 log.warn(("warning: no files found matching '%s' " | |
370 "under directory '%s'"), | |
371 pattern, dir) | |
372 | |
373 elif action == 'recursive-exclude': | |
374 self.debug_print("recursive-exclude %s %s" % | |
375 (dir, ' '.join(patterns))) | |
376 for pattern in patterns: | |
377 if not self.recursive_exclude(dir, pattern): | |
378 log.warn(("warning: no previously-included files matching " | |
379 "'%s' found under directory '%s'"), | |
380 pattern, dir) | |
381 | |
382 elif action == 'graft': | |
383 self.debug_print("graft " + dir_pattern) | |
384 if not self.graft(dir_pattern): | |
385 log.warn("warning: no directories found matching '%s'", | |
386 dir_pattern) | |
387 | |
388 elif action == 'prune': | |
389 self.debug_print("prune " + dir_pattern) | |
390 if not self.prune(dir_pattern): | |
391 log.warn(("no previously-included directories found " | |
392 "matching '%s'"), dir_pattern) | |
393 | |
394 else: | |
395 raise DistutilsInternalError( | |
396 "this cannot happen: invalid action '%s'" % action) | |
397 | |
398 def _remove_files(self, predicate): | |
399 """ | |
400 Remove all files from the file list that match the predicate. | |
401 Return True if any matching files were removed | |
402 """ | |
403 found = False | |
404 for i in range(len(self.files) - 1, -1, -1): | |
405 if predicate(self.files[i]): | |
406 self.debug_print(" removing " + self.files[i]) | |
407 del self.files[i] | |
408 found = True | |
409 return found | |
410 | |
411 def include(self, pattern): | |
412 """Include files that match 'pattern'.""" | |
413 found = [f for f in glob(pattern) if not os.path.isdir(f)] | |
414 self.extend(found) | |
415 return bool(found) | |
416 | |
417 def exclude(self, pattern): | |
418 """Exclude files that match 'pattern'.""" | |
419 match = translate_pattern(pattern) | |
420 return self._remove_files(match.match) | |
421 | |
422 def recursive_include(self, dir, pattern): | |
423 """ | |
424 Include all files anywhere in 'dir/' that match the pattern. | |
425 """ | |
426 full_pattern = os.path.join(dir, '**', pattern) | |
427 found = [f for f in glob(full_pattern, recursive=True) | |
428 if not os.path.isdir(f)] | |
429 self.extend(found) | |
430 return bool(found) | |
431 | |
432 def recursive_exclude(self, dir, pattern): | |
433 """ | |
434 Exclude any file anywhere in 'dir/' that match the pattern. | |
435 """ | |
436 match = translate_pattern(os.path.join(dir, '**', pattern)) | |
437 return self._remove_files(match.match) | |
438 | |
439 def graft(self, dir): | |
440 """Include all files from 'dir/'.""" | |
441 found = [ | |
442 item | |
443 for match_dir in glob(dir) | |
444 for item in distutils.filelist.findall(match_dir) | |
445 ] | |
446 self.extend(found) | |
447 return bool(found) | |
448 | |
449 def prune(self, dir): | |
450 """Filter out files from 'dir/'.""" | |
451 match = translate_pattern(os.path.join(dir, '**')) | |
452 return self._remove_files(match.match) | |
453 | |
454 def global_include(self, pattern): | |
455 """ | |
456 Include all files anywhere in the current directory that match the | |
457 pattern. This is very inefficient on large file trees. | |
458 """ | |
459 if self.allfiles is None: | |
460 self.findall() | |
461 match = translate_pattern(os.path.join('**', pattern)) | |
462 found = [f for f in self.allfiles if match.match(f)] | |
463 self.extend(found) | |
464 return bool(found) | |
465 | |
466 def global_exclude(self, pattern): | |
467 """ | |
468 Exclude all files anywhere that match the pattern. | |
469 """ | |
470 match = translate_pattern(os.path.join('**', pattern)) | |
471 return self._remove_files(match.match) | |
472 | |
473 def append(self, item): | |
474 if item.endswith('\r'): # Fix older sdists built on Windows | |
475 item = item[:-1] | |
476 path = convert_path(item) | |
477 | |
478 if self._safe_path(path): | |
479 self.files.append(path) | |
480 | |
481 def extend(self, paths): | |
482 self.files.extend(filter(self._safe_path, paths)) | |
483 | |
484 def _repair(self): | |
485 """ | |
486 Replace self.files with only safe paths | |
487 | |
488 Because some owners of FileList manipulate the underlying | |
489 ``files`` attribute directly, this method must be called to | |
490 repair those paths. | |
491 """ | |
492 self.files = list(filter(self._safe_path, self.files)) | |
493 | |
494 def _safe_path(self, path): | |
495 enc_warn = "'%s' not %s encodable -- skipping" | |
496 | |
497 # To avoid accidental trans-codings errors, first to unicode | |
498 u_path = unicode_utils.filesys_decode(path) | |
499 if u_path is None: | |
500 log.warn("'%s' in unexpected encoding -- skipping" % path) | |
501 return False | |
502 | |
503 # Must ensure utf-8 encodability | |
504 utf8_path = unicode_utils.try_encode(u_path, "utf-8") | |
505 if utf8_path is None: | |
506 log.warn(enc_warn, path, 'utf-8') | |
507 return False | |
508 | |
509 try: | |
510 # accept is either way checks out | |
511 if os.path.exists(u_path) or os.path.exists(utf8_path): | |
512 return True | |
513 # this will catch any encode errors decoding u_path | |
514 except UnicodeEncodeError: | |
515 log.warn(enc_warn, path, sys.getfilesystemencoding()) | |
516 | |
517 | |
518 class manifest_maker(sdist): | |
519 template = "MANIFEST.in" | |
520 | |
521 def initialize_options(self): | |
522 self.use_defaults = 1 | |
523 self.prune = 1 | |
524 self.manifest_only = 1 | |
525 self.force_manifest = 1 | |
526 | |
527 def finalize_options(self): | |
528 pass | |
529 | |
530 def run(self): | |
531 self.filelist = FileList() | |
532 if not os.path.exists(self.manifest): | |
533 self.write_manifest() # it must exist so it'll get in the list | |
534 self.add_defaults() | |
535 if os.path.exists(self.template): | |
536 self.read_template() | |
537 self.prune_file_list() | |
538 self.filelist.sort() | |
539 self.filelist.remove_duplicates() | |
540 self.write_manifest() | |
541 | |
542 def _manifest_normalize(self, path): | |
543 path = unicode_utils.filesys_decode(path) | |
544 return path.replace(os.sep, '/') | |
545 | |
546 def write_manifest(self): | |
547 """ | |
548 Write the file list in 'self.filelist' to the manifest file | |
549 named by 'self.manifest'. | |
550 """ | |
551 self.filelist._repair() | |
552 | |
553 # Now _repairs should encodability, but not unicode | |
554 files = [self._manifest_normalize(f) for f in self.filelist.files] | |
555 msg = "writing manifest file '%s'" % self.manifest | |
556 self.execute(write_file, (self.manifest, files), msg) | |
557 | |
558 def warn(self, msg): | |
559 if not self._should_suppress_warning(msg): | |
560 sdist.warn(self, msg) | |
561 | |
562 @staticmethod | |
563 def _should_suppress_warning(msg): | |
564 """ | |
565 suppress missing-file warnings from sdist | |
566 """ | |
567 return re.match(r"standard file .*not found", msg) | |
568 | |
569 def add_defaults(self): | |
570 sdist.add_defaults(self) | |
571 self.check_license() | |
572 self.filelist.append(self.template) | |
573 self.filelist.append(self.manifest) | |
574 rcfiles = list(walk_revctrl()) | |
575 if rcfiles: | |
576 self.filelist.extend(rcfiles) | |
577 elif os.path.exists(self.manifest): | |
578 self.read_manifest() | |
579 | |
580 if os.path.exists("setup.py"): | |
581 # setup.py should be included by default, even if it's not | |
582 # the script called to create the sdist | |
583 self.filelist.append("setup.py") | |
584 | |
585 ei_cmd = self.get_finalized_command('egg_info') | |
586 self.filelist.graft(ei_cmd.egg_info) | |
587 | |
588 def prune_file_list(self): | |
589 build = self.get_finalized_command('build') | |
590 base_dir = self.distribution.get_fullname() | |
591 self.filelist.prune(build.build_base) | |
592 self.filelist.prune(base_dir) | |
593 sep = re.escape(os.sep) | |
594 self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, | |
595 is_regex=1) | |
596 | |
597 | |
598 def write_file(filename, contents): | |
599 """Create a file with the specified name and write 'contents' (a | |
600 sequence of strings without line terminators) to it. | |
601 """ | |
602 contents = "\n".join(contents) | |
603 | |
604 # assuming the contents has been vetted for utf-8 encoding | |
605 contents = contents.encode("utf-8") | |
606 | |
607 with open(filename, "wb") as f: # always write POSIX-style manifest | |
608 f.write(contents) | |
609 | |
610 | |
611 def write_pkg_info(cmd, basename, filename): | |
612 log.info("writing %s", filename) | |
613 if not cmd.dry_run: | |
614 metadata = cmd.distribution.metadata | |
615 metadata.version, oldver = cmd.egg_version, metadata.version | |
616 metadata.name, oldname = cmd.egg_name, metadata.name | |
617 | |
618 try: | |
619 # write unescaped data to PKG-INFO, so older pkg_resources | |
620 # can still parse it | |
621 metadata.write_pkg_info(cmd.egg_info) | |
622 finally: | |
623 metadata.name, metadata.version = oldname, oldver | |
624 | |
625 safe = getattr(cmd.distribution, 'zip_safe', None) | |
626 | |
627 bdist_egg.write_safety_flag(cmd.egg_info, safe) | |
628 | |
629 | |
630 def warn_depends_obsolete(cmd, basename, filename): | |
631 if os.path.exists(filename): | |
632 log.warn( | |
633 "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" | |
634 "Use the install_requires/extras_require setup() args instead." | |
635 ) | |
636 | |
637 | |
638 def _write_requirements(stream, reqs): | |
639 lines = yield_lines(reqs or ()) | |
640 append_cr = lambda line: line + '\n' | |
641 lines = map(append_cr, lines) | |
642 stream.writelines(lines) | |
643 | |
644 | |
645 def write_requirements(cmd, basename, filename): | |
646 dist = cmd.distribution | |
647 data = six.StringIO() | |
648 _write_requirements(data, dist.install_requires) | |
649 extras_require = dist.extras_require or {} | |
650 for extra in sorted(extras_require): | |
651 data.write('\n[{extra}]\n'.format(**vars())) | |
652 _write_requirements(data, extras_require[extra]) | |
653 cmd.write_or_delete_file("requirements", filename, data.getvalue()) | |
654 | |
655 | |
656 def write_setup_requirements(cmd, basename, filename): | |
657 data = io.StringIO() | |
658 _write_requirements(data, cmd.distribution.setup_requires) | |
659 cmd.write_or_delete_file("setup-requirements", filename, data.getvalue()) | |
660 | |
661 | |
662 def write_toplevel_names(cmd, basename, filename): | |
663 pkgs = dict.fromkeys( | |
664 [ | |
665 k.split('.', 1)[0] | |
666 for k in cmd.distribution.iter_distribution_names() | |
667 ] | |
668 ) | |
669 cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') | |
670 | |
671 | |
672 def overwrite_arg(cmd, basename, filename): | |
673 write_arg(cmd, basename, filename, True) | |
674 | |
675 | |
676 def write_arg(cmd, basename, filename, force=False): | |
677 argname = os.path.splitext(basename)[0] | |
678 value = getattr(cmd.distribution, argname, None) | |
679 if value is not None: | |
680 value = '\n'.join(value) + '\n' | |
681 cmd.write_or_delete_file(argname, filename, value, force) | |
682 | |
683 | |
684 def write_entries(cmd, basename, filename): | |
685 ep = cmd.distribution.entry_points | |
686 | |
687 if isinstance(ep, six.string_types) or ep is None: | |
688 data = ep | |
689 elif ep is not None: | |
690 data = [] | |
691 for section, contents in sorted(ep.items()): | |
692 if not isinstance(contents, six.string_types): | |
693 contents = EntryPoint.parse_group(section, contents) | |
694 contents = '\n'.join(sorted(map(str, contents.values()))) | |
695 data.append('[%s]\n%s\n\n' % (section, contents)) | |
696 data = ''.join(data) | |
697 | |
698 cmd.write_or_delete_file('entry points', filename, data, True) | |
699 | |
700 | |
701 def get_pkg_info_revision(): | |
702 """ | |
703 Get a -r### off of PKG-INFO Version in case this is an sdist of | |
704 a subversion revision. | |
705 """ | |
706 warnings.warn("get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning) | |
707 if os.path.exists('PKG-INFO'): | |
708 with io.open('PKG-INFO') as f: | |
709 for line in f: | |
710 match = re.match(r"Version:.*-r(\d+)\s*$", line) | |
711 if match: | |
712 return int(match.group(1)) | |
713 return 0 | |
714 | |
715 | |
716 class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): | |
717 """Class for warning about deprecations in eggInfo in setupTools. Not ignored by default, unlike DeprecationWarning.""" |