Mercurial > repos > shellac > guppy_basecaller
comparison env/lib/python3.7/site-packages/ruamel/yaml/parser.py @ 0:26e78fe6e8c4 draft
"planemo upload commit c699937486c35866861690329de38ec1a5d9f783"
| author | shellac | 
|---|---|
| date | Sat, 02 May 2020 07:14:21 -0400 | 
| parents | |
| children | 
   comparison
  equal
  deleted
  inserted
  replaced
| -1:000000000000 | 0:26e78fe6e8c4 | 
|---|---|
| 1 # coding: utf-8 | |
| 2 | |
| 3 from __future__ import absolute_import | |
| 4 | |
| 5 # The following YAML grammar is LL(1) and is parsed by a recursive descent | |
| 6 # parser. | |
| 7 # | |
| 8 # stream ::= STREAM-START implicit_document? explicit_document* | |
| 9 # STREAM-END | |
| 10 # implicit_document ::= block_node DOCUMENT-END* | |
| 11 # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* | |
| 12 # block_node_or_indentless_sequence ::= | |
| 13 # ALIAS | |
| 14 # | properties (block_content | | |
| 15 # indentless_block_sequence)? | |
| 16 # | block_content | |
| 17 # | indentless_block_sequence | |
| 18 # block_node ::= ALIAS | |
| 19 # | properties block_content? | |
| 20 # | block_content | |
| 21 # flow_node ::= ALIAS | |
| 22 # | properties flow_content? | |
| 23 # | flow_content | |
| 24 # properties ::= TAG ANCHOR? | ANCHOR TAG? | |
| 25 # block_content ::= block_collection | flow_collection | SCALAR | |
| 26 # flow_content ::= flow_collection | SCALAR | |
| 27 # block_collection ::= block_sequence | block_mapping | |
| 28 # flow_collection ::= flow_sequence | flow_mapping | |
| 29 # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* | |
| 30 # BLOCK-END | |
| 31 # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ | |
| 32 # block_mapping ::= BLOCK-MAPPING_START | |
| 33 # ((KEY block_node_or_indentless_sequence?)? | |
| 34 # (VALUE block_node_or_indentless_sequence?)?)* | |
| 35 # BLOCK-END | |
| 36 # flow_sequence ::= FLOW-SEQUENCE-START | |
| 37 # (flow_sequence_entry FLOW-ENTRY)* | |
| 38 # flow_sequence_entry? | |
| 39 # FLOW-SEQUENCE-END | |
| 40 # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | |
| 41 # flow_mapping ::= FLOW-MAPPING-START | |
| 42 # (flow_mapping_entry FLOW-ENTRY)* | |
| 43 # flow_mapping_entry? | |
| 44 # FLOW-MAPPING-END | |
| 45 # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | |
| 46 # | |
| 47 # FIRST sets: | |
| 48 # | |
| 49 # stream: { STREAM-START } | |
| 50 # explicit_document: { DIRECTIVE DOCUMENT-START } | |
| 51 # implicit_document: FIRST(block_node) | |
| 52 # block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START | |
| 53 # BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } | |
| 54 # flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } | |
| 55 # block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START | |
| 56 # FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } | |
| 57 # flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } | |
| 58 # block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } | |
| 59 # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } | |
| 60 # block_sequence: { BLOCK-SEQUENCE-START } | |
| 61 # block_mapping: { BLOCK-MAPPING-START } | |
| 62 # block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR | |
| 63 # BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START | |
| 64 # FLOW-MAPPING-START BLOCK-ENTRY } | |
| 65 # indentless_sequence: { ENTRY } | |
| 66 # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } | |
| 67 # flow_sequence: { FLOW-SEQUENCE-START } | |
| 68 # flow_mapping: { FLOW-MAPPING-START } | |
| 69 # flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START | |
| 70 # FLOW-MAPPING-START KEY } | |
| 71 # flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START | |
| 72 # FLOW-MAPPING-START KEY } | |
| 73 | |
| 74 # need to have full path with import, as pkg_resources tries to load parser.py in __init__.py | |
| 75 # only to not do anything with the package afterwards | |
| 76 # and for Jython too | |
| 77 | |
| 78 | |
| 79 from ruamel.yaml.error import MarkedYAMLError | |
| 80 from ruamel.yaml.tokens import * # NOQA | |
| 81 from ruamel.yaml.events import * # NOQA | |
| 82 from ruamel.yaml.scanner import Scanner, RoundTripScanner, ScannerError # NOQA | |
| 83 from ruamel.yaml.compat import utf8, nprint, nprintf # NOQA | |
| 84 | |
| 85 if False: # MYPY | |
| 86 from typing import Any, Dict, Optional, List # NOQA | |
| 87 | |
| 88 __all__ = ['Parser', 'RoundTripParser', 'ParserError'] | |
| 89 | |
| 90 | |
| 91 class ParserError(MarkedYAMLError): | |
| 92 pass | |
| 93 | |
| 94 | |
| 95 class Parser(object): | |
| 96 # Since writing a recursive-descendant parser is a straightforward task, we | |
| 97 # do not give many comments here. | |
| 98 | |
| 99 DEFAULT_TAGS = {u'!': u'!', u'!!': u'tag:yaml.org,2002:'} | |
| 100 | |
| 101 def __init__(self, loader): | |
| 102 # type: (Any) -> None | |
| 103 self.loader = loader | |
| 104 if self.loader is not None and getattr(self.loader, '_parser', None) is None: | |
| 105 self.loader._parser = self | |
| 106 self.reset_parser() | |
| 107 | |
| 108 def reset_parser(self): | |
| 109 # type: () -> None | |
| 110 # Reset the state attributes (to clear self-references) | |
| 111 self.current_event = None | |
| 112 self.yaml_version = None | |
| 113 self.tag_handles = {} # type: Dict[Any, Any] | |
| 114 self.states = [] # type: List[Any] | |
| 115 self.marks = [] # type: List[Any] | |
| 116 self.state = self.parse_stream_start # type: Any | |
| 117 | |
| 118 def dispose(self): | |
| 119 # type: () -> None | |
| 120 self.reset_parser() | |
| 121 | |
| 122 @property | |
| 123 def scanner(self): | |
| 124 # type: () -> Any | |
| 125 if hasattr(self.loader, 'typ'): | |
| 126 return self.loader.scanner | |
| 127 return self.loader._scanner | |
| 128 | |
| 129 @property | |
| 130 def resolver(self): | |
| 131 # type: () -> Any | |
| 132 if hasattr(self.loader, 'typ'): | |
| 133 return self.loader.resolver | |
| 134 return self.loader._resolver | |
| 135 | |
| 136 def check_event(self, *choices): | |
| 137 # type: (Any) -> bool | |
| 138 # Check the type of the next event. | |
| 139 if self.current_event is None: | |
| 140 if self.state: | |
| 141 self.current_event = self.state() | |
| 142 if self.current_event is not None: | |
| 143 if not choices: | |
| 144 return True | |
| 145 for choice in choices: | |
| 146 if isinstance(self.current_event, choice): | |
| 147 return True | |
| 148 return False | |
| 149 | |
| 150 def peek_event(self): | |
| 151 # type: () -> Any | |
| 152 # Get the next event. | |
| 153 if self.current_event is None: | |
| 154 if self.state: | |
| 155 self.current_event = self.state() | |
| 156 return self.current_event | |
| 157 | |
| 158 def get_event(self): | |
| 159 # type: () -> Any | |
| 160 # Get the next event and proceed further. | |
| 161 if self.current_event is None: | |
| 162 if self.state: | |
| 163 self.current_event = self.state() | |
| 164 value = self.current_event | |
| 165 self.current_event = None | |
| 166 return value | |
| 167 | |
| 168 # stream ::= STREAM-START implicit_document? explicit_document* | |
| 169 # STREAM-END | |
| 170 # implicit_document ::= block_node DOCUMENT-END* | |
| 171 # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* | |
| 172 | |
| 173 def parse_stream_start(self): | |
| 174 # type: () -> Any | |
| 175 # Parse the stream start. | |
| 176 token = self.scanner.get_token() | |
| 177 token.move_comment(self.scanner.peek_token()) | |
| 178 event = StreamStartEvent(token.start_mark, token.end_mark, encoding=token.encoding) | |
| 179 | |
| 180 # Prepare the next state. | |
| 181 self.state = self.parse_implicit_document_start | |
| 182 | |
| 183 return event | |
| 184 | |
| 185 def parse_implicit_document_start(self): | |
| 186 # type: () -> Any | |
| 187 # Parse an implicit document. | |
| 188 if not self.scanner.check_token(DirectiveToken, DocumentStartToken, StreamEndToken): | |
| 189 self.tag_handles = self.DEFAULT_TAGS | |
| 190 token = self.scanner.peek_token() | |
| 191 start_mark = end_mark = token.start_mark | |
| 192 event = DocumentStartEvent(start_mark, end_mark, explicit=False) | |
| 193 | |
| 194 # Prepare the next state. | |
| 195 self.states.append(self.parse_document_end) | |
| 196 self.state = self.parse_block_node | |
| 197 | |
| 198 return event | |
| 199 | |
| 200 else: | |
| 201 return self.parse_document_start() | |
| 202 | |
| 203 def parse_document_start(self): | |
| 204 # type: () -> Any | |
| 205 # Parse any extra document end indicators. | |
| 206 while self.scanner.check_token(DocumentEndToken): | |
| 207 self.scanner.get_token() | |
| 208 # Parse an explicit document. | |
| 209 if not self.scanner.check_token(StreamEndToken): | |
| 210 token = self.scanner.peek_token() | |
| 211 start_mark = token.start_mark | |
| 212 version, tags = self.process_directives() | |
| 213 if not self.scanner.check_token(DocumentStartToken): | |
| 214 raise ParserError( | |
| 215 None, | |
| 216 None, | |
| 217 "expected '<document start>', but found %r" % self.scanner.peek_token().id, | |
| 218 self.scanner.peek_token().start_mark, | |
| 219 ) | |
| 220 token = self.scanner.get_token() | |
| 221 end_mark = token.end_mark | |
| 222 event = DocumentStartEvent( | |
| 223 start_mark, end_mark, explicit=True, version=version, tags=tags | |
| 224 ) # type: Any | |
| 225 self.states.append(self.parse_document_end) | |
| 226 self.state = self.parse_document_content | |
| 227 else: | |
| 228 # Parse the end of the stream. | |
| 229 token = self.scanner.get_token() | |
| 230 event = StreamEndEvent(token.start_mark, token.end_mark, comment=token.comment) | |
| 231 assert not self.states | |
| 232 assert not self.marks | |
| 233 self.state = None | |
| 234 return event | |
| 235 | |
| 236 def parse_document_end(self): | |
| 237 # type: () -> Any | |
| 238 # Parse the document end. | |
| 239 token = self.scanner.peek_token() | |
| 240 start_mark = end_mark = token.start_mark | |
| 241 explicit = False | |
| 242 if self.scanner.check_token(DocumentEndToken): | |
| 243 token = self.scanner.get_token() | |
| 244 end_mark = token.end_mark | |
| 245 explicit = True | |
| 246 event = DocumentEndEvent(start_mark, end_mark, explicit=explicit) | |
| 247 | |
| 248 # Prepare the next state. | |
| 249 if self.resolver.processing_version == (1, 1): | |
| 250 self.state = self.parse_document_start | |
| 251 else: | |
| 252 self.state = self.parse_implicit_document_start | |
| 253 | |
| 254 return event | |
| 255 | |
| 256 def parse_document_content(self): | |
| 257 # type: () -> Any | |
| 258 if self.scanner.check_token( | |
| 259 DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken | |
| 260 ): | |
| 261 event = self.process_empty_scalar(self.scanner.peek_token().start_mark) | |
| 262 self.state = self.states.pop() | |
| 263 return event | |
| 264 else: | |
| 265 return self.parse_block_node() | |
| 266 | |
| 267 def process_directives(self): | |
| 268 # type: () -> Any | |
| 269 self.yaml_version = None | |
| 270 self.tag_handles = {} | |
| 271 while self.scanner.check_token(DirectiveToken): | |
| 272 token = self.scanner.get_token() | |
| 273 if token.name == u'YAML': | |
| 274 if self.yaml_version is not None: | |
| 275 raise ParserError( | |
| 276 None, None, 'found duplicate YAML directive', token.start_mark | |
| 277 ) | |
| 278 major, minor = token.value | |
| 279 if major != 1: | |
| 280 raise ParserError( | |
| 281 None, | |
| 282 None, | |
| 283 'found incompatible YAML document (version 1.* is ' 'required)', | |
| 284 token.start_mark, | |
| 285 ) | |
| 286 self.yaml_version = token.value | |
| 287 elif token.name == u'TAG': | |
| 288 handle, prefix = token.value | |
| 289 if handle in self.tag_handles: | |
| 290 raise ParserError( | |
| 291 None, None, 'duplicate tag handle %r' % utf8(handle), token.start_mark | |
| 292 ) | |
| 293 self.tag_handles[handle] = prefix | |
| 294 if bool(self.tag_handles): | |
| 295 value = self.yaml_version, self.tag_handles.copy() # type: Any | |
| 296 else: | |
| 297 value = self.yaml_version, None | |
| 298 for key in self.DEFAULT_TAGS: | |
| 299 if key not in self.tag_handles: | |
| 300 self.tag_handles[key] = self.DEFAULT_TAGS[key] | |
| 301 return value | |
| 302 | |
| 303 # block_node_or_indentless_sequence ::= ALIAS | |
| 304 # | properties (block_content | indentless_block_sequence)? | |
| 305 # | block_content | |
| 306 # | indentless_block_sequence | |
| 307 # block_node ::= ALIAS | |
| 308 # | properties block_content? | |
| 309 # | block_content | |
| 310 # flow_node ::= ALIAS | |
| 311 # | properties flow_content? | |
| 312 # | flow_content | |
| 313 # properties ::= TAG ANCHOR? | ANCHOR TAG? | |
| 314 # block_content ::= block_collection | flow_collection | SCALAR | |
| 315 # flow_content ::= flow_collection | SCALAR | |
| 316 # block_collection ::= block_sequence | block_mapping | |
| 317 # flow_collection ::= flow_sequence | flow_mapping | |
| 318 | |
| 319 def parse_block_node(self): | |
| 320 # type: () -> Any | |
| 321 return self.parse_node(block=True) | |
| 322 | |
| 323 def parse_flow_node(self): | |
| 324 # type: () -> Any | |
| 325 return self.parse_node() | |
| 326 | |
| 327 def parse_block_node_or_indentless_sequence(self): | |
| 328 # type: () -> Any | |
| 329 return self.parse_node(block=True, indentless_sequence=True) | |
| 330 | |
| 331 def transform_tag(self, handle, suffix): | |
| 332 # type: (Any, Any) -> Any | |
| 333 return self.tag_handles[handle] + suffix | |
| 334 | |
| 335 def parse_node(self, block=False, indentless_sequence=False): | |
| 336 # type: (bool, bool) -> Any | |
| 337 if self.scanner.check_token(AliasToken): | |
| 338 token = self.scanner.get_token() | |
| 339 event = AliasEvent(token.value, token.start_mark, token.end_mark) # type: Any | |
| 340 self.state = self.states.pop() | |
| 341 return event | |
| 342 | |
| 343 anchor = None | |
| 344 tag = None | |
| 345 start_mark = end_mark = tag_mark = None | |
| 346 if self.scanner.check_token(AnchorToken): | |
| 347 token = self.scanner.get_token() | |
| 348 start_mark = token.start_mark | |
| 349 end_mark = token.end_mark | |
| 350 anchor = token.value | |
| 351 if self.scanner.check_token(TagToken): | |
| 352 token = self.scanner.get_token() | |
| 353 tag_mark = token.start_mark | |
| 354 end_mark = token.end_mark | |
| 355 tag = token.value | |
| 356 elif self.scanner.check_token(TagToken): | |
| 357 token = self.scanner.get_token() | |
| 358 start_mark = tag_mark = token.start_mark | |
| 359 end_mark = token.end_mark | |
| 360 tag = token.value | |
| 361 if self.scanner.check_token(AnchorToken): | |
| 362 token = self.scanner.get_token() | |
| 363 start_mark = tag_mark = token.start_mark | |
| 364 end_mark = token.end_mark | |
| 365 anchor = token.value | |
| 366 if tag is not None: | |
| 367 handle, suffix = tag | |
| 368 if handle is not None: | |
| 369 if handle not in self.tag_handles: | |
| 370 raise ParserError( | |
| 371 'while parsing a node', | |
| 372 start_mark, | |
| 373 'found undefined tag handle %r' % utf8(handle), | |
| 374 tag_mark, | |
| 375 ) | |
| 376 tag = self.transform_tag(handle, suffix) | |
| 377 else: | |
| 378 tag = suffix | |
| 379 # if tag == u'!': | |
| 380 # raise ParserError("while parsing a node", start_mark, | |
| 381 # "found non-specific tag '!'", tag_mark, | |
| 382 # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' | |
| 383 # and share your opinion.") | |
| 384 if start_mark is None: | |
| 385 start_mark = end_mark = self.scanner.peek_token().start_mark | |
| 386 event = None | |
| 387 implicit = tag is None or tag == u'!' | |
| 388 if indentless_sequence and self.scanner.check_token(BlockEntryToken): | |
| 389 comment = None | |
| 390 pt = self.scanner.peek_token() | |
| 391 if pt.comment and pt.comment[0]: | |
| 392 comment = [pt.comment[0], []] | |
| 393 pt.comment[0] = None | |
| 394 end_mark = self.scanner.peek_token().end_mark | |
| 395 event = SequenceStartEvent( | |
| 396 anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment | |
| 397 ) | |
| 398 self.state = self.parse_indentless_sequence_entry | |
| 399 return event | |
| 400 | |
| 401 if self.scanner.check_token(ScalarToken): | |
| 402 token = self.scanner.get_token() | |
| 403 # self.scanner.peek_token_same_line_comment(token) | |
| 404 end_mark = token.end_mark | |
| 405 if (token.plain and tag is None) or tag == u'!': | |
| 406 implicit = (True, False) | |
| 407 elif tag is None: | |
| 408 implicit = (False, True) | |
| 409 else: | |
| 410 implicit = (False, False) | |
| 411 # nprint('se', token.value, token.comment) | |
| 412 event = ScalarEvent( | |
| 413 anchor, | |
| 414 tag, | |
| 415 implicit, | |
| 416 token.value, | |
| 417 start_mark, | |
| 418 end_mark, | |
| 419 style=token.style, | |
| 420 comment=token.comment, | |
| 421 ) | |
| 422 self.state = self.states.pop() | |
| 423 elif self.scanner.check_token(FlowSequenceStartToken): | |
| 424 pt = self.scanner.peek_token() | |
| 425 end_mark = pt.end_mark | |
| 426 event = SequenceStartEvent( | |
| 427 anchor, | |
| 428 tag, | |
| 429 implicit, | |
| 430 start_mark, | |
| 431 end_mark, | |
| 432 flow_style=True, | |
| 433 comment=pt.comment, | |
| 434 ) | |
| 435 self.state = self.parse_flow_sequence_first_entry | |
| 436 elif self.scanner.check_token(FlowMappingStartToken): | |
| 437 pt = self.scanner.peek_token() | |
| 438 end_mark = pt.end_mark | |
| 439 event = MappingStartEvent( | |
| 440 anchor, | |
| 441 tag, | |
| 442 implicit, | |
| 443 start_mark, | |
| 444 end_mark, | |
| 445 flow_style=True, | |
| 446 comment=pt.comment, | |
| 447 ) | |
| 448 self.state = self.parse_flow_mapping_first_key | |
| 449 elif block and self.scanner.check_token(BlockSequenceStartToken): | |
| 450 end_mark = self.scanner.peek_token().start_mark | |
| 451 # should inserting the comment be dependent on the | |
| 452 # indentation? | |
| 453 pt = self.scanner.peek_token() | |
| 454 comment = pt.comment | |
| 455 # nprint('pt0', type(pt)) | |
| 456 if comment is None or comment[1] is None: | |
| 457 comment = pt.split_comment() | |
| 458 # nprint('pt1', comment) | |
| 459 event = SequenceStartEvent( | |
| 460 anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment | |
| 461 ) | |
| 462 self.state = self.parse_block_sequence_first_entry | |
| 463 elif block and self.scanner.check_token(BlockMappingStartToken): | |
| 464 end_mark = self.scanner.peek_token().start_mark | |
| 465 comment = self.scanner.peek_token().comment | |
| 466 event = MappingStartEvent( | |
| 467 anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment | |
| 468 ) | |
| 469 self.state = self.parse_block_mapping_first_key | |
| 470 elif anchor is not None or tag is not None: | |
| 471 # Empty scalars are allowed even if a tag or an anchor is | |
| 472 # specified. | |
| 473 event = ScalarEvent(anchor, tag, (implicit, False), "", start_mark, end_mark) | |
| 474 self.state = self.states.pop() | |
| 475 else: | |
| 476 if block: | |
| 477 node = 'block' | |
| 478 else: | |
| 479 node = 'flow' | |
| 480 token = self.scanner.peek_token() | |
| 481 raise ParserError( | |
| 482 'while parsing a %s node' % node, | |
| 483 start_mark, | |
| 484 'expected the node content, but found %r' % token.id, | |
| 485 token.start_mark, | |
| 486 ) | |
| 487 return event | |
| 488 | |
| 489 # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* | |
| 490 # BLOCK-END | |
| 491 | |
| 492 def parse_block_sequence_first_entry(self): | |
| 493 # type: () -> Any | |
| 494 token = self.scanner.get_token() | |
| 495 # move any comment from start token | |
| 496 # token.move_comment(self.scanner.peek_token()) | |
| 497 self.marks.append(token.start_mark) | |
| 498 return self.parse_block_sequence_entry() | |
| 499 | |
| 500 def parse_block_sequence_entry(self): | |
| 501 # type: () -> Any | |
| 502 if self.scanner.check_token(BlockEntryToken): | |
| 503 token = self.scanner.get_token() | |
| 504 token.move_comment(self.scanner.peek_token()) | |
| 505 if not self.scanner.check_token(BlockEntryToken, BlockEndToken): | |
| 506 self.states.append(self.parse_block_sequence_entry) | |
| 507 return self.parse_block_node() | |
| 508 else: | |
| 509 self.state = self.parse_block_sequence_entry | |
| 510 return self.process_empty_scalar(token.end_mark) | |
| 511 if not self.scanner.check_token(BlockEndToken): | |
| 512 token = self.scanner.peek_token() | |
| 513 raise ParserError( | |
| 514 'while parsing a block collection', | |
| 515 self.marks[-1], | |
| 516 'expected <block end>, but found %r' % token.id, | |
| 517 token.start_mark, | |
| 518 ) | |
| 519 token = self.scanner.get_token() # BlockEndToken | |
| 520 event = SequenceEndEvent(token.start_mark, token.end_mark, comment=token.comment) | |
| 521 self.state = self.states.pop() | |
| 522 self.marks.pop() | |
| 523 return event | |
| 524 | |
| 525 # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ | |
| 526 | |
| 527 # indentless_sequence? | |
| 528 # sequence: | |
| 529 # - entry | |
| 530 # - nested | |
| 531 | |
| 532 def parse_indentless_sequence_entry(self): | |
| 533 # type: () -> Any | |
| 534 if self.scanner.check_token(BlockEntryToken): | |
| 535 token = self.scanner.get_token() | |
| 536 token.move_comment(self.scanner.peek_token()) | |
| 537 if not self.scanner.check_token( | |
| 538 BlockEntryToken, KeyToken, ValueToken, BlockEndToken | |
| 539 ): | |
| 540 self.states.append(self.parse_indentless_sequence_entry) | |
| 541 return self.parse_block_node() | |
| 542 else: | |
| 543 self.state = self.parse_indentless_sequence_entry | |
| 544 return self.process_empty_scalar(token.end_mark) | |
| 545 token = self.scanner.peek_token() | |
| 546 event = SequenceEndEvent(token.start_mark, token.start_mark, comment=token.comment) | |
| 547 self.state = self.states.pop() | |
| 548 return event | |
| 549 | |
| 550 # block_mapping ::= BLOCK-MAPPING_START | |
| 551 # ((KEY block_node_or_indentless_sequence?)? | |
| 552 # (VALUE block_node_or_indentless_sequence?)?)* | |
| 553 # BLOCK-END | |
| 554 | |
| 555 def parse_block_mapping_first_key(self): | |
| 556 # type: () -> Any | |
| 557 token = self.scanner.get_token() | |
| 558 self.marks.append(token.start_mark) | |
| 559 return self.parse_block_mapping_key() | |
| 560 | |
| 561 def parse_block_mapping_key(self): | |
| 562 # type: () -> Any | |
| 563 if self.scanner.check_token(KeyToken): | |
| 564 token = self.scanner.get_token() | |
| 565 token.move_comment(self.scanner.peek_token()) | |
| 566 if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken): | |
| 567 self.states.append(self.parse_block_mapping_value) | |
| 568 return self.parse_block_node_or_indentless_sequence() | |
| 569 else: | |
| 570 self.state = self.parse_block_mapping_value | |
| 571 return self.process_empty_scalar(token.end_mark) | |
| 572 if self.resolver.processing_version > (1, 1) and self.scanner.check_token(ValueToken): | |
| 573 self.state = self.parse_block_mapping_value | |
| 574 return self.process_empty_scalar(self.scanner.peek_token().start_mark) | |
| 575 if not self.scanner.check_token(BlockEndToken): | |
| 576 token = self.scanner.peek_token() | |
| 577 raise ParserError( | |
| 578 'while parsing a block mapping', | |
| 579 self.marks[-1], | |
| 580 'expected <block end>, but found %r' % token.id, | |
| 581 token.start_mark, | |
| 582 ) | |
| 583 token = self.scanner.get_token() | |
| 584 token.move_comment(self.scanner.peek_token()) | |
| 585 event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment) | |
| 586 self.state = self.states.pop() | |
| 587 self.marks.pop() | |
| 588 return event | |
| 589 | |
| 590 def parse_block_mapping_value(self): | |
| 591 # type: () -> Any | |
| 592 if self.scanner.check_token(ValueToken): | |
| 593 token = self.scanner.get_token() | |
| 594 # value token might have post comment move it to e.g. block | |
| 595 if self.scanner.check_token(ValueToken): | |
| 596 token.move_comment(self.scanner.peek_token()) | |
| 597 else: | |
| 598 if not self.scanner.check_token(KeyToken): | |
| 599 token.move_comment(self.scanner.peek_token(), empty=True) | |
| 600 # else: empty value for this key cannot move token.comment | |
| 601 if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken): | |
| 602 self.states.append(self.parse_block_mapping_key) | |
| 603 return self.parse_block_node_or_indentless_sequence() | |
| 604 else: | |
| 605 self.state = self.parse_block_mapping_key | |
| 606 comment = token.comment | |
| 607 if comment is None: | |
| 608 token = self.scanner.peek_token() | |
| 609 comment = token.comment | |
| 610 if comment: | |
| 611 token._comment = [None, comment[1]] | |
| 612 comment = [comment[0], None] | |
| 613 return self.process_empty_scalar(token.end_mark, comment=comment) | |
| 614 else: | |
| 615 self.state = self.parse_block_mapping_key | |
| 616 token = self.scanner.peek_token() | |
| 617 return self.process_empty_scalar(token.start_mark) | |
| 618 | |
| 619 # flow_sequence ::= FLOW-SEQUENCE-START | |
| 620 # (flow_sequence_entry FLOW-ENTRY)* | |
| 621 # flow_sequence_entry? | |
| 622 # FLOW-SEQUENCE-END | |
| 623 # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | |
| 624 # | |
| 625 # Note that while production rules for both flow_sequence_entry and | |
| 626 # flow_mapping_entry are equal, their interpretations are different. | |
| 627 # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` | |
| 628 # generate an inline mapping (set syntax). | |
| 629 | |
| 630 def parse_flow_sequence_first_entry(self): | |
| 631 # type: () -> Any | |
| 632 token = self.scanner.get_token() | |
| 633 self.marks.append(token.start_mark) | |
| 634 return self.parse_flow_sequence_entry(first=True) | |
| 635 | |
| 636 def parse_flow_sequence_entry(self, first=False): | |
| 637 # type: (bool) -> Any | |
| 638 if not self.scanner.check_token(FlowSequenceEndToken): | |
| 639 if not first: | |
| 640 if self.scanner.check_token(FlowEntryToken): | |
| 641 self.scanner.get_token() | |
| 642 else: | |
| 643 token = self.scanner.peek_token() | |
| 644 raise ParserError( | |
| 645 'while parsing a flow sequence', | |
| 646 self.marks[-1], | |
| 647 "expected ',' or ']', but got %r" % token.id, | |
| 648 token.start_mark, | |
| 649 ) | |
| 650 | |
| 651 if self.scanner.check_token(KeyToken): | |
| 652 token = self.scanner.peek_token() | |
| 653 event = MappingStartEvent( | |
| 654 None, None, True, token.start_mark, token.end_mark, flow_style=True | |
| 655 ) # type: Any | |
| 656 self.state = self.parse_flow_sequence_entry_mapping_key | |
| 657 return event | |
| 658 elif not self.scanner.check_token(FlowSequenceEndToken): | |
| 659 self.states.append(self.parse_flow_sequence_entry) | |
| 660 return self.parse_flow_node() | |
| 661 token = self.scanner.get_token() | |
| 662 event = SequenceEndEvent(token.start_mark, token.end_mark, comment=token.comment) | |
| 663 self.state = self.states.pop() | |
| 664 self.marks.pop() | |
| 665 return event | |
| 666 | |
| 667 def parse_flow_sequence_entry_mapping_key(self): | |
| 668 # type: () -> Any | |
| 669 token = self.scanner.get_token() | |
| 670 if not self.scanner.check_token(ValueToken, FlowEntryToken, FlowSequenceEndToken): | |
| 671 self.states.append(self.parse_flow_sequence_entry_mapping_value) | |
| 672 return self.parse_flow_node() | |
| 673 else: | |
| 674 self.state = self.parse_flow_sequence_entry_mapping_value | |
| 675 return self.process_empty_scalar(token.end_mark) | |
| 676 | |
| 677 def parse_flow_sequence_entry_mapping_value(self): | |
| 678 # type: () -> Any | |
| 679 if self.scanner.check_token(ValueToken): | |
| 680 token = self.scanner.get_token() | |
| 681 if not self.scanner.check_token(FlowEntryToken, FlowSequenceEndToken): | |
| 682 self.states.append(self.parse_flow_sequence_entry_mapping_end) | |
| 683 return self.parse_flow_node() | |
| 684 else: | |
| 685 self.state = self.parse_flow_sequence_entry_mapping_end | |
| 686 return self.process_empty_scalar(token.end_mark) | |
| 687 else: | |
| 688 self.state = self.parse_flow_sequence_entry_mapping_end | |
| 689 token = self.scanner.peek_token() | |
| 690 return self.process_empty_scalar(token.start_mark) | |
| 691 | |
| 692 def parse_flow_sequence_entry_mapping_end(self): | |
| 693 # type: () -> Any | |
| 694 self.state = self.parse_flow_sequence_entry | |
| 695 token = self.scanner.peek_token() | |
| 696 return MappingEndEvent(token.start_mark, token.start_mark) | |
| 697 | |
| 698 # flow_mapping ::= FLOW-MAPPING-START | |
| 699 # (flow_mapping_entry FLOW-ENTRY)* | |
| 700 # flow_mapping_entry? | |
| 701 # FLOW-MAPPING-END | |
| 702 # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | |
| 703 | |
| 704 def parse_flow_mapping_first_key(self): | |
| 705 # type: () -> Any | |
| 706 token = self.scanner.get_token() | |
| 707 self.marks.append(token.start_mark) | |
| 708 return self.parse_flow_mapping_key(first=True) | |
| 709 | |
| 710 def parse_flow_mapping_key(self, first=False): | |
| 711 # type: (Any) -> Any | |
| 712 if not self.scanner.check_token(FlowMappingEndToken): | |
| 713 if not first: | |
| 714 if self.scanner.check_token(FlowEntryToken): | |
| 715 self.scanner.get_token() | |
| 716 else: | |
| 717 token = self.scanner.peek_token() | |
| 718 raise ParserError( | |
| 719 'while parsing a flow mapping', | |
| 720 self.marks[-1], | |
| 721 "expected ',' or '}', but got %r" % token.id, | |
| 722 token.start_mark, | |
| 723 ) | |
| 724 if self.scanner.check_token(KeyToken): | |
| 725 token = self.scanner.get_token() | |
| 726 if not self.scanner.check_token( | |
| 727 ValueToken, FlowEntryToken, FlowMappingEndToken | |
| 728 ): | |
| 729 self.states.append(self.parse_flow_mapping_value) | |
| 730 return self.parse_flow_node() | |
| 731 else: | |
| 732 self.state = self.parse_flow_mapping_value | |
| 733 return self.process_empty_scalar(token.end_mark) | |
| 734 elif self.resolver.processing_version > (1, 1) and self.scanner.check_token( | |
| 735 ValueToken | |
| 736 ): | |
| 737 self.state = self.parse_flow_mapping_value | |
| 738 return self.process_empty_scalar(self.scanner.peek_token().end_mark) | |
| 739 elif not self.scanner.check_token(FlowMappingEndToken): | |
| 740 self.states.append(self.parse_flow_mapping_empty_value) | |
| 741 return self.parse_flow_node() | |
| 742 token = self.scanner.get_token() | |
| 743 event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment) | |
| 744 self.state = self.states.pop() | |
| 745 self.marks.pop() | |
| 746 return event | |
| 747 | |
| 748 def parse_flow_mapping_value(self): | |
| 749 # type: () -> Any | |
| 750 if self.scanner.check_token(ValueToken): | |
| 751 token = self.scanner.get_token() | |
| 752 if not self.scanner.check_token(FlowEntryToken, FlowMappingEndToken): | |
| 753 self.states.append(self.parse_flow_mapping_key) | |
| 754 return self.parse_flow_node() | |
| 755 else: | |
| 756 self.state = self.parse_flow_mapping_key | |
| 757 return self.process_empty_scalar(token.end_mark) | |
| 758 else: | |
| 759 self.state = self.parse_flow_mapping_key | |
| 760 token = self.scanner.peek_token() | |
| 761 return self.process_empty_scalar(token.start_mark) | |
| 762 | |
| 763 def parse_flow_mapping_empty_value(self): | |
| 764 # type: () -> Any | |
| 765 self.state = self.parse_flow_mapping_key | |
| 766 return self.process_empty_scalar(self.scanner.peek_token().start_mark) | |
| 767 | |
| 768 def process_empty_scalar(self, mark, comment=None): | |
| 769 # type: (Any, Any) -> Any | |
| 770 return ScalarEvent(None, None, (True, False), "", mark, mark, comment=comment) | |
| 771 | |
| 772 | |
| 773 class RoundTripParser(Parser): | |
| 774 """roundtrip is a safe loader, that wants to see the unmangled tag""" | |
| 775 | |
| 776 def transform_tag(self, handle, suffix): | |
| 777 # type: (Any, Any) -> Any | |
| 778 # return self.tag_handles[handle]+suffix | |
| 779 if handle == '!!' and suffix in ( | |
| 780 u'null', | |
| 781 u'bool', | |
| 782 u'int', | |
| 783 u'float', | |
| 784 u'binary', | |
| 785 u'timestamp', | |
| 786 u'omap', | |
| 787 u'pairs', | |
| 788 u'set', | |
| 789 u'str', | |
| 790 u'seq', | |
| 791 u'map', | |
| 792 ): | |
| 793 return Parser.transform_tag(self, handle, suffix) | |
| 794 return handle + suffix | 
