1 
2 //          Copyright Ferdinand Majerech 2011-2014.
3 // Distributed under the Boost Software License, Version 1.0.
4 //    (See accompanying file LICENSE_1_0.txt or copy at
5 //          http://www.boost.org/LICENSE_1_0.txt)
6 
7 /**
8  * YAML parser.
9  * Code based on PyYAML: http://www.pyyaml.org
10  */
11 module dyaml.parser;
12 
13 
14 import std.algorithm;
15 import std.array;
16 import std.container;
17 import std.conv;
18 import std.exception;
19 import std.typecons;
20 
21 import dyaml.anchor;
22 import dyaml.event;
23 import dyaml.exception;
24 import dyaml.scanner;
25 import dyaml.style;
26 import dyaml.token;
27 import dyaml.tag;
28 import dyaml.tagdirective;
29 
30 
31 package:
32 /**
33  * The following YAML grammar is LL(1) and is parsed by a recursive descent
34  * parser.
35  *
36  * stream            ::= STREAM-START implicit_document? explicit_document* STREAM-END
37  * implicit_document ::= block_node DOCUMENT-END*
38  * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
39  * block_node_or_indentless_sequence ::=
40  *                       ALIAS
41  *                       | properties (block_content | indentless_block_sequence)?
42  *                       | block_content
43  *                       | indentless_block_sequence
44  * block_node        ::= ALIAS
45  *                       | properties block_content?
46  *                       | block_content
47  * flow_node         ::= ALIAS
48  *                       | properties flow_content?
49  *                       | flow_content
50  * properties        ::= TAG ANCHOR? | ANCHOR TAG?
51  * block_content     ::= block_collection | flow_collection | SCALAR
52  * flow_content      ::= flow_collection | SCALAR
53  * block_collection  ::= block_sequence | block_mapping
54  * flow_collection   ::= flow_sequence | flow_mapping
55  * block_sequence    ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
56  * indentless_sequence   ::= (BLOCK-ENTRY block_node?)+
57  * block_mapping     ::= BLOCK-MAPPING_START
58  *                       ((KEY block_node_or_indentless_sequence?)?
59  *                       (VALUE block_node_or_indentless_sequence?)?)*
60  *                       BLOCK-END
61  * flow_sequence     ::= FLOW-SEQUENCE-START
62  *                       (flow_sequence_entry FLOW-ENTRY)*
63  *                       flow_sequence_entry?
64  *                       FLOW-SEQUENCE-END
65  * flow_sequence_entry   ::= flow_node | KEY flow_node? (VALUE flow_node?)?
66  * flow_mapping      ::= FLOW-MAPPING-START
67  *                       (flow_mapping_entry FLOW-ENTRY)*
68  *                       flow_mapping_entry?
69  *                       FLOW-MAPPING-END
70  * flow_mapping_entry    ::= flow_node | KEY flow_node? (VALUE flow_node?)?
71  *
72  * FIRST sets:
73  *
74  * stream: { STREAM-START }
75  * explicit_document: { DIRECTIVE DOCUMENT-START }
76  * implicit_document: FIRST(block_node)
77  * block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
78  * flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
79  * block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
80  * flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
81  * block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
82  * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
83  * block_sequence: { BLOCK-SEQUENCE-START }
84  * block_mapping: { BLOCK-MAPPING-START }
85  * block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
86  * indentless_sequence: { ENTRY }
87  * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
88  * flow_sequence: { FLOW-SEQUENCE-START }
89  * flow_mapping: { FLOW-MAPPING-START }
90  * flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
91  * flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
92  */
93 
94 
95 /**
96  * Marked exception thrown at parser errors.
97  *
98  * See_Also: MarkedYAMLException
99  */
100 class ParserException : MarkedYAMLException
101 {
102     mixin MarkedExceptionCtors;
103 }
104 
105 private alias ParserException Error;
106 
107 /// Generates events from tokens provided by a Scanner.
108 ///
109 /// While Parser receives tokens with non-const character slices, the events it 
110 /// produces are immutable strings, which are usually the same slices, cast to string.
111 /// Parser is the last layer of D:YAML that may possibly do any modifications to these
112 /// slices.
113 final class Parser
114 {
115     private:
116         ///Default tag handle shortcuts and replacements.
117         static TagDirective[] defaultTagDirectives_ = 
118             [TagDirective("!", "!"), TagDirective("!!", "tag:yaml.org,2002:")];
119 
120         ///Scanner providing YAML tokens.
121         Scanner scanner_;
122 
123         ///Event produced by the most recent state.
124         Event currentEvent_;
125 
126         ///YAML version string.
127         string YAMLVersion_ = null;
128         ///Tag handle shortcuts and replacements.
129         TagDirective[] tagDirectives_;
130 
131         ///Stack of states.
132         Array!(Event delegate()) states_;
133         ///Stack of marks used to keep track of extents of e.g. YAML collections.
134         Array!Mark marks_;
135 
136         ///Current state.
137         Event delegate() state_;
138 
139     public:
140         ///Construct a Parser using specified Scanner.
141         this(Scanner scanner) @trusted
142         {
143             state_ = &parseStreamStart;
144             scanner_ = scanner;
145             states_.reserve(32);
146             marks_.reserve(32);
147         }
148 
149         ///Destroy the parser.
150         @trusted ~this()
151         {
152             currentEvent_.destroy();
153             tagDirectives_.destroy();
154             tagDirectives_ = null;
155             states_.destroy();
156             marks_.destroy();
157         }
158 
159         /**
160          * Check if the next event is one of specified types.
161          *
162          * If no types are specified, checks if any events are left.
163          *
164          * Params:  ids = Event IDs to check for.
165          *
166          * Returns: true if the next event is one of specified types,
167          *          or if there are any events left if no types specified.
168          *          false otherwise.
169          */
170         bool checkEvent(EventID[] ids...) @trusted
171         {
172             //Check if the next event is one of specified types.
173             if(currentEvent_.isNull && state_ !is null)
174             {
175                 currentEvent_ = state_();
176             }
177 
178             if(!currentEvent_.isNull)
179             {
180                 if(ids.length == 0){return true;}
181                 else
182                 {
183                     const nextId = currentEvent_.id;
184                     foreach(id; ids)
185                     {
186                         if(nextId == id){return true;}
187                     }
188                 }
189             }
190 
191             return false;
192         }
193 
194         /**
195          * Return the next event, but keep it in the queue.
196          *
197          * Must not be called if there are no events left.
198          */
199         immutable(Event) peekEvent() @trusted
200         {
201             if(currentEvent_.isNull && state_ !is null)
202             {
203                 currentEvent_ = state_();
204             }
205             if(!currentEvent_.isNull){return cast(immutable Event)currentEvent_;}
206             assert(false, "No event left to peek");
207         }
208 
209         /**
210          * Return the next event, removing it from the queue.
211          *
212          * Must not be called if there are no events left.
213          */
214         immutable(Event) getEvent() @trusted
215         {
216             //Get the next event and proceed further.
217             if(currentEvent_.isNull && state_ !is null)
218             {
219                 currentEvent_ = state_();
220             }
221 
222             if(!currentEvent_.isNull)
223             {
224                 immutable Event result = cast(immutable Event)currentEvent_;
225                 currentEvent_.id = EventID.Invalid;
226                 return result;
227             }
228             assert(false, "No event left to get");
229         }
230 
231     private:
232         ///Pop and return the newest state in states_.
233         Event delegate() popState() @trusted
234         {
235             enforce(states_.length > 0,
236                     new YAMLException("Parser: Need to pop state but no states left to pop"));
237             const result = states_.back;
238             states_.length = states_.length - 1;
239             return result;
240         }
241 
242         ///Pop and return the newest mark in marks_.
243         Mark popMark() @trusted
244         {
245             enforce(marks_.length > 0,
246                     new YAMLException("Parser: Need to pop mark but no marks left to pop"));
247             const result = marks_.back;
248             marks_.length = marks_.length - 1;
249             return result;
250         }
251 
252         /**
253          * stream    ::= STREAM-START implicit_document? explicit_document* STREAM-END
254          * implicit_document ::= block_node DOCUMENT-END*
255          * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
256          */
257 
258         ///Parse stream start.
259         Event parseStreamStart() @safe
260         {
261             const token = scanner_.getToken();
262             state_ = &parseImplicitDocumentStart;
263             return streamStartEvent(token.startMark, token.endMark, token.encoding);
264         }
265 
266         /// Parse implicit document start, unless explicit detected: if so, parse explicit.
267         Event parseImplicitDocumentStart() @trusted
268         {
269             // Parse an implicit document.
270             if(!scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
271                                     TokenID.StreamEnd))
272             {
273                 tagDirectives_  = defaultTagDirectives_;
274                 const token = scanner_.peekToken();
275 
276                 states_ ~= &parseDocumentEnd;
277                 state_ = &parseBlockNode;
278 
279                 return documentStartEvent(token.startMark, token.endMark, false, null, null);
280             }
281             return parseDocumentStart();
282         }
283 
284         ///Parse explicit document start.
285         Event parseDocumentStart() @trusted
286         {
287             //Parse any extra document end indicators.
288             while(scanner_.checkToken(TokenID.DocumentEnd)){scanner_.getToken();}
289 
290             //Parse an explicit document.
291             if(!scanner_.checkToken(TokenID.StreamEnd))
292             {
293                 const startMark = scanner_.peekToken().startMark;
294 
295                 auto tagDirectives = processDirectives();
296                 enforce(scanner_.checkToken(TokenID.DocumentStart),
297                         new Error("Expected document start but found " ~
298                                   scanner_.peekToken().idString,
299                                   scanner_.peekToken().startMark));
300 
301                 const endMark = scanner_.getToken().endMark;
302                 states_ ~= &parseDocumentEnd;
303                 state_ = &parseDocumentContent;
304                 return documentStartEvent(startMark, endMark, true, YAMLVersion_, tagDirectives);
305             }
306             else
307             {
308                 //Parse the end of the stream.
309                 const token = scanner_.getToken();
310                 assert(states_.length == 0);
311                 assert(marks_.length == 0);
312                 state_ = null;
313                 return streamEndEvent(token.startMark, token.endMark);
314             }
315         }
316 
317         ///Parse document end (explicit or implicit).
318         Event parseDocumentEnd() @safe
319         {
320             Mark startMark = scanner_.peekToken().startMark;
321             const bool explicit = scanner_.checkToken(TokenID.DocumentEnd);
322             Mark endMark = explicit ? scanner_.getToken().endMark : startMark;
323 
324             state_ = &parseDocumentStart;
325 
326             return documentEndEvent(startMark, endMark, explicit);
327         }
328 
329         ///Parse document content.
330         Event parseDocumentContent() @safe
331         {
332             if(scanner_.checkToken(TokenID.Directive,   TokenID.DocumentStart,
333                                    TokenID.DocumentEnd, TokenID.StreamEnd))
334             {
335                 state_ = popState();
336                 return processEmptyScalar(scanner_.peekToken().startMark);
337             }
338             return parseBlockNode();
339         }
340 
341         /// Process directives at the beginning of a document.
342         TagDirective[] processDirectives() @system
343         {
344             // Destroy version and tag handles from previous document.
345             YAMLVersion_ = null;
346             tagDirectives_.length = 0;
347 
348             // Process directives.
349             while(scanner_.checkToken(TokenID.Directive))
350             {
351                 const token = scanner_.getToken();
352                 const value = token.value;
353                 if(token.directive == DirectiveType.YAML)
354                 {
355                     enforce(YAMLVersion_ is null,
356                             new Error("Duplicate YAML directive", token.startMark));
357                     const minor = value.split(".")[0];
358                     enforce(minor == "1",
359                             new Error("Incompatible document (version 1.x is required)",
360                                       token.startMark));
361                     YAMLVersion_ = cast(string)value;
362                 }
363                 else if(token.directive == DirectiveType.TAG)
364                 {
365                     auto handle = cast(string)value[0 .. token.valueDivider];
366 
367                     foreach(ref pair; tagDirectives_)
368                     {
369                         // handle
370                         const h = pair.handle;
371                         enforce(h != handle, new Error("Duplicate tag handle: " ~ handle,
372                                                        token.startMark));
373                     }
374                     tagDirectives_ ~= 
375                         TagDirective(handle, cast(string)value[token.valueDivider .. $]);
376                 }
377                 // Any other directive type is ignored (only YAML and TAG are in YAML
378                 // 1.1/1.2, any other directives are "reserved")
379             }
380 
381             TagDirective[] value = tagDirectives_;
382 
383             //Add any default tag handles that haven't been overridden.
384             foreach(ref defaultPair; defaultTagDirectives_)
385             {
386                 bool found = false;
387                 foreach(ref pair; tagDirectives_) if(defaultPair.handle == pair.handle)
388                 {
389                     found = true;
390                     break;
391                 }
392                 if(!found) {tagDirectives_ ~= defaultPair; }
393             }
394 
395             return value;
396         }
397 
398         /**
399          * block_node_or_indentless_sequence ::= ALIAS
400          *               | properties (block_content | indentless_block_sequence)?
401          *               | block_content
402          *               | indentless_block_sequence
403          * block_node    ::= ALIAS
404          *                   | properties block_content?
405          *                   | block_content
406          * flow_node     ::= ALIAS
407          *                   | properties flow_content?
408          *                   | flow_content
409          * properties    ::= TAG ANCHOR? | ANCHOR TAG?
410          * block_content     ::= block_collection | flow_collection | SCALAR
411          * flow_content      ::= flow_collection | SCALAR
412          * block_collection  ::= block_sequence | block_mapping
413          * flow_collection   ::= flow_sequence | flow_mapping
414          */
415 
416         ///Parse a node.
417         Event parseNode(const Flag!"block" block,
418                         const Flag!"indentlessSequence" indentlessSequence = No.indentlessSequence)
419             @trusted
420         {
421             if(scanner_.checkToken(TokenID.Alias))
422             {
423                 const token = scanner_.getToken();
424                 state_ = popState();
425                 return aliasEvent(token.startMark, token.endMark, 
426                                   Anchor(cast(string)token.value));
427             }
428 
429             string anchor = null;
430             string tag = null;
431             Mark startMark, endMark, tagMark;
432             bool invalidMarks = true;
433             // The index in the tag string where tag handle ends and tag suffix starts.
434             uint tagHandleEnd;
435 
436             //Get anchor/tag if detected. Return false otherwise.
437             bool get(const TokenID id, const Flag!"first" first, ref string target)
438             {
439                 if(!scanner_.checkToken(id)){return false;}
440                 invalidMarks = false;
441                 const token = scanner_.getToken();
442                 if(first){startMark = token.startMark;}
443                 if(id == TokenID.Tag)
444                 {
445                     tagMark = token.startMark;
446                     tagHandleEnd = token.valueDivider;
447                 }
448                 endMark = token.endMark;
449                 target  = cast(string)token.value;
450                 return true;
451             }
452 
453             //Anchor and/or tag can be in any order.
454             if(get(TokenID.Anchor, Yes.first, anchor)){get(TokenID.Tag, No.first, tag);}
455             else if(get(TokenID.Tag, Yes.first, tag)) {get(TokenID.Anchor, No.first, anchor);}
456 
457             if(tag !is null){tag = processTag(tag, tagHandleEnd, startMark, tagMark);}
458 
459             if(invalidMarks)
460             {
461                 startMark = endMark = scanner_.peekToken().startMark;
462             }
463 
464             bool implicit = (tag is null || tag == "!");
465 
466             if(indentlessSequence && scanner_.checkToken(TokenID.BlockEntry))
467             {
468                 state_ = &parseIndentlessSequenceEntry;
469                 return sequenceStartEvent
470                     (startMark, scanner_.peekToken().endMark, Anchor(anchor),
471                      Tag(tag), implicit, CollectionStyle.Block);
472             }
473 
474             if(scanner_.checkToken(TokenID.Scalar))
475             {
476                 auto token = scanner_.getToken();
477                 auto value = token.style == ScalarStyle.DoubleQuoted
478                            ? handleDoubleQuotedScalarEscapes(token.value)
479                            : cast(string)token.value;
480 
481                 implicit = (token.style == ScalarStyle.Plain && tag is null) || tag == "!";
482                 bool implicit_2 = (!implicit) && tag is null;
483                 state_ = popState();
484                 return scalarEvent(startMark, token.endMark, Anchor(anchor), Tag(tag),
485                                    tuple(implicit, implicit_2), value, token.style);
486             }
487 
488             if(scanner_.checkToken(TokenID.FlowSequenceStart))
489             {
490                 endMark = scanner_.peekToken().endMark;
491                 state_ = &parseFlowSequenceEntry!(Yes.first);
492                 return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
493                                           implicit, CollectionStyle.Flow);
494             }
495 
496             if(scanner_.checkToken(TokenID.FlowMappingStart))
497             {
498                 endMark = scanner_.peekToken().endMark;
499                 state_ = &parseFlowMappingKey!(Yes.first);
500                 return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
501                                          implicit, CollectionStyle.Flow);
502             }
503 
504             if(block && scanner_.checkToken(TokenID.BlockSequenceStart))
505             {
506                 endMark = scanner_.peekToken().endMark;
507                 state_ = &parseBlockSequenceEntry!(Yes.first);
508                 return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
509                                           implicit, CollectionStyle.Block);
510             }
511 
512             if(block && scanner_.checkToken(TokenID.BlockMappingStart))
513             {
514                 endMark = scanner_.peekToken().endMark;
515                 state_ = &parseBlockMappingKey!(Yes.first);
516                 return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
517                                          implicit, CollectionStyle.Block);
518             }
519 
520             if(anchor != null || tag !is null)
521             {
522                 state_ = popState();
523 
524                 //PyYAML uses a tuple(implicit, false) for the second last arg here,
525                 //but the second bool is never used after that - so we don't use it.
526 
527                 //Empty scalars are allowed even if a tag or an anchor is specified.
528                 return scalarEvent(startMark, endMark, Anchor(anchor), Tag(tag),
529                                    tuple(implicit, false) , "");
530             }
531 
532             const token = scanner_.peekToken();
533             throw new Error("While parsing a " ~ (block ? "block" : "flow") ~ " node",
534                             startMark, "expected node content, but found: "
535                             ~ token.idString, token.startMark);
536         }
537 
538         /// Handle escape sequences in a double quoted scalar.
539         ///
540         /// Moved here from scanner as it can't always be done in-place with slices.
541         string handleDoubleQuotedScalarEscapes(char[] tokenValue)
542         {
543             string notInPlace;
544             bool inEscape = false;
545             import dyaml.nogcutil;
546             auto appender = appenderNoGC(cast(char[])tokenValue);
547             for(char[] oldValue = tokenValue; !oldValue.empty();)
548             {
549                 const dchar c = oldValue.front();
550                 oldValue.popFront();
551 
552                 if(!inEscape)
553                 {
554                     if(c != '\\')
555                     {
556                         if(notInPlace is null) { appender.putDChar(c); }
557                         else                   { notInPlace ~= c; }
558                         continue;
559                     }
560                     // Escape sequence starts with a '\'
561                     inEscape = true;
562                     continue;
563                 }
564 
565                 import dyaml.escapes;
566                 scope(exit) { inEscape = false; }
567 
568                 // 'Normal' escape sequence.
569                 if(dyaml.escapes.escapes.canFind(c))
570                 {
571                     if(notInPlace is null)
572                     {
573                         // \L and \C can't be handled in place as the expand into
574                         // many-byte unicode chars
575                         if(c != 'L' && c != 'P')
576                         {
577                             appender.putDChar(dyaml.escapes.fromEscape(c));
578                             continue;
579                         }
580                         // Need to duplicate as we won't fit into
581                         // token.value - which is what appender uses
582                         notInPlace = appender.data.dup;
583                         notInPlace ~= dyaml.escapes.fromEscape(c);
584                         continue;
585                     }
586                     notInPlace ~= dyaml.escapes.fromEscape(c);
587                     continue;
588                 }
589 
590                 // Unicode char written in hexadecimal in an escape sequence.
591                 if(dyaml.escapes.escapeHexCodeList.canFind(c))
592                 {
593                     // Scanner has already checked that the hex string is valid.
594 
595                     const hexLength = dyaml.escapes.escapeHexLength(c);
596                     // Any hex digits are 1-byte so this works.
597                     char[] hex = oldValue[0 .. hexLength];
598                     oldValue = oldValue[hexLength .. $];
599                     import std.ascii : isHexDigit;
600                     assert(!hex.canFind!(d => !d.isHexDigit),
601                             "Scanner must ensure the hex string is valid");
602 
603                     bool overflow;
604                     const decoded = cast(dchar)parseNoGC!int(hex, 16u, overflow);
605                     assert(!overflow, "Scanner must ensure there's no overflow");
606                     if(notInPlace is null) { appender.putDChar(decoded); }
607                     else                   { notInPlace ~= decoded; }
608                     continue;
609                 }
610 
611                 assert(false, "Scanner must handle unsupported escapes");
612             }
613 
614             return notInPlace is null ? cast(string)appender.data : notInPlace;
615         }
616 
617         /**
618          * Process a tag string retrieved from a tag token.
619          *
620          * Params:  tag       = Tag before processing.
621          *          handleEnd = Index in tag where tag handle ends and tag suffix
622          *                      starts.
623          *          startMark = Position of the node the tag belongs to.
624          *          tagMark   = Position of the tag.
625          */
626         string processTag(const string tag, const uint handleEnd,
627                           const Mark startMark, const Mark tagMark)
628             const @trusted
629         {
630             const handle = tag[0 .. handleEnd];
631             const suffix = tag[handleEnd .. $];
632 
633             if(handle.length > 0)
634             {
635                 string replacement = null;
636                 foreach(ref pair; tagDirectives_)
637                 {
638                     if(pair.handle == handle)
639                     {
640                         replacement = pair.prefix;
641                         break;
642                     }
643                 }
644                 //handle must be in tagDirectives_
645                 enforce(replacement !is null,
646                         new Error("While parsing a node", startMark,
647                                   "found undefined tag handle: " ~ handle, tagMark));
648                 return replacement ~ suffix;
649             }
650             return suffix;
651         }
652 
653         ///Wrappers to parse nodes.
654         Event parseBlockNode() @safe {return parseNode(Yes.block);}
655         Event parseFlowNode() @safe {return parseNode(No.block);}
656         Event parseBlockNodeOrIndentlessSequence() @safe {return parseNode(Yes.block, Yes.indentlessSequence);}
657 
658         ///block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
659 
660         ///Parse an entry of a block sequence. If first is true, this is the first entry.
661         Event parseBlockSequenceEntry(Flag!"first" first)() @trusted
662         {
663             static if(first){marks_ ~= scanner_.getToken().startMark;}
664 
665             if(scanner_.checkToken(TokenID.BlockEntry))
666             {
667                 const token = scanner_.getToken();
668                 if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.BlockEnd))
669                 {
670                     states_~= &parseBlockSequenceEntry!(No.first);
671                     return parseBlockNode();
672                 }
673 
674                 state_ = &parseBlockSequenceEntry!(No.first);
675                 return processEmptyScalar(token.endMark);
676             }
677 
678             if(!scanner_.checkToken(TokenID.BlockEnd))
679             {
680                 const token = scanner_.peekToken();
681                 throw new Error("While parsing a block collection", marks_.back,
682                                 "expected block end, but found " ~ token.idString,
683                                 token.startMark);
684             }
685 
686             state_ = popState();
687             popMark();
688             const token = scanner_.getToken();
689             return sequenceEndEvent(token.startMark, token.endMark);
690         }
691 
692         ///indentless_sequence ::= (BLOCK-ENTRY block_node?)+
693 
694         ///Parse an entry of an indentless sequence.
695         Event parseIndentlessSequenceEntry() @trusted
696         {
697             if(scanner_.checkToken(TokenID.BlockEntry))
698             {
699                 const token = scanner_.getToken();
700 
701                 if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.Key,
702                                         TokenID.Value, TokenID.BlockEnd))
703                 {
704                     states_ ~= &parseIndentlessSequenceEntry;
705                     return parseBlockNode();
706                 }
707 
708                 state_ = &parseIndentlessSequenceEntry;
709                 return processEmptyScalar(token.endMark);
710             }
711 
712             state_ = popState();
713             const token = scanner_.peekToken();
714             return sequenceEndEvent(token.startMark, token.endMark);
715         }
716 
717         /**
718          * block_mapping     ::= BLOCK-MAPPING_START
719          *                       ((KEY block_node_or_indentless_sequence?)?
720          *                       (VALUE block_node_or_indentless_sequence?)?)*
721          *                       BLOCK-END
722          */
723 
724         ///Parse a key in a block mapping. If first is true, this is the first key.
725         Event parseBlockMappingKey(Flag!"first" first)() @trusted
726         {
727             static if(first){marks_ ~= scanner_.getToken().startMark;}
728 
729             if(scanner_.checkToken(TokenID.Key))
730             {
731                 const token = scanner_.getToken();
732 
733                 if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
734                 {
735                     states_ ~= &parseBlockMappingValue;
736                     return parseBlockNodeOrIndentlessSequence();
737                 }
738 
739                 state_ = &parseBlockMappingValue;
740                 return processEmptyScalar(token.endMark);
741             }
742 
743             if(!scanner_.checkToken(TokenID.BlockEnd))
744             {
745                 const token = scanner_.peekToken();
746                 throw new Error("While parsing a block mapping", marks_.back,
747                                 "expected block end, but found: " ~ token.idString,
748                                 token.startMark);
749             }
750 
751             state_ = popState();
752             popMark();
753             const token = scanner_.getToken();
754             return mappingEndEvent(token.startMark, token.endMark);
755         }
756 
757         ///Parse a value in a block mapping.
758         Event parseBlockMappingValue() @trusted
759         {
760             if(scanner_.checkToken(TokenID.Value))
761             {
762                 const token = scanner_.getToken();
763 
764                 if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
765                 {
766                     states_ ~= &parseBlockMappingKey!(No.first);
767                     return parseBlockNodeOrIndentlessSequence();
768                 }
769 
770                 state_ = &parseBlockMappingKey!(No.first);
771                 return processEmptyScalar(token.endMark);
772             }
773 
774             state_= &parseBlockMappingKey!(No.first);
775             return processEmptyScalar(scanner_.peekToken().startMark);
776         }
777 
778         /**
779          * flow_sequence     ::= FLOW-SEQUENCE-START
780          *                       (flow_sequence_entry FLOW-ENTRY)*
781          *                       flow_sequence_entry?
782          *                       FLOW-SEQUENCE-END
783          * flow_sequence_entry   ::= flow_node | KEY flow_node? (VALUE flow_node?)?
784          *
785          * Note that while production rules for both flow_sequence_entry and
786          * flow_mapping_entry are equal, their interpretations are different.
787          * For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
788          * generate an inline mapping (set syntax).
789          */
790 
791         ///Parse an entry in a flow sequence. If first is true, this is the first entry.
792         Event parseFlowSequenceEntry(Flag!"first" first)() @trusted
793         {
794             static if(first){marks_ ~= scanner_.getToken().startMark;}
795 
796             if(!scanner_.checkToken(TokenID.FlowSequenceEnd))
797             {
798                 static if(!first)
799                 {
800                     if(scanner_.checkToken(TokenID.FlowEntry))
801                     {
802                         scanner_.getToken();
803                     }
804                     else
805                     {
806                         const token = scanner_.peekToken();
807                         throw new Error("While parsing a flow sequence", marks_.back,
808                                         "expected ',' or ']', but got: " ~
809                                         token.idString, token.startMark);
810                     }
811                 }
812 
813                 if(scanner_.checkToken(TokenID.Key))
814                 {
815                     const token = scanner_.peekToken();
816                     state_ = &parseFlowSequenceEntryMappingKey;
817                     return mappingStartEvent(token.startMark, token.endMark,
818                                              Anchor(), Tag(), true, CollectionStyle.Flow);
819                 }
820                 else if(!scanner_.checkToken(TokenID.FlowSequenceEnd))
821                 {
822                     states_ ~= &parseFlowSequenceEntry!(No.first);
823                     return parseFlowNode();
824                 }
825             }
826 
827             const token = scanner_.getToken();
828             state_ = popState();
829             popMark();
830             return sequenceEndEvent(token.startMark, token.endMark);
831         }
832 
833         ///Parse a key in flow context.
834         Event parseFlowKey(in Event delegate() nextState) @trusted
835         {
836             const token = scanner_.getToken();
837 
838             if(!scanner_.checkToken(TokenID.Value, TokenID.FlowEntry,
839                                     TokenID.FlowSequenceEnd))
840             {
841                 states_ ~= nextState;
842                 return parseFlowNode();
843             }
844 
845             state_ = nextState;
846             return processEmptyScalar(token.endMark);
847         }
848 
849         ///Parse a mapping key in an entry in a flow sequence.
850         Event parseFlowSequenceEntryMappingKey() @safe
851         {
852             return parseFlowKey(&parseFlowSequenceEntryMappingValue);
853         }
854 
855         ///Parse a mapping value in a flow context.
856         Event parseFlowValue(TokenID checkId, in Event delegate() nextState)
857             @trusted
858         {
859             if(scanner_.checkToken(TokenID.Value))
860             {
861                 const token = scanner_.getToken();
862                 if(!scanner_.checkToken(TokenID.FlowEntry, checkId))
863                 {
864                     states_ ~= nextState;
865                     return parseFlowNode();
866                 }
867 
868                 state_ = nextState;
869                 return processEmptyScalar(token.endMark);
870             }
871 
872             state_ = nextState;
873             return processEmptyScalar(scanner_.peekToken().startMark);
874         }
875 
876         ///Parse a mapping value in an entry in a flow sequence.
877         Event parseFlowSequenceEntryMappingValue() @safe
878         {
879             return parseFlowValue(TokenID.FlowSequenceEnd,
880                                   &parseFlowSequenceEntryMappingEnd);
881         }
882 
883         ///Parse end of a mapping in a flow sequence entry.
884         Event parseFlowSequenceEntryMappingEnd() @safe
885         {
886             state_ = &parseFlowSequenceEntry!(No.first);
887             const token = scanner_.peekToken();
888             return mappingEndEvent(token.startMark, token.startMark);
889         }
890 
891         /**
892          * flow_mapping  ::= FLOW-MAPPING-START
893          *                   (flow_mapping_entry FLOW-ENTRY)*
894          *                   flow_mapping_entry?
895          *                   FLOW-MAPPING-END
896          * flow_mapping_entry    ::= flow_node | KEY flow_node? (VALUE flow_node?)?
897          */
898 
899         ///Parse a key in a flow mapping.
900         Event parseFlowMappingKey(Flag!"first" first)() @trusted
901         {
902             static if(first){marks_ ~= scanner_.getToken().startMark;}
903 
904             if(!scanner_.checkToken(TokenID.FlowMappingEnd))
905             {
906                 static if(!first)
907                 {
908                     if(scanner_.checkToken(TokenID.FlowEntry))
909                     {
910                         scanner_.getToken();
911                     }
912                     else
913                     {
914                         const token = scanner_.peekToken();
915                         throw new Error("While parsing a flow mapping", marks_.back,
916                                         "expected ',' or '}', but got: " ~
917                                         token.idString, token.startMark);
918                     }
919                 }
920 
921                 if(scanner_.checkToken(TokenID.Key))
922                 {
923                     return parseFlowKey(&parseFlowMappingValue);
924                 }
925 
926                 if(!scanner_.checkToken(TokenID.FlowMappingEnd))
927                 {
928                     states_ ~= &parseFlowMappingEmptyValue;
929                     return parseFlowNode();
930                 }
931             }
932 
933             const token = scanner_.getToken();
934             state_ = popState();
935             popMark();
936             return mappingEndEvent(token.startMark, token.endMark);
937         }
938 
939         ///Parse a value in a flow mapping.
940         Event parseFlowMappingValue()  @safe
941         {
942             return parseFlowValue(TokenID.FlowMappingEnd, &parseFlowMappingKey!(No.first));
943         }
944 
945         ///Parse an empty value in a flow mapping.
946         Event parseFlowMappingEmptyValue() @safe
947         {
948             state_ = &parseFlowMappingKey!(No.first);
949             return processEmptyScalar(scanner_.peekToken().startMark);
950         }
951 
952         ///Return an empty scalar.
953         Event processEmptyScalar(const Mark mark) @safe pure nothrow const @nogc
954         {
955             return scalarEvent(mark, mark, Anchor(), Tag(), tuple(true, false), "");
956         }
957 }