1 |
// |
2 |
// Copyright (c) 2011-2019 Canonical Ltd |
3 |
// Copyright (c) 2006-2010 Kirill Simonov |
4 |
// |
5 |
// Permission is hereby granted, free of charge, to any person obtaining a copy of |
6 |
// this software and associated documentation files (the "Software"), to deal in |
7 |
// the Software without restriction, including without limitation the rights to |
8 |
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies |
9 |
// of the Software, and to permit persons to whom the Software is furnished to do |
10 |
// so, subject to the following conditions: |
11 |
// |
12 |
// The above copyright notice and this permission notice shall be included in all |
13 |
// copies or substantial portions of the Software. |
14 |
// |
15 |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
16 |
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
17 |
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
18 |
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
19 |
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
20 |
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
21 |
// SOFTWARE. |
22 |
|
23 |
package yaml |
24 |
|
25 |
import ( |
26 |
"bytes" |
27 |
) |
28 |
|
29 |
// The parser implements the following grammar: |
30 |
// |
31 |
// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END |
32 |
// implicit_document ::= block_node DOCUMENT-END* |
33 |
// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* |
34 |
// block_node_or_indentless_sequence ::= |
35 |
// ALIAS |
36 |
// | properties (block_content | indentless_block_sequence)? |
37 |
// | block_content |
38 |
// | indentless_block_sequence |
39 |
// block_node ::= ALIAS |
40 |
// | properties block_content? |
41 |
// | block_content |
42 |
// flow_node ::= ALIAS |
43 |
// | properties flow_content? |
44 |
// | flow_content |
45 |
// properties ::= TAG ANCHOR? | ANCHOR TAG? |
46 |
// block_content ::= block_collection | flow_collection | SCALAR |
47 |
// flow_content ::= flow_collection | SCALAR |
48 |
// block_collection ::= block_sequence | block_mapping |
49 |
// flow_collection ::= flow_sequence | flow_mapping |
50 |
// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END |
51 |
// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ |
52 |
// block_mapping ::= BLOCK-MAPPING_START |
53 |
// ((KEY block_node_or_indentless_sequence?)? |
54 |
// (VALUE block_node_or_indentless_sequence?)?)* |
55 |
// BLOCK-END |
56 |
// flow_sequence ::= FLOW-SEQUENCE-START |
57 |
// (flow_sequence_entry FLOW-ENTRY)* |
58 |
// flow_sequence_entry? |
59 |
// FLOW-SEQUENCE-END |
60 |
// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? |
61 |
// flow_mapping ::= FLOW-MAPPING-START |
62 |
// (flow_mapping_entry FLOW-ENTRY)* |
63 |
// flow_mapping_entry? |
64 |
// FLOW-MAPPING-END |
65 |
// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? |
66 |
|
67 |
// Peek the next token in the token queue. |
68 |
func peek_token(parser *yaml_parser_t) *yaml_token_t { |
69 |
if parser.token_available || yaml_parser_fetch_more_tokens(parser) { |
70 |
token := &parser.tokens[parser.tokens_head] |
71 |
yaml_parser_unfold_comments(parser, token) |
72 |
return token |
73 |
} |
74 |
return nil |
75 |
} |
76 |
|
77 |
// yaml_parser_unfold_comments walks through the comments queue and joins all |
78 |
// comments behind the position of the provided token into the respective |
79 |
// top-level comment slices in the parser. |
80 |
func yaml_parser_unfold_comments(parser *yaml_parser_t, token *yaml_token_t) { |
81 |
for parser.comments_head < len(parser.comments) && token.start_mark.index >= parser.comments[parser.comments_head].token_mark.index { |
82 |
comment := &parser.comments[parser.comments_head] |
83 |
if len(comment.head) > 0 { |
84 |
if token.typ == yaml_BLOCK_END_TOKEN { |
85 |
// No heads on ends, so keep comment.head for a follow up token. |
86 |
break |
87 |
} |
88 |
if len(parser.head_comment) > 0 { |
89 |
parser.head_comment = append(parser.head_comment, '\n') |
90 |
} |
91 |
parser.head_comment = append(parser.head_comment, comment.head...) |
92 |
} |
93 |
if len(comment.foot) > 0 { |
94 |
if len(parser.foot_comment) > 0 { |
95 |
parser.foot_comment = append(parser.foot_comment, '\n') |
96 |
} |
97 |
parser.foot_comment = append(parser.foot_comment, comment.foot...) |
98 |
} |
99 |
if len(comment.line) > 0 { |
100 |
if len(parser.line_comment) > 0 { |
101 |
parser.line_comment = append(parser.line_comment, '\n') |
102 |
} |
103 |
parser.line_comment = append(parser.line_comment, comment.line...) |
104 |
} |
105 |
*comment = yaml_comment_t{} |
106 |
parser.comments_head++ |
107 |
} |
108 |
} |
109 |
|
110 |
// Remove the next token from the queue (must be called after peek_token). |
111 |
func skip_token(parser *yaml_parser_t) { |
112 |
parser.token_available = false |
113 |
parser.tokens_parsed++ |
114 |
parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN |
115 |
parser.tokens_head++ |
116 |
} |
117 |
|
118 |
// Get the next event. |
119 |
func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { |
120 |
// Erase the event object. |
121 |
*event = yaml_event_t{} |
122 |
|
123 |
// No events after the end of the stream or error. |
124 |
if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE { |
125 |
return true |
126 |
} |
127 |
|
128 |
// Generate the next event. |
129 |
return yaml_parser_state_machine(parser, event) |
130 |
} |
131 |
|
132 |
// Set parser error. |
133 |
func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool { |
134 |
parser.error = yaml_PARSER_ERROR |
135 |
parser.problem = problem |
136 |
parser.problem_mark = problem_mark |
137 |
return false |
138 |
} |
139 |
|
140 |
func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool { |
141 |
parser.error = yaml_PARSER_ERROR |
142 |
parser.context = context |
143 |
parser.context_mark = context_mark |
144 |
parser.problem = problem |
145 |
parser.problem_mark = problem_mark |
146 |
return false |
147 |
} |
148 |
|
149 |
// State dispatcher. |
150 |
func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { |
151 |
//trace("yaml_parser_state_machine", "state:", parser.state.String()) |
152 |
|
153 |
switch parser.state { |
154 |
case yaml_PARSE_STREAM_START_STATE: |
155 |
return yaml_parser_parse_stream_start(parser, event) |
156 |
|
157 |
case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: |
158 |
return yaml_parser_parse_document_start(parser, event, true) |
159 |
|
160 |
case yaml_PARSE_DOCUMENT_START_STATE: |
161 |
return yaml_parser_parse_document_start(parser, event, false) |
162 |
|
163 |
case yaml_PARSE_DOCUMENT_CONTENT_STATE: |
164 |
return yaml_parser_parse_document_content(parser, event) |
165 |
|
166 |
case yaml_PARSE_DOCUMENT_END_STATE: |
167 |
return yaml_parser_parse_document_end(parser, event) |
168 |
|
169 |
case yaml_PARSE_BLOCK_NODE_STATE: |
170 |
return yaml_parser_parse_node(parser, event, true, false) |
171 |
|
172 |
case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: |
173 |
return yaml_parser_parse_node(parser, event, true, true) |
174 |
|
175 |
case yaml_PARSE_FLOW_NODE_STATE: |
176 |
return yaml_parser_parse_node(parser, event, false, false) |
177 |
|
178 |
case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: |
179 |
return yaml_parser_parse_block_sequence_entry(parser, event, true) |
180 |
|
181 |
case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: |
182 |
return yaml_parser_parse_block_sequence_entry(parser, event, false) |
183 |
|
184 |
case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: |
185 |
return yaml_parser_parse_indentless_sequence_entry(parser, event) |
186 |
|
187 |
case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: |
188 |
return yaml_parser_parse_block_mapping_key(parser, event, true) |
189 |
|
190 |
case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: |
191 |
return yaml_parser_parse_block_mapping_key(parser, event, false) |
192 |
|
193 |
case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: |
194 |
return yaml_parser_parse_block_mapping_value(parser, event) |
195 |
|
196 |
case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: |
197 |
return yaml_parser_parse_flow_sequence_entry(parser, event, true) |
198 |
|
199 |
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: |
200 |
return yaml_parser_parse_flow_sequence_entry(parser, event, false) |
201 |
|
202 |
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: |
203 |
return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) |
204 |
|
205 |
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: |
206 |
return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) |
207 |
|
208 |
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: |
209 |
return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) |
210 |
|
211 |
case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: |
212 |
return yaml_parser_parse_flow_mapping_key(parser, event, true) |
213 |
|
214 |
case yaml_PARSE_FLOW_MAPPING_KEY_STATE: |
215 |
return yaml_parser_parse_flow_mapping_key(parser, event, false) |
216 |
|
217 |
case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: |
218 |
return yaml_parser_parse_flow_mapping_value(parser, event, false) |
219 |
|
220 |
case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: |
221 |
return yaml_parser_parse_flow_mapping_value(parser, event, true) |
222 |
|
223 |
default: |
224 |
panic("invalid parser state") |
225 |
} |
226 |
} |
227 |
|
228 |
// Parse the production: |
229 |
// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END |
230 |
// ************ |
231 |
func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { |
232 |
token := peek_token(parser) |
233 |
if token == nil { |
234 |
return false |
235 |
} |
236 |
if token.typ != yaml_STREAM_START_TOKEN { |
237 |
return yaml_parser_set_parser_error(parser, "did not find expected <stream-start>", token.start_mark) |
238 |
} |
239 |
parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE |
240 |
*event = yaml_event_t{ |
241 |
typ: yaml_STREAM_START_EVENT, |
242 |
start_mark: token.start_mark, |
243 |
end_mark: token.end_mark, |
244 |
encoding: token.encoding, |
245 |
} |
246 |
skip_token(parser) |
247 |
return true |
248 |
} |
249 |
|
250 |
// Parse the productions: |
251 |
// implicit_document ::= block_node DOCUMENT-END* |
252 |
// * |
253 |
// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* |
254 |
// ************************* |
255 |
func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool { |
256 |
|
257 |
token := peek_token(parser) |
258 |
if token == nil { |
259 |
return false |
260 |
} |
261 |
|
262 |
// Parse extra document end indicators. |
263 |
if !implicit { |
264 |
for token.typ == yaml_DOCUMENT_END_TOKEN { |
265 |
skip_token(parser) |
266 |
token = peek_token(parser) |
267 |
if token == nil { |
268 |
return false |
269 |
} |
270 |
} |
271 |
} |
272 |
|
273 |
if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN && |
274 |
token.typ != yaml_TAG_DIRECTIVE_TOKEN && |
275 |
token.typ != yaml_DOCUMENT_START_TOKEN && |
276 |
token.typ != yaml_STREAM_END_TOKEN { |
277 |
// Parse an implicit document. |
278 |
if !yaml_parser_process_directives(parser, nil, nil) { |
279 |
return false |
280 |
} |
281 |
parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) |
282 |
parser.state = yaml_PARSE_BLOCK_NODE_STATE |
283 |
|
284 |
var head_comment []byte |
285 |
if len(parser.head_comment) > 0 { |
286 |
// [Go] Scan the header comment backwards, and if an empty line is found, break |
287 |
// the header so the part before the last empty line goes into the |
288 |
// document header, while the bottom of it goes into a follow up event. |
289 |
for i := len(parser.head_comment) - 1; i > 0; i-- { |
290 |
if parser.head_comment[i] == '\n' { |
291 |
if i == len(parser.head_comment)-1 { |
292 |
head_comment = parser.head_comment[:i] |
293 |
parser.head_comment = parser.head_comment[i+1:] |
294 |
break |
295 |
} else if parser.head_comment[i-1] == '\n' { |
296 |
head_comment = parser.head_comment[:i-1] |
297 |
parser.head_comment = parser.head_comment[i+1:] |
298 |
break |
299 |
} |
300 |
} |
301 |
} |
302 |
} |
303 |
|
304 |
*event = yaml_event_t{ |
305 |
typ: yaml_DOCUMENT_START_EVENT, |
306 |
start_mark: token.start_mark, |
307 |
end_mark: token.end_mark, |
308 |
|
309 |
head_comment: head_comment, |
310 |
} |
311 |
|
312 |
} else if token.typ != yaml_STREAM_END_TOKEN { |
313 |
// Parse an explicit document. |
314 |
var version_directive *yaml_version_directive_t |
315 |
var tag_directives []yaml_tag_directive_t |
316 |
start_mark := token.start_mark |
317 |
if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) { |
318 |
return false |
319 |
} |
320 |
token = peek_token(parser) |
321 |
if token == nil { |
322 |
return false |
323 |
} |
324 |
if token.typ != yaml_DOCUMENT_START_TOKEN { |
325 |
yaml_parser_set_parser_error(parser, |
326 |
"did not find expected <document start>", token.start_mark) |
327 |
return false |
328 |
} |
329 |
parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) |
330 |
parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE |
331 |
end_mark := token.end_mark |
332 |
|
333 |
*event = yaml_event_t{ |
334 |
typ: yaml_DOCUMENT_START_EVENT, |
335 |
start_mark: start_mark, |
336 |
end_mark: end_mark, |
337 |
version_directive: version_directive, |
338 |
tag_directives: tag_directives, |
339 |
implicit: false, |
340 |
} |
341 |
skip_token(parser) |
342 |
|
343 |
} else { |
344 |
// Parse the stream end. |
345 |
parser.state = yaml_PARSE_END_STATE |
346 |
*event = yaml_event_t{ |
347 |
typ: yaml_STREAM_END_EVENT, |
348 |
start_mark: token.start_mark, |
349 |
end_mark: token.end_mark, |
350 |
} |
351 |
skip_token(parser) |
352 |
} |
353 |
|
354 |
return true |
355 |
} |
356 |
|
357 |
// Parse the productions: |
358 |
// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* |
359 |
// *********** |
360 |
// |
361 |
func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { |
362 |
token := peek_token(parser) |
363 |
if token == nil { |
364 |
return false |
365 |
} |
366 |
|
367 |
if token.typ == yaml_VERSION_DIRECTIVE_TOKEN || |
368 |
token.typ == yaml_TAG_DIRECTIVE_TOKEN || |
369 |
token.typ == yaml_DOCUMENT_START_TOKEN || |
370 |
token.typ == yaml_DOCUMENT_END_TOKEN || |
371 |
token.typ == yaml_STREAM_END_TOKEN { |
372 |
parser.state = parser.states[len(parser.states)-1] |
373 |
parser.states = parser.states[:len(parser.states)-1] |
374 |
return yaml_parser_process_empty_scalar(parser, event, |
375 |
token.start_mark) |
376 |
} |
377 |
return yaml_parser_parse_node(parser, event, true, false) |
378 |
} |
379 |
|
380 |
// Parse the productions: |
381 |
// implicit_document ::= block_node DOCUMENT-END* |
382 |
// ************* |
383 |
// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* |
384 |
// |
385 |
func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { |
386 |
token := peek_token(parser) |
387 |
if token == nil { |
388 |
return false |
389 |
} |
390 |
|
391 |
start_mark := token.start_mark |
392 |
end_mark := token.start_mark |
393 |
|
394 |
implicit := true |
395 |
if token.typ == yaml_DOCUMENT_END_TOKEN { |
396 |
end_mark = token.end_mark |
397 |
skip_token(parser) |
398 |
implicit = false |
399 |
} |
400 |
|
401 |
parser.tag_directives = parser.tag_directives[:0] |
402 |
|
403 |
parser.state = yaml_PARSE_DOCUMENT_START_STATE |
404 |
*event = yaml_event_t{ |
405 |
typ: yaml_DOCUMENT_END_EVENT, |
406 |
start_mark: start_mark, |
407 |
end_mark: end_mark, |
408 |
implicit: implicit, |
409 |
} |
410 |
yaml_parser_set_event_comments(parser, event) |
411 |
if len(event.head_comment) > 0 && len(event.foot_comment) == 0 { |
412 |
event.foot_comment = event.head_comment |
413 |
event.head_comment = nil |
414 |
} |
415 |
return true |
416 |
} |
417 |
|
418 |
func yaml_parser_set_event_comments(parser *yaml_parser_t, event *yaml_event_t) { |
419 |
event.head_comment = parser.head_comment |
420 |
event.line_comment = parser.line_comment |
421 |
event.foot_comment = parser.foot_comment |
422 |
parser.head_comment = nil |
423 |
parser.line_comment = nil |
424 |
parser.foot_comment = nil |
425 |
parser.tail_comment = nil |
426 |
parser.stem_comment = nil |
427 |
} |
428 |
|
429 |
// Parse the productions: |
430 |
// block_node_or_indentless_sequence ::= |
431 |
// ALIAS |
432 |
// ***** |
433 |
// | properties (block_content | indentless_block_sequence)? |
434 |
// ********** * |
435 |
// | block_content | indentless_block_sequence |
436 |
// * |
437 |
// block_node ::= ALIAS |
438 |
// ***** |
439 |
// | properties block_content? |
440 |
// ********** * |
441 |
// | block_content |
442 |
// * |
443 |
// flow_node ::= ALIAS |
444 |
// ***** |
445 |
// | properties flow_content? |
446 |
// ********** * |
447 |
// | flow_content |
448 |
// * |
449 |
// properties ::= TAG ANCHOR? | ANCHOR TAG? |
450 |
// ************************* |
451 |
// block_content ::= block_collection | flow_collection | SCALAR |
452 |
// ****** |
453 |
// flow_content ::= flow_collection | SCALAR |
454 |
// ****** |
455 |
func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool { |
456 |
//defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)() |
457 |
|
458 |
token := peek_token(parser) |
459 |
if token == nil { |
460 |
return false |
461 |
} |
462 |
|
463 |
if token.typ == yaml_ALIAS_TOKEN { |
464 |
parser.state = parser.states[len(parser.states)-1] |
465 |
parser.states = parser.states[:len(parser.states)-1] |
466 |
*event = yaml_event_t{ |
467 |
typ: yaml_ALIAS_EVENT, |
468 |
start_mark: token.start_mark, |
469 |
end_mark: token.end_mark, |
470 |
anchor: token.value, |
471 |
} |
472 |
yaml_parser_set_event_comments(parser, event) |
473 |
skip_token(parser) |
474 |
return true |
475 |
} |
476 |
|
477 |
start_mark := token.start_mark |
478 |
end_mark := token.start_mark |
479 |
|
480 |
var tag_token bool |
481 |
var tag_handle, tag_suffix, anchor []byte |
482 |
var tag_mark yaml_mark_t |
483 |
if token.typ == yaml_ANCHOR_TOKEN { |
484 |
anchor = token.value |
485 |
start_mark = token.start_mark |
486 |
end_mark = token.end_mark |
487 |
skip_token(parser) |
488 |
token = peek_token(parser) |
489 |
if token == nil { |
490 |
return false |
491 |
} |
492 |
if token.typ == yaml_TAG_TOKEN { |
493 |
tag_token = true |
494 |
tag_handle = token.value |
495 |
tag_suffix = token.suffix |
496 |
tag_mark = token.start_mark |
497 |
end_mark = token.end_mark |
498 |
skip_token(parser) |
499 |
token = peek_token(parser) |
500 |
if token == nil { |
501 |
return false |
502 |
} |
503 |
} |
504 |
} else if token.typ == yaml_TAG_TOKEN { |
505 |
tag_token = true |
506 |
tag_handle = token.value |
507 |
tag_suffix = token.suffix |
508 |
start_mark = token.start_mark |
509 |
tag_mark = token.start_mark |
510 |
end_mark = token.end_mark |
511 |
skip_token(parser) |
512 |
token = peek_token(parser) |
513 |
if token == nil { |
514 |
return false |
515 |
} |
516 |
if token.typ == yaml_ANCHOR_TOKEN { |
517 |
anchor = token.value |
518 |
end_mark = token.end_mark |
519 |
skip_token(parser) |
520 |
token = peek_token(parser) |
521 |
if token == nil { |
522 |
return false |
523 |
} |
524 |
} |
525 |
} |
526 |
|
527 |
var tag []byte |
528 |
if tag_token { |
529 |
if len(tag_handle) == 0 { |
530 |
tag = tag_suffix |
531 |
tag_suffix = nil |
532 |
} else { |
533 |
for i := range parser.tag_directives { |
534 |
if bytes.Equal(parser.tag_directives[i].handle, tag_handle) { |
535 |
tag = append([]byte(nil), parser.tag_directives[i].prefix...) |
536 |
tag = append(tag, tag_suffix...) |
537 |
break |
538 |
} |
539 |
} |
540 |
if len(tag) == 0 { |
541 |
yaml_parser_set_parser_error_context(parser, |
542 |
"while parsing a node", start_mark, |
543 |
"found undefined tag handle", tag_mark) |
544 |
return false |
545 |
} |
546 |
} |
547 |
} |
548 |
|
549 |
implicit := len(tag) == 0 |
550 |
if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN { |
551 |
end_mark = token.end_mark |
552 |
parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE |
553 |
*event = yaml_event_t{ |
554 |
typ: yaml_SEQUENCE_START_EVENT, |
555 |
start_mark: start_mark, |
556 |
end_mark: end_mark, |
557 |
anchor: anchor, |
558 |
tag: tag, |
559 |
implicit: implicit, |
560 |
style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), |
561 |
} |
562 |
return true |
563 |
} |
564 |
if token.typ == yaml_SCALAR_TOKEN { |
565 |
var plain_implicit, quoted_implicit bool |
566 |
end_mark = token.end_mark |
567 |
if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') { |
568 |
plain_implicit = true |
569 |
} else if len(tag) == 0 { |
570 |
quoted_implicit = true |
571 |
} |
572 |
parser.state = parser.states[len(parser.states)-1] |
573 |
parser.states = parser.states[:len(parser.states)-1] |
574 |
|
575 |
*event = yaml_event_t{ |
576 |
typ: yaml_SCALAR_EVENT, |
577 |
start_mark: start_mark, |
578 |
end_mark: end_mark, |
579 |
anchor: anchor, |
580 |
tag: tag, |
581 |
value: token.value, |
582 |
implicit: plain_implicit, |
583 |
quoted_implicit: quoted_implicit, |
584 |
style: yaml_style_t(token.style), |
585 |
} |
586 |
yaml_parser_set_event_comments(parser, event) |
587 |
skip_token(parser) |
588 |
return true |
589 |
} |
590 |
if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN { |
591 |
// [Go] Some of the events below can be merged as they differ only on style. |
592 |
end_mark = token.end_mark |
593 |
parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE |
594 |
*event = yaml_event_t{ |
595 |
typ: yaml_SEQUENCE_START_EVENT, |
596 |
start_mark: start_mark, |
597 |
end_mark: end_mark, |
598 |
anchor: anchor, |
599 |
tag: tag, |
600 |
implicit: implicit, |
601 |
style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), |
602 |
} |
603 |
yaml_parser_set_event_comments(parser, event) |
604 |
return true |
605 |
} |
606 |
if token.typ == yaml_FLOW_MAPPING_START_TOKEN { |
607 |
end_mark = token.end_mark |
608 |
parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE |
609 |
*event = yaml_event_t{ |
610 |
typ: yaml_MAPPING_START_EVENT, |
611 |
start_mark: start_mark, |
612 |
end_mark: end_mark, |
613 |
anchor: anchor, |
614 |
tag: tag, |
615 |
implicit: implicit, |
616 |
style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), |
617 |
} |
618 |
yaml_parser_set_event_comments(parser, event) |
619 |
return true |
620 |
} |
621 |
if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN { |
622 |
end_mark = token.end_mark |
623 |
parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE |
624 |
*event = yaml_event_t{ |
625 |
typ: yaml_SEQUENCE_START_EVENT, |
626 |
start_mark: start_mark, |
627 |
end_mark: end_mark, |
628 |
anchor: anchor, |
629 |
tag: tag, |
630 |
implicit: implicit, |
631 |
style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), |
632 |
} |
633 |
if parser.stem_comment != nil { |
634 |
event.head_comment = parser.stem_comment |
635 |
parser.stem_comment = nil |
636 |
} |
637 |
return true |
638 |
} |
639 |
if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN { |
640 |
end_mark = token.end_mark |
641 |
parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE |
642 |
*event = yaml_event_t{ |
643 |
typ: yaml_MAPPING_START_EVENT, |
644 |
start_mark: start_mark, |
645 |
end_mark: end_mark, |
646 |
anchor: anchor, |
647 |
tag: tag, |
648 |
implicit: implicit, |
649 |
style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), |
650 |
} |
651 |
if parser.stem_comment != nil { |
652 |
event.head_comment = parser.stem_comment |
653 |
parser.stem_comment = nil |
654 |
} |
655 |
return true |
656 |
} |
657 |
if len(anchor) > 0 || len(tag) > 0 { |
658 |
parser.state = parser.states[len(parser.states)-1] |
659 |
parser.states = parser.states[:len(parser.states)-1] |
660 |
|
661 |
*event = yaml_event_t{ |
662 |
typ: yaml_SCALAR_EVENT, |
663 |
start_mark: start_mark, |
664 |
end_mark: end_mark, |
665 |
anchor: anchor, |
666 |
tag: tag, |
667 |
implicit: implicit, |
668 |
quoted_implicit: false, |
669 |
style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), |
670 |
} |
671 |
return true |
672 |
} |
673 |
|
674 |
context := "while parsing a flow node" |
675 |
if block { |
676 |
context = "while parsing a block node" |
677 |
} |
678 |
yaml_parser_set_parser_error_context(parser, context, start_mark, |
679 |
"did not find expected node content", token.start_mark) |
680 |
return false |
681 |
} |
682 |
|
683 |
// Parse the productions: |
684 |
// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END |
685 |
// ******************** *********** * ********* |
686 |
// |
687 |
func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { |
688 |
if first { |
689 |
token := peek_token(parser) |
690 |
if token == nil { |
691 |
return false |
692 |
} |
693 |
parser.marks = append(parser.marks, token.start_mark) |
694 |
skip_token(parser) |
695 |
} |
696 |
|
697 |
token := peek_token(parser) |
698 |
if token == nil { |
699 |
return false |
700 |
} |
701 |
|
702 |
if token.typ == yaml_BLOCK_ENTRY_TOKEN { |
703 |
mark := token.end_mark |
704 |
prior_head_len := len(parser.head_comment) |
705 |
skip_token(parser) |
706 |
yaml_parser_split_stem_comment(parser, prior_head_len) |
707 |
token = peek_token(parser) |
708 |
if token == nil { |
709 |
return false |
710 |
} |
711 |
if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN { |
712 |
parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) |
713 |
return yaml_parser_parse_node(parser, event, true, false) |
714 |
} else { |
715 |
parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE |
716 |
return yaml_parser_process_empty_scalar(parser, event, mark) |
717 |
} |
718 |
} |
719 |
if token.typ == yaml_BLOCK_END_TOKEN { |
720 |
parser.state = parser.states[len(parser.states)-1] |
721 |
parser.states = parser.states[:len(parser.states)-1] |
722 |
parser.marks = parser.marks[:len(parser.marks)-1] |
723 |
|
724 |
*event = yaml_event_t{ |
725 |
typ: yaml_SEQUENCE_END_EVENT, |
726 |
start_mark: token.start_mark, |
727 |
end_mark: token.end_mark, |
728 |
} |
729 |
|
730 |
skip_token(parser) |
731 |
return true |
732 |
} |
733 |
|
734 |
context_mark := parser.marks[len(parser.marks)-1] |
735 |
parser.marks = parser.marks[:len(parser.marks)-1] |
736 |
return yaml_parser_set_parser_error_context(parser, |
737 |
"while parsing a block collection", context_mark, |
738 |
"did not find expected '-' indicator", token.start_mark) |
739 |
} |
740 |
|
741 |
// Parse the productions: |
742 |
// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ |
743 |
// *********** * |
744 |
func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool { |
745 |
token := peek_token(parser) |
746 |
if token == nil { |
747 |
return false |
748 |
} |
749 |
|
750 |
if token.typ == yaml_BLOCK_ENTRY_TOKEN { |
751 |
mark := token.end_mark |
752 |
prior_head_len := len(parser.head_comment) |
753 |
skip_token(parser) |
754 |
yaml_parser_split_stem_comment(parser, prior_head_len) |
755 |
token = peek_token(parser) |
756 |
if token == nil { |
757 |
return false |
758 |
} |
759 |
if token.typ != yaml_BLOCK_ENTRY_TOKEN && |
760 |
token.typ != yaml_KEY_TOKEN && |
761 |
token.typ != yaml_VALUE_TOKEN && |
762 |
token.typ != yaml_BLOCK_END_TOKEN { |
763 |
parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) |
764 |
return yaml_parser_parse_node(parser, event, true, false) |
765 |
} |
766 |
parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE |
767 |
return yaml_parser_process_empty_scalar(parser, event, mark) |
768 |
} |
769 |
parser.state = parser.states[len(parser.states)-1] |
770 |
parser.states = parser.states[:len(parser.states)-1] |
771 |
|
772 |
*event = yaml_event_t{ |
773 |
typ: yaml_SEQUENCE_END_EVENT, |
774 |
start_mark: token.start_mark, |
775 |
end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark? |
776 |
} |
777 |
return true |
778 |
} |
779 |
|
780 |
// Split stem comment from head comment. |
781 |
// |
782 |
// When a sequence or map is found under a sequence entry, the former head comment |
783 |
// is assigned to the underlying sequence or map as a whole, not the individual |
784 |
// sequence or map entry as would be expected otherwise. To handle this case the |
785 |
// previous head comment is moved aside as the stem comment. |
786 |
func yaml_parser_split_stem_comment(parser *yaml_parser_t, stem_len int) { |
787 |
if stem_len == 0 { |
788 |
return |
789 |
} |
790 |
|
791 |
token := peek_token(parser) |
792 |
if token == nil || token.typ != yaml_BLOCK_SEQUENCE_START_TOKEN && token.typ != yaml_BLOCK_MAPPING_START_TOKEN { |
793 |
return |
794 |
} |
795 |
|
796 |
parser.stem_comment = parser.head_comment[:stem_len] |
797 |
if len(parser.head_comment) == stem_len { |
798 |
parser.head_comment = nil |
799 |
} else { |
800 |
// Copy suffix to prevent very strange bugs if someone ever appends |
801 |
// further bytes to the prefix in the stem_comment slice above. |
802 |
parser.head_comment = append([]byte(nil), parser.head_comment[stem_len+1:]...) |
803 |
} |
804 |
} |
805 |
|
806 |
// Parse the productions: |
807 |
// block_mapping ::= BLOCK-MAPPING_START |
808 |
// ******************* |
809 |
// ((KEY block_node_or_indentless_sequence?)? |
810 |
// *** * |
811 |
// (VALUE block_node_or_indentless_sequence?)?)* |
812 |
// |
813 |
// BLOCK-END |
814 |
// ********* |
815 |
// |
816 |
func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { |
817 |
if first { |
818 |
token := peek_token(parser) |
819 |
if token == nil { |
820 |
return false |
821 |
} |
822 |
parser.marks = append(parser.marks, token.start_mark) |
823 |
skip_token(parser) |
824 |
} |
825 |
|
826 |
token := peek_token(parser) |
827 |
if token == nil { |
828 |
return false |
829 |
} |
830 |
|
831 |
// [Go] A tail comment was left from the prior mapping value processed. Emit an event |
832 |
// as it needs to be processed with that value and not the following key. |
833 |
if len(parser.tail_comment) > 0 { |
834 |
*event = yaml_event_t{ |
835 |
typ: yaml_TAIL_COMMENT_EVENT, |
836 |
start_mark: token.start_mark, |
837 |
end_mark: token.end_mark, |
838 |
foot_comment: parser.tail_comment, |
839 |
} |
840 |
parser.tail_comment = nil |
841 |
return true |
842 |
} |
843 |
|
844 |
if token.typ == yaml_KEY_TOKEN { |
845 |
mark := token.end_mark |
846 |
skip_token(parser) |
847 |
token = peek_token(parser) |
848 |
if token == nil { |
849 |
return false |
850 |
} |
851 |
if token.typ != yaml_KEY_TOKEN && |
852 |
token.typ != yaml_VALUE_TOKEN && |
853 |
token.typ != yaml_BLOCK_END_TOKEN { |
854 |
parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) |
855 |
return yaml_parser_parse_node(parser, event, true, true) |
856 |
} else { |
857 |
parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE |
858 |
return yaml_parser_process_empty_scalar(parser, event, mark) |
859 |
} |
860 |
} else if token.typ == yaml_BLOCK_END_TOKEN { |
861 |
parser.state = parser.states[len(parser.states)-1] |
862 |
parser.states = parser.states[:len(parser.states)-1] |
863 |
parser.marks = parser.marks[:len(parser.marks)-1] |
864 |
*event = yaml_event_t{ |
865 |
typ: yaml_MAPPING_END_EVENT, |
866 |
start_mark: token.start_mark, |
867 |
end_mark: token.end_mark, |
868 |
} |
869 |
yaml_parser_set_event_comments(parser, event) |
870 |
skip_token(parser) |
871 |
return true |
872 |
} |
873 |
|
874 |
context_mark := parser.marks[len(parser.marks)-1] |
875 |
parser.marks = parser.marks[:len(parser.marks)-1] |
876 |
return yaml_parser_set_parser_error_context(parser, |
877 |
"while parsing a block mapping", context_mark, |
878 |
"did not find expected key", token.start_mark) |
879 |
} |
880 |
|
881 |
// Parse the productions: |
882 |
// block_mapping ::= BLOCK-MAPPING_START |
883 |
// |
884 |
// ((KEY block_node_or_indentless_sequence?)? |
885 |
// |
886 |
// (VALUE block_node_or_indentless_sequence?)?)* |
887 |
// ***** * |
888 |
// BLOCK-END |
889 |
// |
890 |
// |
891 |
func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { |
892 |
token := peek_token(parser) |
893 |
if token == nil { |
894 |
return false |
895 |
} |
896 |
if token.typ == yaml_VALUE_TOKEN { |
897 |
mark := token.end_mark |
898 |
skip_token(parser) |
899 |
token = peek_token(parser) |
900 |
if token == nil { |
901 |
return false |
902 |
} |
903 |
if token.typ != yaml_KEY_TOKEN && |
904 |
token.typ != yaml_VALUE_TOKEN && |
905 |
token.typ != yaml_BLOCK_END_TOKEN { |
906 |
parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) |
907 |
return yaml_parser_parse_node(parser, event, true, true) |
908 |
} |
909 |
parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE |
910 |
return yaml_parser_process_empty_scalar(parser, event, mark) |
911 |
} |
912 |
parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE |
913 |
return yaml_parser_process_empty_scalar(parser, event, token.start_mark) |
914 |
} |
915 |
|
916 |
// Parse the productions: |
917 |
// flow_sequence ::= FLOW-SEQUENCE-START |
918 |
// ******************* |
919 |
// (flow_sequence_entry FLOW-ENTRY)* |
920 |
// * ********** |
921 |
// flow_sequence_entry? |
922 |
// * |
923 |
// FLOW-SEQUENCE-END |
924 |
// ***************** |
925 |
// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? |
926 |
// * |
927 |
// |
928 |
func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { |
929 |
if first { |
930 |
token := peek_token(parser) |
931 |
if token == nil { |
932 |
return false |
933 |
} |
934 |
parser.marks = append(parser.marks, token.start_mark) |
935 |
skip_token(parser) |
936 |
} |
937 |
token := peek_token(parser) |
938 |
if token == nil { |
939 |
return false |
940 |
} |
941 |
if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { |
942 |
if !first { |
943 |
if token.typ == yaml_FLOW_ENTRY_TOKEN { |
944 |
skip_token(parser) |
945 |
token = peek_token(parser) |
946 |
if token == nil { |
947 |
return false |
948 |
} |
949 |
} else { |
950 |
context_mark := parser.marks[len(parser.marks)-1] |
951 |
parser.marks = parser.marks[:len(parser.marks)-1] |
952 |
return yaml_parser_set_parser_error_context(parser, |
953 |
"while parsing a flow sequence", context_mark, |
954 |
"did not find expected ',' or ']'", token.start_mark) |
955 |
} |
956 |
} |
957 |
|
958 |
if token.typ == yaml_KEY_TOKEN { |
959 |
parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE |
960 |
*event = yaml_event_t{ |
961 |
typ: yaml_MAPPING_START_EVENT, |
962 |
start_mark: token.start_mark, |
963 |
end_mark: token.end_mark, |
964 |
implicit: true, |
965 |
style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), |
966 |
} |
967 |
skip_token(parser) |
968 |
return true |
969 |
} else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { |
970 |
parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) |
971 |
return yaml_parser_parse_node(parser, event, false, false) |
972 |
} |
973 |
} |
974 |
|
975 |
parser.state = parser.states[len(parser.states)-1] |
976 |
parser.states = parser.states[:len(parser.states)-1] |
977 |
parser.marks = parser.marks[:len(parser.marks)-1] |
978 |
|
979 |
*event = yaml_event_t{ |
980 |
typ: yaml_SEQUENCE_END_EVENT, |
981 |
start_mark: token.start_mark, |
982 |
end_mark: token.end_mark, |
983 |
} |
984 |
yaml_parser_set_event_comments(parser, event) |
985 |
|
986 |
skip_token(parser) |
987 |
return true |
988 |
} |
989 |
|
990 |
// |
991 |
// Parse the productions: |
992 |
// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? |
993 |
// *** * |
994 |
// |
995 |
func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool { |
996 |
token := peek_token(parser) |
997 |
if token == nil { |
998 |
return false |
999 |
} |
1000 |
if token.typ != yaml_VALUE_TOKEN && |
1001 |
token.typ != yaml_FLOW_ENTRY_TOKEN && |
1002 |
token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { |
1003 |
parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) |
1004 |
return yaml_parser_parse_node(parser, event, false, false) |
1005 |
} |
1006 |
mark := token.end_mark |
1007 |
skip_token(parser) |
1008 |
parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE |
1009 |
return yaml_parser_process_empty_scalar(parser, event, mark) |
1010 |
} |
1011 |
|
1012 |
// Parse the productions: |
1013 |
// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? |
1014 |
// ***** * |
1015 |
// |
1016 |
func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { |
1017 |
token := peek_token(parser) |
1018 |
if token == nil { |
1019 |
return false |
1020 |
} |
1021 |
if token.typ == yaml_VALUE_TOKEN { |
1022 |
skip_token(parser) |
1023 |
token := peek_token(parser) |
1024 |
if token == nil { |
1025 |
return false |
1026 |
} |
1027 |
if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { |
1028 |
parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) |
1029 |
return yaml_parser_parse_node(parser, event, false, false) |
1030 |
} |
1031 |
} |
1032 |
parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE |
1033 |
return yaml_parser_process_empty_scalar(parser, event, token.start_mark) |
1034 |
} |
1035 |
|
1036 |
// Parse the productions: |
1037 |
// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? |
1038 |
// * |
1039 |
// |
1040 |
func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool { |
1041 |
token := peek_token(parser) |
1042 |
if token == nil { |
1043 |
return false |
1044 |
} |
1045 |
parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE |
1046 |
*event = yaml_event_t{ |
1047 |
typ: yaml_MAPPING_END_EVENT, |
1048 |
start_mark: token.start_mark, |
1049 |
end_mark: token.start_mark, // [Go] Shouldn't this be end_mark? |
1050 |
} |
1051 |
return true |
1052 |
} |
1053 |
|
1054 |
// Parse the productions: |
1055 |
// flow_mapping ::= FLOW-MAPPING-START |
1056 |
// ****************** |
1057 |
// (flow_mapping_entry FLOW-ENTRY)* |
1058 |
// * ********** |
1059 |
// flow_mapping_entry? |
1060 |
// ****************** |
1061 |
// FLOW-MAPPING-END |
1062 |
// **************** |
1063 |
// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? |
1064 |
// * *** * |
1065 |
// |
1066 |
func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { |
1067 |
if first { |
1068 |
token := peek_token(parser) |
1069 |
parser.marks = append(parser.marks, token.start_mark) |
1070 |
skip_token(parser) |
1071 |
} |
1072 |
|
1073 |
token := peek_token(parser) |
1074 |
if token == nil { |
1075 |
return false |
1076 |
} |
1077 |
|
1078 |
if token.typ != yaml_FLOW_MAPPING_END_TOKEN { |
1079 |
if !first { |
1080 |
if token.typ == yaml_FLOW_ENTRY_TOKEN { |
1081 |
skip_token(parser) |
1082 |
token = peek_token(parser) |
1083 |
if token == nil { |
1084 |
return false |
1085 |
} |
1086 |
} else { |
1087 |
context_mark := parser.marks[len(parser.marks)-1] |
1088 |
parser.marks = parser.marks[:len(parser.marks)-1] |
1089 |
return yaml_parser_set_parser_error_context(parser, |
1090 |
"while parsing a flow mapping", context_mark, |
1091 |
"did not find expected ',' or '}'", token.start_mark) |
1092 |
} |
1093 |
} |
1094 |
|
1095 |
if token.typ == yaml_KEY_TOKEN { |
1096 |
skip_token(parser) |
1097 |
token = peek_token(parser) |
1098 |
if token == nil { |
1099 |
return false |
1100 |
} |
1101 |
if token.typ != yaml_VALUE_TOKEN && |
1102 |
token.typ != yaml_FLOW_ENTRY_TOKEN && |
1103 |
token.typ != yaml_FLOW_MAPPING_END_TOKEN { |
1104 |
parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) |
1105 |
return yaml_parser_parse_node(parser, event, false, false) |
1106 |
} else { |
1107 |
parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE |
1108 |
return yaml_parser_process_empty_scalar(parser, event, token.start_mark) |
1109 |
} |
1110 |
} else if token.typ != yaml_FLOW_MAPPING_END_TOKEN { |
1111 |
parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) |
1112 |
return yaml_parser_parse_node(parser, event, false, false) |
1113 |
} |
1114 |
} |
1115 |
|
1116 |
parser.state = parser.states[len(parser.states)-1] |
1117 |
parser.states = parser.states[:len(parser.states)-1] |
1118 |
parser.marks = parser.marks[:len(parser.marks)-1] |
1119 |
*event = yaml_event_t{ |
1120 |
typ: yaml_MAPPING_END_EVENT, |
1121 |
start_mark: token.start_mark, |
1122 |
end_mark: token.end_mark, |
1123 |
} |
1124 |
yaml_parser_set_event_comments(parser, event) |
1125 |
skip_token(parser) |
1126 |
return true |
1127 |
} |
1128 |
|
1129 |
// Parse the productions: |
1130 |
// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? |
1131 |
// * ***** * |
1132 |
// |
1133 |
func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool { |
1134 |
token := peek_token(parser) |
1135 |
if token == nil { |
1136 |
return false |
1137 |
} |
1138 |
if empty { |
1139 |
parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE |
1140 |
return yaml_parser_process_empty_scalar(parser, event, token.start_mark) |
1141 |
} |
1142 |
if token.typ == yaml_VALUE_TOKEN { |
1143 |
skip_token(parser) |
1144 |
token = peek_token(parser) |
1145 |
if token == nil { |
1146 |
return false |
1147 |
} |
1148 |
if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN { |
1149 |
parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) |
1150 |
return yaml_parser_parse_node(parser, event, false, false) |
1151 |
} |
1152 |
} |
1153 |
parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE |
1154 |
return yaml_parser_process_empty_scalar(parser, event, token.start_mark) |
1155 |
} |
1156 |
|
1157 |
// Generate an empty scalar event. |
1158 |
func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool { |
1159 |
*event = yaml_event_t{ |
1160 |
typ: yaml_SCALAR_EVENT, |
1161 |
start_mark: mark, |
1162 |
end_mark: mark, |
1163 |
value: nil, // Empty |
1164 |
implicit: true, |
1165 |
style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), |
1166 |
} |
1167 |
return true |
1168 |
} |
1169 |
|
1170 |
var default_tag_directives = []yaml_tag_directive_t{ |
1171 |
{[]byte("!"), []byte("!")}, |
1172 |
{[]byte("!!"), []byte("tag:yaml.org,2002:")}, |
1173 |
} |
1174 |
|
1175 |
// Parse directives. |
1176 |
func yaml_parser_process_directives(parser *yaml_parser_t, |
1177 |
version_directive_ref **yaml_version_directive_t, |
1178 |
tag_directives_ref *[]yaml_tag_directive_t) bool { |
1179 |
|
1180 |
var version_directive *yaml_version_directive_t |
1181 |
var tag_directives []yaml_tag_directive_t |
1182 |
|
1183 |
token := peek_token(parser) |
1184 |
if token == nil { |
1185 |
return false |
1186 |
} |
1187 |
|
1188 |
for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN { |
1189 |
if token.typ == yaml_VERSION_DIRECTIVE_TOKEN { |
1190 |
if version_directive != nil { |
1191 |
yaml_parser_set_parser_error(parser, |
1192 |
"found duplicate %YAML directive", token.start_mark) |
1193 |
return false |
1194 |
} |
1195 |
if token.major != 1 || token.minor != 1 { |
1196 |
yaml_parser_set_parser_error(parser, |
1197 |
"found incompatible YAML document", token.start_mark) |
1198 |
return false |
1199 |
} |
1200 |
version_directive = &yaml_version_directive_t{ |
1201 |
major: token.major, |
1202 |
minor: token.minor, |
1203 |
} |
1204 |
} else if token.typ == yaml_TAG_DIRECTIVE_TOKEN { |
1205 |
value := yaml_tag_directive_t{ |
1206 |
handle: token.value, |
1207 |
prefix: token.prefix, |
1208 |
} |
1209 |
if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) { |
1210 |
return false |
1211 |
} |
1212 |
tag_directives = append(tag_directives, value) |
1213 |
} |
1214 |
|
1215 |
skip_token(parser) |
1216 |
token = peek_token(parser) |
1217 |
if token == nil { |
1218 |
return false |
1219 |
} |
1220 |
} |
1221 |
|
1222 |
for i := range default_tag_directives { |
1223 |
if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { |
1224 |
return false |
1225 |
} |
1226 |
} |
1227 |
|
1228 |
if version_directive_ref != nil { |
1229 |
*version_directive_ref = version_directive |
1230 |
} |
1231 |
if tag_directives_ref != nil { |
1232 |
*tag_directives_ref = tag_directives |
1233 |
} |
1234 |
return true |
1235 |
} |
1236 |
|
1237 |
// Append a tag directive to the directives stack. |
1238 |
func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool { |
1239 |
for i := range parser.tag_directives { |
1240 |
if bytes.Equal(value.handle, parser.tag_directives[i].handle) { |
1241 |
if allow_duplicates { |
1242 |
return true |
1243 |
} |
1244 |
return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) |
1245 |
} |
1246 |
} |
1247 |
|
1248 |
// [Go] I suspect the copy is unnecessary. This was likely done |
1249 |
// because there was no way to track ownership of the data. |
1250 |
value_copy := yaml_tag_directive_t{ |
1251 |
handle: make([]byte, len(value.handle)), |
1252 |
prefix: make([]byte, len(value.prefix)), |
1253 |
} |
1254 |
copy(value_copy.handle, value.handle) |
1255 |
copy(value_copy.prefix, value.prefix) |
1256 |
parser.tag_directives = append(parser.tag_directives, value_copy) |
1257 |
return true |
1258 |
} |