1 |
yakumo_izuru |
1.1 |
// |
2 |
|
|
// Copyright (c) 2011-2019 Canonical Ltd |
3 |
|
|
// |
4 |
|
|
// Licensed under the Apache License, Version 2.0 (the "License"); |
5 |
|
|
// you may not use this file except in compliance with the License. |
6 |
|
|
// You may obtain a copy of the License at |
7 |
|
|
// |
8 |
|
|
// http://www.apache.org/licenses/LICENSE-2.0 |
9 |
|
|
// |
10 |
|
|
// Unless required by applicable law or agreed to in writing, software |
11 |
|
|
// distributed under the License is distributed on an "AS IS" BASIS, |
12 |
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13 |
|
|
// See the License for the specific language governing permissions and |
14 |
|
|
// limitations under the License. |
15 |
|
|
|
16 |
|
|
package yaml |
17 |
|
|
|
18 |
|
|
import ( |
19 |
|
|
"encoding" |
20 |
|
|
"encoding/base64" |
21 |
|
|
"fmt" |
22 |
|
|
"io" |
23 |
|
|
"math" |
24 |
|
|
"reflect" |
25 |
|
|
"strconv" |
26 |
|
|
"time" |
27 |
|
|
) |
28 |
|
|
|
29 |
|
|
// ---------------------------------------------------------------------------- |
30 |
|
|
// Parser, produces a node tree out of a libyaml event stream. |
31 |
|
|
|
32 |
|
|
type parser struct { |
33 |
|
|
parser yaml_parser_t |
34 |
|
|
event yaml_event_t |
35 |
|
|
doc *Node |
36 |
|
|
anchors map[string]*Node |
37 |
|
|
doneInit bool |
38 |
|
|
textless bool |
39 |
|
|
} |
40 |
|
|
|
41 |
|
|
func newParser(b []byte) *parser { |
42 |
|
|
p := parser{} |
43 |
|
|
if !yaml_parser_initialize(&p.parser) { |
44 |
|
|
panic("failed to initialize YAML emitter") |
45 |
|
|
} |
46 |
|
|
if len(b) == 0 { |
47 |
|
|
b = []byte{'\n'} |
48 |
|
|
} |
49 |
|
|
yaml_parser_set_input_string(&p.parser, b) |
50 |
|
|
return &p |
51 |
|
|
} |
52 |
|
|
|
53 |
|
|
func newParserFromReader(r io.Reader) *parser { |
54 |
|
|
p := parser{} |
55 |
|
|
if !yaml_parser_initialize(&p.parser) { |
56 |
|
|
panic("failed to initialize YAML emitter") |
57 |
|
|
} |
58 |
|
|
yaml_parser_set_input_reader(&p.parser, r) |
59 |
|
|
return &p |
60 |
|
|
} |
61 |
|
|
|
62 |
|
|
func (p *parser) init() { |
63 |
|
|
if p.doneInit { |
64 |
|
|
return |
65 |
|
|
} |
66 |
|
|
p.anchors = make(map[string]*Node) |
67 |
|
|
p.expect(yaml_STREAM_START_EVENT) |
68 |
|
|
p.doneInit = true |
69 |
|
|
} |
70 |
|
|
|
71 |
|
|
func (p *parser) destroy() { |
72 |
|
|
if p.event.typ != yaml_NO_EVENT { |
73 |
|
|
yaml_event_delete(&p.event) |
74 |
|
|
} |
75 |
|
|
yaml_parser_delete(&p.parser) |
76 |
|
|
} |
77 |
|
|
|
78 |
|
|
// expect consumes an event from the event stream and |
79 |
|
|
// checks that it's of the expected type. |
80 |
|
|
func (p *parser) expect(e yaml_event_type_t) { |
81 |
|
|
if p.event.typ == yaml_NO_EVENT { |
82 |
|
|
if !yaml_parser_parse(&p.parser, &p.event) { |
83 |
|
|
p.fail() |
84 |
|
|
} |
85 |
|
|
} |
86 |
|
|
if p.event.typ == yaml_STREAM_END_EVENT { |
87 |
|
|
failf("attempted to go past the end of stream; corrupted value?") |
88 |
|
|
} |
89 |
|
|
if p.event.typ != e { |
90 |
|
|
p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ) |
91 |
|
|
p.fail() |
92 |
|
|
} |
93 |
|
|
yaml_event_delete(&p.event) |
94 |
|
|
p.event.typ = yaml_NO_EVENT |
95 |
|
|
} |
96 |
|
|
|
97 |
|
|
// peek peeks at the next event in the event stream, |
98 |
|
|
// puts the results into p.event and returns the event type. |
99 |
|
|
func (p *parser) peek() yaml_event_type_t { |
100 |
|
|
if p.event.typ != yaml_NO_EVENT { |
101 |
|
|
return p.event.typ |
102 |
|
|
} |
103 |
|
|
// It's curious choice from the underlying API to generally return a |
104 |
|
|
// positive result on success, but on this case return true in an error |
105 |
|
|
// scenario. This was the source of bugs in the past (issue #666). |
106 |
|
|
if !yaml_parser_parse(&p.parser, &p.event) || p.parser.error != yaml_NO_ERROR { |
107 |
|
|
p.fail() |
108 |
|
|
} |
109 |
|
|
return p.event.typ |
110 |
|
|
} |
111 |
|
|
|
112 |
|
|
func (p *parser) fail() { |
113 |
|
|
var where string |
114 |
|
|
var line int |
115 |
|
|
if p.parser.context_mark.line != 0 { |
116 |
|
|
line = p.parser.context_mark.line |
117 |
|
|
// Scanner errors don't iterate line before returning error |
118 |
|
|
if p.parser.error == yaml_SCANNER_ERROR { |
119 |
|
|
line++ |
120 |
|
|
} |
121 |
|
|
} else if p.parser.problem_mark.line != 0 { |
122 |
|
|
line = p.parser.problem_mark.line |
123 |
|
|
// Scanner errors don't iterate line before returning error |
124 |
|
|
if p.parser.error == yaml_SCANNER_ERROR { |
125 |
|
|
line++ |
126 |
|
|
} |
127 |
|
|
} |
128 |
|
|
if line != 0 { |
129 |
|
|
where = "line " + strconv.Itoa(line) + ": " |
130 |
|
|
} |
131 |
|
|
var msg string |
132 |
|
|
if len(p.parser.problem) > 0 { |
133 |
|
|
msg = p.parser.problem |
134 |
|
|
} else { |
135 |
|
|
msg = "unknown problem parsing YAML content" |
136 |
|
|
} |
137 |
|
|
failf("%s%s", where, msg) |
138 |
|
|
} |
139 |
|
|
|
140 |
|
|
func (p *parser) anchor(n *Node, anchor []byte) { |
141 |
|
|
if anchor != nil { |
142 |
|
|
n.Anchor = string(anchor) |
143 |
|
|
p.anchors[n.Anchor] = n |
144 |
|
|
} |
145 |
|
|
} |
146 |
|
|
|
147 |
|
|
func (p *parser) parse() *Node { |
148 |
|
|
p.init() |
149 |
|
|
switch p.peek() { |
150 |
|
|
case yaml_SCALAR_EVENT: |
151 |
|
|
return p.scalar() |
152 |
|
|
case yaml_ALIAS_EVENT: |
153 |
|
|
return p.alias() |
154 |
|
|
case yaml_MAPPING_START_EVENT: |
155 |
|
|
return p.mapping() |
156 |
|
|
case yaml_SEQUENCE_START_EVENT: |
157 |
|
|
return p.sequence() |
158 |
|
|
case yaml_DOCUMENT_START_EVENT: |
159 |
|
|
return p.document() |
160 |
|
|
case yaml_STREAM_END_EVENT: |
161 |
|
|
// Happens when attempting to decode an empty buffer. |
162 |
|
|
return nil |
163 |
|
|
case yaml_TAIL_COMMENT_EVENT: |
164 |
|
|
panic("internal error: unexpected tail comment event (please report)") |
165 |
|
|
default: |
166 |
|
|
panic("internal error: attempted to parse unknown event (please report): " + p.event.typ.String()) |
167 |
|
|
} |
168 |
|
|
} |
169 |
|
|
|
170 |
|
|
func (p *parser) node(kind Kind, defaultTag, tag, value string) *Node { |
171 |
|
|
var style Style |
172 |
|
|
if tag != "" && tag != "!" { |
173 |
|
|
tag = shortTag(tag) |
174 |
|
|
style = TaggedStyle |
175 |
|
|
} else if defaultTag != "" { |
176 |
|
|
tag = defaultTag |
177 |
|
|
} else if kind == ScalarNode { |
178 |
|
|
tag, _ = resolve("", value) |
179 |
|
|
} |
180 |
|
|
n := &Node{ |
181 |
|
|
Kind: kind, |
182 |
|
|
Tag: tag, |
183 |
|
|
Value: value, |
184 |
|
|
Style: style, |
185 |
|
|
} |
186 |
|
|
if !p.textless { |
187 |
|
|
n.Line = p.event.start_mark.line + 1 |
188 |
|
|
n.Column = p.event.start_mark.column + 1 |
189 |
|
|
n.HeadComment = string(p.event.head_comment) |
190 |
|
|
n.LineComment = string(p.event.line_comment) |
191 |
|
|
n.FootComment = string(p.event.foot_comment) |
192 |
|
|
} |
193 |
|
|
return n |
194 |
|
|
} |
195 |
|
|
|
196 |
|
|
func (p *parser) parseChild(parent *Node) *Node { |
197 |
|
|
child := p.parse() |
198 |
|
|
parent.Content = append(parent.Content, child) |
199 |
|
|
return child |
200 |
|
|
} |
201 |
|
|
|
202 |
|
|
func (p *parser) document() *Node { |
203 |
|
|
n := p.node(DocumentNode, "", "", "") |
204 |
|
|
p.doc = n |
205 |
|
|
p.expect(yaml_DOCUMENT_START_EVENT) |
206 |
|
|
p.parseChild(n) |
207 |
|
|
if p.peek() == yaml_DOCUMENT_END_EVENT { |
208 |
|
|
n.FootComment = string(p.event.foot_comment) |
209 |
|
|
} |
210 |
|
|
p.expect(yaml_DOCUMENT_END_EVENT) |
211 |
|
|
return n |
212 |
|
|
} |
213 |
|
|
|
214 |
|
|
func (p *parser) alias() *Node { |
215 |
|
|
n := p.node(AliasNode, "", "", string(p.event.anchor)) |
216 |
|
|
n.Alias = p.anchors[n.Value] |
217 |
|
|
if n.Alias == nil { |
218 |
|
|
failf("unknown anchor '%s' referenced", n.Value) |
219 |
|
|
} |
220 |
|
|
p.expect(yaml_ALIAS_EVENT) |
221 |
|
|
return n |
222 |
|
|
} |
223 |
|
|
|
224 |
|
|
func (p *parser) scalar() *Node { |
225 |
|
|
var parsedStyle = p.event.scalar_style() |
226 |
|
|
var nodeStyle Style |
227 |
|
|
switch { |
228 |
|
|
case parsedStyle&yaml_DOUBLE_QUOTED_SCALAR_STYLE != 0: |
229 |
|
|
nodeStyle = DoubleQuotedStyle |
230 |
|
|
case parsedStyle&yaml_SINGLE_QUOTED_SCALAR_STYLE != 0: |
231 |
|
|
nodeStyle = SingleQuotedStyle |
232 |
|
|
case parsedStyle&yaml_LITERAL_SCALAR_STYLE != 0: |
233 |
|
|
nodeStyle = LiteralStyle |
234 |
|
|
case parsedStyle&yaml_FOLDED_SCALAR_STYLE != 0: |
235 |
|
|
nodeStyle = FoldedStyle |
236 |
|
|
} |
237 |
|
|
var nodeValue = string(p.event.value) |
238 |
|
|
var nodeTag = string(p.event.tag) |
239 |
|
|
var defaultTag string |
240 |
|
|
if nodeStyle == 0 { |
241 |
|
|
if nodeValue == "<<" { |
242 |
|
|
defaultTag = mergeTag |
243 |
|
|
} |
244 |
|
|
} else { |
245 |
|
|
defaultTag = strTag |
246 |
|
|
} |
247 |
|
|
n := p.node(ScalarNode, defaultTag, nodeTag, nodeValue) |
248 |
|
|
n.Style |= nodeStyle |
249 |
|
|
p.anchor(n, p.event.anchor) |
250 |
|
|
p.expect(yaml_SCALAR_EVENT) |
251 |
|
|
return n |
252 |
|
|
} |
253 |
|
|
|
254 |
|
|
func (p *parser) sequence() *Node { |
255 |
|
|
n := p.node(SequenceNode, seqTag, string(p.event.tag), "") |
256 |
|
|
if p.event.sequence_style()&yaml_FLOW_SEQUENCE_STYLE != 0 { |
257 |
|
|
n.Style |= FlowStyle |
258 |
|
|
} |
259 |
|
|
p.anchor(n, p.event.anchor) |
260 |
|
|
p.expect(yaml_SEQUENCE_START_EVENT) |
261 |
|
|
for p.peek() != yaml_SEQUENCE_END_EVENT { |
262 |
|
|
p.parseChild(n) |
263 |
|
|
} |
264 |
|
|
n.LineComment = string(p.event.line_comment) |
265 |
|
|
n.FootComment = string(p.event.foot_comment) |
266 |
|
|
p.expect(yaml_SEQUENCE_END_EVENT) |
267 |
|
|
return n |
268 |
|
|
} |
269 |
|
|
|
270 |
|
|
func (p *parser) mapping() *Node { |
271 |
|
|
n := p.node(MappingNode, mapTag, string(p.event.tag), "") |
272 |
|
|
block := true |
273 |
|
|
if p.event.mapping_style()&yaml_FLOW_MAPPING_STYLE != 0 { |
274 |
|
|
block = false |
275 |
|
|
n.Style |= FlowStyle |
276 |
|
|
} |
277 |
|
|
p.anchor(n, p.event.anchor) |
278 |
|
|
p.expect(yaml_MAPPING_START_EVENT) |
279 |
|
|
for p.peek() != yaml_MAPPING_END_EVENT { |
280 |
|
|
k := p.parseChild(n) |
281 |
|
|
if block && k.FootComment != "" { |
282 |
|
|
// Must be a foot comment for the prior value when being dedented. |
283 |
|
|
if len(n.Content) > 2 { |
284 |
|
|
n.Content[len(n.Content)-3].FootComment = k.FootComment |
285 |
|
|
k.FootComment = "" |
286 |
|
|
} |
287 |
|
|
} |
288 |
|
|
v := p.parseChild(n) |
289 |
|
|
if k.FootComment == "" && v.FootComment != "" { |
290 |
|
|
k.FootComment = v.FootComment |
291 |
|
|
v.FootComment = "" |
292 |
|
|
} |
293 |
|
|
if p.peek() == yaml_TAIL_COMMENT_EVENT { |
294 |
|
|
if k.FootComment == "" { |
295 |
|
|
k.FootComment = string(p.event.foot_comment) |
296 |
|
|
} |
297 |
|
|
p.expect(yaml_TAIL_COMMENT_EVENT) |
298 |
|
|
} |
299 |
|
|
} |
300 |
|
|
n.LineComment = string(p.event.line_comment) |
301 |
|
|
n.FootComment = string(p.event.foot_comment) |
302 |
|
|
if n.Style&FlowStyle == 0 && n.FootComment != "" && len(n.Content) > 1 { |
303 |
|
|
n.Content[len(n.Content)-2].FootComment = n.FootComment |
304 |
|
|
n.FootComment = "" |
305 |
|
|
} |
306 |
|
|
p.expect(yaml_MAPPING_END_EVENT) |
307 |
|
|
return n |
308 |
|
|
} |
309 |
|
|
|
310 |
|
|
// ---------------------------------------------------------------------------- |
311 |
|
|
// Decoder, unmarshals a node into a provided value. |
312 |
|
|
|
313 |
|
|
type decoder struct { |
314 |
|
|
doc *Node |
315 |
|
|
aliases map[*Node]bool |
316 |
|
|
terrors []string |
317 |
|
|
|
318 |
|
|
stringMapType reflect.Type |
319 |
|
|
generalMapType reflect.Type |
320 |
|
|
|
321 |
|
|
knownFields bool |
322 |
|
|
uniqueKeys bool |
323 |
|
|
decodeCount int |
324 |
|
|
aliasCount int |
325 |
|
|
aliasDepth int |
326 |
|
|
|
327 |
|
|
mergedFields map[interface{}]bool |
328 |
|
|
} |
329 |
|
|
|
330 |
|
|
var ( |
331 |
|
|
nodeType = reflect.TypeOf(Node{}) |
332 |
|
|
durationType = reflect.TypeOf(time.Duration(0)) |
333 |
|
|
stringMapType = reflect.TypeOf(map[string]interface{}{}) |
334 |
|
|
generalMapType = reflect.TypeOf(map[interface{}]interface{}{}) |
335 |
|
|
ifaceType = generalMapType.Elem() |
336 |
|
|
timeType = reflect.TypeOf(time.Time{}) |
337 |
|
|
ptrTimeType = reflect.TypeOf(&time.Time{}) |
338 |
|
|
) |
339 |
|
|
|
340 |
|
|
func newDecoder() *decoder { |
341 |
|
|
d := &decoder{ |
342 |
|
|
stringMapType: stringMapType, |
343 |
|
|
generalMapType: generalMapType, |
344 |
|
|
uniqueKeys: true, |
345 |
|
|
} |
346 |
|
|
d.aliases = make(map[*Node]bool) |
347 |
|
|
return d |
348 |
|
|
} |
349 |
|
|
|
350 |
|
|
func (d *decoder) terror(n *Node, tag string, out reflect.Value) { |
351 |
|
|
if n.Tag != "" { |
352 |
|
|
tag = n.Tag |
353 |
|
|
} |
354 |
|
|
value := n.Value |
355 |
|
|
if tag != seqTag && tag != mapTag { |
356 |
|
|
if len(value) > 10 { |
357 |
|
|
value = " `" + value[:7] + "...`" |
358 |
|
|
} else { |
359 |
|
|
value = " `" + value + "`" |
360 |
|
|
} |
361 |
|
|
} |
362 |
|
|
d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.Line, shortTag(tag), value, out.Type())) |
363 |
|
|
} |
364 |
|
|
|
365 |
|
|
func (d *decoder) callUnmarshaler(n *Node, u Unmarshaler) (good bool) { |
366 |
|
|
err := u.UnmarshalYAML(n) |
367 |
|
|
if e, ok := err.(*TypeError); ok { |
368 |
|
|
d.terrors = append(d.terrors, e.Errors...) |
369 |
|
|
return false |
370 |
|
|
} |
371 |
|
|
if err != nil { |
372 |
|
|
fail(err) |
373 |
|
|
} |
374 |
|
|
return true |
375 |
|
|
} |
376 |
|
|
|
377 |
|
|
func (d *decoder) callObsoleteUnmarshaler(n *Node, u obsoleteUnmarshaler) (good bool) { |
378 |
|
|
terrlen := len(d.terrors) |
379 |
|
|
err := u.UnmarshalYAML(func(v interface{}) (err error) { |
380 |
|
|
defer handleErr(&err) |
381 |
|
|
d.unmarshal(n, reflect.ValueOf(v)) |
382 |
|
|
if len(d.terrors) > terrlen { |
383 |
|
|
issues := d.terrors[terrlen:] |
384 |
|
|
d.terrors = d.terrors[:terrlen] |
385 |
|
|
return &TypeError{issues} |
386 |
|
|
} |
387 |
|
|
return nil |
388 |
|
|
}) |
389 |
|
|
if e, ok := err.(*TypeError); ok { |
390 |
|
|
d.terrors = append(d.terrors, e.Errors...) |
391 |
|
|
return false |
392 |
|
|
} |
393 |
|
|
if err != nil { |
394 |
|
|
fail(err) |
395 |
|
|
} |
396 |
|
|
return true |
397 |
|
|
} |
398 |
|
|
|
399 |
|
|
// d.prepare initializes and dereferences pointers and calls UnmarshalYAML |
400 |
|
|
// if a value is found to implement it. |
401 |
|
|
// It returns the initialized and dereferenced out value, whether |
402 |
|
|
// unmarshalling was already done by UnmarshalYAML, and if so whether |
403 |
|
|
// its types unmarshalled appropriately. |
404 |
|
|
// |
405 |
|
|
// If n holds a null value, prepare returns before doing anything. |
406 |
|
|
func (d *decoder) prepare(n *Node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) { |
407 |
|
|
if n.ShortTag() == nullTag { |
408 |
|
|
return out, false, false |
409 |
|
|
} |
410 |
|
|
again := true |
411 |
|
|
for again { |
412 |
|
|
again = false |
413 |
|
|
if out.Kind() == reflect.Ptr { |
414 |
|
|
if out.IsNil() { |
415 |
|
|
out.Set(reflect.New(out.Type().Elem())) |
416 |
|
|
} |
417 |
|
|
out = out.Elem() |
418 |
|
|
again = true |
419 |
|
|
} |
420 |
|
|
if out.CanAddr() { |
421 |
|
|
outi := out.Addr().Interface() |
422 |
|
|
if u, ok := outi.(Unmarshaler); ok { |
423 |
|
|
good = d.callUnmarshaler(n, u) |
424 |
|
|
return out, true, good |
425 |
|
|
} |
426 |
|
|
if u, ok := outi.(obsoleteUnmarshaler); ok { |
427 |
|
|
good = d.callObsoleteUnmarshaler(n, u) |
428 |
|
|
return out, true, good |
429 |
|
|
} |
430 |
|
|
} |
431 |
|
|
} |
432 |
|
|
return out, false, false |
433 |
|
|
} |
434 |
|
|
|
435 |
|
|
func (d *decoder) fieldByIndex(n *Node, v reflect.Value, index []int) (field reflect.Value) { |
436 |
|
|
if n.ShortTag() == nullTag { |
437 |
|
|
return reflect.Value{} |
438 |
|
|
} |
439 |
|
|
for _, num := range index { |
440 |
|
|
for { |
441 |
|
|
if v.Kind() == reflect.Ptr { |
442 |
|
|
if v.IsNil() { |
443 |
|
|
v.Set(reflect.New(v.Type().Elem())) |
444 |
|
|
} |
445 |
|
|
v = v.Elem() |
446 |
|
|
continue |
447 |
|
|
} |
448 |
|
|
break |
449 |
|
|
} |
450 |
|
|
v = v.Field(num) |
451 |
|
|
} |
452 |
|
|
return v |
453 |
|
|
} |
454 |
|
|
|
455 |
|
|
const ( |
456 |
|
|
// 400,000 decode operations is ~500kb of dense object declarations, or |
457 |
|
|
// ~5kb of dense object declarations with 10000% alias expansion |
458 |
|
|
alias_ratio_range_low = 400000 |
459 |
|
|
|
460 |
|
|
// 4,000,000 decode operations is ~5MB of dense object declarations, or |
461 |
|
|
// ~4.5MB of dense object declarations with 10% alias expansion |
462 |
|
|
alias_ratio_range_high = 4000000 |
463 |
|
|
|
464 |
|
|
// alias_ratio_range is the range over which we scale allowed alias ratios |
465 |
|
|
alias_ratio_range = float64(alias_ratio_range_high - alias_ratio_range_low) |
466 |
|
|
) |
467 |
|
|
|
468 |
|
|
func allowedAliasRatio(decodeCount int) float64 { |
469 |
|
|
switch { |
470 |
|
|
case decodeCount <= alias_ratio_range_low: |
471 |
|
|
// allow 99% to come from alias expansion for small-to-medium documents |
472 |
|
|
return 0.99 |
473 |
|
|
case decodeCount >= alias_ratio_range_high: |
474 |
|
|
// allow 10% to come from alias expansion for very large documents |
475 |
|
|
return 0.10 |
476 |
|
|
default: |
477 |
|
|
// scale smoothly from 99% down to 10% over the range. |
478 |
|
|
// this maps to 396,000 - 400,000 allowed alias-driven decodes over the range. |
479 |
|
|
// 400,000 decode operations is ~100MB of allocations in worst-case scenarios (single-item maps). |
480 |
|
|
return 0.99 - 0.89*(float64(decodeCount-alias_ratio_range_low)/alias_ratio_range) |
481 |
|
|
} |
482 |
|
|
} |
483 |
|
|
|
484 |
|
|
func (d *decoder) unmarshal(n *Node, out reflect.Value) (good bool) { |
485 |
|
|
d.decodeCount++ |
486 |
|
|
if d.aliasDepth > 0 { |
487 |
|
|
d.aliasCount++ |
488 |
|
|
} |
489 |
|
|
if d.aliasCount > 100 && d.decodeCount > 1000 && float64(d.aliasCount)/float64(d.decodeCount) > allowedAliasRatio(d.decodeCount) { |
490 |
|
|
failf("document contains excessive aliasing") |
491 |
|
|
} |
492 |
|
|
if out.Type() == nodeType { |
493 |
|
|
out.Set(reflect.ValueOf(n).Elem()) |
494 |
|
|
return true |
495 |
|
|
} |
496 |
|
|
switch n.Kind { |
497 |
|
|
case DocumentNode: |
498 |
|
|
return d.document(n, out) |
499 |
|
|
case AliasNode: |
500 |
|
|
return d.alias(n, out) |
501 |
|
|
} |
502 |
|
|
out, unmarshaled, good := d.prepare(n, out) |
503 |
|
|
if unmarshaled { |
504 |
|
|
return good |
505 |
|
|
} |
506 |
|
|
switch n.Kind { |
507 |
|
|
case ScalarNode: |
508 |
|
|
good = d.scalar(n, out) |
509 |
|
|
case MappingNode: |
510 |
|
|
good = d.mapping(n, out) |
511 |
|
|
case SequenceNode: |
512 |
|
|
good = d.sequence(n, out) |
513 |
|
|
case 0: |
514 |
|
|
if n.IsZero() { |
515 |
|
|
return d.null(out) |
516 |
|
|
} |
517 |
|
|
fallthrough |
518 |
|
|
default: |
519 |
|
|
failf("cannot decode node with unknown kind %d", n.Kind) |
520 |
|
|
} |
521 |
|
|
return good |
522 |
|
|
} |
523 |
|
|
|
524 |
|
|
func (d *decoder) document(n *Node, out reflect.Value) (good bool) { |
525 |
|
|
if len(n.Content) == 1 { |
526 |
|
|
d.doc = n |
527 |
|
|
d.unmarshal(n.Content[0], out) |
528 |
|
|
return true |
529 |
|
|
} |
530 |
|
|
return false |
531 |
|
|
} |
532 |
|
|
|
533 |
|
|
func (d *decoder) alias(n *Node, out reflect.Value) (good bool) { |
534 |
|
|
if d.aliases[n] { |
535 |
|
|
// TODO this could actually be allowed in some circumstances. |
536 |
|
|
failf("anchor '%s' value contains itself", n.Value) |
537 |
|
|
} |
538 |
|
|
d.aliases[n] = true |
539 |
|
|
d.aliasDepth++ |
540 |
|
|
good = d.unmarshal(n.Alias, out) |
541 |
|
|
d.aliasDepth-- |
542 |
|
|
delete(d.aliases, n) |
543 |
|
|
return good |
544 |
|
|
} |
545 |
|
|
|
546 |
|
|
var zeroValue reflect.Value |
547 |
|
|
|
548 |
|
|
func resetMap(out reflect.Value) { |
549 |
|
|
for _, k := range out.MapKeys() { |
550 |
|
|
out.SetMapIndex(k, zeroValue) |
551 |
|
|
} |
552 |
|
|
} |
553 |
|
|
|
554 |
|
|
func (d *decoder) null(out reflect.Value) bool { |
555 |
|
|
if out.CanAddr() { |
556 |
|
|
switch out.Kind() { |
557 |
|
|
case reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice: |
558 |
|
|
out.Set(reflect.Zero(out.Type())) |
559 |
|
|
return true |
560 |
|
|
} |
561 |
|
|
} |
562 |
|
|
return false |
563 |
|
|
} |
564 |
|
|
|
565 |
|
|
func (d *decoder) scalar(n *Node, out reflect.Value) bool { |
566 |
|
|
var tag string |
567 |
|
|
var resolved interface{} |
568 |
|
|
if n.indicatedString() { |
569 |
|
|
tag = strTag |
570 |
|
|
resolved = n.Value |
571 |
|
|
} else { |
572 |
|
|
tag, resolved = resolve(n.Tag, n.Value) |
573 |
|
|
if tag == binaryTag { |
574 |
|
|
data, err := base64.StdEncoding.DecodeString(resolved.(string)) |
575 |
|
|
if err != nil { |
576 |
|
|
failf("!!binary value contains invalid base64 data") |
577 |
|
|
} |
578 |
|
|
resolved = string(data) |
579 |
|
|
} |
580 |
|
|
} |
581 |
|
|
if resolved == nil { |
582 |
|
|
return d.null(out) |
583 |
|
|
} |
584 |
|
|
if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { |
585 |
|
|
// We've resolved to exactly the type we want, so use that. |
586 |
|
|
out.Set(resolvedv) |
587 |
|
|
return true |
588 |
|
|
} |
589 |
|
|
// Perhaps we can use the value as a TextUnmarshaler to |
590 |
|
|
// set its value. |
591 |
|
|
if out.CanAddr() { |
592 |
|
|
u, ok := out.Addr().Interface().(encoding.TextUnmarshaler) |
593 |
|
|
if ok { |
594 |
|
|
var text []byte |
595 |
|
|
if tag == binaryTag { |
596 |
|
|
text = []byte(resolved.(string)) |
597 |
|
|
} else { |
598 |
|
|
// We let any value be unmarshaled into TextUnmarshaler. |
599 |
|
|
// That might be more lax than we'd like, but the |
600 |
|
|
// TextUnmarshaler itself should bowl out any dubious values. |
601 |
|
|
text = []byte(n.Value) |
602 |
|
|
} |
603 |
|
|
err := u.UnmarshalText(text) |
604 |
|
|
if err != nil { |
605 |
|
|
fail(err) |
606 |
|
|
} |
607 |
|
|
return true |
608 |
|
|
} |
609 |
|
|
} |
610 |
|
|
switch out.Kind() { |
611 |
|
|
case reflect.String: |
612 |
|
|
if tag == binaryTag { |
613 |
|
|
out.SetString(resolved.(string)) |
614 |
|
|
return true |
615 |
|
|
} |
616 |
|
|
out.SetString(n.Value) |
617 |
|
|
return true |
618 |
|
|
case reflect.Interface: |
619 |
|
|
out.Set(reflect.ValueOf(resolved)) |
620 |
|
|
return true |
621 |
|
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: |
622 |
|
|
// This used to work in v2, but it's very unfriendly. |
623 |
|
|
isDuration := out.Type() == durationType |
624 |
|
|
|
625 |
|
|
switch resolved := resolved.(type) { |
626 |
|
|
case int: |
627 |
|
|
if !isDuration && !out.OverflowInt(int64(resolved)) { |
628 |
|
|
out.SetInt(int64(resolved)) |
629 |
|
|
return true |
630 |
|
|
} |
631 |
|
|
case int64: |
632 |
|
|
if !isDuration && !out.OverflowInt(resolved) { |
633 |
|
|
out.SetInt(resolved) |
634 |
|
|
return true |
635 |
|
|
} |
636 |
|
|
case uint64: |
637 |
|
|
if !isDuration && resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { |
638 |
|
|
out.SetInt(int64(resolved)) |
639 |
|
|
return true |
640 |
|
|
} |
641 |
|
|
case float64: |
642 |
|
|
if !isDuration && resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { |
643 |
|
|
out.SetInt(int64(resolved)) |
644 |
|
|
return true |
645 |
|
|
} |
646 |
|
|
case string: |
647 |
|
|
if out.Type() == durationType { |
648 |
|
|
d, err := time.ParseDuration(resolved) |
649 |
|
|
if err == nil { |
650 |
|
|
out.SetInt(int64(d)) |
651 |
|
|
return true |
652 |
|
|
} |
653 |
|
|
} |
654 |
|
|
} |
655 |
|
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: |
656 |
|
|
switch resolved := resolved.(type) { |
657 |
|
|
case int: |
658 |
|
|
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { |
659 |
|
|
out.SetUint(uint64(resolved)) |
660 |
|
|
return true |
661 |
|
|
} |
662 |
|
|
case int64: |
663 |
|
|
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { |
664 |
|
|
out.SetUint(uint64(resolved)) |
665 |
|
|
return true |
666 |
|
|
} |
667 |
|
|
case uint64: |
668 |
|
|
if !out.OverflowUint(uint64(resolved)) { |
669 |
|
|
out.SetUint(uint64(resolved)) |
670 |
|
|
return true |
671 |
|
|
} |
672 |
|
|
case float64: |
673 |
|
|
if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) { |
674 |
|
|
out.SetUint(uint64(resolved)) |
675 |
|
|
return true |
676 |
|
|
} |
677 |
|
|
} |
678 |
|
|
case reflect.Bool: |
679 |
|
|
switch resolved := resolved.(type) { |
680 |
|
|
case bool: |
681 |
|
|
out.SetBool(resolved) |
682 |
|
|
return true |
683 |
|
|
case string: |
684 |
|
|
// This offers some compatibility with the 1.1 spec (https://yaml.org/type/bool.html). |
685 |
|
|
// It only works if explicitly attempting to unmarshal into a typed bool value. |
686 |
|
|
switch resolved { |
687 |
|
|
case "y", "Y", "yes", "Yes", "YES", "on", "On", "ON": |
688 |
|
|
out.SetBool(true) |
689 |
|
|
return true |
690 |
|
|
case "n", "N", "no", "No", "NO", "off", "Off", "OFF": |
691 |
|
|
out.SetBool(false) |
692 |
|
|
return true |
693 |
|
|
} |
694 |
|
|
} |
695 |
|
|
case reflect.Float32, reflect.Float64: |
696 |
|
|
switch resolved := resolved.(type) { |
697 |
|
|
case int: |
698 |
|
|
out.SetFloat(float64(resolved)) |
699 |
|
|
return true |
700 |
|
|
case int64: |
701 |
|
|
out.SetFloat(float64(resolved)) |
702 |
|
|
return true |
703 |
|
|
case uint64: |
704 |
|
|
out.SetFloat(float64(resolved)) |
705 |
|
|
return true |
706 |
|
|
case float64: |
707 |
|
|
out.SetFloat(resolved) |
708 |
|
|
return true |
709 |
|
|
} |
710 |
|
|
case reflect.Struct: |
711 |
|
|
if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { |
712 |
|
|
out.Set(resolvedv) |
713 |
|
|
return true |
714 |
|
|
} |
715 |
|
|
case reflect.Ptr: |
716 |
|
|
panic("yaml internal error: please report the issue") |
717 |
|
|
} |
718 |
|
|
d.terror(n, tag, out) |
719 |
|
|
return false |
720 |
|
|
} |
721 |
|
|
|
722 |
|
|
func settableValueOf(i interface{}) reflect.Value { |
723 |
|
|
v := reflect.ValueOf(i) |
724 |
|
|
sv := reflect.New(v.Type()).Elem() |
725 |
|
|
sv.Set(v) |
726 |
|
|
return sv |
727 |
|
|
} |
728 |
|
|
|
729 |
|
|
func (d *decoder) sequence(n *Node, out reflect.Value) (good bool) { |
730 |
|
|
l := len(n.Content) |
731 |
|
|
|
732 |
|
|
var iface reflect.Value |
733 |
|
|
switch out.Kind() { |
734 |
|
|
case reflect.Slice: |
735 |
|
|
out.Set(reflect.MakeSlice(out.Type(), l, l)) |
736 |
|
|
case reflect.Array: |
737 |
|
|
if l != out.Len() { |
738 |
|
|
failf("invalid array: want %d elements but got %d", out.Len(), l) |
739 |
|
|
} |
740 |
|
|
case reflect.Interface: |
741 |
|
|
// No type hints. Will have to use a generic sequence. |
742 |
|
|
iface = out |
743 |
|
|
out = settableValueOf(make([]interface{}, l)) |
744 |
|
|
default: |
745 |
|
|
d.terror(n, seqTag, out) |
746 |
|
|
return false |
747 |
|
|
} |
748 |
|
|
et := out.Type().Elem() |
749 |
|
|
|
750 |
|
|
j := 0 |
751 |
|
|
for i := 0; i < l; i++ { |
752 |
|
|
e := reflect.New(et).Elem() |
753 |
|
|
if ok := d.unmarshal(n.Content[i], e); ok { |
754 |
|
|
out.Index(j).Set(e) |
755 |
|
|
j++ |
756 |
|
|
} |
757 |
|
|
} |
758 |
|
|
if out.Kind() != reflect.Array { |
759 |
|
|
out.Set(out.Slice(0, j)) |
760 |
|
|
} |
761 |
|
|
if iface.IsValid() { |
762 |
|
|
iface.Set(out) |
763 |
|
|
} |
764 |
|
|
return true |
765 |
|
|
} |
766 |
|
|
|
767 |
|
|
func (d *decoder) mapping(n *Node, out reflect.Value) (good bool) { |
768 |
|
|
l := len(n.Content) |
769 |
|
|
if d.uniqueKeys { |
770 |
|
|
nerrs := len(d.terrors) |
771 |
|
|
for i := 0; i < l; i += 2 { |
772 |
|
|
ni := n.Content[i] |
773 |
|
|
for j := i + 2; j < l; j += 2 { |
774 |
|
|
nj := n.Content[j] |
775 |
|
|
if ni.Kind == nj.Kind && ni.Value == nj.Value { |
776 |
|
|
d.terrors = append(d.terrors, fmt.Sprintf("line %d: mapping key %#v already defined at line %d", nj.Line, nj.Value, ni.Line)) |
777 |
|
|
} |
778 |
|
|
} |
779 |
|
|
} |
780 |
|
|
if len(d.terrors) > nerrs { |
781 |
|
|
return false |
782 |
|
|
} |
783 |
|
|
} |
784 |
|
|
switch out.Kind() { |
785 |
|
|
case reflect.Struct: |
786 |
|
|
return d.mappingStruct(n, out) |
787 |
|
|
case reflect.Map: |
788 |
|
|
// okay |
789 |
|
|
case reflect.Interface: |
790 |
|
|
iface := out |
791 |
|
|
if isStringMap(n) { |
792 |
|
|
out = reflect.MakeMap(d.stringMapType) |
793 |
|
|
} else { |
794 |
|
|
out = reflect.MakeMap(d.generalMapType) |
795 |
|
|
} |
796 |
|
|
iface.Set(out) |
797 |
|
|
default: |
798 |
|
|
d.terror(n, mapTag, out) |
799 |
|
|
return false |
800 |
|
|
} |
801 |
|
|
|
802 |
|
|
outt := out.Type() |
803 |
|
|
kt := outt.Key() |
804 |
|
|
et := outt.Elem() |
805 |
|
|
|
806 |
|
|
stringMapType := d.stringMapType |
807 |
|
|
generalMapType := d.generalMapType |
808 |
|
|
if outt.Elem() == ifaceType { |
809 |
|
|
if outt.Key().Kind() == reflect.String { |
810 |
|
|
d.stringMapType = outt |
811 |
|
|
} else if outt.Key() == ifaceType { |
812 |
|
|
d.generalMapType = outt |
813 |
|
|
} |
814 |
|
|
} |
815 |
|
|
|
816 |
|
|
mergedFields := d.mergedFields |
817 |
|
|
d.mergedFields = nil |
818 |
|
|
|
819 |
|
|
var mergeNode *Node |
820 |
|
|
|
821 |
|
|
mapIsNew := false |
822 |
|
|
if out.IsNil() { |
823 |
|
|
out.Set(reflect.MakeMap(outt)) |
824 |
|
|
mapIsNew = true |
825 |
|
|
} |
826 |
|
|
for i := 0; i < l; i += 2 { |
827 |
|
|
if isMerge(n.Content[i]) { |
828 |
|
|
mergeNode = n.Content[i+1] |
829 |
|
|
continue |
830 |
|
|
} |
831 |
|
|
k := reflect.New(kt).Elem() |
832 |
|
|
if d.unmarshal(n.Content[i], k) { |
833 |
|
|
if mergedFields != nil { |
834 |
|
|
ki := k.Interface() |
835 |
|
|
if mergedFields[ki] { |
836 |
|
|
continue |
837 |
|
|
} |
838 |
|
|
mergedFields[ki] = true |
839 |
|
|
} |
840 |
|
|
kkind := k.Kind() |
841 |
|
|
if kkind == reflect.Interface { |
842 |
|
|
kkind = k.Elem().Kind() |
843 |
|
|
} |
844 |
|
|
if kkind == reflect.Map || kkind == reflect.Slice { |
845 |
|
|
failf("invalid map key: %#v", k.Interface()) |
846 |
|
|
} |
847 |
|
|
e := reflect.New(et).Elem() |
848 |
|
|
if d.unmarshal(n.Content[i+1], e) || n.Content[i+1].ShortTag() == nullTag && (mapIsNew || !out.MapIndex(k).IsValid()) { |
849 |
|
|
out.SetMapIndex(k, e) |
850 |
|
|
} |
851 |
|
|
} |
852 |
|
|
} |
853 |
|
|
|
854 |
|
|
d.mergedFields = mergedFields |
855 |
|
|
if mergeNode != nil { |
856 |
|
|
d.merge(n, mergeNode, out) |
857 |
|
|
} |
858 |
|
|
|
859 |
|
|
d.stringMapType = stringMapType |
860 |
|
|
d.generalMapType = generalMapType |
861 |
|
|
return true |
862 |
|
|
} |
863 |
|
|
|
864 |
|
|
func isStringMap(n *Node) bool { |
865 |
|
|
if n.Kind != MappingNode { |
866 |
|
|
return false |
867 |
|
|
} |
868 |
|
|
l := len(n.Content) |
869 |
|
|
for i := 0; i < l; i += 2 { |
870 |
|
|
shortTag := n.Content[i].ShortTag() |
871 |
|
|
if shortTag != strTag && shortTag != mergeTag { |
872 |
|
|
return false |
873 |
|
|
} |
874 |
|
|
} |
875 |
|
|
return true |
876 |
|
|
} |
877 |
|
|
|
878 |
|
|
func (d *decoder) mappingStruct(n *Node, out reflect.Value) (good bool) { |
879 |
|
|
sinfo, err := getStructInfo(out.Type()) |
880 |
|
|
if err != nil { |
881 |
|
|
panic(err) |
882 |
|
|
} |
883 |
|
|
|
884 |
|
|
var inlineMap reflect.Value |
885 |
|
|
var elemType reflect.Type |
886 |
|
|
if sinfo.InlineMap != -1 { |
887 |
|
|
inlineMap = out.Field(sinfo.InlineMap) |
888 |
|
|
elemType = inlineMap.Type().Elem() |
889 |
|
|
} |
890 |
|
|
|
891 |
|
|
for _, index := range sinfo.InlineUnmarshalers { |
892 |
|
|
field := d.fieldByIndex(n, out, index) |
893 |
|
|
d.prepare(n, field) |
894 |
|
|
} |
895 |
|
|
|
896 |
|
|
mergedFields := d.mergedFields |
897 |
|
|
d.mergedFields = nil |
898 |
|
|
var mergeNode *Node |
899 |
|
|
var doneFields []bool |
900 |
|
|
if d.uniqueKeys { |
901 |
|
|
doneFields = make([]bool, len(sinfo.FieldsList)) |
902 |
|
|
} |
903 |
|
|
name := settableValueOf("") |
904 |
|
|
l := len(n.Content) |
905 |
|
|
for i := 0; i < l; i += 2 { |
906 |
|
|
ni := n.Content[i] |
907 |
|
|
if isMerge(ni) { |
908 |
|
|
mergeNode = n.Content[i+1] |
909 |
|
|
continue |
910 |
|
|
} |
911 |
|
|
if !d.unmarshal(ni, name) { |
912 |
|
|
continue |
913 |
|
|
} |
914 |
|
|
sname := name.String() |
915 |
|
|
if mergedFields != nil { |
916 |
|
|
if mergedFields[sname] { |
917 |
|
|
continue |
918 |
|
|
} |
919 |
|
|
mergedFields[sname] = true |
920 |
|
|
} |
921 |
|
|
if info, ok := sinfo.FieldsMap[sname]; ok { |
922 |
|
|
if d.uniqueKeys { |
923 |
|
|
if doneFields[info.Id] { |
924 |
|
|
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.Line, name.String(), out.Type())) |
925 |
|
|
continue |
926 |
|
|
} |
927 |
|
|
doneFields[info.Id] = true |
928 |
|
|
} |
929 |
|
|
var field reflect.Value |
930 |
|
|
if info.Inline == nil { |
931 |
|
|
field = out.Field(info.Num) |
932 |
|
|
} else { |
933 |
|
|
field = d.fieldByIndex(n, out, info.Inline) |
934 |
|
|
} |
935 |
|
|
d.unmarshal(n.Content[i+1], field) |
936 |
|
|
} else if sinfo.InlineMap != -1 { |
937 |
|
|
if inlineMap.IsNil() { |
938 |
|
|
inlineMap.Set(reflect.MakeMap(inlineMap.Type())) |
939 |
|
|
} |
940 |
|
|
value := reflect.New(elemType).Elem() |
941 |
|
|
d.unmarshal(n.Content[i+1], value) |
942 |
|
|
inlineMap.SetMapIndex(name, value) |
943 |
|
|
} else if d.knownFields { |
944 |
|
|
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.Line, name.String(), out.Type())) |
945 |
|
|
} |
946 |
|
|
} |
947 |
|
|
|
948 |
|
|
d.mergedFields = mergedFields |
949 |
|
|
if mergeNode != nil { |
950 |
|
|
d.merge(n, mergeNode, out) |
951 |
|
|
} |
952 |
|
|
return true |
953 |
|
|
} |
954 |
|
|
|
955 |
|
|
func failWantMap() { |
956 |
|
|
failf("map merge requires map or sequence of maps as the value") |
957 |
|
|
} |
958 |
|
|
|
959 |
|
|
func (d *decoder) merge(parent *Node, merge *Node, out reflect.Value) { |
960 |
|
|
mergedFields := d.mergedFields |
961 |
|
|
if mergedFields == nil { |
962 |
|
|
d.mergedFields = make(map[interface{}]bool) |
963 |
|
|
for i := 0; i < len(parent.Content); i += 2 { |
964 |
|
|
k := reflect.New(ifaceType).Elem() |
965 |
|
|
if d.unmarshal(parent.Content[i], k) { |
966 |
|
|
d.mergedFields[k.Interface()] = true |
967 |
|
|
} |
968 |
|
|
} |
969 |
|
|
} |
970 |
|
|
|
971 |
|
|
switch merge.Kind { |
972 |
|
|
case MappingNode: |
973 |
|
|
d.unmarshal(merge, out) |
974 |
|
|
case AliasNode: |
975 |
|
|
if merge.Alias != nil && merge.Alias.Kind != MappingNode { |
976 |
|
|
failWantMap() |
977 |
|
|
} |
978 |
|
|
d.unmarshal(merge, out) |
979 |
|
|
case SequenceNode: |
980 |
|
|
for i := 0; i < len(merge.Content); i++ { |
981 |
|
|
ni := merge.Content[i] |
982 |
|
|
if ni.Kind == AliasNode { |
983 |
|
|
if ni.Alias != nil && ni.Alias.Kind != MappingNode { |
984 |
|
|
failWantMap() |
985 |
|
|
} |
986 |
|
|
} else if ni.Kind != MappingNode { |
987 |
|
|
failWantMap() |
988 |
|
|
} |
989 |
|
|
d.unmarshal(ni, out) |
990 |
|
|
} |
991 |
|
|
default: |
992 |
|
|
failWantMap() |
993 |
|
|
} |
994 |
|
|
|
995 |
|
|
d.mergedFields = mergedFields |
996 |
|
|
} |
997 |
|
|
|
998 |
|
|
func isMerge(n *Node) bool { |
999 |
|
|
return n.Kind == ScalarNode && n.Value == "<<" && (n.Tag == "" || n.Tag == "!" || shortTag(n.Tag) == mergeTag) |
1000 |
|
|
} |