aboutsummaryrefslogtreecommitdiffhomepage
path: root/vendor/github.com/hashicorp/hcl2/hcl/json
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/hashicorp/hcl2/hcl/json')
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/ast.go121
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go33
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/doc.go8
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go70
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/parser.go491
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go25
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/public.go94
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go293
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/spec.md405
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/structure.go616
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go29
11 files changed, 2185 insertions, 0 deletions
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/ast.go b/vendor/github.com/hashicorp/hcl2/hcl/json/ast.go
new file mode 100644
index 0000000..753bfa0
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/ast.go
@@ -0,0 +1,121 @@
1package json
2
3import (
4 "math/big"
5
6 "github.com/hashicorp/hcl2/hcl"
7)
8
9type node interface {
10 Range() hcl.Range
11 StartRange() hcl.Range
12}
13
14type objectVal struct {
15 Attrs []*objectAttr
16 SrcRange hcl.Range // range of the entire object, brace-to-brace
17 OpenRange hcl.Range // range of the opening brace
18 CloseRange hcl.Range // range of the closing brace
19}
20
21func (n *objectVal) Range() hcl.Range {
22 return n.SrcRange
23}
24
25func (n *objectVal) StartRange() hcl.Range {
26 return n.OpenRange
27}
28
29type objectAttr struct {
30 Name string
31 Value node
32 NameRange hcl.Range // range of the name string
33}
34
35func (n *objectAttr) Range() hcl.Range {
36 return n.NameRange
37}
38
39func (n *objectAttr) StartRange() hcl.Range {
40 return n.NameRange
41}
42
43type arrayVal struct {
44 Values []node
45 SrcRange hcl.Range // range of the entire object, bracket-to-bracket
46 OpenRange hcl.Range // range of the opening bracket
47}
48
49func (n *arrayVal) Range() hcl.Range {
50 return n.SrcRange
51}
52
53func (n *arrayVal) StartRange() hcl.Range {
54 return n.OpenRange
55}
56
57type booleanVal struct {
58 Value bool
59 SrcRange hcl.Range
60}
61
62func (n *booleanVal) Range() hcl.Range {
63 return n.SrcRange
64}
65
66func (n *booleanVal) StartRange() hcl.Range {
67 return n.SrcRange
68}
69
70type numberVal struct {
71 Value *big.Float
72 SrcRange hcl.Range
73}
74
75func (n *numberVal) Range() hcl.Range {
76 return n.SrcRange
77}
78
79func (n *numberVal) StartRange() hcl.Range {
80 return n.SrcRange
81}
82
83type stringVal struct {
84 Value string
85 SrcRange hcl.Range
86}
87
88func (n *stringVal) Range() hcl.Range {
89 return n.SrcRange
90}
91
92func (n *stringVal) StartRange() hcl.Range {
93 return n.SrcRange
94}
95
96type nullVal struct {
97 SrcRange hcl.Range
98}
99
100func (n *nullVal) Range() hcl.Range {
101 return n.SrcRange
102}
103
104func (n *nullVal) StartRange() hcl.Range {
105 return n.SrcRange
106}
107
108// invalidVal is used as a placeholder where a value is needed for a valid
109// parse tree but the input was invalid enough to prevent one from being
110// created.
111type invalidVal struct {
112 SrcRange hcl.Range
113}
114
115func (n invalidVal) Range() hcl.Range {
116 return n.SrcRange
117}
118
119func (n invalidVal) StartRange() hcl.Range {
120 return n.SrcRange
121}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go b/vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go
new file mode 100644
index 0000000..fbdd8bf
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go
@@ -0,0 +1,33 @@
1package json
2
3import (
4 "github.com/agext/levenshtein"
5)
6
7var keywords = []string{"false", "true", "null"}
8
9// keywordSuggestion tries to find a valid JSON keyword that is close to the
10// given string and returns it if found. If no keyword is close enough, returns
11// the empty string.
12func keywordSuggestion(given string) string {
13 return nameSuggestion(given, keywords)
14}
15
16// nameSuggestion tries to find a name from the given slice of suggested names
17// that is close to the given name and returns it if found. If no suggestion
18// is close enough, returns the empty string.
19//
20// The suggestions are tried in order, so earlier suggestions take precedence
21// if the given string is similar to two or more suggestions.
22//
23// This function is intended to be used with a relatively-small number of
24// suggestions. It's not optimized for hundreds or thousands of them.
25func nameSuggestion(given string, suggestions []string) string {
26 for _, suggestion := range suggestions {
27 dist := levenshtein.Distance(given, suggestion, nil)
28 if dist < 3 { // threshold determined experimentally
29 return suggestion
30 }
31 }
32 return ""
33}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/doc.go b/vendor/github.com/hashicorp/hcl2/hcl/json/doc.go
new file mode 100644
index 0000000..4943f9b
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/doc.go
@@ -0,0 +1,8 @@
1// Package json is the JSON parser for HCL. It parses JSON files and returns
2// implementations of the core HCL structural interfaces in terms of the
3// JSON data inside.
4//
5// This is not a generic JSON parser. Instead, it deals with the mapping from
6// the JSON information model to the HCL information model, using a number
7// of hard-coded structural conventions.
8package json
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go b/vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go
new file mode 100644
index 0000000..bc8a97f
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go
@@ -0,0 +1,70 @@
1package json
2
3import (
4 "fmt"
5 "strings"
6)
7
8type navigation struct {
9 root node
10}
11
12// Implementation of hcled.ContextString
13func (n navigation) ContextString(offset int) string {
14 steps := navigationStepsRev(n.root, offset)
15 if steps == nil {
16 return ""
17 }
18
19 // We built our slice backwards, so we'll reverse it in-place now.
20 half := len(steps) / 2 // integer division
21 for i := 0; i < half; i++ {
22 steps[i], steps[len(steps)-1-i] = steps[len(steps)-1-i], steps[i]
23 }
24
25 ret := strings.Join(steps, "")
26 if len(ret) > 0 && ret[0] == '.' {
27 ret = ret[1:]
28 }
29 return ret
30}
31
32func navigationStepsRev(v node, offset int) []string {
33 switch tv := v.(type) {
34 case *objectVal:
35 // Do any of our properties have an object that contains the target
36 // offset?
37 for _, attr := range tv.Attrs {
38 k := attr.Name
39 av := attr.Value
40
41 switch av.(type) {
42 case *objectVal, *arrayVal:
43 // okay
44 default:
45 continue
46 }
47
48 if av.Range().ContainsOffset(offset) {
49 return append(navigationStepsRev(av, offset), "."+k)
50 }
51 }
52 case *arrayVal:
53 // Do any of our elements contain the target offset?
54 for i, elem := range tv.Values {
55
56 switch elem.(type) {
57 case *objectVal, *arrayVal:
58 // okay
59 default:
60 continue
61 }
62
63 if elem.Range().ContainsOffset(offset) {
64 return append(navigationStepsRev(elem, offset), fmt.Sprintf("[%d]", i))
65 }
66 }
67 }
68
69 return nil
70}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go b/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go
new file mode 100644
index 0000000..246fd1c
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go
@@ -0,0 +1,491 @@
1package json
2
3import (
4 "encoding/json"
5 "fmt"
6 "math/big"
7
8 "github.com/hashicorp/hcl2/hcl"
9)
10
11func parseFileContent(buf []byte, filename string) (node, hcl.Diagnostics) {
12 tokens := scan(buf, pos{
13 Filename: filename,
14 Pos: hcl.Pos{
15 Byte: 0,
16 Line: 1,
17 Column: 1,
18 },
19 })
20 p := newPeeker(tokens)
21 node, diags := parseValue(p)
22 if len(diags) == 0 && p.Peek().Type != tokenEOF {
23 diags = diags.Append(&hcl.Diagnostic{
24 Severity: hcl.DiagError,
25 Summary: "Extraneous data after value",
26 Detail: "Extra characters appear after the JSON value.",
27 Subject: p.Peek().Range.Ptr(),
28 })
29 }
30 return node, diags
31}
32
33func parseValue(p *peeker) (node, hcl.Diagnostics) {
34 tok := p.Peek()
35
36 wrapInvalid := func(n node, diags hcl.Diagnostics) (node, hcl.Diagnostics) {
37 if n != nil {
38 return n, diags
39 }
40 return invalidVal{tok.Range}, diags
41 }
42
43 switch tok.Type {
44 case tokenBraceO:
45 return wrapInvalid(parseObject(p))
46 case tokenBrackO:
47 return wrapInvalid(parseArray(p))
48 case tokenNumber:
49 return wrapInvalid(parseNumber(p))
50 case tokenString:
51 return wrapInvalid(parseString(p))
52 case tokenKeyword:
53 return wrapInvalid(parseKeyword(p))
54 case tokenBraceC:
55 return wrapInvalid(nil, hcl.Diagnostics{
56 {
57 Severity: hcl.DiagError,
58 Summary: "Missing attribute value",
59 Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.",
60 Subject: &tok.Range,
61 },
62 })
63 case tokenBrackC:
64 return wrapInvalid(nil, hcl.Diagnostics{
65 {
66 Severity: hcl.DiagError,
67 Summary: "Missing array element value",
68 Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.",
69 Subject: &tok.Range,
70 },
71 })
72 case tokenEOF:
73 return wrapInvalid(nil, hcl.Diagnostics{
74 {
75 Severity: hcl.DiagError,
76 Summary: "Missing value",
77 Detail: "The JSON data ends prematurely.",
78 Subject: &tok.Range,
79 },
80 })
81 default:
82 return wrapInvalid(nil, hcl.Diagnostics{
83 {
84 Severity: hcl.DiagError,
85 Summary: "Invalid start of value",
86 Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.",
87 Subject: &tok.Range,
88 },
89 })
90 }
91}
92
93func tokenCanStartValue(tok token) bool {
94 switch tok.Type {
95 case tokenBraceO, tokenBrackO, tokenNumber, tokenString, tokenKeyword:
96 return true
97 default:
98 return false
99 }
100}
101
102func parseObject(p *peeker) (node, hcl.Diagnostics) {
103 var diags hcl.Diagnostics
104
105 open := p.Read()
106 attrs := []*objectAttr{}
107
108 // recover is used to shift the peeker to what seems to be the end of
109 // our object, so that when we encounter an error we leave the peeker
110 // at a reasonable point in the token stream to continue parsing.
111 recover := func(tok token) {
112 open := 1
113 for {
114 switch tok.Type {
115 case tokenBraceO:
116 open++
117 case tokenBraceC:
118 open--
119 if open <= 1 {
120 return
121 }
122 case tokenEOF:
123 // Ran out of source before we were able to recover,
124 // so we'll bail here and let the caller deal with it.
125 return
126 }
127 tok = p.Read()
128 }
129 }
130
131Token:
132 for {
133 if p.Peek().Type == tokenBraceC {
134 break Token
135 }
136
137 keyNode, keyDiags := parseValue(p)
138 diags = diags.Extend(keyDiags)
139 if keyNode == nil {
140 return nil, diags
141 }
142
143 keyStrNode, ok := keyNode.(*stringVal)
144 if !ok {
145 return nil, diags.Append(&hcl.Diagnostic{
146 Severity: hcl.DiagError,
147 Summary: "Invalid object attribute name",
148 Detail: "A JSON object attribute name must be a string",
149 Subject: keyNode.StartRange().Ptr(),
150 })
151 }
152
153 key := keyStrNode.Value
154
155 colon := p.Read()
156 if colon.Type != tokenColon {
157 recover(colon)
158
159 if colon.Type == tokenBraceC || colon.Type == tokenComma {
160 // Catch common mistake of using braces instead of brackets
161 // for an object.
162 return nil, diags.Append(&hcl.Diagnostic{
163 Severity: hcl.DiagError,
164 Summary: "Missing object value",
165 Detail: "A JSON object attribute must have a value, introduced by a colon.",
166 Subject: &colon.Range,
167 })
168 }
169
170 if colon.Type == tokenEquals {
171 // Possible confusion with native HCL syntax.
172 return nil, diags.Append(&hcl.Diagnostic{
173 Severity: hcl.DiagError,
174 Summary: "Missing attribute value colon",
175 Detail: "JSON uses a colon as its name/value delimiter, not an equals sign.",
176 Subject: &colon.Range,
177 })
178 }
179
180 return nil, diags.Append(&hcl.Diagnostic{
181 Severity: hcl.DiagError,
182 Summary: "Missing attribute value colon",
183 Detail: "A colon must appear between an object attribute's name and its value.",
184 Subject: &colon.Range,
185 })
186 }
187
188 valNode, valDiags := parseValue(p)
189 diags = diags.Extend(valDiags)
190 if valNode == nil {
191 return nil, diags
192 }
193
194 attrs = append(attrs, &objectAttr{
195 Name: key,
196 Value: valNode,
197 NameRange: keyStrNode.SrcRange,
198 })
199
200 switch p.Peek().Type {
201 case tokenComma:
202 comma := p.Read()
203 if p.Peek().Type == tokenBraceC {
204 // Special error message for this common mistake
205 return nil, diags.Append(&hcl.Diagnostic{
206 Severity: hcl.DiagError,
207 Summary: "Trailing comma in object",
208 Detail: "JSON does not permit a trailing comma after the final attribute in an object.",
209 Subject: &comma.Range,
210 })
211 }
212 continue Token
213 case tokenEOF:
214 return nil, diags.Append(&hcl.Diagnostic{
215 Severity: hcl.DiagError,
216 Summary: "Unclosed object",
217 Detail: "No closing brace was found for this JSON object.",
218 Subject: &open.Range,
219 })
220 case tokenBrackC:
221 // Consume the bracket anyway, so that we don't return with the peeker
222 // at a strange place.
223 p.Read()
224 return nil, diags.Append(&hcl.Diagnostic{
225 Severity: hcl.DiagError,
226 Summary: "Mismatched braces",
227 Detail: "A JSON object must be closed with a brace, not a bracket.",
228 Subject: p.Peek().Range.Ptr(),
229 })
230 case tokenBraceC:
231 break Token
232 default:
233 recover(p.Read())
234 return nil, diags.Append(&hcl.Diagnostic{
235 Severity: hcl.DiagError,
236 Summary: "Missing attribute seperator comma",
237 Detail: "A comma must appear between each attribute declaration in an object.",
238 Subject: p.Peek().Range.Ptr(),
239 })
240 }
241
242 }
243
244 close := p.Read()
245 return &objectVal{
246 Attrs: attrs,
247 SrcRange: hcl.RangeBetween(open.Range, close.Range),
248 OpenRange: open.Range,
249 CloseRange: close.Range,
250 }, diags
251}
252
253func parseArray(p *peeker) (node, hcl.Diagnostics) {
254 var diags hcl.Diagnostics
255
256 open := p.Read()
257 vals := []node{}
258
259 // recover is used to shift the peeker to what seems to be the end of
260 // our array, so that when we encounter an error we leave the peeker
261 // at a reasonable point in the token stream to continue parsing.
262 recover := func(tok token) {
263 open := 1
264 for {
265 switch tok.Type {
266 case tokenBrackO:
267 open++
268 case tokenBrackC:
269 open--
270 if open <= 1 {
271 return
272 }
273 case tokenEOF:
274 // Ran out of source before we were able to recover,
275 // so we'll bail here and let the caller deal with it.
276 return
277 }
278 tok = p.Read()
279 }
280 }
281
282Token:
283 for {
284 if p.Peek().Type == tokenBrackC {
285 break Token
286 }
287
288 valNode, valDiags := parseValue(p)
289 diags = diags.Extend(valDiags)
290 if valNode == nil {
291 return nil, diags
292 }
293
294 vals = append(vals, valNode)
295
296 switch p.Peek().Type {
297 case tokenComma:
298 comma := p.Read()
299 if p.Peek().Type == tokenBrackC {
300 // Special error message for this common mistake
301 return nil, diags.Append(&hcl.Diagnostic{
302 Severity: hcl.DiagError,
303 Summary: "Trailing comma in array",
304 Detail: "JSON does not permit a trailing comma after the final attribute in an array.",
305 Subject: &comma.Range,
306 })
307 }
308 continue Token
309 case tokenColon:
310 recover(p.Read())
311 return nil, diags.Append(&hcl.Diagnostic{
312 Severity: hcl.DiagError,
313 Summary: "Invalid array value",
314 Detail: "A colon is not used to introduce values in a JSON array.",
315 Subject: p.Peek().Range.Ptr(),
316 })
317 case tokenEOF:
318 recover(p.Read())
319 return nil, diags.Append(&hcl.Diagnostic{
320 Severity: hcl.DiagError,
321 Summary: "Unclosed object",
322 Detail: "No closing bracket was found for this JSON array.",
323 Subject: &open.Range,
324 })
325 case tokenBraceC:
326 recover(p.Read())
327 return nil, diags.Append(&hcl.Diagnostic{
328 Severity: hcl.DiagError,
329 Summary: "Mismatched brackets",
330 Detail: "A JSON array must be closed with a bracket, not a brace.",
331 Subject: p.Peek().Range.Ptr(),
332 })
333 case tokenBrackC:
334 break Token
335 default:
336 recover(p.Read())
337 return nil, diags.Append(&hcl.Diagnostic{
338 Severity: hcl.DiagError,
339 Summary: "Missing attribute seperator comma",
340 Detail: "A comma must appear between each value in an array.",
341 Subject: p.Peek().Range.Ptr(),
342 })
343 }
344
345 }
346
347 close := p.Read()
348 return &arrayVal{
349 Values: vals,
350 SrcRange: hcl.RangeBetween(open.Range, close.Range),
351 OpenRange: open.Range,
352 }, diags
353}
354
355func parseNumber(p *peeker) (node, hcl.Diagnostics) {
356 tok := p.Read()
357
358 // Use encoding/json to validate the number syntax.
359 // TODO: Do this more directly to produce better diagnostics.
360 var num json.Number
361 err := json.Unmarshal(tok.Bytes, &num)
362 if err != nil {
363 return nil, hcl.Diagnostics{
364 {
365 Severity: hcl.DiagError,
366 Summary: "Invalid JSON number",
367 Detail: fmt.Sprintf("There is a syntax error in the given JSON number."),
368 Subject: &tok.Range,
369 },
370 }
371 }
372
373 f, _, err := big.ParseFloat(string(num), 10, 512, big.ToNearestEven)
374 if err != nil {
375 // Should never happen if above passed, since JSON numbers are a subset
376 // of what big.Float can parse...
377 return nil, hcl.Diagnostics{
378 {
379 Severity: hcl.DiagError,
380 Summary: "Invalid JSON number",
381 Detail: fmt.Sprintf("There is a syntax error in the given JSON number."),
382 Subject: &tok.Range,
383 },
384 }
385 }
386
387 return &numberVal{
388 Value: f,
389 SrcRange: tok.Range,
390 }, nil
391}
392
393func parseString(p *peeker) (node, hcl.Diagnostics) {
394 tok := p.Read()
395 var str string
396 err := json.Unmarshal(tok.Bytes, &str)
397
398 if err != nil {
399 var errRange hcl.Range
400 if serr, ok := err.(*json.SyntaxError); ok {
401 errOfs := serr.Offset
402 errPos := tok.Range.Start
403 errPos.Byte += int(errOfs)
404
405 // TODO: Use the byte offset to properly count unicode
406 // characters for the column, and mark the whole of the
407 // character that was wrong as part of our range.
408 errPos.Column += int(errOfs)
409
410 errEndPos := errPos
411 errEndPos.Byte++
412 errEndPos.Column++
413
414 errRange = hcl.Range{
415 Filename: tok.Range.Filename,
416 Start: errPos,
417 End: errEndPos,
418 }
419 } else {
420 errRange = tok.Range
421 }
422
423 var contextRange *hcl.Range
424 if errRange != tok.Range {
425 contextRange = &tok.Range
426 }
427
428 // FIXME: Eventually we should parse strings directly here so
429 // we can produce a more useful error message in the face fo things
430 // such as invalid escapes, etc.
431 return nil, hcl.Diagnostics{
432 {
433 Severity: hcl.DiagError,
434 Summary: "Invalid JSON string",
435 Detail: fmt.Sprintf("There is a syntax error in the given JSON string."),
436 Subject: &errRange,
437 Context: contextRange,
438 },
439 }
440 }
441
442 return &stringVal{
443 Value: str,
444 SrcRange: tok.Range,
445 }, nil
446}
447
448func parseKeyword(p *peeker) (node, hcl.Diagnostics) {
449 tok := p.Read()
450 s := string(tok.Bytes)
451
452 switch s {
453 case "true":
454 return &booleanVal{
455 Value: true,
456 SrcRange: tok.Range,
457 }, nil
458 case "false":
459 return &booleanVal{
460 Value: false,
461 SrcRange: tok.Range,
462 }, nil
463 case "null":
464 return &nullVal{
465 SrcRange: tok.Range,
466 }, nil
467 case "undefined", "NaN", "Infinity":
468 return nil, hcl.Diagnostics{
469 {
470 Severity: hcl.DiagError,
471 Summary: "Invalid JSON keyword",
472 Detail: fmt.Sprintf("The JavaScript identifier %q cannot be used in JSON.", s),
473 Subject: &tok.Range,
474 },
475 }
476 default:
477 var dym string
478 if suggest := keywordSuggestion(s); suggest != "" {
479 dym = fmt.Sprintf(" Did you mean %q?", suggest)
480 }
481
482 return nil, hcl.Diagnostics{
483 {
484 Severity: hcl.DiagError,
485 Summary: "Invalid JSON keyword",
486 Detail: fmt.Sprintf("%q is not a valid JSON keyword.%s", s, dym),
487 Subject: &tok.Range,
488 },
489 }
490 }
491}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go b/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go
new file mode 100644
index 0000000..fc7bbf5
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go
@@ -0,0 +1,25 @@
1package json
2
3type peeker struct {
4 tokens []token
5 pos int
6}
7
8func newPeeker(tokens []token) *peeker {
9 return &peeker{
10 tokens: tokens,
11 pos: 0,
12 }
13}
14
15func (p *peeker) Peek() token {
16 return p.tokens[p.pos]
17}
18
19func (p *peeker) Read() token {
20 ret := p.tokens[p.pos]
21 if ret.Type != tokenEOF {
22 p.pos++
23 }
24 return ret
25}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/public.go b/vendor/github.com/hashicorp/hcl2/hcl/json/public.go
new file mode 100644
index 0000000..2728aa1
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/public.go
@@ -0,0 +1,94 @@
1package json
2
3import (
4 "fmt"
5 "io/ioutil"
6 "os"
7
8 "github.com/hashicorp/hcl2/hcl"
9)
10
11// Parse attempts to parse the given buffer as JSON and, if successful, returns
12// a hcl.File for the HCL configuration represented by it.
13//
14// This is not a generic JSON parser. Instead, it deals only with the profile
15// of JSON used to express HCL configuration.
16//
17// The returned file is valid only if the returned diagnostics returns false
18// from its HasErrors method. If HasErrors returns true, the file represents
19// the subset of data that was able to be parsed, which may be none.
20func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
21 rootNode, diags := parseFileContent(src, filename)
22
23 switch rootNode.(type) {
24 case *objectVal, *arrayVal:
25 // okay
26 default:
27 diags = diags.Append(&hcl.Diagnostic{
28 Severity: hcl.DiagError,
29 Summary: "Root value must be object",
30 Detail: "The root value in a JSON-based configuration must be either a JSON object or a JSON array of objects.",
31 Subject: rootNode.StartRange().Ptr(),
32 })
33
34 // Since we've already produced an error message for this being
35 // invalid, we'll return an empty placeholder here so that trying to
36 // extract content from our root body won't produce a redundant
37 // error saying the same thing again in more general terms.
38 fakePos := hcl.Pos{
39 Byte: 0,
40 Line: 1,
41 Column: 1,
42 }
43 fakeRange := hcl.Range{
44 Filename: filename,
45 Start: fakePos,
46 End: fakePos,
47 }
48 rootNode = &objectVal{
49 Attrs: []*objectAttr{},
50 SrcRange: fakeRange,
51 OpenRange: fakeRange,
52 }
53 }
54
55 file := &hcl.File{
56 Body: &body{
57 val: rootNode,
58 },
59 Bytes: src,
60 Nav: navigation{rootNode},
61 }
62 return file, diags
63}
64
65// ParseFile is a convenience wrapper around Parse that first attempts to load
66// data from the given filename, passing the result to Parse if successful.
67//
68// If the file cannot be read, an error diagnostic with nil context is returned.
69func ParseFile(filename string) (*hcl.File, hcl.Diagnostics) {
70 f, err := os.Open(filename)
71 if err != nil {
72 return nil, hcl.Diagnostics{
73 {
74 Severity: hcl.DiagError,
75 Summary: "Failed to open file",
76 Detail: fmt.Sprintf("The file %q could not be opened.", filename),
77 },
78 }
79 }
80 defer f.Close()
81
82 src, err := ioutil.ReadAll(f)
83 if err != nil {
84 return nil, hcl.Diagnostics{
85 {
86 Severity: hcl.DiagError,
87 Summary: "Failed to read file",
88 Detail: fmt.Sprintf("The file %q was opened, but an error occured while reading it.", filename),
89 },
90 }
91 }
92
93 return Parse(src, filename)
94}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go b/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go
new file mode 100644
index 0000000..0a8378b
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go
@@ -0,0 +1,293 @@
1package json
2
3import (
4 "fmt"
5
6 "github.com/apparentlymart/go-textseg/textseg"
7 "github.com/hashicorp/hcl2/hcl"
8)
9
10//go:generate stringer -type tokenType scanner.go
11type tokenType rune
12
13const (
14 tokenBraceO tokenType = '{'
15 tokenBraceC tokenType = '}'
16 tokenBrackO tokenType = '['
17 tokenBrackC tokenType = ']'
18 tokenComma tokenType = ','
19 tokenColon tokenType = ':'
20 tokenKeyword tokenType = 'K'
21 tokenString tokenType = 'S'
22 tokenNumber tokenType = 'N'
23 tokenEOF tokenType = '␄'
24 tokenInvalid tokenType = 0
25 tokenEquals tokenType = '=' // used only for reminding the user of JSON syntax
26)
27
28type token struct {
29 Type tokenType
30 Bytes []byte
31 Range hcl.Range
32}
33
34// scan returns the primary tokens for the given JSON buffer in sequence.
35//
36// The responsibility of this pass is to just mark the slices of the buffer
37// as being of various types. It is lax in how it interprets the multi-byte
38// token types keyword, string and number, preferring to capture erroneous
39// extra bytes that we presume the user intended to be part of the token
40// so that we can generate more helpful diagnostics in the parser.
41func scan(buf []byte, start pos) []token {
42 var tokens []token
43 p := start
44 for {
45 if len(buf) == 0 {
46 tokens = append(tokens, token{
47 Type: tokenEOF,
48 Bytes: nil,
49 Range: posRange(p, p),
50 })
51 return tokens
52 }
53
54 buf, p = skipWhitespace(buf, p)
55
56 if len(buf) == 0 {
57 tokens = append(tokens, token{
58 Type: tokenEOF,
59 Bytes: nil,
60 Range: posRange(p, p),
61 })
62 return tokens
63 }
64
65 start = p
66
67 first := buf[0]
68 switch {
69 case first == '{' || first == '}' || first == '[' || first == ']' || first == ',' || first == ':' || first == '=':
70 p.Pos.Column++
71 p.Pos.Byte++
72 tokens = append(tokens, token{
73 Type: tokenType(first),
74 Bytes: buf[0:1],
75 Range: posRange(start, p),
76 })
77 buf = buf[1:]
78 case first == '"':
79 var tokBuf []byte
80 tokBuf, buf, p = scanString(buf, p)
81 tokens = append(tokens, token{
82 Type: tokenString,
83 Bytes: tokBuf,
84 Range: posRange(start, p),
85 })
86 case byteCanStartNumber(first):
87 var tokBuf []byte
88 tokBuf, buf, p = scanNumber(buf, p)
89 tokens = append(tokens, token{
90 Type: tokenNumber,
91 Bytes: tokBuf,
92 Range: posRange(start, p),
93 })
94 case byteCanStartKeyword(first):
95 var tokBuf []byte
96 tokBuf, buf, p = scanKeyword(buf, p)
97 tokens = append(tokens, token{
98 Type: tokenKeyword,
99 Bytes: tokBuf,
100 Range: posRange(start, p),
101 })
102 default:
103 tokens = append(tokens, token{
104 Type: tokenInvalid,
105 Bytes: buf[:1],
106 Range: start.Range(1, 1),
107 })
108 // If we've encountered an invalid then we might as well stop
109 // scanning since the parser won't proceed beyond this point.
110 return tokens
111 }
112 }
113}
114
115func byteCanStartNumber(b byte) bool {
116 switch b {
117 // We are slightly more tolerant than JSON requires here since we
118 // expect the parser will make a stricter interpretation of the
119 // number bytes, but we specifically don't allow 'e' or 'E' here
120 // since we want the scanner to treat that as the start of an
121 // invalid keyword instead, to produce more intelligible error messages.
122 case '-', '+', '.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
123 return true
124 default:
125 return false
126 }
127}
128
129func scanNumber(buf []byte, start pos) ([]byte, []byte, pos) {
130 // The scanner doesn't check that the sequence of digit-ish bytes is
131 // in a valid order. The parser must do this when decoding a number
132 // token.
133 var i int
134 p := start
135Byte:
136 for i = 0; i < len(buf); i++ {
137 switch buf[i] {
138 case '-', '+', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
139 p.Pos.Byte++
140 p.Pos.Column++
141 default:
142 break Byte
143 }
144 }
145 return buf[:i], buf[i:], p
146}
147
148func byteCanStartKeyword(b byte) bool {
149 switch {
150 // We allow any sequence of alphabetical characters here, even though
151 // JSON is more constrained, so that we can collect what we presume
152 // the user intended to be a single keyword and then check its validity
153 // in the parser, where we can generate better diagnostics.
154 // So e.g. we want to be able to say:
155 // unrecognized keyword "True". Did you mean "true"?
156 case b >= 'a' || b <= 'z' || b >= 'A' || b <= 'Z':
157 return true
158 default:
159 return false
160 }
161}
162
163func scanKeyword(buf []byte, start pos) ([]byte, []byte, pos) {
164 var i int
165 p := start
166Byte:
167 for i = 0; i < len(buf); i++ {
168 b := buf[i]
169 switch {
170 case (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_':
171 p.Pos.Byte++
172 p.Pos.Column++
173 default:
174 break Byte
175 }
176 }
177 return buf[:i], buf[i:], p
178}
179
180func scanString(buf []byte, start pos) ([]byte, []byte, pos) {
181 // The scanner doesn't validate correct use of escapes, etc. It pays
182 // attention to escapes only for the purpose of identifying the closing
183 // quote character. It's the parser's responsibility to do proper
184 // validation.
185 //
186 // The scanner also doesn't specifically detect unterminated string
187 // literals, though they can be identified in the parser by checking if
188 // the final byte in a string token is the double-quote character.
189
190 // Skip the opening quote symbol
191 i := 1
192 p := start
193 p.Pos.Byte++
194 p.Pos.Column++
195 escaping := false
196Byte:
197 for i < len(buf) {
198 b := buf[i]
199
200 switch {
201 case b == '\\':
202 escaping = !escaping
203 p.Pos.Byte++
204 p.Pos.Column++
205 i++
206 case b == '"':
207 p.Pos.Byte++
208 p.Pos.Column++
209 i++
210 if !escaping {
211 break Byte
212 }
213 escaping = false
214 case b < 32:
215 break Byte
216 default:
217 // Advance by one grapheme cluster, so that we consider each
218 // grapheme to be a "column".
219 // Ignoring error because this scanner cannot produce errors.
220 advance, _, _ := textseg.ScanGraphemeClusters(buf[i:], true)
221
222 p.Pos.Byte += advance
223 p.Pos.Column++
224 i += advance
225
226 escaping = false
227 }
228 }
229 return buf[:i], buf[i:], p
230}
231
232func skipWhitespace(buf []byte, start pos) ([]byte, pos) {
233 var i int
234 p := start
235Byte:
236 for i = 0; i < len(buf); i++ {
237 switch buf[i] {
238 case ' ':
239 p.Pos.Byte++
240 p.Pos.Column++
241 case '\n':
242 p.Pos.Byte++
243 p.Pos.Column = 1
244 p.Pos.Line++
245 case '\r':
246 // For the purpose of line/column counting we consider a
247 // carriage return to take up no space, assuming that it will
248 // be paired up with a newline (on Windows, for example) that
249 // will account for both of them.
250 p.Pos.Byte++
251 case '\t':
252 // We arbitrarily count a tab as if it were two spaces, because
253 // we need to choose _some_ number here. This means any system
254 // that renders code on-screen with markers must itself treat
255 // tabs as a pair of spaces for rendering purposes, or instead
256 // use the byte offset and back into its own column position.
257 p.Pos.Byte++
258 p.Pos.Column += 2
259 default:
260 break Byte
261 }
262 }
263 return buf[i:], p
264}
265
266type pos struct {
267 Filename string
268 Pos hcl.Pos
269}
270
271func (p *pos) Range(byteLen, charLen int) hcl.Range {
272 start := p.Pos
273 end := p.Pos
274 end.Byte += byteLen
275 end.Column += charLen
276 return hcl.Range{
277 Filename: p.Filename,
278 Start: start,
279 End: end,
280 }
281}
282
283func posRange(start, end pos) hcl.Range {
284 return hcl.Range{
285 Filename: start.Filename,
286 Start: start.Pos,
287 End: end.Pos,
288 }
289}
290
291func (t token) GoString() string {
292 return fmt.Sprintf("json.token{json.%s, []byte(%q), %#v}", t.Type, t.Bytes, t.Range)
293}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md b/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md
new file mode 100644
index 0000000..9b33c7f
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md
@@ -0,0 +1,405 @@
1# HCL JSON Syntax Specification
2
3This is the specification for the JSON serialization for hcl. HCL is a system
4for defining configuration languages for applications. The HCL information
5model is designed to support multiple concrete syntaxes for configuration,
6and this JSON-based format complements [the native syntax](../hclsyntax/spec.md)
7by being easy to machine-generate, whereas the native syntax is oriented
8towards human authoring and maintenence.
9
10This syntax is defined in terms of JSON as defined in
11[RFC7159](https://tools.ietf.org/html/rfc7159). As such it inherits the JSON
12grammar as-is, and merely defines a specific methodology for interpreting
13JSON constructs into HCL structural elements and expressions.
14
15This mapping is defined such that valid JSON-serialized HCL input can be
16_produced_ using standard JSON implementations in various programming languages.
17_Parsing_ such JSON has some additional constraints not beyond what is normally
18supported by JSON parsers, so a specialized parser may be required that
19is able to:
20
21* Preserve the relative ordering of properties defined in an object.
22* Preserve multiple definitions of the same property name.
23* Preserve numeric values to the precision required by the number type
24 in [the HCL syntax-agnostic information model](../spec.md).
25* Retain source location information for parsed tokens/constructs in order
26 to produce good error messages.
27
28## Structural Elements
29
30[The HCL syntax-agnostic information model](../spec.md) defines a _body_ as an
31abstract container for attribute definitions and child blocks. A body is
32represented in JSON as either a single JSON object or a JSON array of objects.
33
34Body processing is in terms of JSON object properties, visited in the order
35they appear in the input. Where a body is represented by a single JSON object,
36the properties of that object are visited in order. Where a body is
37represented by a JSON array, each of its elements are visited in order and
38each element has its properties visited in order. If any element of the array
39is not a JSON object then the input is erroneous.
40
41When a body is being processed in the _dynamic attributes_ mode, the allowance
42of a JSON array in the previous paragraph does not apply and instead a single
43JSON object is always required.
44
45As defined in the language-agnostic model, body processing is in terms
46of a schema which provides context for interpreting the body's content. For
47JSON bodies, the schema is crucial to allow differentiation of attribute
48definitions and block definitions, both of which are represented via object
49properties.
50
51The special property name `"//"`, when used in an object representing a HCL
52body, is parsed and ignored. A property with this name can be used to
53include human-readable comments. (This special property name is _not_
54processed in this way for any _other_ HCL constructs that are represented as
55JSON objects.)
56
57### Attributes
58
59Where the given schema describes an attribute with a given name, the object
60property with the matching name — if present — serves as the attribute's
61definition.
62
63When a body is being processed in the _dynamic attributes_ mode, each object
64property serves as an attribute definition for the attribute whose name
65matches the property name.
66
67The value of an attribute definition property is interpreted as an _expression_,
68as described in a later section.
69
70Given a schema that calls for an attribute named "foo", a JSON object like
71the following provides a definition for that attribute:
72
73```json
74{
75 "foo": "bar baz"
76}
77```
78
79### Blocks
80
81Where the given schema describes a block with a given type name, each object
82property with the matching name serves as a definition of zero or more blocks
83of that type.
84
85Processing of child blocks is in terms of nested JSON objects and arrays.
86If the schema defines one or more _labels_ for the block type, a nested JSON
87object or JSON array of objects is required for each labelling level. These
88are flattened to a single ordered sequence of object properties using the
89same algorithm as for body content as defined above. Each object property
90serves as a label value at the corresponding level.
91
92After any labelling levels, the next nested value is either a JSON object
93representing a single block body, or a JSON array of JSON objects that each
94represent a single block body. Use of an array accommodates the definition
95of multiple blocks that have identical type and labels.
96
97Given a schema that calls for a block type named "foo" with no labels, the
98following JSON objects are all valid definitions of zero or more blocks of this
99type:
100
101```json
102{
103 "foo": {
104 "child_attr": "baz"
105 }
106}
107```
108
109```json
110{
111 "foo": [
112 {
113 "child_attr": "baz"
114 },
115 {
116 "child_attr": "boz"
117 }
118 ]
119}
120```
121```json
122{
123 "foo": []
124}
125```
126
127The first of these defines a single child block of type "foo". The second
128defines _two_ such blocks. The final example shows a degenerate definition
129of zero blocks, though generators should prefer to omit the property entirely
130in this scenario.
131
132Given a schema that calls for a block type named "foo" with _two_ labels, the
133extra label levels must be represented as objects or arrays of objects as in
134the following examples:
135
136```json
137{
138 "foo": {
139 "bar": {
140 "baz": {
141 "child_attr": "baz"
142 },
143 "boz": {
144 "child_attr": "baz"
145 }
146 },
147 "boz": {
148 "baz": {
149 "child_attr": "baz"
150 },
151 }
152 }
153}
154```
155
156```json
157{
158 "foo": {
159 "bar": {
160 "baz": {
161 "child_attr": "baz"
162 },
163 "boz": {
164 "child_attr": "baz"
165 }
166 },
167 "boz": {
168 "baz": [
169 {
170 "child_attr": "baz"
171 },
172 {
173 "child_attr": "boz"
174 }
175 ]
176 }
177 }
178}
179```
180
181```json
182{
183 "foo": [
184 {
185 "bar": {
186 "baz": {
187 "child_attr": "baz"
188 },
189 "boz": {
190 "child_attr": "baz"
191 }
192 },
193 },
194 {
195 "bar": {
196 "baz": [
197 {
198 "child_attr": "baz"
199 },
200 {
201 "child_attr": "boz"
202 }
203 ]
204 }
205 }
206 ]
207}
208```
209
210```json
211{
212 "foo": {
213 "bar": {
214 "baz": {
215 "child_attr": "baz"
216 },
217 "boz": {
218 "child_attr": "baz"
219 }
220 },
221 "bar": {
222 "baz": [
223 {
224 "child_attr": "baz"
225 },
226 {
227 "child_attr": "boz"
228 }
229 ]
230 }
231 }
232}
233```
234
235Arrays can be introduced at either the label definition or block body
236definition levels to define multiple definitions of the same block type
237or labels while preserving order.
238
239A JSON HCL parser _must_ support duplicate definitions of the same property
240name within a single object, preserving all of them and the relative ordering
241between them. The array-based forms are also required so that JSON HCL
242configurations can be produced with JSON producing libraries that are not
243able to preserve property definition order and multiple definitions of
244the same property.
245
246## Expressions
247
248JSON lacks a native expression syntax, so the HCL JSON syntax instead defines
249a mapping for each of the JSON value types, including a special mapping for
250strings that allows optional use of arbitrary expressions.
251
252### Objects
253
254When interpreted as an expression, a JSON object represents a value of a HCL
255object type.
256
257Each property of the JSON object represents an attribute of the HCL object type.
258The property name string given in the JSON input is interpreted as a string
259expression as described below, and its result is converted to string as defined
260by the syntax-agnostic information model. If such a conversion is not possible,
261an error is produced and evaluation fails.
262
263An instance of the constructed object type is then created, whose values
264are interpreted by again recursively applying the mapping rules defined in
265this section to each of the property values.
266
267If any evaluated property name strings produce null values, an error is
268produced and evaluation fails. If any produce _unknown_ values, the _entire
269object's_ result is an unknown value of the dynamic pseudo-type, signalling
270that the type of the object cannot be determined.
271
272It is an error to define the same property name multiple times within a single
273JSON object interpreted as an expression. In full expression mode, this
274constraint applies to the name expression results after conversion to string,
275rather than the raw string that may contain interpolation expressions.
276
277### Arrays
278
279When interpreted as an expression, a JSON array represents a value of a HCL
280tuple type.
281
282Each element of the JSON array represents an element of the HCL tuple type.
283The tuple type is constructed by enumerationg the JSON array elements, creating
284for each an element whose type is the result of recursively applying the
285expression mapping rules. Correspondance is preserved between the array element
286indices and the tuple element indices.
287
288An instance of the constructed tuple type is then created, whose values are
289interpreted by again recursively applying the mapping rules defined in this
290section.
291
292### Numbers
293
294When interpreted as an expression, a JSON number represents a HCL number value.
295
296HCL numbers are arbitrary-precision decimal values, so a JSON HCL parser must
297be able to translate exactly the value given to a number of corresponding
298precision, within the constraints set by the HCL syntax-agnostic information
299model.
300
301In practice, off-the-shelf JSON serializers often do not support customizing the
302processing of numbers, and instead force processing as 32-bit or 64-bit
303floating point values.
304
305A _producer_ of JSON HCL that uses such a serializer can provide numeric values
306as JSON strings where they have precision too great for representation in the
307serializer's chosen numeric type in situations where the result will be
308converted to number (using the standard conversion rules) by a calling
309application.
310
311Alternatively, for expressions that are evaluated in full expression mode an
312embedded template interpolation can be used to faithfully represent a number,
313such as `"${1e150}"`, which will then be evaluated by the underlying HCL native
314syntax expression evaluator.
315
316### Boolean Values
317
318The JSON boolean values `true` and `false`, when interpreted as expressions,
319represent the corresponding HCL boolean values.
320
321### The Null Value
322
323The JSON value `null`, when interpreted as an expression, represents a
324HCL null value of the dynamic pseudo-type.
325
326### Strings
327
328When intepreted as an expression, a JSON string may be interpreted in one of
329two ways depending on the evaluation mode.
330
331If evaluating in literal-only mode (as defined by the syntax-agnostic
332information model) the literal string is intepreted directly as a HCL string
333value, by directly using the exact sequence of unicode characters represented.
334Template interpolations and directives MUST NOT be processed in this mode,
335allowing any characters that appear as introduction sequences to pass through
336literally:
337
338```json
339"Hello world! Template sequences like ${ are not intepreted here."
340```
341
342When evaluating in full expression mode (again, as defined by the syntax-
343agnostic information model) the literal string is instead interpreted as a
344_standalone template_ in the HCL Native Syntax. The expression evaluation
345result is then the direct result of evaluating that template with the current
346variable scope and function table.
347
348```json
349"Hello, ${name}! Template sequences are interpreted in full expression mode."
350```
351
352In particular the _Template Interpolation Unwrapping_ requirement from the
353HCL native syntax specification must be implemented, allowing the use of
354single-interpolation templates to represent expressions that would not
355otherwise be representable in JSON, such as the following example where
356the result must be a number, rather than a string representation of a number:
357
358```json
359"${ a + b }"
360```
361
362## Static Analysis
363
364The HCL static analysis operations are implemented for JSON values that
365represent expressions, as described in the following sections.
366
367Due to the limited expressive power of the JSON syntax alone, use of these
368static analyses functions rather than normal expression evaluation is used
369as additional context for how a JSON value is to be interpreted, which means
370that static analyses can result in a different interpretation of a given
371expression than normal evaluation.
372
373### Static List
374
375An expression interpreted as a static list must be a JSON array. Each of the
376values in the array is interpreted as an expression and returned.
377
378### Static Map
379
380An expression interpreted as a static map must be a JSON object. Each of the
381key/value pairs in the object is presented as a pair of expressions. Since
382object property names are always strings, evaluating the key expression with
383a non-`nil` evaluation context will evaluate any template sequences given
384in the property name.
385
386### Static Call
387
388An expression interpreted as a static call must be a string. The content of
389the string is interpreted as a native syntax expression (not a _template_,
390unlike normal evaluation) and then the static call analysis is delegated to
391that expression.
392
393If the original expression is not a string or its contents cannot be parsed
394as a native syntax expression then static call analysis is not supported.
395
396### Static Traversal
397
398An expression interpreted as a static traversal must be a string. The content
399of the string is interpreted as a native syntax expression (not a _template_,
400unlike normal evaluation) and then static traversal analysis is delegated
401to that expression.
402
403If the original expression is not a string or its contents cannot be parsed
404as a native syntax expression then static call analysis is not supported.
405
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go b/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go
new file mode 100644
index 0000000..28dcf52
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go
@@ -0,0 +1,616 @@
1package json
2
3import (
4 "fmt"
5
6 "github.com/hashicorp/hcl2/hcl"
7 "github.com/hashicorp/hcl2/hcl/hclsyntax"
8 "github.com/zclconf/go-cty/cty"
9 "github.com/zclconf/go-cty/cty/convert"
10)
11
12// body is the implementation of "Body" used for files processed with the JSON
13// parser.
14type body struct {
15 val node
16
17 // If non-nil, the keys of this map cause the corresponding attributes to
18 // be treated as non-existing. This is used when Body.PartialContent is
19 // called, to produce the "remaining content" Body.
20 hiddenAttrs map[string]struct{}
21}
22
23// expression is the implementation of "Expression" used for files processed
24// with the JSON parser.
25type expression struct {
26 src node
27}
28
29func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
30 content, newBody, diags := b.PartialContent(schema)
31
32 hiddenAttrs := newBody.(*body).hiddenAttrs
33
34 var nameSuggestions []string
35 for _, attrS := range schema.Attributes {
36 if _, ok := hiddenAttrs[attrS.Name]; !ok {
37 // Only suggest an attribute name if we didn't use it already.
38 nameSuggestions = append(nameSuggestions, attrS.Name)
39 }
40 }
41 for _, blockS := range schema.Blocks {
42 // Blocks can appear multiple times, so we'll suggest their type
43 // names regardless of whether they've already been used.
44 nameSuggestions = append(nameSuggestions, blockS.Type)
45 }
46
47 jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
48 diags = append(diags, attrDiags...)
49
50 for _, attr := range jsonAttrs {
51 k := attr.Name
52 if k == "//" {
53 // Ignore "//" keys in objects representing bodies, to allow
54 // their use as comments.
55 continue
56 }
57
58 if _, ok := hiddenAttrs[k]; !ok {
59 suggestion := nameSuggestion(k, nameSuggestions)
60 if suggestion != "" {
61 suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
62 }
63
64 diags = append(diags, &hcl.Diagnostic{
65 Severity: hcl.DiagError,
66 Summary: "Extraneous JSON object property",
67 Detail: fmt.Sprintf("No attribute or block type is named %q.%s", k, suggestion),
68 Subject: &attr.NameRange,
69 Context: attr.Range().Ptr(),
70 })
71 }
72 }
73
74 return content, diags
75}
76
77func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
78 var diags hcl.Diagnostics
79
80 jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
81 diags = append(diags, attrDiags...)
82
83 usedNames := map[string]struct{}{}
84 if b.hiddenAttrs != nil {
85 for k := range b.hiddenAttrs {
86 usedNames[k] = struct{}{}
87 }
88 }
89
90 content := &hcl.BodyContent{
91 Attributes: map[string]*hcl.Attribute{},
92 Blocks: nil,
93
94 MissingItemRange: b.MissingItemRange(),
95 }
96
97 // Create some more convenient data structures for our work below.
98 attrSchemas := map[string]hcl.AttributeSchema{}
99 blockSchemas := map[string]hcl.BlockHeaderSchema{}
100 for _, attrS := range schema.Attributes {
101 attrSchemas[attrS.Name] = attrS
102 }
103 for _, blockS := range schema.Blocks {
104 blockSchemas[blockS.Type] = blockS
105 }
106
107 for _, jsonAttr := range jsonAttrs {
108 attrName := jsonAttr.Name
109 if _, used := b.hiddenAttrs[attrName]; used {
110 continue
111 }
112
113 if attrS, defined := attrSchemas[attrName]; defined {
114 if existing, exists := content.Attributes[attrName]; exists {
115 diags = append(diags, &hcl.Diagnostic{
116 Severity: hcl.DiagError,
117 Summary: "Duplicate attribute definition",
118 Detail: fmt.Sprintf("The attribute %q was already defined at %s.", attrName, existing.Range),
119 Subject: &jsonAttr.NameRange,
120 Context: jsonAttr.Range().Ptr(),
121 })
122 continue
123 }
124
125 content.Attributes[attrS.Name] = &hcl.Attribute{
126 Name: attrS.Name,
127 Expr: &expression{src: jsonAttr.Value},
128 Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
129 NameRange: jsonAttr.NameRange,
130 }
131 usedNames[attrName] = struct{}{}
132
133 } else if blockS, defined := blockSchemas[attrName]; defined {
134 bv := jsonAttr.Value
135 blockDiags := b.unpackBlock(bv, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks)
136 diags = append(diags, blockDiags...)
137 usedNames[attrName] = struct{}{}
138 }
139
140 // We ignore anything that isn't defined because that's the
141 // PartialContent contract. The Content method will catch leftovers.
142 }
143
144 // Make sure we got all the required attributes.
145 for _, attrS := range schema.Attributes {
146 if !attrS.Required {
147 continue
148 }
149 if _, defined := content.Attributes[attrS.Name]; !defined {
150 diags = append(diags, &hcl.Diagnostic{
151 Severity: hcl.DiagError,
152 Summary: "Missing required attribute",
153 Detail: fmt.Sprintf("The attribute %q is required, but no definition was found.", attrS.Name),
154 Subject: b.MissingItemRange().Ptr(),
155 })
156 }
157 }
158
159 unusedBody := &body{
160 val: b.val,
161 hiddenAttrs: usedNames,
162 }
163
164 return content, unusedBody, diags
165}
166
167// JustAttributes for JSON bodies interprets all properties of the wrapped
168// JSON object as attributes and returns them.
169func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
170 var diags hcl.Diagnostics
171 attrs := make(map[string]*hcl.Attribute)
172
173 obj, ok := b.val.(*objectVal)
174 if !ok {
175 diags = append(diags, &hcl.Diagnostic{
176 Severity: hcl.DiagError,
177 Summary: "Incorrect JSON value type",
178 Detail: "A JSON object is required here, defining the attributes for this block.",
179 Subject: b.val.StartRange().Ptr(),
180 })
181 return attrs, diags
182 }
183
184 for _, jsonAttr := range obj.Attrs {
185 name := jsonAttr.Name
186 if name == "//" {
187 // Ignore "//" keys in objects representing bodies, to allow
188 // their use as comments.
189 continue
190 }
191
192 if _, hidden := b.hiddenAttrs[name]; hidden {
193 continue
194 }
195
196 if existing, exists := attrs[name]; exists {
197 diags = append(diags, &hcl.Diagnostic{
198 Severity: hcl.DiagError,
199 Summary: "Duplicate attribute definition",
200 Detail: fmt.Sprintf("The attribute %q was already defined at %s.", name, existing.Range),
201 Subject: &jsonAttr.NameRange,
202 })
203 continue
204 }
205
206 attrs[name] = &hcl.Attribute{
207 Name: name,
208 Expr: &expression{src: jsonAttr.Value},
209 Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
210 NameRange: jsonAttr.NameRange,
211 }
212 }
213
214 // No diagnostics possible here, since the parser already took care of
215 // finding duplicates and every JSON value can be a valid attribute value.
216 return attrs, diags
217}
218
219func (b *body) MissingItemRange() hcl.Range {
220 switch tv := b.val.(type) {
221 case *objectVal:
222 return tv.CloseRange
223 case *arrayVal:
224 return tv.OpenRange
225 default:
226 // Should not happen in correct operation, but might show up if the
227 // input is invalid and we are producing partial results.
228 return tv.StartRange()
229 }
230}
231
232func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labelsLeft []string, labelsUsed []string, labelRanges []hcl.Range, blocks *hcl.Blocks) (diags hcl.Diagnostics) {
233 if len(labelsLeft) > 0 {
234 labelName := labelsLeft[0]
235 jsonAttrs, attrDiags := b.collectDeepAttrs(v, &labelName)
236 diags = append(diags, attrDiags...)
237
238 if len(jsonAttrs) == 0 {
239 diags = diags.Append(&hcl.Diagnostic{
240 Severity: hcl.DiagError,
241 Summary: "Missing block label",
242 Detail: fmt.Sprintf("At least one object property is required, whose name represents the %s block's %s.", typeName, labelName),
243 Subject: v.StartRange().Ptr(),
244 })
245 return
246 }
247 labelsUsed := append(labelsUsed, "")
248 labelRanges := append(labelRanges, hcl.Range{})
249 for _, p := range jsonAttrs {
250 pk := p.Name
251 labelsUsed[len(labelsUsed)-1] = pk
252 labelRanges[len(labelRanges)-1] = p.NameRange
253 diags = append(diags, b.unpackBlock(p.Value, typeName, typeRange, labelsLeft[1:], labelsUsed, labelRanges, blocks)...)
254 }
255 return
256 }
257
258 // By the time we get here, we've peeled off all the labels and we're ready
259 // to deal with the block's actual content.
260
261 // need to copy the label slices because their underlying arrays will
262 // continue to be mutated after we return.
263 labels := make([]string, len(labelsUsed))
264 copy(labels, labelsUsed)
265 labelR := make([]hcl.Range, len(labelRanges))
266 copy(labelR, labelRanges)
267
268 switch tv := v.(type) {
269 case *objectVal:
270 // Single instance of the block
271 *blocks = append(*blocks, &hcl.Block{
272 Type: typeName,
273 Labels: labels,
274 Body: &body{
275 val: tv,
276 },
277
278 DefRange: tv.OpenRange,
279 TypeRange: *typeRange,
280 LabelRanges: labelR,
281 })
282 case *arrayVal:
283 // Multiple instances of the block
284 for _, av := range tv.Values {
285 *blocks = append(*blocks, &hcl.Block{
286 Type: typeName,
287 Labels: labels,
288 Body: &body{
289 val: av, // might be mistyped; we'll find out when content is requested for this body
290 },
291
292 DefRange: tv.OpenRange,
293 TypeRange: *typeRange,
294 LabelRanges: labelR,
295 })
296 }
297 default:
298 diags = diags.Append(&hcl.Diagnostic{
299 Severity: hcl.DiagError,
300 Summary: "Incorrect JSON value type",
301 Detail: fmt.Sprintf("Either a JSON object or a JSON array is required, representing the contents of one or more %q blocks.", typeName),
302 Subject: v.StartRange().Ptr(),
303 })
304 }
305 return
306}
307
308// collectDeepAttrs takes either a single object or an array of objects and
309// flattens it into a list of object attributes, collecting attributes from
310// all of the objects in a given array.
311//
312// Ordering is preserved, so a list of objects that each have one property
313// will result in those properties being returned in the same order as the
314// objects appeared in the array.
315//
316// This is appropriate for use only for objects representing bodies or labels
317// within a block.
318//
319// The labelName argument, if non-null, is used to tailor returned error
320// messages to refer to block labels rather than attributes and child blocks.
321// It has no other effect.
322func (b *body) collectDeepAttrs(v node, labelName *string) ([]*objectAttr, hcl.Diagnostics) {
323 var diags hcl.Diagnostics
324 var attrs []*objectAttr
325
326 switch tv := v.(type) {
327
328 case *objectVal:
329 attrs = append(attrs, tv.Attrs...)
330
331 case *arrayVal:
332 for _, ev := range tv.Values {
333 switch tev := ev.(type) {
334 case *objectVal:
335 attrs = append(attrs, tev.Attrs...)
336 default:
337 if labelName != nil {
338 diags = append(diags, &hcl.Diagnostic{
339 Severity: hcl.DiagError,
340 Summary: "Incorrect JSON value type",
341 Detail: fmt.Sprintf("A JSON object is required here, to specify %s labels for this block.", *labelName),
342 Subject: ev.StartRange().Ptr(),
343 })
344 } else {
345 diags = append(diags, &hcl.Diagnostic{
346 Severity: hcl.DiagError,
347 Summary: "Incorrect JSON value type",
348 Detail: "A JSON object is required here, to define attributes and child blocks.",
349 Subject: ev.StartRange().Ptr(),
350 })
351 }
352 }
353 }
354
355 default:
356 if labelName != nil {
357 diags = append(diags, &hcl.Diagnostic{
358 Severity: hcl.DiagError,
359 Summary: "Incorrect JSON value type",
360 Detail: fmt.Sprintf("Either a JSON object or JSON array of objects is required here, to specify %s labels for this block.", *labelName),
361 Subject: v.StartRange().Ptr(),
362 })
363 } else {
364 diags = append(diags, &hcl.Diagnostic{
365 Severity: hcl.DiagError,
366 Summary: "Incorrect JSON value type",
367 Detail: "Either a JSON object or JSON array of objects is required here, to define attributes and child blocks.",
368 Subject: v.StartRange().Ptr(),
369 })
370 }
371 }
372
373 return attrs, diags
374}
375
376func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
377 switch v := e.src.(type) {
378 case *stringVal:
379 if ctx != nil {
380 // Parse string contents as a HCL native language expression.
381 // We only do this if we have a context, so passing a nil context
382 // is how the caller specifies that interpolations are not allowed
383 // and that the string should just be returned verbatim.
384 templateSrc := v.Value
385 expr, diags := hclsyntax.ParseTemplate(
386 []byte(templateSrc),
387 v.SrcRange.Filename,
388
389 // This won't produce _exactly_ the right result, since
390 // the hclsyntax parser can't "see" any escapes we removed
391 // while parsing JSON, but it's better than nothing.
392 hcl.Pos{
393 Line: v.SrcRange.Start.Line,
394
395 // skip over the opening quote mark
396 Byte: v.SrcRange.Start.Byte + 1,
397 Column: v.SrcRange.Start.Column + 1,
398 },
399 )
400 if diags.HasErrors() {
401 return cty.DynamicVal, diags
402 }
403 val, evalDiags := expr.Value(ctx)
404 diags = append(diags, evalDiags...)
405 return val, diags
406 }
407
408 return cty.StringVal(v.Value), nil
409 case *numberVal:
410 return cty.NumberVal(v.Value), nil
411 case *booleanVal:
412 return cty.BoolVal(v.Value), nil
413 case *arrayVal:
414 vals := []cty.Value{}
415 for _, jsonVal := range v.Values {
416 val, _ := (&expression{src: jsonVal}).Value(ctx)
417 vals = append(vals, val)
418 }
419 return cty.TupleVal(vals), nil
420 case *objectVal:
421 var diags hcl.Diagnostics
422 attrs := map[string]cty.Value{}
423 attrRanges := map[string]hcl.Range{}
424 known := true
425 for _, jsonAttr := range v.Attrs {
426 // In this one context we allow keys to contain interpolation
427 // experessions too, assuming we're evaluating in interpolation
428 // mode. This achieves parity with the native syntax where
429 // object expressions can have dynamic keys, while block contents
430 // may not.
431 name, nameDiags := (&expression{src: &stringVal{
432 Value: jsonAttr.Name,
433 SrcRange: jsonAttr.NameRange,
434 }}).Value(ctx)
435 val, valDiags := (&expression{src: jsonAttr.Value}).Value(ctx)
436 diags = append(diags, nameDiags...)
437 diags = append(diags, valDiags...)
438
439 var err error
440 name, err = convert.Convert(name, cty.String)
441 if err != nil {
442 diags = append(diags, &hcl.Diagnostic{
443 Severity: hcl.DiagError,
444 Summary: "Invalid object key expression",
445 Detail: fmt.Sprintf("Cannot use this expression as an object key: %s.", err),
446 Subject: &jsonAttr.NameRange,
447 })
448 continue
449 }
450 if name.IsNull() {
451 diags = append(diags, &hcl.Diagnostic{
452 Severity: hcl.DiagError,
453 Summary: "Invalid object key expression",
454 Detail: "Cannot use null value as an object key.",
455 Subject: &jsonAttr.NameRange,
456 })
457 continue
458 }
459 if !name.IsKnown() {
460 // This is a bit of a weird case, since our usual rules require
461 // us to tolerate unknowns and just represent the result as
462 // best we can but if we don't know the key then we can't
463 // know the type of our object at all, and thus we must turn
464 // the whole thing into cty.DynamicVal. This is consistent with
465 // how this situation is handled in the native syntax.
466 // We'll keep iterating so we can collect other errors in
467 // subsequent attributes.
468 known = false
469 continue
470 }
471 nameStr := name.AsString()
472 if _, defined := attrs[nameStr]; defined {
473 diags = append(diags, &hcl.Diagnostic{
474 Severity: hcl.DiagError,
475 Summary: "Duplicate object attribute",
476 Detail: fmt.Sprintf("An attribute named %q was already defined at %s.", nameStr, attrRanges[nameStr]),
477 Subject: &jsonAttr.NameRange,
478 })
479 continue
480 }
481 attrs[nameStr] = val
482 attrRanges[nameStr] = jsonAttr.NameRange
483 }
484 if !known {
485 // We encountered an unknown key somewhere along the way, so
486 // we can't know what our type will eventually be.
487 return cty.DynamicVal, diags
488 }
489 return cty.ObjectVal(attrs), diags
490 default:
491 // Default to DynamicVal so that ASTs containing invalid nodes can
492 // still be partially-evaluated.
493 return cty.DynamicVal, nil
494 }
495}
496
497func (e *expression) Variables() []hcl.Traversal {
498 var vars []hcl.Traversal
499
500 switch v := e.src.(type) {
501 case *stringVal:
502 templateSrc := v.Value
503 expr, diags := hclsyntax.ParseTemplate(
504 []byte(templateSrc),
505 v.SrcRange.Filename,
506
507 // This won't produce _exactly_ the right result, since
508 // the hclsyntax parser can't "see" any escapes we removed
509 // while parsing JSON, but it's better than nothing.
510 hcl.Pos{
511 Line: v.SrcRange.Start.Line,
512
513 // skip over the opening quote mark
514 Byte: v.SrcRange.Start.Byte + 1,
515 Column: v.SrcRange.Start.Column + 1,
516 },
517 )
518 if diags.HasErrors() {
519 return vars
520 }
521 return expr.Variables()
522
523 case *arrayVal:
524 for _, jsonVal := range v.Values {
525 vars = append(vars, (&expression{src: jsonVal}).Variables()...)
526 }
527 case *objectVal:
528 for _, jsonAttr := range v.Attrs {
529 vars = append(vars, (&expression{src: jsonAttr.Value}).Variables()...)
530 }
531 }
532
533 return vars
534}
535
536func (e *expression) Range() hcl.Range {
537 return e.src.Range()
538}
539
540func (e *expression) StartRange() hcl.Range {
541 return e.src.StartRange()
542}
543
544// Implementation for hcl.AbsTraversalForExpr.
545func (e *expression) AsTraversal() hcl.Traversal {
546 // In JSON-based syntax a traversal is given as a string containing
547 // traversal syntax as defined by hclsyntax.ParseTraversalAbs.
548
549 switch v := e.src.(type) {
550 case *stringVal:
551 traversal, diags := hclsyntax.ParseTraversalAbs([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start)
552 if diags.HasErrors() {
553 return nil
554 }
555 return traversal
556 default:
557 return nil
558 }
559}
560
561// Implementation for hcl.ExprCall.
562func (e *expression) ExprCall() *hcl.StaticCall {
563 // In JSON-based syntax a static call is given as a string containing
564 // an expression in the native syntax that also supports ExprCall.
565
566 switch v := e.src.(type) {
567 case *stringVal:
568 expr, diags := hclsyntax.ParseExpression([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start)
569 if diags.HasErrors() {
570 return nil
571 }
572
573 call, diags := hcl.ExprCall(expr)
574 if diags.HasErrors() {
575 return nil
576 }
577
578 return call
579 default:
580 return nil
581 }
582}
583
584// Implementation for hcl.ExprList.
585func (e *expression) ExprList() []hcl.Expression {
586 switch v := e.src.(type) {
587 case *arrayVal:
588 ret := make([]hcl.Expression, len(v.Values))
589 for i, node := range v.Values {
590 ret[i] = &expression{src: node}
591 }
592 return ret
593 default:
594 return nil
595 }
596}
597
598// Implementation for hcl.ExprMap.
599func (e *expression) ExprMap() []hcl.KeyValuePair {
600 switch v := e.src.(type) {
601 case *objectVal:
602 ret := make([]hcl.KeyValuePair, len(v.Attrs))
603 for i, jsonAttr := range v.Attrs {
604 ret[i] = hcl.KeyValuePair{
605 Key: &expression{src: &stringVal{
606 Value: jsonAttr.Name,
607 SrcRange: jsonAttr.NameRange,
608 }},
609 Value: &expression{src: jsonAttr.Value},
610 }
611 }
612 return ret
613 default:
614 return nil
615 }
616}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go b/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go
new file mode 100644
index 0000000..bbcce5b
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go
@@ -0,0 +1,29 @@
1// Code generated by "stringer -type tokenType scanner.go"; DO NOT EDIT.
2
3package json
4
5import "strconv"
6
7const _tokenType_name = "tokenInvalidtokenCommatokenColontokenEqualstokenKeywordtokenNumbertokenStringtokenBrackOtokenBrackCtokenBraceOtokenBraceCtokenEOF"
8
9var _tokenType_map = map[tokenType]string{
10 0: _tokenType_name[0:12],
11 44: _tokenType_name[12:22],
12 58: _tokenType_name[22:32],
13 61: _tokenType_name[32:43],
14 75: _tokenType_name[43:55],
15 78: _tokenType_name[55:66],
16 83: _tokenType_name[66:77],
17 91: _tokenType_name[77:88],
18 93: _tokenType_name[88:99],
19 123: _tokenType_name[99:110],
20 125: _tokenType_name[110:121],
21 9220: _tokenType_name[121:129],
22}
23
24func (i tokenType) String() string {
25 if str, ok := _tokenType_map[i]; ok {
26 return str
27 }
28 return "tokenType(" + strconv.FormatInt(int64(i), 10) + ")"
29}