]> git.immae.eu Git - github/fretlink/terraform-provider-statuscake.git/blame - vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go
Upgrade to 0.12
[github/fretlink/terraform-provider-statuscake.git] / vendor / github.com / hashicorp / hcl2 / hcl / hclsyntax / parser.go
CommitLineData
15c0b25d
AP
1package hclsyntax
2
3import (
4 "bytes"
5 "fmt"
6 "strconv"
7 "unicode/utf8"
8
9 "github.com/apparentlymart/go-textseg/textseg"
10 "github.com/hashicorp/hcl2/hcl"
11 "github.com/zclconf/go-cty/cty"
15c0b25d
AP
12)
13
14type parser struct {
15 *peeker
16
17 // set to true if any recovery is attempted. The parser can use this
18 // to attempt to reduce error noise by suppressing "bad token" errors
19 // in recovery mode, assuming that the recovery heuristics have failed
20 // in this case and left the peeker in a wrong place.
21 recovery bool
22}
23
24func (p *parser) ParseBody(end TokenType) (*Body, hcl.Diagnostics) {
25 attrs := Attributes{}
26 blocks := Blocks{}
27 var diags hcl.Diagnostics
28
29 startRange := p.PrevRange()
30 var endRange hcl.Range
31
32Token:
33 for {
34 next := p.Peek()
35 if next.Type == end {
36 endRange = p.NextRange()
37 p.Read()
38 break Token
39 }
40
41 switch next.Type {
42 case TokenNewline:
43 p.Read()
44 continue
45 case TokenIdent:
46 item, itemDiags := p.ParseBodyItem()
47 diags = append(diags, itemDiags...)
48 switch titem := item.(type) {
49 case *Block:
50 blocks = append(blocks, titem)
51 case *Attribute:
52 if existing, exists := attrs[titem.Name]; exists {
53 diags = append(diags, &hcl.Diagnostic{
54 Severity: hcl.DiagError,
55 Summary: "Attribute redefined",
56 Detail: fmt.Sprintf(
107c1cdb 57 "The argument %q was already set at %s. Each argument may be set only once.",
15c0b25d
AP
58 titem.Name, existing.NameRange.String(),
59 ),
60 Subject: &titem.NameRange,
61 })
62 } else {
63 attrs[titem.Name] = titem
64 }
65 default:
66 // This should never happen for valid input, but may if a
67 // syntax error was detected in ParseBodyItem that prevented
68 // it from even producing a partially-broken item. In that
69 // case, it would've left at least one error in the diagnostics
70 // slice we already dealt with above.
71 //
72 // We'll assume ParseBodyItem attempted recovery to leave
73 // us in a reasonable position to try parsing the next item.
74 continue
75 }
76 default:
77 bad := p.Read()
78 if !p.recovery {
79 if bad.Type == TokenOQuote {
80 diags = append(diags, &hcl.Diagnostic{
81 Severity: hcl.DiagError,
107c1cdb
ND
82 Summary: "Invalid argument name",
83 Detail: "Argument names must not be quoted.",
15c0b25d
AP
84 Subject: &bad.Range,
85 })
86 } else {
87 diags = append(diags, &hcl.Diagnostic{
88 Severity: hcl.DiagError,
107c1cdb
ND
89 Summary: "Argument or block definition required",
90 Detail: "An argument or block definition is required here.",
15c0b25d
AP
91 Subject: &bad.Range,
92 })
93 }
94 }
95 endRange = p.PrevRange() // arbitrary, but somewhere inside the body means better diagnostics
96
97 p.recover(end) // attempt to recover to the token after the end of this body
98 break Token
99 }
100 }
101
102 return &Body{
103 Attributes: attrs,
104 Blocks: blocks,
105
106 SrcRange: hcl.RangeBetween(startRange, endRange),
107 EndRange: hcl.Range{
108 Filename: endRange.Filename,
109 Start: endRange.End,
110 End: endRange.End,
111 },
112 }, diags
113}
114
115func (p *parser) ParseBodyItem() (Node, hcl.Diagnostics) {
116 ident := p.Read()
117 if ident.Type != TokenIdent {
118 p.recoverAfterBodyItem()
119 return nil, hcl.Diagnostics{
120 {
121 Severity: hcl.DiagError,
107c1cdb
ND
122 Summary: "Argument or block definition required",
123 Detail: "An argument or block definition is required here.",
15c0b25d
AP
124 Subject: &ident.Range,
125 },
126 }
127 }
128
129 next := p.Peek()
130
131 switch next.Type {
132 case TokenEqual:
107c1cdb 133 return p.finishParsingBodyAttribute(ident, false)
15c0b25d
AP
134 case TokenOQuote, TokenOBrace, TokenIdent:
135 return p.finishParsingBodyBlock(ident)
136 default:
137 p.recoverAfterBodyItem()
138 return nil, hcl.Diagnostics{
139 {
140 Severity: hcl.DiagError,
107c1cdb
ND
141 Summary: "Argument or block definition required",
142 Detail: "An argument or block definition is required here. To set an argument, use the equals sign \"=\" to introduce the argument value.",
15c0b25d
AP
143 Subject: &ident.Range,
144 },
145 }
146 }
147
148 return nil, nil
149}
150
107c1cdb
ND
151// parseSingleAttrBody is a weird variant of ParseBody that deals with the
152// body of a nested block containing only one attribute value all on a single
153// line, like foo { bar = baz } . It expects to find a single attribute item
154// immediately followed by the end token type with no intervening newlines.
155func (p *parser) parseSingleAttrBody(end TokenType) (*Body, hcl.Diagnostics) {
156 ident := p.Read()
157 if ident.Type != TokenIdent {
158 p.recoverAfterBodyItem()
159 return nil, hcl.Diagnostics{
160 {
161 Severity: hcl.DiagError,
162 Summary: "Argument or block definition required",
163 Detail: "An argument or block definition is required here.",
164 Subject: &ident.Range,
165 },
166 }
167 }
168
169 var attr *Attribute
170 var diags hcl.Diagnostics
171
172 next := p.Peek()
173
174 switch next.Type {
175 case TokenEqual:
176 node, attrDiags := p.finishParsingBodyAttribute(ident, true)
177 diags = append(diags, attrDiags...)
178 attr = node.(*Attribute)
179 case TokenOQuote, TokenOBrace, TokenIdent:
180 p.recoverAfterBodyItem()
181 return nil, hcl.Diagnostics{
182 {
183 Severity: hcl.DiagError,
184 Summary: "Argument definition required",
185 Detail: fmt.Sprintf("A single-line block definition can contain only a single argument. If you meant to define argument %q, use an equals sign to assign it a value. To define a nested block, place it on a line of its own within its parent block.", ident.Bytes),
186 Subject: hcl.RangeBetween(ident.Range, next.Range).Ptr(),
187 },
188 }
189 default:
190 p.recoverAfterBodyItem()
191 return nil, hcl.Diagnostics{
192 {
193 Severity: hcl.DiagError,
194 Summary: "Argument or block definition required",
195 Detail: "An argument or block definition is required here. To set an argument, use the equals sign \"=\" to introduce the argument value.",
196 Subject: &ident.Range,
197 },
198 }
199 }
200
201 return &Body{
202 Attributes: Attributes{
203 string(ident.Bytes): attr,
204 },
205
206 SrcRange: attr.SrcRange,
207 EndRange: hcl.Range{
208 Filename: attr.SrcRange.Filename,
209 Start: attr.SrcRange.End,
210 End: attr.SrcRange.End,
211 },
212 }, diags
213
214}
215
216func (p *parser) finishParsingBodyAttribute(ident Token, singleLine bool) (Node, hcl.Diagnostics) {
15c0b25d
AP
217 eqTok := p.Read() // eat equals token
218 if eqTok.Type != TokenEqual {
219 // should never happen if caller behaves
220 panic("finishParsingBodyAttribute called with next not equals")
221 }
222
223 var endRange hcl.Range
224
225 expr, diags := p.ParseExpression()
226 if p.recovery && diags.HasErrors() {
227 // recovery within expressions tends to be tricky, so we've probably
228 // landed somewhere weird. We'll try to reset to the start of a body
229 // item so parsing can continue.
230 endRange = p.PrevRange()
231 p.recoverAfterBodyItem()
232 } else {
107c1cdb
ND
233 endRange = p.PrevRange()
234 if !singleLine {
235 end := p.Peek()
236 if end.Type != TokenNewline && end.Type != TokenEOF {
237 if !p.recovery {
238 summary := "Missing newline after argument"
239 detail := "An argument definition must end with a newline."
240
241 if end.Type == TokenComma {
242 summary = "Unexpected comma after argument"
243 detail = "Argument definitions must be separated by newlines, not commas. " + detail
244 }
245
246 diags = append(diags, &hcl.Diagnostic{
247 Severity: hcl.DiagError,
248 Summary: summary,
249 Detail: detail,
250 Subject: &end.Range,
251 Context: hcl.RangeBetween(ident.Range, end.Range).Ptr(),
252 })
253 }
254 endRange = p.PrevRange()
255 p.recoverAfterBodyItem()
256 } else {
257 endRange = p.PrevRange()
258 p.Read() // eat newline
15c0b25d 259 }
15c0b25d
AP
260 }
261 }
262
263 return &Attribute{
264 Name: string(ident.Bytes),
265 Expr: expr,
266
267 SrcRange: hcl.RangeBetween(ident.Range, endRange),
268 NameRange: ident.Range,
269 EqualsRange: eqTok.Range,
270 }, diags
271}
272
273func (p *parser) finishParsingBodyBlock(ident Token) (Node, hcl.Diagnostics) {
274 var blockType = string(ident.Bytes)
275 var diags hcl.Diagnostics
276 var labels []string
277 var labelRanges []hcl.Range
278
279 var oBrace Token
280
281Token:
282 for {
283 tok := p.Peek()
284
285 switch tok.Type {
286
287 case TokenOBrace:
288 oBrace = p.Read()
289 break Token
290
291 case TokenOQuote:
292 label, labelRange, labelDiags := p.parseQuotedStringLiteral()
293 diags = append(diags, labelDiags...)
294 labels = append(labels, label)
295 labelRanges = append(labelRanges, labelRange)
107c1cdb
ND
296 // parseQuoteStringLiteral recovers up to the closing quote
297 // if it encounters problems, so we can continue looking for
298 // more labels and eventually the block body even.
15c0b25d
AP
299
300 case TokenIdent:
301 tok = p.Read() // eat token
302 label, labelRange := string(tok.Bytes), tok.Range
303 labels = append(labels, label)
304 labelRanges = append(labelRanges, labelRange)
305
306 default:
307 switch tok.Type {
308 case TokenEqual:
309 diags = append(diags, &hcl.Diagnostic{
310 Severity: hcl.DiagError,
311 Summary: "Invalid block definition",
107c1cdb 312 Detail: "The equals sign \"=\" indicates an argument definition, and must not be used when defining a block.",
15c0b25d
AP
313 Subject: &tok.Range,
314 Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
315 })
316 case TokenNewline:
317 diags = append(diags, &hcl.Diagnostic{
318 Severity: hcl.DiagError,
319 Summary: "Invalid block definition",
320 Detail: "A block definition must have block content delimited by \"{\" and \"}\", starting on the same line as the block header.",
321 Subject: &tok.Range,
322 Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
323 })
324 default:
325 if !p.recovery {
326 diags = append(diags, &hcl.Diagnostic{
327 Severity: hcl.DiagError,
328 Summary: "Invalid block definition",
329 Detail: "Either a quoted string block label or an opening brace (\"{\") is expected here.",
330 Subject: &tok.Range,
331 Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
332 })
333 }
334 }
335
336 p.recoverAfterBodyItem()
337
338 return &Block{
339 Type: blockType,
340 Labels: labels,
107c1cdb
ND
341 Body: &Body{
342 SrcRange: ident.Range,
343 EndRange: ident.Range,
344 },
15c0b25d
AP
345
346 TypeRange: ident.Range,
347 LabelRanges: labelRanges,
348 OpenBraceRange: ident.Range, // placeholder
349 CloseBraceRange: ident.Range, // placeholder
350 }, diags
351 }
352 }
353
354 // Once we fall out here, the peeker is pointed just after our opening
355 // brace, so we can begin our nested body parsing.
107c1cdb
ND
356 var body *Body
357 var bodyDiags hcl.Diagnostics
358 switch p.Peek().Type {
359 case TokenNewline, TokenEOF, TokenCBrace:
360 body, bodyDiags = p.ParseBody(TokenCBrace)
361 default:
362 // Special one-line, single-attribute block parsing mode.
363 body, bodyDiags = p.parseSingleAttrBody(TokenCBrace)
364 switch p.Peek().Type {
365 case TokenCBrace:
366 p.Read() // the happy path - just consume the closing brace
367 case TokenComma:
368 // User seems to be trying to use the object-constructor
369 // comma-separated style, which isn't permitted for blocks.
370 diags = append(diags, &hcl.Diagnostic{
371 Severity: hcl.DiagError,
372 Summary: "Invalid single-argument block definition",
373 Detail: "Single-line block syntax can include only one argument definition. To define multiple arguments, use the multi-line block syntax with one argument definition per line.",
374 Subject: p.Peek().Range.Ptr(),
375 })
376 p.recover(TokenCBrace)
377 case TokenNewline:
378 // We don't allow weird mixtures of single and multi-line syntax.
379 diags = append(diags, &hcl.Diagnostic{
380 Severity: hcl.DiagError,
381 Summary: "Invalid single-argument block definition",
382 Detail: "An argument definition on the same line as its containing block creates a single-line block definition, which must also be closed on the same line. Place the block's closing brace immediately after the argument definition.",
383 Subject: p.Peek().Range.Ptr(),
384 })
385 p.recover(TokenCBrace)
386 default:
387 // Some other weird thing is going on. Since we can't guess a likely
388 // user intent for this one, we'll skip it if we're already in
389 // recovery mode.
390 if !p.recovery {
391 diags = append(diags, &hcl.Diagnostic{
392 Severity: hcl.DiagError,
393 Summary: "Invalid single-argument block definition",
394 Detail: "A single-line block definition must end with a closing brace immediately after its single argument definition.",
395 Subject: p.Peek().Range.Ptr(),
396 })
397 }
398 p.recover(TokenCBrace)
399 }
400 }
15c0b25d
AP
401 diags = append(diags, bodyDiags...)
402 cBraceRange := p.PrevRange()
403
404 eol := p.Peek()
405 if eol.Type == TokenNewline || eol.Type == TokenEOF {
406 p.Read() // eat newline
407 } else {
408 if !p.recovery {
409 diags = append(diags, &hcl.Diagnostic{
410 Severity: hcl.DiagError,
411 Summary: "Missing newline after block definition",
412 Detail: "A block definition must end with a newline.",
413 Subject: &eol.Range,
414 Context: hcl.RangeBetween(ident.Range, eol.Range).Ptr(),
415 })
416 }
417 p.recoverAfterBodyItem()
418 }
419
107c1cdb
ND
420 // We must never produce a nil body, since the caller may attempt to
421 // do analysis of a partial result when there's an error, so we'll
422 // insert a placeholder if we otherwise failed to produce a valid
423 // body due to one of the syntax error paths above.
424 if body == nil && diags.HasErrors() {
425 body = &Body{
426 SrcRange: hcl.RangeBetween(oBrace.Range, cBraceRange),
427 EndRange: cBraceRange,
428 }
429 }
430
15c0b25d
AP
431 return &Block{
432 Type: blockType,
433 Labels: labels,
434 Body: body,
435
436 TypeRange: ident.Range,
437 LabelRanges: labelRanges,
438 OpenBraceRange: oBrace.Range,
439 CloseBraceRange: cBraceRange,
440 }, diags
441}
442
443func (p *parser) ParseExpression() (Expression, hcl.Diagnostics) {
444 return p.parseTernaryConditional()
445}
446
447func (p *parser) parseTernaryConditional() (Expression, hcl.Diagnostics) {
448 // The ternary conditional operator (.. ? .. : ..) behaves somewhat
449 // like a binary operator except that the "symbol" is itself
450 // an expression enclosed in two punctuation characters.
451 // The middle expression is parsed as if the ? and : symbols
452 // were parentheses. The "rhs" (the "false expression") is then
453 // treated right-associatively so it behaves similarly to the
454 // middle in terms of precedence.
455
456 startRange := p.NextRange()
457 var condExpr, trueExpr, falseExpr Expression
458 var diags hcl.Diagnostics
459
460 condExpr, condDiags := p.parseBinaryOps(binaryOps)
461 diags = append(diags, condDiags...)
462 if p.recovery && condDiags.HasErrors() {
463 return condExpr, diags
464 }
465
466 questionMark := p.Peek()
467 if questionMark.Type != TokenQuestion {
468 return condExpr, diags
469 }
470
471 p.Read() // eat question mark
472
473 trueExpr, trueDiags := p.ParseExpression()
474 diags = append(diags, trueDiags...)
475 if p.recovery && trueDiags.HasErrors() {
476 return condExpr, diags
477 }
478
479 colon := p.Peek()
480 if colon.Type != TokenColon {
481 diags = append(diags, &hcl.Diagnostic{
482 Severity: hcl.DiagError,
483 Summary: "Missing false expression in conditional",
484 Detail: "The conditional operator (...?...:...) requires a false expression, delimited by a colon.",
485 Subject: &colon.Range,
486 Context: hcl.RangeBetween(startRange, colon.Range).Ptr(),
487 })
488 return condExpr, diags
489 }
490
491 p.Read() // eat colon
492
493 falseExpr, falseDiags := p.ParseExpression()
494 diags = append(diags, falseDiags...)
495 if p.recovery && falseDiags.HasErrors() {
496 return condExpr, diags
497 }
498
499 return &ConditionalExpr{
500 Condition: condExpr,
501 TrueResult: trueExpr,
502 FalseResult: falseExpr,
503
504 SrcRange: hcl.RangeBetween(startRange, falseExpr.Range()),
505 }, diags
506}
507
508// parseBinaryOps calls itself recursively to work through all of the
509// operator precedence groups, and then eventually calls parseExpressionTerm
510// for each operand.
511func (p *parser) parseBinaryOps(ops []map[TokenType]*Operation) (Expression, hcl.Diagnostics) {
512 if len(ops) == 0 {
513 // We've run out of operators, so now we'll just try to parse a term.
514 return p.parseExpressionWithTraversals()
515 }
516
517 thisLevel := ops[0]
518 remaining := ops[1:]
519
520 var lhs, rhs Expression
521 var operation *Operation
522 var diags hcl.Diagnostics
523
524 // Parse a term that might be the first operand of a binary
525 // operation or it might just be a standalone term.
526 // We won't know until we've parsed it and can look ahead
527 // to see if there's an operator token for this level.
528 lhs, lhsDiags := p.parseBinaryOps(remaining)
529 diags = append(diags, lhsDiags...)
530 if p.recovery && lhsDiags.HasErrors() {
531 return lhs, diags
532 }
533
534 // We'll keep eating up operators until we run out, so that operators
535 // with the same precedence will combine in a left-associative manner:
536 // a+b+c => (a+b)+c, not a+(b+c)
537 //
538 // Should we later want to have right-associative operators, a way
539 // to achieve that would be to call back up to ParseExpression here
540 // instead of iteratively parsing only the remaining operators.
541 for {
542 next := p.Peek()
543 var newOp *Operation
544 var ok bool
545 if newOp, ok = thisLevel[next.Type]; !ok {
546 break
547 }
548
549 // Are we extending an expression started on the previous iteration?
550 if operation != nil {
551 lhs = &BinaryOpExpr{
552 LHS: lhs,
553 Op: operation,
554 RHS: rhs,
555
556 SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()),
557 }
558 }
559
560 operation = newOp
561 p.Read() // eat operator token
562 var rhsDiags hcl.Diagnostics
563 rhs, rhsDiags = p.parseBinaryOps(remaining)
564 diags = append(diags, rhsDiags...)
565 if p.recovery && rhsDiags.HasErrors() {
566 return lhs, diags
567 }
568 }
569
570 if operation == nil {
571 return lhs, diags
572 }
573
574 return &BinaryOpExpr{
575 LHS: lhs,
576 Op: operation,
577 RHS: rhs,
578
579 SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()),
580 }, diags
581}
582
583func (p *parser) parseExpressionWithTraversals() (Expression, hcl.Diagnostics) {
584 term, diags := p.parseExpressionTerm()
107c1cdb
ND
585 ret, moreDiags := p.parseExpressionTraversals(term)
586 diags = append(diags, moreDiags...)
587 return ret, diags
588}
589
590func (p *parser) parseExpressionTraversals(from Expression) (Expression, hcl.Diagnostics) {
591 var diags hcl.Diagnostics
592 ret := from
15c0b25d
AP
593
594Traversal:
595 for {
596 next := p.Peek()
597
598 switch next.Type {
599 case TokenDot:
600 // Attribute access or splat
601 dot := p.Read()
602 attrTok := p.Peek()
603
604 switch attrTok.Type {
605 case TokenIdent:
606 attrTok = p.Read() // eat token
607 name := string(attrTok.Bytes)
608 rng := hcl.RangeBetween(dot.Range, attrTok.Range)
609 step := hcl.TraverseAttr{
610 Name: name,
611 SrcRange: rng,
612 }
613
614 ret = makeRelativeTraversal(ret, step, rng)
615
616 case TokenNumberLit:
617 // This is a weird form we inherited from HIL, allowing numbers
618 // to be used as attributes as a weird way of writing [n].
619 // This was never actually a first-class thing in HIL, but
620 // HIL tolerated sequences like .0. in its variable names and
621 // calling applications like Terraform exploited that to
622 // introduce indexing syntax where none existed.
623 numTok := p.Read() // eat token
624 attrTok = numTok
625
626 // This syntax is ambiguous if multiple indices are used in
627 // succession, like foo.0.1.baz: that actually parses as
628 // a fractional number 0.1. Since we're only supporting this
629 // syntax for compatibility with legacy Terraform
630 // configurations, and Terraform does not tend to have lists
631 // of lists, we'll choose to reject that here with a helpful
632 // error message, rather than failing later because the index
633 // isn't a whole number.
634 if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 {
635 first := numTok.Bytes[:dotIdx]
636 second := numTok.Bytes[dotIdx+1:]
637 diags = append(diags, &hcl.Diagnostic{
638 Severity: hcl.DiagError,
639 Summary: "Invalid legacy index syntax",
640 Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax instead, like [%s][%s].", first, second),
641 Subject: &attrTok.Range,
642 })
643 rng := hcl.RangeBetween(dot.Range, numTok.Range)
644 step := hcl.TraverseIndex{
645 Key: cty.DynamicVal,
646 SrcRange: rng,
647 }
648 ret = makeRelativeTraversal(ret, step, rng)
649 break
650 }
651
652 numVal, numDiags := p.numberLitValue(numTok)
653 diags = append(diags, numDiags...)
654
655 rng := hcl.RangeBetween(dot.Range, numTok.Range)
656 step := hcl.TraverseIndex{
657 Key: numVal,
658 SrcRange: rng,
659 }
660
661 ret = makeRelativeTraversal(ret, step, rng)
662
663 case TokenStar:
664 // "Attribute-only" splat expression.
665 // (This is a kinda weird construct inherited from HIL, which
666 // behaves a bit like a [*] splat except that it is only able
667 // to do attribute traversals into each of its elements,
668 // whereas foo[*] can support _any_ traversal.
669 marker := p.Read() // eat star
670 trav := make(hcl.Traversal, 0, 1)
671 var firstRange, lastRange hcl.Range
672 firstRange = p.NextRange()
673 for p.Peek().Type == TokenDot {
674 dot := p.Read()
675
676 if p.Peek().Type == TokenNumberLit {
677 // Continuing the "weird stuff inherited from HIL"
678 // theme, we also allow numbers as attribute names
679 // inside splats and interpret them as indexing
680 // into a list, for expressions like:
681 // foo.bar.*.baz.0.foo
682 numTok := p.Read()
683
684 // Weird special case if the user writes something
685 // like foo.bar.*.baz.0.0.foo, where 0.0 parses
686 // as a number.
687 if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 {
688 first := numTok.Bytes[:dotIdx]
689 second := numTok.Bytes[dotIdx+1:]
690 diags = append(diags, &hcl.Diagnostic{
691 Severity: hcl.DiagError,
692 Summary: "Invalid legacy index syntax",
693 Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax with a full splat expression [*] instead, like [%s][%s].", first, second),
694 Subject: &attrTok.Range,
695 })
696 trav = append(trav, hcl.TraverseIndex{
697 Key: cty.DynamicVal,
698 SrcRange: hcl.RangeBetween(dot.Range, numTok.Range),
699 })
700 lastRange = numTok.Range
701 continue
702 }
703
704 numVal, numDiags := p.numberLitValue(numTok)
705 diags = append(diags, numDiags...)
706 trav = append(trav, hcl.TraverseIndex{
707 Key: numVal,
708 SrcRange: hcl.RangeBetween(dot.Range, numTok.Range),
709 })
710 lastRange = numTok.Range
711 continue
712 }
713
714 if p.Peek().Type != TokenIdent {
715 if !p.recovery {
716 if p.Peek().Type == TokenStar {
717 diags = append(diags, &hcl.Diagnostic{
718 Severity: hcl.DiagError,
719 Summary: "Nested splat expression not allowed",
720 Detail: "A splat expression (*) cannot be used inside another attribute-only splat expression.",
721 Subject: p.Peek().Range.Ptr(),
722 })
723 } else {
724 diags = append(diags, &hcl.Diagnostic{
725 Severity: hcl.DiagError,
726 Summary: "Invalid attribute name",
727 Detail: "An attribute name is required after a dot.",
728 Subject: &attrTok.Range,
729 })
730 }
731 }
732 p.setRecovery()
733 continue Traversal
734 }
735
736 attrTok := p.Read()
737 trav = append(trav, hcl.TraverseAttr{
738 Name: string(attrTok.Bytes),
739 SrcRange: hcl.RangeBetween(dot.Range, attrTok.Range),
740 })
741 lastRange = attrTok.Range
742 }
743
744 itemExpr := &AnonSymbolExpr{
745 SrcRange: hcl.RangeBetween(dot.Range, marker.Range),
746 }
747 var travExpr Expression
748 if len(trav) == 0 {
749 travExpr = itemExpr
750 } else {
751 travExpr = &RelativeTraversalExpr{
752 Source: itemExpr,
753 Traversal: trav,
754 SrcRange: hcl.RangeBetween(firstRange, lastRange),
755 }
756 }
757
758 ret = &SplatExpr{
759 Source: ret,
760 Each: travExpr,
761 Item: itemExpr,
762
763 SrcRange: hcl.RangeBetween(dot.Range, lastRange),
764 MarkerRange: hcl.RangeBetween(dot.Range, marker.Range),
765 }
766
767 default:
768 diags = append(diags, &hcl.Diagnostic{
769 Severity: hcl.DiagError,
770 Summary: "Invalid attribute name",
771 Detail: "An attribute name is required after a dot.",
772 Subject: &attrTok.Range,
773 })
774 // This leaves the peeker in a bad place, so following items
775 // will probably be misparsed until we hit something that
776 // allows us to re-sync.
777 //
778 // We will probably need to do something better here eventually
779 // in order to support autocomplete triggered by typing a
780 // period.
781 p.setRecovery()
782 }
783
784 case TokenOBrack:
785 // Indexing of a collection.
786 // This may or may not be a hcl.Traverser, depending on whether
787 // the key value is something constant.
788
789 open := p.Read()
107c1cdb
ND
790 switch p.Peek().Type {
791 case TokenStar:
792 // This is a full splat expression, like foo[*], which consumes
793 // the rest of the traversal steps after it using a recursive
794 // call to this function.
795 p.Read() // consume star
796 close := p.Read()
15c0b25d
AP
797 if close.Type != TokenCBrack && !p.recovery {
798 diags = append(diags, &hcl.Diagnostic{
799 Severity: hcl.DiagError,
107c1cdb
ND
800 Summary: "Missing close bracket on splat index",
801 Detail: "The star for a full splat operator must be immediately followed by a closing bracket (\"]\").",
15c0b25d
AP
802 Subject: &close.Range,
803 })
804 close = p.recover(TokenCBrack)
805 }
107c1cdb
ND
806 // Splat expressions use a special "anonymous symbol" as a
807 // placeholder in an expression to be evaluated once for each
808 // item in the source expression.
809 itemExpr := &AnonSymbolExpr{
810 SrcRange: hcl.RangeBetween(open.Range, close.Range),
811 }
812 // Now we'll recursively call this same function to eat any
813 // remaining traversal steps against the anonymous symbol.
814 travExpr, nestedDiags := p.parseExpressionTraversals(itemExpr)
815 diags = append(diags, nestedDiags...)
15c0b25d 816
107c1cdb
ND
817 ret = &SplatExpr{
818 Source: ret,
819 Each: travExpr,
820 Item: itemExpr,
821
822 SrcRange: hcl.RangeBetween(open.Range, travExpr.Range()),
823 MarkerRange: hcl.RangeBetween(open.Range, close.Range),
15c0b25d 824 }
15c0b25d 825
107c1cdb
ND
826 default:
827
828 var close Token
829 p.PushIncludeNewlines(false) // arbitrary newlines allowed in brackets
830 keyExpr, keyDiags := p.ParseExpression()
831 diags = append(diags, keyDiags...)
832 if p.recovery && keyDiags.HasErrors() {
833 close = p.recover(TokenCBrack)
834 } else {
835 close = p.Read()
836 if close.Type != TokenCBrack && !p.recovery {
837 diags = append(diags, &hcl.Diagnostic{
838 Severity: hcl.DiagError,
839 Summary: "Missing close bracket on index",
840 Detail: "The index operator must end with a closing bracket (\"]\").",
841 Subject: &close.Range,
842 })
843 close = p.recover(TokenCBrack)
844 }
845 }
846 p.PopIncludeNewlines()
847
848 if lit, isLit := keyExpr.(*LiteralValueExpr); isLit {
849 litKey, _ := lit.Value(nil)
850 rng := hcl.RangeBetween(open.Range, close.Range)
851 step := hcl.TraverseIndex{
852 Key: litKey,
853 SrcRange: rng,
854 }
855 ret = makeRelativeTraversal(ret, step, rng)
856 } else {
857 rng := hcl.RangeBetween(open.Range, close.Range)
858 ret = &IndexExpr{
859 Collection: ret,
860 Key: keyExpr,
861
862 SrcRange: rng,
863 OpenRange: open.Range,
864 }
15c0b25d
AP
865 }
866 }
867
868 default:
869 break Traversal
870 }
871 }
872
873 return ret, diags
874}
875
876// makeRelativeTraversal takes an expression and a traverser and returns
877// a traversal expression that combines the two. If the given expression
878// is already a traversal, it is extended in place (mutating it) and
879// returned. If it isn't, a new RelativeTraversalExpr is created and returned.
880func makeRelativeTraversal(expr Expression, next hcl.Traverser, rng hcl.Range) Expression {
881 switch texpr := expr.(type) {
882 case *ScopeTraversalExpr:
883 texpr.Traversal = append(texpr.Traversal, next)
884 texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng)
885 return texpr
886 case *RelativeTraversalExpr:
887 texpr.Traversal = append(texpr.Traversal, next)
888 texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng)
889 return texpr
890 default:
891 return &RelativeTraversalExpr{
892 Source: expr,
893 Traversal: hcl.Traversal{next},
894 SrcRange: rng,
895 }
896 }
897}
898
899func (p *parser) parseExpressionTerm() (Expression, hcl.Diagnostics) {
900 start := p.Peek()
901
902 switch start.Type {
903 case TokenOParen:
904 p.Read() // eat open paren
905
906 p.PushIncludeNewlines(false)
907
908 expr, diags := p.ParseExpression()
909 if diags.HasErrors() {
910 // attempt to place the peeker after our closing paren
911 // before we return, so that the next parser has some
912 // chance of finding a valid expression.
913 p.recover(TokenCParen)
914 p.PopIncludeNewlines()
915 return expr, diags
916 }
917
918 close := p.Peek()
919 if close.Type != TokenCParen {
920 diags = append(diags, &hcl.Diagnostic{
921 Severity: hcl.DiagError,
922 Summary: "Unbalanced parentheses",
923 Detail: "Expected a closing parenthesis to terminate the expression.",
924 Subject: &close.Range,
925 Context: hcl.RangeBetween(start.Range, close.Range).Ptr(),
926 })
927 p.setRecovery()
928 }
929
930 p.Read() // eat closing paren
931 p.PopIncludeNewlines()
932
933 return expr, diags
934
935 case TokenNumberLit:
936 tok := p.Read() // eat number token
937
938 numVal, diags := p.numberLitValue(tok)
939 return &LiteralValueExpr{
940 Val: numVal,
941 SrcRange: tok.Range,
942 }, diags
943
944 case TokenIdent:
945 tok := p.Read() // eat identifier token
946
947 if p.Peek().Type == TokenOParen {
948 return p.finishParsingFunctionCall(tok)
949 }
950
951 name := string(tok.Bytes)
952 switch name {
953 case "true":
954 return &LiteralValueExpr{
955 Val: cty.True,
956 SrcRange: tok.Range,
957 }, nil
958 case "false":
959 return &LiteralValueExpr{
960 Val: cty.False,
961 SrcRange: tok.Range,
962 }, nil
963 case "null":
964 return &LiteralValueExpr{
965 Val: cty.NullVal(cty.DynamicPseudoType),
966 SrcRange: tok.Range,
967 }, nil
968 default:
969 return &ScopeTraversalExpr{
970 Traversal: hcl.Traversal{
971 hcl.TraverseRoot{
972 Name: name,
973 SrcRange: tok.Range,
974 },
975 },
976 SrcRange: tok.Range,
977 }, nil
978 }
979
980 case TokenOQuote, TokenOHeredoc:
981 open := p.Read() // eat opening marker
982 closer := p.oppositeBracket(open.Type)
107c1cdb 983 exprs, passthru, _, diags := p.parseTemplateInner(closer, tokenOpensFlushHeredoc(open))
15c0b25d
AP
984
985 closeRange := p.PrevRange()
986
987 if passthru {
988 if len(exprs) != 1 {
989 panic("passthru set with len(exprs) != 1")
990 }
991 return &TemplateWrapExpr{
992 Wrapped: exprs[0],
993 SrcRange: hcl.RangeBetween(open.Range, closeRange),
994 }, diags
995 }
996
997 return &TemplateExpr{
998 Parts: exprs,
999 SrcRange: hcl.RangeBetween(open.Range, closeRange),
1000 }, diags
1001
1002 case TokenMinus:
1003 tok := p.Read() // eat minus token
1004
1005 // Important to use parseExpressionWithTraversals rather than parseExpression
1006 // here, otherwise we can capture a following binary expression into
1007 // our negation.
1008 // e.g. -46+5 should parse as (-46)+5, not -(46+5)
1009 operand, diags := p.parseExpressionWithTraversals()
1010 return &UnaryOpExpr{
1011 Op: OpNegate,
1012 Val: operand,
1013
1014 SrcRange: hcl.RangeBetween(tok.Range, operand.Range()),
1015 SymbolRange: tok.Range,
1016 }, diags
1017
1018 case TokenBang:
1019 tok := p.Read() // eat bang token
1020
1021 // Important to use parseExpressionWithTraversals rather than parseExpression
1022 // here, otherwise we can capture a following binary expression into
1023 // our negation.
1024 operand, diags := p.parseExpressionWithTraversals()
1025 return &UnaryOpExpr{
1026 Op: OpLogicalNot,
1027 Val: operand,
1028
1029 SrcRange: hcl.RangeBetween(tok.Range, operand.Range()),
1030 SymbolRange: tok.Range,
1031 }, diags
1032
1033 case TokenOBrack:
1034 return p.parseTupleCons()
1035
1036 case TokenOBrace:
1037 return p.parseObjectCons()
1038
1039 default:
1040 var diags hcl.Diagnostics
1041 if !p.recovery {
1042 diags = append(diags, &hcl.Diagnostic{
1043 Severity: hcl.DiagError,
1044 Summary: "Invalid expression",
1045 Detail: "Expected the start of an expression, but found an invalid expression token.",
1046 Subject: &start.Range,
1047 })
1048 }
1049 p.setRecovery()
1050
1051 // Return a placeholder so that the AST is still structurally sound
1052 // even in the presence of parse errors.
1053 return &LiteralValueExpr{
1054 Val: cty.DynamicVal,
1055 SrcRange: start.Range,
1056 }, diags
1057 }
1058}
1059
1060func (p *parser) numberLitValue(tok Token) (cty.Value, hcl.Diagnostics) {
107c1cdb
ND
1061 // The cty.ParseNumberVal is always the same behavior as converting a
1062 // string to a number, ensuring we always interpret decimal numbers in
1063 // the same way.
1064 numVal, err := cty.ParseNumberVal(string(tok.Bytes))
15c0b25d
AP
1065 if err != nil {
1066 ret := cty.UnknownVal(cty.Number)
1067 return ret, hcl.Diagnostics{
1068 {
1069 Severity: hcl.DiagError,
1070 Summary: "Invalid number literal",
1071 // FIXME: not a very good error message, but convert only
1072 // gives us "a number is required", so not much help either.
1073 Detail: "Failed to recognize the value of this number literal.",
1074 Subject: &tok.Range,
1075 },
1076 }
1077 }
1078 return numVal, nil
1079}
1080
1081// finishParsingFunctionCall parses a function call assuming that the function
1082// name was already read, and so the peeker should be pointing at the opening
1083// parenthesis after the name.
1084func (p *parser) finishParsingFunctionCall(name Token) (Expression, hcl.Diagnostics) {
1085 openTok := p.Read()
1086 if openTok.Type != TokenOParen {
1087 // should never happen if callers behave
1088 panic("finishParsingFunctionCall called with non-parenthesis as next token")
1089 }
1090
1091 var args []Expression
1092 var diags hcl.Diagnostics
1093 var expandFinal bool
1094 var closeTok Token
1095
1096 // Arbitrary newlines are allowed inside the function call parentheses.
1097 p.PushIncludeNewlines(false)
1098
1099Token:
1100 for {
1101 tok := p.Peek()
1102
1103 if tok.Type == TokenCParen {
1104 closeTok = p.Read() // eat closing paren
1105 break Token
1106 }
1107
1108 arg, argDiags := p.ParseExpression()
1109 args = append(args, arg)
1110 diags = append(diags, argDiags...)
1111 if p.recovery && argDiags.HasErrors() {
1112 // if there was a parse error in the argument then we've
1113 // probably been left in a weird place in the token stream,
1114 // so we'll bail out with a partial argument list.
1115 p.recover(TokenCParen)
1116 break Token
1117 }
1118
1119 sep := p.Read()
1120 if sep.Type == TokenCParen {
1121 closeTok = sep
1122 break Token
1123 }
1124
1125 if sep.Type == TokenEllipsis {
1126 expandFinal = true
1127
1128 if p.Peek().Type != TokenCParen {
1129 if !p.recovery {
1130 diags = append(diags, &hcl.Diagnostic{
1131 Severity: hcl.DiagError,
1132 Summary: "Missing closing parenthesis",
1133 Detail: "An expanded function argument (with ...) must be immediately followed by closing parentheses.",
1134 Subject: &sep.Range,
1135 Context: hcl.RangeBetween(name.Range, sep.Range).Ptr(),
1136 })
1137 }
1138 closeTok = p.recover(TokenCParen)
1139 } else {
1140 closeTok = p.Read() // eat closing paren
1141 }
1142 break Token
1143 }
1144
1145 if sep.Type != TokenComma {
1146 diags = append(diags, &hcl.Diagnostic{
1147 Severity: hcl.DiagError,
1148 Summary: "Missing argument separator",
1149 Detail: "A comma is required to separate each function argument from the next.",
1150 Subject: &sep.Range,
1151 Context: hcl.RangeBetween(name.Range, sep.Range).Ptr(),
1152 })
1153 closeTok = p.recover(TokenCParen)
1154 break Token
1155 }
1156
1157 if p.Peek().Type == TokenCParen {
1158 // A trailing comma after the last argument gets us in here.
1159 closeTok = p.Read() // eat closing paren
1160 break Token
1161 }
1162
1163 }
1164
1165 p.PopIncludeNewlines()
1166
1167 return &FunctionCallExpr{
1168 Name: string(name.Bytes),
1169 Args: args,
1170
1171 ExpandFinal: expandFinal,
1172
1173 NameRange: name.Range,
1174 OpenParenRange: openTok.Range,
1175 CloseParenRange: closeTok.Range,
1176 }, diags
1177}
1178
1179func (p *parser) parseTupleCons() (Expression, hcl.Diagnostics) {
1180 open := p.Read()
1181 if open.Type != TokenOBrack {
1182 // Should never happen if callers are behaving
1183 panic("parseTupleCons called without peeker pointing to open bracket")
1184 }
1185
1186 p.PushIncludeNewlines(false)
1187 defer p.PopIncludeNewlines()
1188
1189 if forKeyword.TokenMatches(p.Peek()) {
1190 return p.finishParsingForExpr(open)
1191 }
1192
1193 var close Token
1194
1195 var diags hcl.Diagnostics
1196 var exprs []Expression
1197
1198 for {
1199 next := p.Peek()
1200 if next.Type == TokenCBrack {
1201 close = p.Read() // eat closer
1202 break
1203 }
1204
1205 expr, exprDiags := p.ParseExpression()
1206 exprs = append(exprs, expr)
1207 diags = append(diags, exprDiags...)
1208
1209 if p.recovery && exprDiags.HasErrors() {
1210 // If expression parsing failed then we are probably in a strange
1211 // place in the token stream, so we'll bail out and try to reset
1212 // to after our closing bracket to allow parsing to continue.
1213 close = p.recover(TokenCBrack)
1214 break
1215 }
1216
1217 next = p.Peek()
1218 if next.Type == TokenCBrack {
1219 close = p.Read() // eat closer
1220 break
1221 }
1222
1223 if next.Type != TokenComma {
1224 if !p.recovery {
1225 diags = append(diags, &hcl.Diagnostic{
1226 Severity: hcl.DiagError,
1227 Summary: "Missing item separator",
1228 Detail: "Expected a comma to mark the beginning of the next item.",
1229 Subject: &next.Range,
1230 Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
1231 })
1232 }
1233 close = p.recover(TokenCBrack)
1234 break
1235 }
1236
1237 p.Read() // eat comma
1238
1239 }
1240
1241 return &TupleConsExpr{
1242 Exprs: exprs,
1243
1244 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1245 OpenRange: open.Range,
1246 }, diags
1247}
1248
1249func (p *parser) parseObjectCons() (Expression, hcl.Diagnostics) {
1250 open := p.Read()
1251 if open.Type != TokenOBrace {
1252 // Should never happen if callers are behaving
1253 panic("parseObjectCons called without peeker pointing to open brace")
1254 }
1255
107c1cdb
ND
1256 // We must temporarily stop looking at newlines here while we check for
1257 // a "for" keyword, since for expressions are _not_ newline-sensitive,
1258 // even though object constructors are.
1259 p.PushIncludeNewlines(false)
1260 isFor := forKeyword.TokenMatches(p.Peek())
1261 p.PopIncludeNewlines()
1262 if isFor {
15c0b25d
AP
1263 return p.finishParsingForExpr(open)
1264 }
1265
107c1cdb
ND
1266 p.PushIncludeNewlines(true)
1267 defer p.PopIncludeNewlines()
1268
15c0b25d
AP
1269 var close Token
1270
1271 var diags hcl.Diagnostics
1272 var items []ObjectConsItem
1273
1274 for {
1275 next := p.Peek()
1276 if next.Type == TokenNewline {
1277 p.Read() // eat newline
1278 continue
1279 }
1280
1281 if next.Type == TokenCBrace {
1282 close = p.Read() // eat closer
1283 break
1284 }
1285
1286 var key Expression
1287 var keyDiags hcl.Diagnostics
1288 key, keyDiags = p.ParseExpression()
1289 diags = append(diags, keyDiags...)
1290
1291 if p.recovery && keyDiags.HasErrors() {
1292 // If expression parsing failed then we are probably in a strange
1293 // place in the token stream, so we'll bail out and try to reset
1294 // to after our closing brace to allow parsing to continue.
1295 close = p.recover(TokenCBrace)
1296 break
1297 }
1298
1299 // We wrap up the key expression in a special wrapper that deals
1300 // with our special case that naked identifiers as object keys
1301 // are interpreted as literal strings.
1302 key = &ObjectConsKeyExpr{Wrapped: key}
1303
1304 next = p.Peek()
1305 if next.Type != TokenEqual && next.Type != TokenColon {
1306 if !p.recovery {
107c1cdb
ND
1307 switch next.Type {
1308 case TokenNewline, TokenComma:
15c0b25d
AP
1309 diags = append(diags, &hcl.Diagnostic{
1310 Severity: hcl.DiagError,
107c1cdb
ND
1311 Summary: "Missing attribute value",
1312 Detail: "Expected an attribute value, introduced by an equals sign (\"=\").",
15c0b25d
AP
1313 Subject: &next.Range,
1314 Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
1315 })
107c1cdb
ND
1316 case TokenIdent:
1317 // Although this might just be a plain old missing equals
1318 // sign before a reference, one way to get here is to try
1319 // to write an attribute name containing a period followed
1320 // by a digit, which was valid in HCL1, like this:
1321 // foo1.2_bar = "baz"
1322 // We can't know exactly what the user intended here, but
1323 // we'll augment our message with an extra hint in this case
1324 // in case it is helpful.
15c0b25d
AP
1325 diags = append(diags, &hcl.Diagnostic{
1326 Severity: hcl.DiagError,
1327 Summary: "Missing key/value separator",
107c1cdb
ND
1328 Detail: "Expected an equals sign (\"=\") to mark the beginning of the attribute value. If you intended to given an attribute name containing periods or spaces, write the name in quotes to create a string literal.",
1329 Subject: &next.Range,
1330 Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
1331 })
1332 default:
1333 diags = append(diags, &hcl.Diagnostic{
1334 Severity: hcl.DiagError,
1335 Summary: "Missing key/value separator",
1336 Detail: "Expected an equals sign (\"=\") to mark the beginning of the attribute value.",
15c0b25d
AP
1337 Subject: &next.Range,
1338 Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
1339 })
1340 }
1341 }
1342 close = p.recover(TokenCBrace)
1343 break
1344 }
1345
1346 p.Read() // eat equals sign or colon
1347
1348 value, valueDiags := p.ParseExpression()
1349 diags = append(diags, valueDiags...)
1350
1351 if p.recovery && valueDiags.HasErrors() {
1352 // If expression parsing failed then we are probably in a strange
1353 // place in the token stream, so we'll bail out and try to reset
1354 // to after our closing brace to allow parsing to continue.
1355 close = p.recover(TokenCBrace)
1356 break
1357 }
1358
1359 items = append(items, ObjectConsItem{
1360 KeyExpr: key,
1361 ValueExpr: value,
1362 })
1363
1364 next = p.Peek()
1365 if next.Type == TokenCBrace {
1366 close = p.Read() // eat closer
1367 break
1368 }
1369
1370 if next.Type != TokenComma && next.Type != TokenNewline {
1371 if !p.recovery {
1372 diags = append(diags, &hcl.Diagnostic{
1373 Severity: hcl.DiagError,
107c1cdb
ND
1374 Summary: "Missing attribute separator",
1375 Detail: "Expected a newline or comma to mark the beginning of the next attribute.",
15c0b25d
AP
1376 Subject: &next.Range,
1377 Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
1378 })
1379 }
1380 close = p.recover(TokenCBrace)
1381 break
1382 }
1383
1384 p.Read() // eat comma or newline
1385
1386 }
1387
1388 return &ObjectConsExpr{
1389 Items: items,
1390
1391 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1392 OpenRange: open.Range,
1393 }, diags
1394}
1395
1396func (p *parser) finishParsingForExpr(open Token) (Expression, hcl.Diagnostics) {
107c1cdb
ND
1397 p.PushIncludeNewlines(false)
1398 defer p.PopIncludeNewlines()
15c0b25d
AP
1399 introducer := p.Read()
1400 if !forKeyword.TokenMatches(introducer) {
1401 // Should never happen if callers are behaving
1402 panic("finishParsingForExpr called without peeker pointing to 'for' identifier")
1403 }
1404
1405 var makeObj bool
1406 var closeType TokenType
1407 switch open.Type {
1408 case TokenOBrace:
1409 makeObj = true
1410 closeType = TokenCBrace
1411 case TokenOBrack:
1412 makeObj = false // making a tuple
1413 closeType = TokenCBrack
1414 default:
1415 // Should never happen if callers are behaving
1416 panic("finishParsingForExpr called with invalid open token")
1417 }
1418
1419 var diags hcl.Diagnostics
1420 var keyName, valName string
1421
1422 if p.Peek().Type != TokenIdent {
1423 if !p.recovery {
1424 diags = append(diags, &hcl.Diagnostic{
1425 Severity: hcl.DiagError,
1426 Summary: "Invalid 'for' expression",
1427 Detail: "For expression requires variable name after 'for'.",
1428 Subject: p.Peek().Range.Ptr(),
1429 Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1430 })
1431 }
1432 close := p.recover(closeType)
1433 return &LiteralValueExpr{
1434 Val: cty.DynamicVal,
1435 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1436 }, diags
1437 }
1438
1439 valName = string(p.Read().Bytes)
1440
1441 if p.Peek().Type == TokenComma {
1442 // What we just read was actually the key, then.
1443 keyName = valName
1444 p.Read() // eat comma
1445
1446 if p.Peek().Type != TokenIdent {
1447 if !p.recovery {
1448 diags = append(diags, &hcl.Diagnostic{
1449 Severity: hcl.DiagError,
1450 Summary: "Invalid 'for' expression",
1451 Detail: "For expression requires value variable name after comma.",
1452 Subject: p.Peek().Range.Ptr(),
1453 Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1454 })
1455 }
1456 close := p.recover(closeType)
1457 return &LiteralValueExpr{
1458 Val: cty.DynamicVal,
1459 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1460 }, diags
1461 }
1462
1463 valName = string(p.Read().Bytes)
1464 }
1465
1466 if !inKeyword.TokenMatches(p.Peek()) {
1467 if !p.recovery {
1468 diags = append(diags, &hcl.Diagnostic{
1469 Severity: hcl.DiagError,
1470 Summary: "Invalid 'for' expression",
107c1cdb 1471 Detail: "For expression requires the 'in' keyword after its name declarations.",
15c0b25d
AP
1472 Subject: p.Peek().Range.Ptr(),
1473 Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1474 })
1475 }
1476 close := p.recover(closeType)
1477 return &LiteralValueExpr{
1478 Val: cty.DynamicVal,
1479 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1480 }, diags
1481 }
1482 p.Read() // eat 'in' keyword
1483
1484 collExpr, collDiags := p.ParseExpression()
1485 diags = append(diags, collDiags...)
1486 if p.recovery && collDiags.HasErrors() {
1487 close := p.recover(closeType)
1488 return &LiteralValueExpr{
1489 Val: cty.DynamicVal,
1490 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1491 }, diags
1492 }
1493
1494 if p.Peek().Type != TokenColon {
1495 if !p.recovery {
1496 diags = append(diags, &hcl.Diagnostic{
1497 Severity: hcl.DiagError,
1498 Summary: "Invalid 'for' expression",
107c1cdb 1499 Detail: "For expression requires a colon after the collection expression.",
15c0b25d
AP
1500 Subject: p.Peek().Range.Ptr(),
1501 Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1502 })
1503 }
1504 close := p.recover(closeType)
1505 return &LiteralValueExpr{
1506 Val: cty.DynamicVal,
1507 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1508 }, diags
1509 }
1510 p.Read() // eat colon
1511
1512 var keyExpr, valExpr Expression
1513 var keyDiags, valDiags hcl.Diagnostics
1514 valExpr, valDiags = p.ParseExpression()
1515 if p.Peek().Type == TokenFatArrow {
1516 // What we just parsed was actually keyExpr
1517 p.Read() // eat the fat arrow
1518 keyExpr, keyDiags = valExpr, valDiags
1519
1520 valExpr, valDiags = p.ParseExpression()
1521 }
1522 diags = append(diags, keyDiags...)
1523 diags = append(diags, valDiags...)
1524 if p.recovery && (keyDiags.HasErrors() || valDiags.HasErrors()) {
1525 close := p.recover(closeType)
1526 return &LiteralValueExpr{
1527 Val: cty.DynamicVal,
1528 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1529 }, diags
1530 }
1531
1532 group := false
1533 var ellipsis Token
1534 if p.Peek().Type == TokenEllipsis {
1535 ellipsis = p.Read()
1536 group = true
1537 }
1538
1539 var condExpr Expression
1540 var condDiags hcl.Diagnostics
1541 if ifKeyword.TokenMatches(p.Peek()) {
1542 p.Read() // eat "if"
1543 condExpr, condDiags = p.ParseExpression()
1544 diags = append(diags, condDiags...)
1545 if p.recovery && condDiags.HasErrors() {
1546 close := p.recover(p.oppositeBracket(open.Type))
1547 return &LiteralValueExpr{
1548 Val: cty.DynamicVal,
1549 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1550 }, diags
1551 }
1552 }
1553
1554 var close Token
1555 if p.Peek().Type == closeType {
1556 close = p.Read()
1557 } else {
1558 if !p.recovery {
1559 diags = append(diags, &hcl.Diagnostic{
1560 Severity: hcl.DiagError,
1561 Summary: "Invalid 'for' expression",
1562 Detail: "Extra characters after the end of the 'for' expression.",
1563 Subject: p.Peek().Range.Ptr(),
1564 Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1565 })
1566 }
1567 close = p.recover(closeType)
1568 }
1569
1570 if !makeObj {
1571 if keyExpr != nil {
1572 diags = append(diags, &hcl.Diagnostic{
1573 Severity: hcl.DiagError,
1574 Summary: "Invalid 'for' expression",
1575 Detail: "Key expression is not valid when building a tuple.",
1576 Subject: keyExpr.Range().Ptr(),
1577 Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
1578 })
1579 }
1580
1581 if group {
1582 diags = append(diags, &hcl.Diagnostic{
1583 Severity: hcl.DiagError,
1584 Summary: "Invalid 'for' expression",
1585 Detail: "Grouping ellipsis (...) cannot be used when building a tuple.",
1586 Subject: &ellipsis.Range,
1587 Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
1588 })
1589 }
1590 } else {
1591 if keyExpr == nil {
1592 diags = append(diags, &hcl.Diagnostic{
1593 Severity: hcl.DiagError,
1594 Summary: "Invalid 'for' expression",
1595 Detail: "Key expression is required when building an object.",
1596 Subject: valExpr.Range().Ptr(),
1597 Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
1598 })
1599 }
1600 }
1601
1602 return &ForExpr{
1603 KeyVar: keyName,
1604 ValVar: valName,
1605 CollExpr: collExpr,
1606 KeyExpr: keyExpr,
1607 ValExpr: valExpr,
1608 CondExpr: condExpr,
1609 Group: group,
1610
1611 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1612 OpenRange: open.Range,
1613 CloseRange: close.Range,
1614 }, diags
1615}
1616
1617// parseQuotedStringLiteral is a helper for parsing quoted strings that
1618// aren't allowed to contain any interpolations, such as block labels.
1619func (p *parser) parseQuotedStringLiteral() (string, hcl.Range, hcl.Diagnostics) {
1620 oQuote := p.Read()
1621 if oQuote.Type != TokenOQuote {
1622 return "", oQuote.Range, hcl.Diagnostics{
1623 {
1624 Severity: hcl.DiagError,
1625 Summary: "Invalid string literal",
1626 Detail: "A quoted string is required here.",
1627 Subject: &oQuote.Range,
1628 },
1629 }
1630 }
1631
1632 var diags hcl.Diagnostics
1633 ret := &bytes.Buffer{}
1634 var cQuote Token
1635
1636Token:
1637 for {
1638 tok := p.Read()
1639 switch tok.Type {
1640
1641 case TokenCQuote:
1642 cQuote = tok
1643 break Token
1644
1645 case TokenQuotedLit:
1646 s, sDiags := p.decodeStringLit(tok)
1647 diags = append(diags, sDiags...)
1648 ret.WriteString(s)
1649
1650 case TokenTemplateControl, TokenTemplateInterp:
1651 which := "$"
1652 if tok.Type == TokenTemplateControl {
107c1cdb 1653 which = "%"
15c0b25d
AP
1654 }
1655
1656 diags = append(diags, &hcl.Diagnostic{
1657 Severity: hcl.DiagError,
1658 Summary: "Invalid string literal",
1659 Detail: fmt.Sprintf(
1660 "Template sequences are not allowed in this string. To include a literal %q, double it (as \"%s%s\") to escape it.",
1661 which, which, which,
1662 ),
1663 Subject: &tok.Range,
1664 Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
1665 })
107c1cdb
ND
1666
1667 // Now that we're returning an error callers won't attempt to use
1668 // the result for any real operations, but they might try to use
1669 // the partial AST for other analyses, so we'll leave a marker
1670 // to indicate that there was something invalid in the string to
1671 // help avoid misinterpretation of the partial result
1672 ret.WriteString(which)
1673 ret.WriteString("{ ... }")
1674
1675 p.recover(TokenTemplateSeqEnd) // we'll try to keep parsing after the sequence ends
15c0b25d
AP
1676
1677 case TokenEOF:
1678 diags = append(diags, &hcl.Diagnostic{
1679 Severity: hcl.DiagError,
1680 Summary: "Unterminated string literal",
1681 Detail: "Unable to find the closing quote mark before the end of the file.",
1682 Subject: &tok.Range,
1683 Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
1684 })
1685 break Token
1686
1687 default:
1688 // Should never happen, as long as the scanner is behaving itself
1689 diags = append(diags, &hcl.Diagnostic{
1690 Severity: hcl.DiagError,
1691 Summary: "Invalid string literal",
1692 Detail: "This item is not valid in a string literal.",
1693 Subject: &tok.Range,
1694 Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
1695 })
107c1cdb 1696 p.recover(TokenCQuote)
15c0b25d
AP
1697 break Token
1698
1699 }
1700
1701 }
1702
1703 return ret.String(), hcl.RangeBetween(oQuote.Range, cQuote.Range), diags
1704}
1705
1706// decodeStringLit processes the given token, which must be either a
1707// TokenQuotedLit or a TokenStringLit, returning the string resulting from
1708// resolving any escape sequences.
1709//
1710// If any error diagnostics are returned, the returned string may be incomplete
1711// or otherwise invalid.
1712func (p *parser) decodeStringLit(tok Token) (string, hcl.Diagnostics) {
1713 var quoted bool
1714 switch tok.Type {
1715 case TokenQuotedLit:
1716 quoted = true
1717 case TokenStringLit:
1718 quoted = false
1719 default:
1720 panic("decodeQuotedLit can only be used with TokenStringLit and TokenQuotedLit tokens")
1721 }
1722 var diags hcl.Diagnostics
1723
1724 ret := make([]byte, 0, len(tok.Bytes))
1725 slices := scanStringLit(tok.Bytes, quoted)
1726
1727 // We will mutate rng constantly as we walk through our token slices below.
1728 // Any diagnostics must take a copy of this rng rather than simply pointing
1729 // to it, e.g. by using rng.Ptr() rather than &rng.
1730 rng := tok.Range
1731 rng.End = rng.Start
1732
1733Slices:
1734 for _, slice := range slices {
1735 if len(slice) == 0 {
1736 continue
1737 }
1738
1739 // Advance the start of our range to where the previous token ended
1740 rng.Start = rng.End
1741
1742 // Advance the end of our range to after our token.
1743 b := slice
1744 for len(b) > 0 {
1745 adv, ch, _ := textseg.ScanGraphemeClusters(b, true)
1746 rng.End.Byte += adv
1747 switch ch[0] {
1748 case '\r', '\n':
1749 rng.End.Line++
1750 rng.End.Column = 1
1751 default:
1752 rng.End.Column++
1753 }
1754 b = b[adv:]
1755 }
1756
1757 TokenType:
1758 switch slice[0] {
1759 case '\\':
1760 if !quoted {
1761 // If we're not in quoted mode then just treat this token as
1762 // normal. (Slices can still start with backslash even if we're
1763 // not specifically looking for backslash sequences.)
1764 break TokenType
1765 }
1766 if len(slice) < 2 {
1767 diags = append(diags, &hcl.Diagnostic{
1768 Severity: hcl.DiagError,
1769 Summary: "Invalid escape sequence",
1770 Detail: "Backslash must be followed by an escape sequence selector character.",
1771 Subject: rng.Ptr(),
1772 })
1773 break TokenType
1774 }
1775
1776 switch slice[1] {
1777
1778 case 'n':
1779 ret = append(ret, '\n')
1780 continue Slices
1781 case 'r':
1782 ret = append(ret, '\r')
1783 continue Slices
1784 case 't':
1785 ret = append(ret, '\t')
1786 continue Slices
1787 case '"':
1788 ret = append(ret, '"')
1789 continue Slices
1790 case '\\':
1791 ret = append(ret, '\\')
1792 continue Slices
1793 case 'u', 'U':
1794 if slice[1] == 'u' && len(slice) != 6 {
1795 diags = append(diags, &hcl.Diagnostic{
1796 Severity: hcl.DiagError,
1797 Summary: "Invalid escape sequence",
1798 Detail: "The \\u escape sequence must be followed by four hexadecimal digits.",
1799 Subject: rng.Ptr(),
1800 })
1801 break TokenType
1802 } else if slice[1] == 'U' && len(slice) != 10 {
1803 diags = append(diags, &hcl.Diagnostic{
1804 Severity: hcl.DiagError,
1805 Summary: "Invalid escape sequence",
1806 Detail: "The \\U escape sequence must be followed by eight hexadecimal digits.",
1807 Subject: rng.Ptr(),
1808 })
1809 break TokenType
1810 }
1811
1812 numHex := string(slice[2:])
1813 num, err := strconv.ParseUint(numHex, 16, 32)
1814 if err != nil {
1815 // Should never happen because the scanner won't match
1816 // a sequence of digits that isn't valid.
1817 panic(err)
1818 }
1819
1820 r := rune(num)
1821 l := utf8.RuneLen(r)
1822 if l == -1 {
1823 diags = append(diags, &hcl.Diagnostic{
1824 Severity: hcl.DiagError,
1825 Summary: "Invalid escape sequence",
1826 Detail: fmt.Sprintf("Cannot encode character U+%04x in UTF-8.", num),
1827 Subject: rng.Ptr(),
1828 })
1829 break TokenType
1830 }
1831 for i := 0; i < l; i++ {
1832 ret = append(ret, 0)
1833 }
1834 rb := ret[len(ret)-l:]
1835 utf8.EncodeRune(rb, r)
1836
1837 continue Slices
1838
1839 default:
1840 diags = append(diags, &hcl.Diagnostic{
1841 Severity: hcl.DiagError,
1842 Summary: "Invalid escape sequence",
1843 Detail: fmt.Sprintf("The symbol %q is not a valid escape sequence selector.", slice[1:]),
1844 Subject: rng.Ptr(),
1845 })
1846 ret = append(ret, slice[1:]...)
1847 continue Slices
1848 }
1849
1850 case '$', '%':
1851 if len(slice) != 3 {
1852 // Not long enough to be our escape sequence, so it's literal.
1853 break TokenType
1854 }
1855
1856 if slice[1] == slice[0] && slice[2] == '{' {
1857 ret = append(ret, slice[0])
1858 ret = append(ret, '{')
1859 continue Slices
1860 }
1861
1862 break TokenType
1863 }
1864
1865 // If we fall out here or break out of here from the switch above
1866 // then this slice is just a literal.
1867 ret = append(ret, slice...)
1868 }
1869
1870 return string(ret), diags
1871}
1872
1873// setRecovery turns on recovery mode without actually doing any recovery.
1874// This can be used when a parser knowingly leaves the peeker in a useless
1875// place and wants to suppress errors that might result from that decision.
1876func (p *parser) setRecovery() {
1877 p.recovery = true
1878}
1879
1880// recover seeks forward in the token stream until it finds TokenType "end",
1881// then returns with the peeker pointed at the following token.
1882//
1883// If the given token type is a bracketer, this function will additionally
1884// count nested instances of the brackets to try to leave the peeker at
1885// the end of the _current_ instance of that bracketer, skipping over any
1886// nested instances. This is a best-effort operation and may have
1887// unpredictable results on input with bad bracketer nesting.
1888func (p *parser) recover(end TokenType) Token {
1889 start := p.oppositeBracket(end)
1890 p.recovery = true
1891
1892 nest := 0
1893 for {
1894 tok := p.Read()
1895 ty := tok.Type
1896 if end == TokenTemplateSeqEnd && ty == TokenTemplateControl {
1897 // normalize so that our matching behavior can work, since
1898 // TokenTemplateControl/TokenTemplateInterp are asymmetrical
1899 // with TokenTemplateSeqEnd and thus we need to count both
1900 // openers if that's the closer we're looking for.
1901 ty = TokenTemplateInterp
1902 }
1903
1904 switch ty {
1905 case start:
1906 nest++
1907 case end:
1908 if nest < 1 {
1909 return tok
1910 }
1911
1912 nest--
1913 case TokenEOF:
1914 return tok
1915 }
1916 }
1917}
1918
1919// recoverOver seeks forward in the token stream until it finds a block
1920// starting with TokenType "start", then finds the corresponding end token,
1921// leaving the peeker pointed at the token after that end token.
1922//
1923// The given token type _must_ be a bracketer. For example, if the given
1924// start token is TokenOBrace then the parser will be left at the _end_ of
1925// the next brace-delimited block encountered, or at EOF if no such block
1926// is found or it is unclosed.
1927func (p *parser) recoverOver(start TokenType) {
1928 end := p.oppositeBracket(start)
1929
1930 // find the opening bracket first
1931Token:
1932 for {
1933 tok := p.Read()
1934 switch tok.Type {
1935 case start, TokenEOF:
1936 break Token
1937 }
1938 }
1939
1940 // Now use our existing recover function to locate the _end_ of the
1941 // container we've found.
1942 p.recover(end)
1943}
1944
1945func (p *parser) recoverAfterBodyItem() {
1946 p.recovery = true
1947 var open []TokenType
1948
1949Token:
1950 for {
1951 tok := p.Read()
1952
1953 switch tok.Type {
1954
1955 case TokenNewline:
1956 if len(open) == 0 {
1957 break Token
1958 }
1959
1960 case TokenEOF:
1961 break Token
1962
1963 case TokenOBrace, TokenOBrack, TokenOParen, TokenOQuote, TokenOHeredoc, TokenTemplateInterp, TokenTemplateControl:
1964 open = append(open, tok.Type)
1965
1966 case TokenCBrace, TokenCBrack, TokenCParen, TokenCQuote, TokenCHeredoc:
1967 opener := p.oppositeBracket(tok.Type)
1968 for len(open) > 0 && open[len(open)-1] != opener {
1969 open = open[:len(open)-1]
1970 }
1971 if len(open) > 0 {
1972 open = open[:len(open)-1]
1973 }
1974
1975 case TokenTemplateSeqEnd:
1976 for len(open) > 0 && open[len(open)-1] != TokenTemplateInterp && open[len(open)-1] != TokenTemplateControl {
1977 open = open[:len(open)-1]
1978 }
1979 if len(open) > 0 {
1980 open = open[:len(open)-1]
1981 }
1982
1983 }
1984 }
1985}
1986
1987// oppositeBracket finds the bracket that opposes the given bracketer, or
1988// NilToken if the given token isn't a bracketer.
1989//
1990// "Bracketer", for the sake of this function, is one end of a matching
1991// open/close set of tokens that establish a bracketing context.
1992func (p *parser) oppositeBracket(ty TokenType) TokenType {
1993 switch ty {
1994
1995 case TokenOBrace:
1996 return TokenCBrace
1997 case TokenOBrack:
1998 return TokenCBrack
1999 case TokenOParen:
2000 return TokenCParen
2001 case TokenOQuote:
2002 return TokenCQuote
2003 case TokenOHeredoc:
2004 return TokenCHeredoc
2005
2006 case TokenCBrace:
2007 return TokenOBrace
2008 case TokenCBrack:
2009 return TokenOBrack
2010 case TokenCParen:
2011 return TokenOParen
2012 case TokenCQuote:
2013 return TokenOQuote
2014 case TokenCHeredoc:
2015 return TokenOHeredoc
2016
2017 case TokenTemplateControl:
2018 return TokenTemplateSeqEnd
2019 case TokenTemplateInterp:
2020 return TokenTemplateSeqEnd
2021 case TokenTemplateSeqEnd:
2022 // This is ambigous, but we return Interp here because that's
2023 // what's assumed by the "recover" method.
2024 return TokenTemplateInterp
2025
2026 default:
2027 return TokenNil
2028 }
2029}
2030
2031func errPlaceholderExpr(rng hcl.Range) Expression {
2032 return &LiteralValueExpr{
2033 Val: cty.DynamicVal,
2034 SrcRange: rng,
2035 }
2036}