]> git.immae.eu Git - github/fretlink/terraform-provider-statuscake.git/blob - vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go
Upgrade to 0.12
[github/fretlink/terraform-provider-statuscake.git] / vendor / github.com / hashicorp / hcl2 / hcl / hclsyntax / parser_template.go
1 package hclsyntax
2
3 import (
4 "fmt"
5 "strings"
6 "unicode"
7
8 "github.com/apparentlymart/go-textseg/textseg"
9 "github.com/hashicorp/hcl2/hcl"
10 "github.com/zclconf/go-cty/cty"
11 )
12
13 func (p *parser) ParseTemplate() (Expression, hcl.Diagnostics) {
14 return p.parseTemplate(TokenEOF, false)
15 }
16
17 func (p *parser) parseTemplate(end TokenType, flushHeredoc bool) (Expression, hcl.Diagnostics) {
18 exprs, passthru, rng, diags := p.parseTemplateInner(end, flushHeredoc)
19
20 if passthru {
21 if len(exprs) != 1 {
22 panic("passthru set with len(exprs) != 1")
23 }
24 return &TemplateWrapExpr{
25 Wrapped: exprs[0],
26 SrcRange: rng,
27 }, diags
28 }
29
30 return &TemplateExpr{
31 Parts: exprs,
32 SrcRange: rng,
33 }, diags
34 }
35
36 func (p *parser) parseTemplateInner(end TokenType, flushHeredoc bool) ([]Expression, bool, hcl.Range, hcl.Diagnostics) {
37 parts, diags := p.parseTemplateParts(end)
38 if flushHeredoc {
39 flushHeredocTemplateParts(parts) // Trim off leading spaces on lines per the flush heredoc spec
40 }
41 tp := templateParser{
42 Tokens: parts.Tokens,
43 SrcRange: parts.SrcRange,
44 }
45 exprs, exprsDiags := tp.parseRoot()
46 diags = append(diags, exprsDiags...)
47
48 passthru := false
49 if len(parts.Tokens) == 2 { // one real token and one synthetic "end" token
50 if _, isInterp := parts.Tokens[0].(*templateInterpToken); isInterp {
51 passthru = true
52 }
53 }
54
55 return exprs, passthru, parts.SrcRange, diags
56 }
57
58 type templateParser struct {
59 Tokens []templateToken
60 SrcRange hcl.Range
61
62 pos int
63 }
64
65 func (p *templateParser) parseRoot() ([]Expression, hcl.Diagnostics) {
66 var exprs []Expression
67 var diags hcl.Diagnostics
68
69 for {
70 next := p.Peek()
71 if _, isEnd := next.(*templateEndToken); isEnd {
72 break
73 }
74
75 expr, exprDiags := p.parseExpr()
76 diags = append(diags, exprDiags...)
77 exprs = append(exprs, expr)
78 }
79
80 return exprs, diags
81 }
82
83 func (p *templateParser) parseExpr() (Expression, hcl.Diagnostics) {
84 next := p.Peek()
85 switch tok := next.(type) {
86
87 case *templateLiteralToken:
88 p.Read() // eat literal
89 return &LiteralValueExpr{
90 Val: cty.StringVal(tok.Val),
91 SrcRange: tok.SrcRange,
92 }, nil
93
94 case *templateInterpToken:
95 p.Read() // eat interp
96 return tok.Expr, nil
97
98 case *templateIfToken:
99 return p.parseIf()
100
101 case *templateForToken:
102 return p.parseFor()
103
104 case *templateEndToken:
105 p.Read() // eat erroneous token
106 return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{
107 {
108 // This is a particularly unhelpful diagnostic, so callers
109 // should attempt to pre-empt it and produce a more helpful
110 // diagnostic that is context-aware.
111 Severity: hcl.DiagError,
112 Summary: "Unexpected end of template",
113 Detail: "The control directives within this template are unbalanced.",
114 Subject: &tok.SrcRange,
115 },
116 }
117
118 case *templateEndCtrlToken:
119 p.Read() // eat erroneous token
120 return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{
121 {
122 Severity: hcl.DiagError,
123 Summary: fmt.Sprintf("Unexpected %s directive", tok.Name()),
124 Detail: "The control directives within this template are unbalanced.",
125 Subject: &tok.SrcRange,
126 },
127 }
128
129 default:
130 // should never happen, because above should be exhaustive
131 panic(fmt.Sprintf("unhandled template token type %T", next))
132 }
133 }
134
135 func (p *templateParser) parseIf() (Expression, hcl.Diagnostics) {
136 open := p.Read()
137 openIf, isIf := open.(*templateIfToken)
138 if !isIf {
139 // should never happen if caller is behaving
140 panic("parseIf called with peeker not pointing at if token")
141 }
142
143 var ifExprs, elseExprs []Expression
144 var diags hcl.Diagnostics
145 var endifRange hcl.Range
146
147 currentExprs := &ifExprs
148 Token:
149 for {
150 next := p.Peek()
151 if end, isEnd := next.(*templateEndToken); isEnd {
152 diags = append(diags, &hcl.Diagnostic{
153 Severity: hcl.DiagError,
154 Summary: "Unexpected end of template",
155 Detail: fmt.Sprintf(
156 "The if directive at %s is missing its corresponding endif directive.",
157 openIf.SrcRange,
158 ),
159 Subject: &end.SrcRange,
160 })
161 return errPlaceholderExpr(end.SrcRange), diags
162 }
163 if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd {
164 p.Read() // eat end directive
165
166 switch end.Type {
167
168 case templateElse:
169 if currentExprs == &ifExprs {
170 currentExprs = &elseExprs
171 continue Token
172 }
173
174 diags = append(diags, &hcl.Diagnostic{
175 Severity: hcl.DiagError,
176 Summary: "Unexpected else directive",
177 Detail: fmt.Sprintf(
178 "Already in the else clause for the if started at %s.",
179 openIf.SrcRange,
180 ),
181 Subject: &end.SrcRange,
182 })
183
184 case templateEndIf:
185 endifRange = end.SrcRange
186 break Token
187
188 default:
189 diags = append(diags, &hcl.Diagnostic{
190 Severity: hcl.DiagError,
191 Summary: fmt.Sprintf("Unexpected %s directive", end.Name()),
192 Detail: fmt.Sprintf(
193 "Expecting an endif directive for the if started at %s.",
194 openIf.SrcRange,
195 ),
196 Subject: &end.SrcRange,
197 })
198 }
199
200 return errPlaceholderExpr(end.SrcRange), diags
201 }
202
203 expr, exprDiags := p.parseExpr()
204 diags = append(diags, exprDiags...)
205 *currentExprs = append(*currentExprs, expr)
206 }
207
208 if len(ifExprs) == 0 {
209 ifExprs = append(ifExprs, &LiteralValueExpr{
210 Val: cty.StringVal(""),
211 SrcRange: hcl.Range{
212 Filename: openIf.SrcRange.Filename,
213 Start: openIf.SrcRange.End,
214 End: openIf.SrcRange.End,
215 },
216 })
217 }
218 if len(elseExprs) == 0 {
219 elseExprs = append(elseExprs, &LiteralValueExpr{
220 Val: cty.StringVal(""),
221 SrcRange: hcl.Range{
222 Filename: endifRange.Filename,
223 Start: endifRange.Start,
224 End: endifRange.Start,
225 },
226 })
227 }
228
229 trueExpr := &TemplateExpr{
230 Parts: ifExprs,
231 SrcRange: hcl.RangeBetween(ifExprs[0].Range(), ifExprs[len(ifExprs)-1].Range()),
232 }
233 falseExpr := &TemplateExpr{
234 Parts: elseExprs,
235 SrcRange: hcl.RangeBetween(elseExprs[0].Range(), elseExprs[len(elseExprs)-1].Range()),
236 }
237
238 return &ConditionalExpr{
239 Condition: openIf.CondExpr,
240 TrueResult: trueExpr,
241 FalseResult: falseExpr,
242
243 SrcRange: hcl.RangeBetween(openIf.SrcRange, endifRange),
244 }, diags
245 }
246
247 func (p *templateParser) parseFor() (Expression, hcl.Diagnostics) {
248 open := p.Read()
249 openFor, isFor := open.(*templateForToken)
250 if !isFor {
251 // should never happen if caller is behaving
252 panic("parseFor called with peeker not pointing at for token")
253 }
254
255 var contentExprs []Expression
256 var diags hcl.Diagnostics
257 var endforRange hcl.Range
258
259 Token:
260 for {
261 next := p.Peek()
262 if end, isEnd := next.(*templateEndToken); isEnd {
263 diags = append(diags, &hcl.Diagnostic{
264 Severity: hcl.DiagError,
265 Summary: "Unexpected end of template",
266 Detail: fmt.Sprintf(
267 "The for directive at %s is missing its corresponding endfor directive.",
268 openFor.SrcRange,
269 ),
270 Subject: &end.SrcRange,
271 })
272 return errPlaceholderExpr(end.SrcRange), diags
273 }
274 if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd {
275 p.Read() // eat end directive
276
277 switch end.Type {
278
279 case templateElse:
280 diags = append(diags, &hcl.Diagnostic{
281 Severity: hcl.DiagError,
282 Summary: "Unexpected else directive",
283 Detail: "An else clause is not expected for a for directive.",
284 Subject: &end.SrcRange,
285 })
286
287 case templateEndFor:
288 endforRange = end.SrcRange
289 break Token
290
291 default:
292 diags = append(diags, &hcl.Diagnostic{
293 Severity: hcl.DiagError,
294 Summary: fmt.Sprintf("Unexpected %s directive", end.Name()),
295 Detail: fmt.Sprintf(
296 "Expecting an endfor directive corresponding to the for directive at %s.",
297 openFor.SrcRange,
298 ),
299 Subject: &end.SrcRange,
300 })
301 }
302
303 return errPlaceholderExpr(end.SrcRange), diags
304 }
305
306 expr, exprDiags := p.parseExpr()
307 diags = append(diags, exprDiags...)
308 contentExprs = append(contentExprs, expr)
309 }
310
311 if len(contentExprs) == 0 {
312 contentExprs = append(contentExprs, &LiteralValueExpr{
313 Val: cty.StringVal(""),
314 SrcRange: hcl.Range{
315 Filename: openFor.SrcRange.Filename,
316 Start: openFor.SrcRange.End,
317 End: openFor.SrcRange.End,
318 },
319 })
320 }
321
322 contentExpr := &TemplateExpr{
323 Parts: contentExprs,
324 SrcRange: hcl.RangeBetween(contentExprs[0].Range(), contentExprs[len(contentExprs)-1].Range()),
325 }
326
327 forExpr := &ForExpr{
328 KeyVar: openFor.KeyVar,
329 ValVar: openFor.ValVar,
330
331 CollExpr: openFor.CollExpr,
332 ValExpr: contentExpr,
333
334 SrcRange: hcl.RangeBetween(openFor.SrcRange, endforRange),
335 OpenRange: openFor.SrcRange,
336 CloseRange: endforRange,
337 }
338
339 return &TemplateJoinExpr{
340 Tuple: forExpr,
341 }, diags
342 }
343
344 func (p *templateParser) Peek() templateToken {
345 return p.Tokens[p.pos]
346 }
347
348 func (p *templateParser) Read() templateToken {
349 ret := p.Peek()
350 if _, end := ret.(*templateEndToken); !end {
351 p.pos++
352 }
353 return ret
354 }
355
356 // parseTemplateParts produces a flat sequence of "template tokens", which are
357 // either literal values (with any "trimming" already applied), interpolation
358 // sequences, or control flow markers.
359 //
360 // A further pass is required on the result to turn it into an AST.
361 func (p *parser) parseTemplateParts(end TokenType) (*templateParts, hcl.Diagnostics) {
362 var parts []templateToken
363 var diags hcl.Diagnostics
364
365 startRange := p.NextRange()
366 ltrimNext := false
367 nextCanTrimPrev := false
368 var endRange hcl.Range
369
370 Token:
371 for {
372 next := p.Read()
373 if next.Type == end {
374 // all done!
375 endRange = next.Range
376 break
377 }
378
379 ltrim := ltrimNext
380 ltrimNext = false
381 canTrimPrev := nextCanTrimPrev
382 nextCanTrimPrev = false
383
384 switch next.Type {
385 case TokenStringLit, TokenQuotedLit:
386 str, strDiags := p.decodeStringLit(next)
387 diags = append(diags, strDiags...)
388
389 if ltrim {
390 str = strings.TrimLeftFunc(str, unicode.IsSpace)
391 }
392
393 parts = append(parts, &templateLiteralToken{
394 Val: str,
395 SrcRange: next.Range,
396 })
397 nextCanTrimPrev = true
398
399 case TokenTemplateInterp:
400 // if the opener is ${~ then we want to eat any trailing whitespace
401 // in the preceding literal token, assuming it is indeed a literal
402 // token.
403 if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 {
404 prevExpr := parts[len(parts)-1]
405 if lexpr, ok := prevExpr.(*templateLiteralToken); ok {
406 lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace)
407 }
408 }
409
410 p.PushIncludeNewlines(false)
411 expr, exprDiags := p.ParseExpression()
412 diags = append(diags, exprDiags...)
413 close := p.Peek()
414 if close.Type != TokenTemplateSeqEnd {
415 if !p.recovery {
416 diags = append(diags, &hcl.Diagnostic{
417 Severity: hcl.DiagError,
418 Summary: "Extra characters after interpolation expression",
419 Detail: "Expected a closing brace to end the interpolation expression, but found extra characters.",
420 Subject: &close.Range,
421 Context: hcl.RangeBetween(startRange, close.Range).Ptr(),
422 })
423 }
424 p.recover(TokenTemplateSeqEnd)
425 } else {
426 p.Read() // eat closing brace
427
428 // If the closer is ~} then we want to eat any leading
429 // whitespace on the next token, if it turns out to be a
430 // literal token.
431 if len(close.Bytes) == 2 && close.Bytes[0] == '~' {
432 ltrimNext = true
433 }
434 }
435 p.PopIncludeNewlines()
436 parts = append(parts, &templateInterpToken{
437 Expr: expr,
438 SrcRange: hcl.RangeBetween(next.Range, close.Range),
439 })
440
441 case TokenTemplateControl:
442 // if the opener is %{~ then we want to eat any trailing whitespace
443 // in the preceding literal token, assuming it is indeed a literal
444 // token.
445 if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 {
446 prevExpr := parts[len(parts)-1]
447 if lexpr, ok := prevExpr.(*templateLiteralToken); ok {
448 lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace)
449 }
450 }
451 p.PushIncludeNewlines(false)
452
453 kw := p.Peek()
454 if kw.Type != TokenIdent {
455 if !p.recovery {
456 diags = append(diags, &hcl.Diagnostic{
457 Severity: hcl.DiagError,
458 Summary: "Invalid template directive",
459 Detail: "A template directive keyword (\"if\", \"for\", etc) is expected at the beginning of a %{ sequence.",
460 Subject: &kw.Range,
461 Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(),
462 })
463 }
464 p.recover(TokenTemplateSeqEnd)
465 p.PopIncludeNewlines()
466 continue Token
467 }
468 p.Read() // eat keyword token
469
470 switch {
471
472 case ifKeyword.TokenMatches(kw):
473 condExpr, exprDiags := p.ParseExpression()
474 diags = append(diags, exprDiags...)
475 parts = append(parts, &templateIfToken{
476 CondExpr: condExpr,
477 SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
478 })
479
480 case elseKeyword.TokenMatches(kw):
481 parts = append(parts, &templateEndCtrlToken{
482 Type: templateElse,
483 SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
484 })
485
486 case endifKeyword.TokenMatches(kw):
487 parts = append(parts, &templateEndCtrlToken{
488 Type: templateEndIf,
489 SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
490 })
491
492 case forKeyword.TokenMatches(kw):
493 var keyName, valName string
494 if p.Peek().Type != TokenIdent {
495 if !p.recovery {
496 diags = append(diags, &hcl.Diagnostic{
497 Severity: hcl.DiagError,
498 Summary: "Invalid 'for' directive",
499 Detail: "For directive requires variable name after 'for'.",
500 Subject: p.Peek().Range.Ptr(),
501 })
502 }
503 p.recover(TokenTemplateSeqEnd)
504 p.PopIncludeNewlines()
505 continue Token
506 }
507
508 valName = string(p.Read().Bytes)
509
510 if p.Peek().Type == TokenComma {
511 // What we just read was actually the key, then.
512 keyName = valName
513 p.Read() // eat comma
514
515 if p.Peek().Type != TokenIdent {
516 if !p.recovery {
517 diags = append(diags, &hcl.Diagnostic{
518 Severity: hcl.DiagError,
519 Summary: "Invalid 'for' directive",
520 Detail: "For directive requires value variable name after comma.",
521 Subject: p.Peek().Range.Ptr(),
522 })
523 }
524 p.recover(TokenTemplateSeqEnd)
525 p.PopIncludeNewlines()
526 continue Token
527 }
528
529 valName = string(p.Read().Bytes)
530 }
531
532 if !inKeyword.TokenMatches(p.Peek()) {
533 if !p.recovery {
534 diags = append(diags, &hcl.Diagnostic{
535 Severity: hcl.DiagError,
536 Summary: "Invalid 'for' directive",
537 Detail: "For directive requires 'in' keyword after names.",
538 Subject: p.Peek().Range.Ptr(),
539 })
540 }
541 p.recover(TokenTemplateSeqEnd)
542 p.PopIncludeNewlines()
543 continue Token
544 }
545 p.Read() // eat 'in' keyword
546
547 collExpr, collDiags := p.ParseExpression()
548 diags = append(diags, collDiags...)
549 parts = append(parts, &templateForToken{
550 KeyVar: keyName,
551 ValVar: valName,
552 CollExpr: collExpr,
553
554 SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
555 })
556
557 case endforKeyword.TokenMatches(kw):
558 parts = append(parts, &templateEndCtrlToken{
559 Type: templateEndFor,
560 SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
561 })
562
563 default:
564 if !p.recovery {
565 suggestions := []string{"if", "for", "else", "endif", "endfor"}
566 given := string(kw.Bytes)
567 suggestion := nameSuggestion(given, suggestions)
568 if suggestion != "" {
569 suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
570 }
571
572 diags = append(diags, &hcl.Diagnostic{
573 Severity: hcl.DiagError,
574 Summary: "Invalid template control keyword",
575 Detail: fmt.Sprintf("%q is not a valid template control keyword.%s", given, suggestion),
576 Subject: &kw.Range,
577 Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(),
578 })
579 }
580 p.recover(TokenTemplateSeqEnd)
581 p.PopIncludeNewlines()
582 continue Token
583
584 }
585
586 close := p.Peek()
587 if close.Type != TokenTemplateSeqEnd {
588 if !p.recovery {
589 diags = append(diags, &hcl.Diagnostic{
590 Severity: hcl.DiagError,
591 Summary: fmt.Sprintf("Extra characters in %s marker", kw.Bytes),
592 Detail: "Expected a closing brace to end the sequence, but found extra characters.",
593 Subject: &close.Range,
594 Context: hcl.RangeBetween(startRange, close.Range).Ptr(),
595 })
596 }
597 p.recover(TokenTemplateSeqEnd)
598 } else {
599 p.Read() // eat closing brace
600
601 // If the closer is ~} then we want to eat any leading
602 // whitespace on the next token, if it turns out to be a
603 // literal token.
604 if len(close.Bytes) == 2 && close.Bytes[0] == '~' {
605 ltrimNext = true
606 }
607 }
608 p.PopIncludeNewlines()
609
610 default:
611 if !p.recovery {
612 diags = append(diags, &hcl.Diagnostic{
613 Severity: hcl.DiagError,
614 Summary: "Unterminated template string",
615 Detail: "No closing marker was found for the string.",
616 Subject: &next.Range,
617 Context: hcl.RangeBetween(startRange, next.Range).Ptr(),
618 })
619 }
620 final := p.recover(end)
621 endRange = final.Range
622 break Token
623 }
624 }
625
626 if len(parts) == 0 {
627 // If a sequence has no content, we'll treat it as if it had an
628 // empty string in it because that's what the user probably means
629 // if they write "" in configuration.
630 parts = append(parts, &templateLiteralToken{
631 Val: "",
632 SrcRange: hcl.Range{
633 // Range is the zero-character span immediately after the
634 // opening quote.
635 Filename: startRange.Filename,
636 Start: startRange.End,
637 End: startRange.End,
638 },
639 })
640 }
641
642 // Always end with an end token, so the parser can produce diagnostics
643 // about unclosed items with proper position information.
644 parts = append(parts, &templateEndToken{
645 SrcRange: endRange,
646 })
647
648 ret := &templateParts{
649 Tokens: parts,
650 SrcRange: hcl.RangeBetween(startRange, endRange),
651 }
652
653 return ret, diags
654 }
655
656 // flushHeredocTemplateParts modifies in-place the line-leading literal strings
657 // to apply the flush heredoc processing rule: find the line with the smallest
658 // number of whitespace characters as prefix and then trim that number of
659 // characters from all of the lines.
660 //
661 // This rule is applied to static tokens rather than to the rendered result,
662 // so interpolating a string with leading whitespace cannot affect the chosen
663 // prefix length.
664 func flushHeredocTemplateParts(parts *templateParts) {
665 if len(parts.Tokens) == 0 {
666 // Nothing to do
667 return
668 }
669
670 const maxInt = int((^uint(0)) >> 1)
671
672 minSpaces := maxInt
673 newline := true
674 var adjust []*templateLiteralToken
675 for _, ttok := range parts.Tokens {
676 if newline {
677 newline = false
678 var spaces int
679 if lit, ok := ttok.(*templateLiteralToken); ok {
680 orig := lit.Val
681 trimmed := strings.TrimLeftFunc(orig, unicode.IsSpace)
682 // If a token is entirely spaces and ends with a newline
683 // then it's a "blank line" and thus not considered for
684 // space-prefix-counting purposes.
685 if len(trimmed) == 0 && strings.HasSuffix(orig, "\n") {
686 spaces = maxInt
687 } else {
688 spaceBytes := len(lit.Val) - len(trimmed)
689 spaces, _ = textseg.TokenCount([]byte(orig[:spaceBytes]), textseg.ScanGraphemeClusters)
690 adjust = append(adjust, lit)
691 }
692 } else if _, ok := ttok.(*templateEndToken); ok {
693 break // don't process the end token since it never has spaces before it
694 }
695 if spaces < minSpaces {
696 minSpaces = spaces
697 }
698 }
699 if lit, ok := ttok.(*templateLiteralToken); ok {
700 if strings.HasSuffix(lit.Val, "\n") {
701 newline = true // The following token, if any, begins a new line
702 }
703 }
704 }
705
706 for _, lit := range adjust {
707 // Since we want to count space _characters_ rather than space _bytes_,
708 // we can't just do a straightforward slice operation here and instead
709 // need to hunt for the split point with a scanner.
710 valBytes := []byte(lit.Val)
711 spaceByteCount := 0
712 for i := 0; i < minSpaces; i++ {
713 adv, _, _ := textseg.ScanGraphemeClusters(valBytes, true)
714 spaceByteCount += adv
715 valBytes = valBytes[adv:]
716 }
717 lit.Val = lit.Val[spaceByteCount:]
718 lit.SrcRange.Start.Column += minSpaces
719 lit.SrcRange.Start.Byte += spaceByteCount
720 }
721 }
722
723 type templateParts struct {
724 Tokens []templateToken
725 SrcRange hcl.Range
726 }
727
728 // templateToken is a higher-level token that represents a single atom within
729 // the template language. Our template parsing first raises the raw token
730 // stream to a sequence of templateToken, and then transforms the result into
731 // an expression tree.
732 type templateToken interface {
733 templateToken() templateToken
734 }
735
736 type templateLiteralToken struct {
737 Val string
738 SrcRange hcl.Range
739 isTemplateToken
740 }
741
742 type templateInterpToken struct {
743 Expr Expression
744 SrcRange hcl.Range
745 isTemplateToken
746 }
747
748 type templateIfToken struct {
749 CondExpr Expression
750 SrcRange hcl.Range
751 isTemplateToken
752 }
753
754 type templateForToken struct {
755 KeyVar string // empty if ignoring key
756 ValVar string
757 CollExpr Expression
758 SrcRange hcl.Range
759 isTemplateToken
760 }
761
762 type templateEndCtrlType int
763
764 const (
765 templateEndIf templateEndCtrlType = iota
766 templateElse
767 templateEndFor
768 )
769
770 type templateEndCtrlToken struct {
771 Type templateEndCtrlType
772 SrcRange hcl.Range
773 isTemplateToken
774 }
775
776 func (t *templateEndCtrlToken) Name() string {
777 switch t.Type {
778 case templateEndIf:
779 return "endif"
780 case templateElse:
781 return "else"
782 case templateEndFor:
783 return "endfor"
784 default:
785 // should never happen
786 panic("invalid templateEndCtrlType")
787 }
788 }
789
790 type templateEndToken struct {
791 SrcRange hcl.Range
792 isTemplateToken
793 }
794
795 type isTemplateToken [0]int
796
797 func (t isTemplateToken) templateToken() templateToken {
798 return t
799 }