]> git.immae.eu Git - github/fretlink/terraform-provider-statuscake.git/blob - vendor/github.com/aws/aws-sdk-go/internal/ini/ini_parser.go
Upgrade to 0.12
[github/fretlink/terraform-provider-statuscake.git] / vendor / github.com / aws / aws-sdk-go / internal / ini / ini_parser.go
1 package ini
2
3 import (
4 "fmt"
5 "io"
6 )
7
8 // State enums for the parse table
9 const (
10 InvalidState = iota
11 // stmt -> value stmt'
12 StatementState
13 // stmt' -> MarkComplete | op stmt
14 StatementPrimeState
15 // value -> number | string | boolean | quoted_string
16 ValueState
17 // section -> [ section'
18 OpenScopeState
19 // section' -> value section_close
20 SectionState
21 // section_close -> ]
22 CloseScopeState
23 // SkipState will skip (NL WS)+
24 SkipState
25 // SkipTokenState will skip any token and push the previous
26 // state onto the stack.
27 SkipTokenState
28 // comment -> # comment' | ; comment'
29 // comment' -> MarkComplete | value
30 CommentState
31 // MarkComplete state will complete statements and move that
32 // to the completed AST list
33 MarkCompleteState
34 // TerminalState signifies that the tokens have been fully parsed
35 TerminalState
36 )
37
38 // parseTable is a state machine to dictate the grammar above.
39 var parseTable = map[ASTKind]map[TokenType]int{
40 ASTKindStart: map[TokenType]int{
41 TokenLit: StatementState,
42 TokenSep: OpenScopeState,
43 TokenWS: SkipTokenState,
44 TokenNL: SkipTokenState,
45 TokenComment: CommentState,
46 TokenNone: TerminalState,
47 },
48 ASTKindCommentStatement: map[TokenType]int{
49 TokenLit: StatementState,
50 TokenSep: OpenScopeState,
51 TokenWS: SkipTokenState,
52 TokenNL: SkipTokenState,
53 TokenComment: CommentState,
54 TokenNone: MarkCompleteState,
55 },
56 ASTKindExpr: map[TokenType]int{
57 TokenOp: StatementPrimeState,
58 TokenLit: ValueState,
59 TokenSep: OpenScopeState,
60 TokenWS: ValueState,
61 TokenNL: SkipState,
62 TokenComment: CommentState,
63 TokenNone: MarkCompleteState,
64 },
65 ASTKindEqualExpr: map[TokenType]int{
66 TokenLit: ValueState,
67 TokenWS: SkipTokenState,
68 TokenNL: SkipState,
69 },
70 ASTKindStatement: map[TokenType]int{
71 TokenLit: SectionState,
72 TokenSep: CloseScopeState,
73 TokenWS: SkipTokenState,
74 TokenNL: SkipTokenState,
75 TokenComment: CommentState,
76 TokenNone: MarkCompleteState,
77 },
78 ASTKindExprStatement: map[TokenType]int{
79 TokenLit: ValueState,
80 TokenSep: OpenScopeState,
81 TokenOp: ValueState,
82 TokenWS: ValueState,
83 TokenNL: MarkCompleteState,
84 TokenComment: CommentState,
85 TokenNone: TerminalState,
86 TokenComma: SkipState,
87 },
88 ASTKindSectionStatement: map[TokenType]int{
89 TokenLit: SectionState,
90 TokenOp: SectionState,
91 TokenSep: CloseScopeState,
92 TokenWS: SectionState,
93 TokenNL: SkipTokenState,
94 },
95 ASTKindCompletedSectionStatement: map[TokenType]int{
96 TokenWS: SkipTokenState,
97 TokenNL: SkipTokenState,
98 TokenLit: StatementState,
99 TokenSep: OpenScopeState,
100 TokenComment: CommentState,
101 TokenNone: MarkCompleteState,
102 },
103 ASTKindSkipStatement: map[TokenType]int{
104 TokenLit: StatementState,
105 TokenSep: OpenScopeState,
106 TokenWS: SkipTokenState,
107 TokenNL: SkipTokenState,
108 TokenComment: CommentState,
109 TokenNone: TerminalState,
110 },
111 }
112
113 // ParseAST will parse input from an io.Reader using
114 // an LL(1) parser.
115 func ParseAST(r io.Reader) ([]AST, error) {
116 lexer := iniLexer{}
117 tokens, err := lexer.Tokenize(r)
118 if err != nil {
119 return []AST{}, err
120 }
121
122 return parse(tokens)
123 }
124
125 // ParseASTBytes will parse input from a byte slice using
126 // an LL(1) parser.
127 func ParseASTBytes(b []byte) ([]AST, error) {
128 lexer := iniLexer{}
129 tokens, err := lexer.tokenize(b)
130 if err != nil {
131 return []AST{}, err
132 }
133
134 return parse(tokens)
135 }
136
137 func parse(tokens []Token) ([]AST, error) {
138 start := Start
139 stack := newParseStack(3, len(tokens))
140
141 stack.Push(start)
142 s := newSkipper()
143
144 loop:
145 for stack.Len() > 0 {
146 k := stack.Pop()
147
148 var tok Token
149 if len(tokens) == 0 {
150 // this occurs when all the tokens have been processed
151 // but reduction of what's left on the stack needs to
152 // occur.
153 tok = emptyToken
154 } else {
155 tok = tokens[0]
156 }
157
158 step := parseTable[k.Kind][tok.Type()]
159 if s.ShouldSkip(tok) {
160 // being in a skip state with no tokens will break out of
161 // the parse loop since there is nothing left to process.
162 if len(tokens) == 0 {
163 break loop
164 }
165
166 step = SkipTokenState
167 }
168
169 switch step {
170 case TerminalState:
171 // Finished parsing. Push what should be the last
172 // statement to the stack. If there is anything left
173 // on the stack, an error in parsing has occurred.
174 if k.Kind != ASTKindStart {
175 stack.MarkComplete(k)
176 }
177 break loop
178 case SkipTokenState:
179 // When skipping a token, the previous state was popped off the stack.
180 // To maintain the correct state, the previous state will be pushed
181 // onto the stack.
182 stack.Push(k)
183 case StatementState:
184 if k.Kind != ASTKindStart {
185 stack.MarkComplete(k)
186 }
187 expr := newExpression(tok)
188 stack.Push(expr)
189 case StatementPrimeState:
190 if tok.Type() != TokenOp {
191 stack.MarkComplete(k)
192 continue
193 }
194
195 if k.Kind != ASTKindExpr {
196 return nil, NewParseError(
197 fmt.Sprintf("invalid expression: expected Expr type, but found %T type", k),
198 )
199 }
200
201 k = trimSpaces(k)
202 expr := newEqualExpr(k, tok)
203 stack.Push(expr)
204 case ValueState:
205 // ValueState requires the previous state to either be an equal expression
206 // or an expression statement.
207 //
208 // This grammar occurs when the RHS is a number, word, or quoted string.
209 // equal_expr -> lit op equal_expr'
210 // equal_expr' -> number | string | quoted_string
211 // quoted_string -> " quoted_string'
212 // quoted_string' -> string quoted_string_end
213 // quoted_string_end -> "
214 //
215 // otherwise
216 // expr_stmt -> equal_expr (expr_stmt')*
217 // expr_stmt' -> ws S | op S | MarkComplete
218 // S -> equal_expr' expr_stmt'
219 switch k.Kind {
220 case ASTKindEqualExpr:
221 // assiging a value to some key
222 k.AppendChild(newExpression(tok))
223 stack.Push(newExprStatement(k))
224 case ASTKindExpr:
225 k.Root.raw = append(k.Root.raw, tok.Raw()...)
226 stack.Push(k)
227 case ASTKindExprStatement:
228 root := k.GetRoot()
229 children := root.GetChildren()
230 if len(children) == 0 {
231 return nil, NewParseError(
232 fmt.Sprintf("invalid expression: AST contains no children %s", k.Kind),
233 )
234 }
235
236 rhs := children[len(children)-1]
237
238 if rhs.Root.ValueType != QuotedStringType {
239 rhs.Root.ValueType = StringType
240 rhs.Root.raw = append(rhs.Root.raw, tok.Raw()...)
241
242 }
243
244 children[len(children)-1] = rhs
245 k.SetChildren(children)
246
247 stack.Push(k)
248 }
249 case OpenScopeState:
250 if !runeCompare(tok.Raw(), openBrace) {
251 return nil, NewParseError("expected '['")
252 }
253
254 stmt := newStatement()
255 stack.Push(stmt)
256 case CloseScopeState:
257 if !runeCompare(tok.Raw(), closeBrace) {
258 return nil, NewParseError("expected ']'")
259 }
260
261 k = trimSpaces(k)
262 stack.Push(newCompletedSectionStatement(k))
263 case SectionState:
264 var stmt AST
265
266 switch k.Kind {
267 case ASTKindStatement:
268 // If there are multiple literals inside of a scope declaration,
269 // then the current token's raw value will be appended to the Name.
270 //
271 // This handles cases like [ profile default ]
272 //
273 // k will represent a SectionStatement with the children representing
274 // the label of the section
275 stmt = newSectionStatement(tok)
276 case ASTKindSectionStatement:
277 k.Root.raw = append(k.Root.raw, tok.Raw()...)
278 stmt = k
279 default:
280 return nil, NewParseError(
281 fmt.Sprintf("invalid statement: expected statement: %v", k.Kind),
282 )
283 }
284
285 stack.Push(stmt)
286 case MarkCompleteState:
287 if k.Kind != ASTKindStart {
288 stack.MarkComplete(k)
289 }
290
291 if stack.Len() == 0 {
292 stack.Push(start)
293 }
294 case SkipState:
295 stack.Push(newSkipStatement(k))
296 s.Skip()
297 case CommentState:
298 if k.Kind == ASTKindStart {
299 stack.Push(k)
300 } else {
301 stack.MarkComplete(k)
302 }
303
304 stmt := newCommentStatement(tok)
305 stack.Push(stmt)
306 default:
307 return nil, NewParseError(fmt.Sprintf("invalid state with ASTKind %v and TokenType %v", k, tok))
308 }
309
310 if len(tokens) > 0 {
311 tokens = tokens[1:]
312 }
313 }
314
315 // this occurs when a statement has not been completed
316 if stack.top > 1 {
317 return nil, NewParseError(fmt.Sprintf("incomplete expression: %v", stack.container))
318 }
319
320 // returns a sublist which excludes the start symbol
321 return stack.List(), nil
322 }
323
324 // trimSpaces will trim spaces on the left and right hand side of
325 // the literal.
326 func trimSpaces(k AST) AST {
327 // trim left hand side of spaces
328 for i := 0; i < len(k.Root.raw); i++ {
329 if !isWhitespace(k.Root.raw[i]) {
330 break
331 }
332
333 k.Root.raw = k.Root.raw[1:]
334 i--
335 }
336
337 // trim right hand side of spaces
338 for i := len(k.Root.raw) - 1; i >= 0; i-- {
339 if !isWhitespace(k.Root.raw[i]) {
340 break
341 }
342
343 k.Root.raw = k.Root.raw[:len(k.Root.raw)-1]
344 }
345
346 return k
347 }