]> git.immae.eu Git - github/fretlink/terraform-provider-statuscake.git/blob - vendor/github.com/aws/aws-sdk-go/internal/ini/ini_lexer.go
Upgrade to 0.12
[github/fretlink/terraform-provider-statuscake.git] / vendor / github.com / aws / aws-sdk-go / internal / ini / ini_lexer.go
1 package ini
2
3 import (
4 "bytes"
5 "io"
6 "io/ioutil"
7
8 "github.com/aws/aws-sdk-go/aws/awserr"
9 )
10
11 const (
12 // ErrCodeUnableToReadFile is used when a file is failed to be
13 // opened or read from.
14 ErrCodeUnableToReadFile = "FailedRead"
15 )
16
17 // TokenType represents the various different tokens types
18 type TokenType int
19
20 func (t TokenType) String() string {
21 switch t {
22 case TokenNone:
23 return "none"
24 case TokenLit:
25 return "literal"
26 case TokenSep:
27 return "sep"
28 case TokenOp:
29 return "op"
30 case TokenWS:
31 return "ws"
32 case TokenNL:
33 return "newline"
34 case TokenComment:
35 return "comment"
36 case TokenComma:
37 return "comma"
38 default:
39 return ""
40 }
41 }
42
43 // TokenType enums
44 const (
45 TokenNone = TokenType(iota)
46 TokenLit
47 TokenSep
48 TokenComma
49 TokenOp
50 TokenWS
51 TokenNL
52 TokenComment
53 )
54
55 type iniLexer struct{}
56
57 // Tokenize will return a list of tokens during lexical analysis of the
58 // io.Reader.
59 func (l *iniLexer) Tokenize(r io.Reader) ([]Token, error) {
60 b, err := ioutil.ReadAll(r)
61 if err != nil {
62 return nil, awserr.New(ErrCodeUnableToReadFile, "unable to read file", err)
63 }
64
65 return l.tokenize(b)
66 }
67
68 func (l *iniLexer) tokenize(b []byte) ([]Token, error) {
69 runes := bytes.Runes(b)
70 var err error
71 n := 0
72 tokenAmount := countTokens(runes)
73 tokens := make([]Token, tokenAmount)
74 count := 0
75
76 for len(runes) > 0 && count < tokenAmount {
77 switch {
78 case isWhitespace(runes[0]):
79 tokens[count], n, err = newWSToken(runes)
80 case isComma(runes[0]):
81 tokens[count], n = newCommaToken(), 1
82 case isComment(runes):
83 tokens[count], n, err = newCommentToken(runes)
84 case isNewline(runes):
85 tokens[count], n, err = newNewlineToken(runes)
86 case isSep(runes):
87 tokens[count], n, err = newSepToken(runes)
88 case isOp(runes):
89 tokens[count], n, err = newOpToken(runes)
90 default:
91 tokens[count], n, err = newLitToken(runes)
92 }
93
94 if err != nil {
95 return nil, err
96 }
97
98 count++
99
100 runes = runes[n:]
101 }
102
103 return tokens[:count], nil
104 }
105
106 func countTokens(runes []rune) int {
107 count, n := 0, 0
108 var err error
109
110 for len(runes) > 0 {
111 switch {
112 case isWhitespace(runes[0]):
113 _, n, err = newWSToken(runes)
114 case isComma(runes[0]):
115 _, n = newCommaToken(), 1
116 case isComment(runes):
117 _, n, err = newCommentToken(runes)
118 case isNewline(runes):
119 _, n, err = newNewlineToken(runes)
120 case isSep(runes):
121 _, n, err = newSepToken(runes)
122 case isOp(runes):
123 _, n, err = newOpToken(runes)
124 default:
125 _, n, err = newLitToken(runes)
126 }
127
128 if err != nil {
129 return 0
130 }
131
132 count++
133 runes = runes[n:]
134 }
135
136 return count + 1
137 }
138
139 // Token indicates a metadata about a given value.
140 type Token struct {
141 t TokenType
142 ValueType ValueType
143 base int
144 raw []rune
145 }
146
147 var emptyValue = Value{}
148
149 func newToken(t TokenType, raw []rune, v ValueType) Token {
150 return Token{
151 t: t,
152 raw: raw,
153 ValueType: v,
154 }
155 }
156
157 // Raw return the raw runes that were consumed
158 func (tok Token) Raw() []rune {
159 return tok.raw
160 }
161
162 // Type returns the token type
163 func (tok Token) Type() TokenType {
164 return tok.t
165 }