aboutsummaryrefslogtreecommitdiffhomepage
path: root/vendor/github.com/hashicorp/hcl/json/parser
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/hashicorp/hcl/json/parser')
-rw-r--r--vendor/github.com/hashicorp/hcl/json/parser/flatten.go117
-rw-r--r--vendor/github.com/hashicorp/hcl/json/parser/parser.go313
2 files changed, 430 insertions, 0 deletions
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/flatten.go b/vendor/github.com/hashicorp/hcl/json/parser/flatten.go
new file mode 100644
index 0000000..f652d6f
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/json/parser/flatten.go
@@ -0,0 +1,117 @@
1package parser
2
3import "github.com/hashicorp/hcl/hcl/ast"
4
5// flattenObjects takes an AST node, walks it, and flattens
6func flattenObjects(node ast.Node) {
7 ast.Walk(node, func(n ast.Node) (ast.Node, bool) {
8 // We only care about lists, because this is what we modify
9 list, ok := n.(*ast.ObjectList)
10 if !ok {
11 return n, true
12 }
13
14 // Rebuild the item list
15 items := make([]*ast.ObjectItem, 0, len(list.Items))
16 frontier := make([]*ast.ObjectItem, len(list.Items))
17 copy(frontier, list.Items)
18 for len(frontier) > 0 {
19 // Pop the current item
20 n := len(frontier)
21 item := frontier[n-1]
22 frontier = frontier[:n-1]
23
24 switch v := item.Val.(type) {
25 case *ast.ObjectType:
26 items, frontier = flattenObjectType(v, item, items, frontier)
27 case *ast.ListType:
28 items, frontier = flattenListType(v, item, items, frontier)
29 default:
30 items = append(items, item)
31 }
32 }
33
34 // Reverse the list since the frontier model runs things backwards
35 for i := len(items)/2 - 1; i >= 0; i-- {
36 opp := len(items) - 1 - i
37 items[i], items[opp] = items[opp], items[i]
38 }
39
40 // Done! Set the original items
41 list.Items = items
42 return n, true
43 })
44}
45
46func flattenListType(
47 ot *ast.ListType,
48 item *ast.ObjectItem,
49 items []*ast.ObjectItem,
50 frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) {
51 // If the list is empty, keep the original list
52 if len(ot.List) == 0 {
53 items = append(items, item)
54 return items, frontier
55 }
56
57 // All the elements of this object must also be objects!
58 for _, subitem := range ot.List {
59 if _, ok := subitem.(*ast.ObjectType); !ok {
60 items = append(items, item)
61 return items, frontier
62 }
63 }
64
65 // Great! We have a match go through all the items and flatten
66 for _, elem := range ot.List {
67 // Add it to the frontier so that we can recurse
68 frontier = append(frontier, &ast.ObjectItem{
69 Keys: item.Keys,
70 Assign: item.Assign,
71 Val: elem,
72 LeadComment: item.LeadComment,
73 LineComment: item.LineComment,
74 })
75 }
76
77 return items, frontier
78}
79
80func flattenObjectType(
81 ot *ast.ObjectType,
82 item *ast.ObjectItem,
83 items []*ast.ObjectItem,
84 frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) {
85 // If the list has no items we do not have to flatten anything
86 if ot.List.Items == nil {
87 items = append(items, item)
88 return items, frontier
89 }
90
91 // All the elements of this object must also be objects!
92 for _, subitem := range ot.List.Items {
93 if _, ok := subitem.Val.(*ast.ObjectType); !ok {
94 items = append(items, item)
95 return items, frontier
96 }
97 }
98
99 // Great! We have a match go through all the items and flatten
100 for _, subitem := range ot.List.Items {
101 // Copy the new key
102 keys := make([]*ast.ObjectKey, len(item.Keys)+len(subitem.Keys))
103 copy(keys, item.Keys)
104 copy(keys[len(item.Keys):], subitem.Keys)
105
106 // Add it to the frontier so that we can recurse
107 frontier = append(frontier, &ast.ObjectItem{
108 Keys: keys,
109 Assign: item.Assign,
110 Val: subitem.Val,
111 LeadComment: item.LeadComment,
112 LineComment: item.LineComment,
113 })
114 }
115
116 return items, frontier
117}
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/parser.go b/vendor/github.com/hashicorp/hcl/json/parser/parser.go
new file mode 100644
index 0000000..125a5f0
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/json/parser/parser.go
@@ -0,0 +1,313 @@
1package parser
2
3import (
4 "errors"
5 "fmt"
6
7 "github.com/hashicorp/hcl/hcl/ast"
8 hcltoken "github.com/hashicorp/hcl/hcl/token"
9 "github.com/hashicorp/hcl/json/scanner"
10 "github.com/hashicorp/hcl/json/token"
11)
12
13type Parser struct {
14 sc *scanner.Scanner
15
16 // Last read token
17 tok token.Token
18 commaPrev token.Token
19
20 enableTrace bool
21 indent int
22 n int // buffer size (max = 1)
23}
24
25func newParser(src []byte) *Parser {
26 return &Parser{
27 sc: scanner.New(src),
28 }
29}
30
31// Parse returns the fully parsed source and returns the abstract syntax tree.
32func Parse(src []byte) (*ast.File, error) {
33 p := newParser(src)
34 return p.Parse()
35}
36
37var errEofToken = errors.New("EOF token found")
38
39// Parse returns the fully parsed source and returns the abstract syntax tree.
40func (p *Parser) Parse() (*ast.File, error) {
41 f := &ast.File{}
42 var err, scerr error
43 p.sc.Error = func(pos token.Pos, msg string) {
44 scerr = fmt.Errorf("%s: %s", pos, msg)
45 }
46
47 // The root must be an object in JSON
48 object, err := p.object()
49 if scerr != nil {
50 return nil, scerr
51 }
52 if err != nil {
53 return nil, err
54 }
55
56 // We make our final node an object list so it is more HCL compatible
57 f.Node = object.List
58
59 // Flatten it, which finds patterns and turns them into more HCL-like
60 // AST trees.
61 flattenObjects(f.Node)
62
63 return f, nil
64}
65
66func (p *Parser) objectList() (*ast.ObjectList, error) {
67 defer un(trace(p, "ParseObjectList"))
68 node := &ast.ObjectList{}
69
70 for {
71 n, err := p.objectItem()
72 if err == errEofToken {
73 break // we are finished
74 }
75
76 // we don't return a nil node, because might want to use already
77 // collected items.
78 if err != nil {
79 return node, err
80 }
81
82 node.Add(n)
83
84 // Check for a followup comma. If it isn't a comma, then we're done
85 if tok := p.scan(); tok.Type != token.COMMA {
86 break
87 }
88 }
89
90 return node, nil
91}
92
93// objectItem parses a single object item
94func (p *Parser) objectItem() (*ast.ObjectItem, error) {
95 defer un(trace(p, "ParseObjectItem"))
96
97 keys, err := p.objectKey()
98 if err != nil {
99 return nil, err
100 }
101
102 o := &ast.ObjectItem{
103 Keys: keys,
104 }
105
106 switch p.tok.Type {
107 case token.COLON:
108 pos := p.tok.Pos
109 o.Assign = hcltoken.Pos{
110 Filename: pos.Filename,
111 Offset: pos.Offset,
112 Line: pos.Line,
113 Column: pos.Column,
114 }
115
116 o.Val, err = p.objectValue()
117 if err != nil {
118 return nil, err
119 }
120 }
121
122 return o, nil
123}
124
125// objectKey parses an object key and returns a ObjectKey AST
126func (p *Parser) objectKey() ([]*ast.ObjectKey, error) {
127 keyCount := 0
128 keys := make([]*ast.ObjectKey, 0)
129
130 for {
131 tok := p.scan()
132 switch tok.Type {
133 case token.EOF:
134 return nil, errEofToken
135 case token.STRING:
136 keyCount++
137 keys = append(keys, &ast.ObjectKey{
138 Token: p.tok.HCLToken(),
139 })
140 case token.COLON:
141 // If we have a zero keycount it means that we never got
142 // an object key, i.e. `{ :`. This is a syntax error.
143 if keyCount == 0 {
144 return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type)
145 }
146
147 // Done
148 return keys, nil
149 case token.ILLEGAL:
150 return nil, errors.New("illegal")
151 default:
152 return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type)
153 }
154 }
155}
156
157// object parses any type of object, such as number, bool, string, object or
158// list.
159func (p *Parser) objectValue() (ast.Node, error) {
160 defer un(trace(p, "ParseObjectValue"))
161 tok := p.scan()
162
163 switch tok.Type {
164 case token.NUMBER, token.FLOAT, token.BOOL, token.NULL, token.STRING:
165 return p.literalType()
166 case token.LBRACE:
167 return p.objectType()
168 case token.LBRACK:
169 return p.listType()
170 case token.EOF:
171 return nil, errEofToken
172 }
173
174 return nil, fmt.Errorf("Expected object value, got unknown token: %+v", tok)
175}
176
177// object parses any type of object, such as number, bool, string, object or
178// list.
179func (p *Parser) object() (*ast.ObjectType, error) {
180 defer un(trace(p, "ParseType"))
181 tok := p.scan()
182
183 switch tok.Type {
184 case token.LBRACE:
185 return p.objectType()
186 case token.EOF:
187 return nil, errEofToken
188 }
189
190 return nil, fmt.Errorf("Expected object, got unknown token: %+v", tok)
191}
192
193// objectType parses an object type and returns a ObjectType AST
194func (p *Parser) objectType() (*ast.ObjectType, error) {
195 defer un(trace(p, "ParseObjectType"))
196
197 // we assume that the currently scanned token is a LBRACE
198 o := &ast.ObjectType{}
199
200 l, err := p.objectList()
201
202 // if we hit RBRACE, we are good to go (means we parsed all Items), if it's
203 // not a RBRACE, it's an syntax error and we just return it.
204 if err != nil && p.tok.Type != token.RBRACE {
205 return nil, err
206 }
207
208 o.List = l
209 return o, nil
210}
211
212// listType parses a list type and returns a ListType AST
213func (p *Parser) listType() (*ast.ListType, error) {
214 defer un(trace(p, "ParseListType"))
215
216 // we assume that the currently scanned token is a LBRACK
217 l := &ast.ListType{}
218
219 for {
220 tok := p.scan()
221 switch tok.Type {
222 case token.NUMBER, token.FLOAT, token.STRING:
223 node, err := p.literalType()
224 if err != nil {
225 return nil, err
226 }
227
228 l.Add(node)
229 case token.COMMA:
230 continue
231 case token.LBRACE:
232 node, err := p.objectType()
233 if err != nil {
234 return nil, err
235 }
236
237 l.Add(node)
238 case token.BOOL:
239 // TODO(arslan) should we support? not supported by HCL yet
240 case token.LBRACK:
241 // TODO(arslan) should we support nested lists? Even though it's
242 // written in README of HCL, it's not a part of the grammar
243 // (not defined in parse.y)
244 case token.RBRACK:
245 // finished
246 return l, nil
247 default:
248 return nil, fmt.Errorf("unexpected token while parsing list: %s", tok.Type)
249 }
250
251 }
252}
253
254// literalType parses a literal type and returns a LiteralType AST
255func (p *Parser) literalType() (*ast.LiteralType, error) {
256 defer un(trace(p, "ParseLiteral"))
257
258 return &ast.LiteralType{
259 Token: p.tok.HCLToken(),
260 }, nil
261}
262
263// scan returns the next token from the underlying scanner. If a token has
264// been unscanned then read that instead.
265func (p *Parser) scan() token.Token {
266 // If we have a token on the buffer, then return it.
267 if p.n != 0 {
268 p.n = 0
269 return p.tok
270 }
271
272 p.tok = p.sc.Scan()
273 return p.tok
274}
275
276// unscan pushes the previously read token back onto the buffer.
277func (p *Parser) unscan() {
278 p.n = 1
279}
280
281// ----------------------------------------------------------------------------
282// Parsing support
283
284func (p *Parser) printTrace(a ...interface{}) {
285 if !p.enableTrace {
286 return
287 }
288
289 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
290 const n = len(dots)
291 fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column)
292
293 i := 2 * p.indent
294 for i > n {
295 fmt.Print(dots)
296 i -= n
297 }
298 // i <= n
299 fmt.Print(dots[0:i])
300 fmt.Println(a...)
301}
302
303func trace(p *Parser, msg string) *Parser {
304 p.printTrace(msg, "(")
305 p.indent++
306 return p
307}
308
309// Usage pattern: defer un(trace(p, "..."))
310func un(p *Parser) {
311 p.indent--
312 p.printTrace(")")
313}