diff options
author | appilon <apilon@hashicorp.com> | 2019-02-27 16:43:31 -0500 |
---|---|---|
committer | GitHub <noreply@github.com> | 2019-02-27 16:43:31 -0500 |
commit | 844b5a68d8af4791755b8f0ad293cc99f5959183 (patch) | |
tree | 255c250a5c9d4801c74092d33b7337d8c14438ff /vendor/github.com/hashicorp/hcl2 | |
parent | 303b299eeb6b06e939e35905e4b34cb410dd9dc3 (diff) | |
parent | 15c0b25d011f37e7c20aeca9eaf461f78285b8d9 (diff) | |
download | terraform-provider-statuscake-844b5a68d8af4791755b8f0ad293cc99f5959183.tar.gz terraform-provider-statuscake-844b5a68d8af4791755b8f0ad293cc99f5959183.tar.zst terraform-provider-statuscake-844b5a68d8af4791755b8f0ad293cc99f5959183.zip |
Merge pull request #27 from terraform-providers/go-modules-2019-02-22
[MODULES] Switch to Go Modules
Diffstat (limited to 'vendor/github.com/hashicorp/hcl2')
73 files changed, 22768 insertions, 0 deletions
diff --git a/vendor/github.com/hashicorp/hcl2/LICENSE b/vendor/github.com/hashicorp/hcl2/LICENSE new file mode 100644 index 0000000..82b4de9 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/LICENSE | |||
@@ -0,0 +1,353 @@ | |||
1 | Mozilla Public License, version 2.0 | ||
2 | |||
3 | 1. Definitions | ||
4 | |||
5 | 1.1. “Contributor” | ||
6 | |||
7 | means each individual or legal entity that creates, contributes to the | ||
8 | creation of, or owns Covered Software. | ||
9 | |||
10 | 1.2. “Contributor Version” | ||
11 | |||
12 | means the combination of the Contributions of others (if any) used by a | ||
13 | Contributor and that particular Contributor’s Contribution. | ||
14 | |||
15 | 1.3. “Contribution” | ||
16 | |||
17 | means Covered Software of a particular Contributor. | ||
18 | |||
19 | 1.4. “Covered Software” | ||
20 | |||
21 | means Source Code Form to which the initial Contributor has attached the | ||
22 | notice in Exhibit A, the Executable Form of such Source Code Form, and | ||
23 | Modifications of such Source Code Form, in each case including portions | ||
24 | thereof. | ||
25 | |||
26 | 1.5. “Incompatible With Secondary Licenses” | ||
27 | means | ||
28 | |||
29 | a. that the initial Contributor has attached the notice described in | ||
30 | Exhibit B to the Covered Software; or | ||
31 | |||
32 | b. that the Covered Software was made available under the terms of version | ||
33 | 1.1 or earlier of the License, but not also under the terms of a | ||
34 | Secondary License. | ||
35 | |||
36 | 1.6. “Executable Form” | ||
37 | |||
38 | means any form of the work other than Source Code Form. | ||
39 | |||
40 | 1.7. “Larger Work” | ||
41 | |||
42 | means a work that combines Covered Software with other material, in a separate | ||
43 | file or files, that is not Covered Software. | ||
44 | |||
45 | 1.8. “License” | ||
46 | |||
47 | means this document. | ||
48 | |||
49 | 1.9. “Licensable” | ||
50 | |||
51 | means having the right to grant, to the maximum extent possible, whether at the | ||
52 | time of the initial grant or subsequently, any and all of the rights conveyed by | ||
53 | this License. | ||
54 | |||
55 | 1.10. “Modifications” | ||
56 | |||
57 | means any of the following: | ||
58 | |||
59 | a. any file in Source Code Form that results from an addition to, deletion | ||
60 | from, or modification of the contents of Covered Software; or | ||
61 | |||
62 | b. any new file in Source Code Form that contains any Covered Software. | ||
63 | |||
64 | 1.11. “Patent Claims” of a Contributor | ||
65 | |||
66 | means any patent claim(s), including without limitation, method, process, | ||
67 | and apparatus claims, in any patent Licensable by such Contributor that | ||
68 | would be infringed, but for the grant of the License, by the making, | ||
69 | using, selling, offering for sale, having made, import, or transfer of | ||
70 | either its Contributions or its Contributor Version. | ||
71 | |||
72 | 1.12. “Secondary License” | ||
73 | |||
74 | means either the GNU General Public License, Version 2.0, the GNU Lesser | ||
75 | General Public License, Version 2.1, the GNU Affero General Public | ||
76 | License, Version 3.0, or any later versions of those licenses. | ||
77 | |||
78 | 1.13. “Source Code Form” | ||
79 | |||
80 | means the form of the work preferred for making modifications. | ||
81 | |||
82 | 1.14. “You” (or “Your”) | ||
83 | |||
84 | means an individual or a legal entity exercising rights under this | ||
85 | License. For legal entities, “You” includes any entity that controls, is | ||
86 | controlled by, or is under common control with You. For purposes of this | ||
87 | definition, “control” means (a) the power, direct or indirect, to cause | ||
88 | the direction or management of such entity, whether by contract or | ||
89 | otherwise, or (b) ownership of more than fifty percent (50%) of the | ||
90 | outstanding shares or beneficial ownership of such entity. | ||
91 | |||
92 | |||
93 | 2. License Grants and Conditions | ||
94 | |||
95 | 2.1. Grants | ||
96 | |||
97 | Each Contributor hereby grants You a world-wide, royalty-free, | ||
98 | non-exclusive license: | ||
99 | |||
100 | a. under intellectual property rights (other than patent or trademark) | ||
101 | Licensable by such Contributor to use, reproduce, make available, | ||
102 | modify, display, perform, distribute, and otherwise exploit its | ||
103 | Contributions, either on an unmodified basis, with Modifications, or as | ||
104 | part of a Larger Work; and | ||
105 | |||
106 | b. under Patent Claims of such Contributor to make, use, sell, offer for | ||
107 | sale, have made, import, and otherwise transfer either its Contributions | ||
108 | or its Contributor Version. | ||
109 | |||
110 | 2.2. Effective Date | ||
111 | |||
112 | The licenses granted in Section 2.1 with respect to any Contribution become | ||
113 | effective for each Contribution on the date the Contributor first distributes | ||
114 | such Contribution. | ||
115 | |||
116 | 2.3. Limitations on Grant Scope | ||
117 | |||
118 | The licenses granted in this Section 2 are the only rights granted under this | ||
119 | License. No additional rights or licenses will be implied from the distribution | ||
120 | or licensing of Covered Software under this License. Notwithstanding Section | ||
121 | 2.1(b) above, no patent license is granted by a Contributor: | ||
122 | |||
123 | a. for any code that a Contributor has removed from Covered Software; or | ||
124 | |||
125 | b. for infringements caused by: (i) Your and any other third party’s | ||
126 | modifications of Covered Software, or (ii) the combination of its | ||
127 | Contributions with other software (except as part of its Contributor | ||
128 | Version); or | ||
129 | |||
130 | c. under Patent Claims infringed by Covered Software in the absence of its | ||
131 | Contributions. | ||
132 | |||
133 | This License does not grant any rights in the trademarks, service marks, or | ||
134 | logos of any Contributor (except as may be necessary to comply with the | ||
135 | notice requirements in Section 3.4). | ||
136 | |||
137 | 2.4. Subsequent Licenses | ||
138 | |||
139 | No Contributor makes additional grants as a result of Your choice to | ||
140 | distribute the Covered Software under a subsequent version of this License | ||
141 | (see Section 10.2) or under the terms of a Secondary License (if permitted | ||
142 | under the terms of Section 3.3). | ||
143 | |||
144 | 2.5. Representation | ||
145 | |||
146 | Each Contributor represents that the Contributor believes its Contributions | ||
147 | are its original creation(s) or it has sufficient rights to grant the | ||
148 | rights to its Contributions conveyed by this License. | ||
149 | |||
150 | 2.6. Fair Use | ||
151 | |||
152 | This License is not intended to limit any rights You have under applicable | ||
153 | copyright doctrines of fair use, fair dealing, or other equivalents. | ||
154 | |||
155 | 2.7. Conditions | ||
156 | |||
157 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in | ||
158 | Section 2.1. | ||
159 | |||
160 | |||
161 | 3. Responsibilities | ||
162 | |||
163 | 3.1. Distribution of Source Form | ||
164 | |||
165 | All distribution of Covered Software in Source Code Form, including any | ||
166 | Modifications that You create or to which You contribute, must be under the | ||
167 | terms of this License. You must inform recipients that the Source Code Form | ||
168 | of the Covered Software is governed by the terms of this License, and how | ||
169 | they can obtain a copy of this License. You may not attempt to alter or | ||
170 | restrict the recipients’ rights in the Source Code Form. | ||
171 | |||
172 | 3.2. Distribution of Executable Form | ||
173 | |||
174 | If You distribute Covered Software in Executable Form then: | ||
175 | |||
176 | a. such Covered Software must also be made available in Source Code Form, | ||
177 | as described in Section 3.1, and You must inform recipients of the | ||
178 | Executable Form how they can obtain a copy of such Source Code Form by | ||
179 | reasonable means in a timely manner, at a charge no more than the cost | ||
180 | of distribution to the recipient; and | ||
181 | |||
182 | b. You may distribute such Executable Form under the terms of this License, | ||
183 | or sublicense it under different terms, provided that the license for | ||
184 | the Executable Form does not attempt to limit or alter the recipients’ | ||
185 | rights in the Source Code Form under this License. | ||
186 | |||
187 | 3.3. Distribution of a Larger Work | ||
188 | |||
189 | You may create and distribute a Larger Work under terms of Your choice, | ||
190 | provided that You also comply with the requirements of this License for the | ||
191 | Covered Software. If the Larger Work is a combination of Covered Software | ||
192 | with a work governed by one or more Secondary Licenses, and the Covered | ||
193 | Software is not Incompatible With Secondary Licenses, this License permits | ||
194 | You to additionally distribute such Covered Software under the terms of | ||
195 | such Secondary License(s), so that the recipient of the Larger Work may, at | ||
196 | their option, further distribute the Covered Software under the terms of | ||
197 | either this License or such Secondary License(s). | ||
198 | |||
199 | 3.4. Notices | ||
200 | |||
201 | You may not remove or alter the substance of any license notices (including | ||
202 | copyright notices, patent notices, disclaimers of warranty, or limitations | ||
203 | of liability) contained within the Source Code Form of the Covered | ||
204 | Software, except that You may alter any license notices to the extent | ||
205 | required to remedy known factual inaccuracies. | ||
206 | |||
207 | 3.5. Application of Additional Terms | ||
208 | |||
209 | You may choose to offer, and to charge a fee for, warranty, support, | ||
210 | indemnity or liability obligations to one or more recipients of Covered | ||
211 | Software. However, You may do so only on Your own behalf, and not on behalf | ||
212 | of any Contributor. You must make it absolutely clear that any such | ||
213 | warranty, support, indemnity, or liability obligation is offered by You | ||
214 | alone, and You hereby agree to indemnify every Contributor for any | ||
215 | liability incurred by such Contributor as a result of warranty, support, | ||
216 | indemnity or liability terms You offer. You may include additional | ||
217 | disclaimers of warranty and limitations of liability specific to any | ||
218 | jurisdiction. | ||
219 | |||
220 | 4. Inability to Comply Due to Statute or Regulation | ||
221 | |||
222 | If it is impossible for You to comply with any of the terms of this License | ||
223 | with respect to some or all of the Covered Software due to statute, judicial | ||
224 | order, or regulation then You must: (a) comply with the terms of this License | ||
225 | to the maximum extent possible; and (b) describe the limitations and the code | ||
226 | they affect. Such description must be placed in a text file included with all | ||
227 | distributions of the Covered Software under this License. Except to the | ||
228 | extent prohibited by statute or regulation, such description must be | ||
229 | sufficiently detailed for a recipient of ordinary skill to be able to | ||
230 | understand it. | ||
231 | |||
232 | 5. Termination | ||
233 | |||
234 | 5.1. The rights granted under this License will terminate automatically if You | ||
235 | fail to comply with any of its terms. However, if You become compliant, | ||
236 | then the rights granted under this License from a particular Contributor | ||
237 | are reinstated (a) provisionally, unless and until such Contributor | ||
238 | explicitly and finally terminates Your grants, and (b) on an ongoing basis, | ||
239 | if such Contributor fails to notify You of the non-compliance by some | ||
240 | reasonable means prior to 60 days after You have come back into compliance. | ||
241 | Moreover, Your grants from a particular Contributor are reinstated on an | ||
242 | ongoing basis if such Contributor notifies You of the non-compliance by | ||
243 | some reasonable means, this is the first time You have received notice of | ||
244 | non-compliance with this License from such Contributor, and You become | ||
245 | compliant prior to 30 days after Your receipt of the notice. | ||
246 | |||
247 | 5.2. If You initiate litigation against any entity by asserting a patent | ||
248 | infringement claim (excluding declaratory judgment actions, counter-claims, | ||
249 | and cross-claims) alleging that a Contributor Version directly or | ||
250 | indirectly infringes any patent, then the rights granted to You by any and | ||
251 | all Contributors for the Covered Software under Section 2.1 of this License | ||
252 | shall terminate. | ||
253 | |||
254 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user | ||
255 | license agreements (excluding distributors and resellers) which have been | ||
256 | validly granted by You or Your distributors under this License prior to | ||
257 | termination shall survive termination. | ||
258 | |||
259 | 6. Disclaimer of Warranty | ||
260 | |||
261 | Covered Software is provided under this License on an “as is” basis, without | ||
262 | warranty of any kind, either expressed, implied, or statutory, including, | ||
263 | without limitation, warranties that the Covered Software is free of defects, | ||
264 | merchantable, fit for a particular purpose or non-infringing. The entire | ||
265 | risk as to the quality and performance of the Covered Software is with You. | ||
266 | Should any Covered Software prove defective in any respect, You (not any | ||
267 | Contributor) assume the cost of any necessary servicing, repair, or | ||
268 | correction. This disclaimer of warranty constitutes an essential part of this | ||
269 | License. No use of any Covered Software is authorized under this License | ||
270 | except under this disclaimer. | ||
271 | |||
272 | 7. Limitation of Liability | ||
273 | |||
274 | Under no circumstances and under no legal theory, whether tort (including | ||
275 | negligence), contract, or otherwise, shall any Contributor, or anyone who | ||
276 | distributes Covered Software as permitted above, be liable to You for any | ||
277 | direct, indirect, special, incidental, or consequential damages of any | ||
278 | character including, without limitation, damages for lost profits, loss of | ||
279 | goodwill, work stoppage, computer failure or malfunction, or any and all | ||
280 | other commercial damages or losses, even if such party shall have been | ||
281 | informed of the possibility of such damages. This limitation of liability | ||
282 | shall not apply to liability for death or personal injury resulting from such | ||
283 | party’s negligence to the extent applicable law prohibits such limitation. | ||
284 | Some jurisdictions do not allow the exclusion or limitation of incidental or | ||
285 | consequential damages, so this exclusion and limitation may not apply to You. | ||
286 | |||
287 | 8. Litigation | ||
288 | |||
289 | Any litigation relating to this License may be brought only in the courts of | ||
290 | a jurisdiction where the defendant maintains its principal place of business | ||
291 | and such litigation shall be governed by laws of that jurisdiction, without | ||
292 | reference to its conflict-of-law provisions. Nothing in this Section shall | ||
293 | prevent a party’s ability to bring cross-claims or counter-claims. | ||
294 | |||
295 | 9. Miscellaneous | ||
296 | |||
297 | This License represents the complete agreement concerning the subject matter | ||
298 | hereof. If any provision of this License is held to be unenforceable, such | ||
299 | provision shall be reformed only to the extent necessary to make it | ||
300 | enforceable. Any law or regulation which provides that the language of a | ||
301 | contract shall be construed against the drafter shall not be used to construe | ||
302 | this License against a Contributor. | ||
303 | |||
304 | |||
305 | 10. Versions of the License | ||
306 | |||
307 | 10.1. New Versions | ||
308 | |||
309 | Mozilla Foundation is the license steward. Except as provided in Section | ||
310 | 10.3, no one other than the license steward has the right to modify or | ||
311 | publish new versions of this License. Each version will be given a | ||
312 | distinguishing version number. | ||
313 | |||
314 | 10.2. Effect of New Versions | ||
315 | |||
316 | You may distribute the Covered Software under the terms of the version of | ||
317 | the License under which You originally received the Covered Software, or | ||
318 | under the terms of any subsequent version published by the license | ||
319 | steward. | ||
320 | |||
321 | 10.3. Modified Versions | ||
322 | |||
323 | If you create software not governed by this License, and you want to | ||
324 | create a new license for such software, you may create and use a modified | ||
325 | version of this License if you rename the license and remove any | ||
326 | references to the name of the license steward (except to note that such | ||
327 | modified license differs from this License). | ||
328 | |||
329 | 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses | ||
330 | If You choose to distribute Source Code Form that is Incompatible With | ||
331 | Secondary Licenses under the terms of this version of the License, the | ||
332 | notice described in Exhibit B of this License must be attached. | ||
333 | |||
334 | Exhibit A - Source Code Form License Notice | ||
335 | |||
336 | This Source Code Form is subject to the | ||
337 | terms of the Mozilla Public License, v. | ||
338 | 2.0. If a copy of the MPL was not | ||
339 | distributed with this file, You can | ||
340 | obtain one at | ||
341 | http://mozilla.org/MPL/2.0/. | ||
342 | |||
343 | If it is not possible or desirable to put the notice in a particular file, then | ||
344 | You may include the notice in a location (such as a LICENSE file in a relevant | ||
345 | directory) where a recipient would be likely to look for such a notice. | ||
346 | |||
347 | You may add additional accurate notices of copyright ownership. | ||
348 | |||
349 | Exhibit B - “Incompatible With Secondary Licenses” Notice | ||
350 | |||
351 | This Source Code Form is “Incompatible | ||
352 | With Secondary Licenses”, as defined by | ||
353 | the Mozilla Public License, v. 2.0. | ||
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/decode.go b/vendor/github.com/hashicorp/hcl2/gohcl/decode.go new file mode 100644 index 0000000..3a149a8 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/gohcl/decode.go | |||
@@ -0,0 +1,304 @@ | |||
1 | package gohcl | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | "reflect" | ||
6 | |||
7 | "github.com/zclconf/go-cty/cty" | ||
8 | |||
9 | "github.com/hashicorp/hcl2/hcl" | ||
10 | "github.com/zclconf/go-cty/cty/convert" | ||
11 | "github.com/zclconf/go-cty/cty/gocty" | ||
12 | ) | ||
13 | |||
14 | // DecodeBody extracts the configuration within the given body into the given | ||
15 | // value. This value must be a non-nil pointer to either a struct or | ||
16 | // a map, where in the former case the configuration will be decoded using | ||
17 | // struct tags and in the latter case only attributes are allowed and their | ||
18 | // values are decoded into the map. | ||
19 | // | ||
20 | // The given EvalContext is used to resolve any variables or functions in | ||
21 | // expressions encountered while decoding. This may be nil to require only | ||
22 | // constant values, for simple applications that do not support variables or | ||
23 | // functions. | ||
24 | // | ||
25 | // The returned diagnostics should be inspected with its HasErrors method to | ||
26 | // determine if the populated value is valid and complete. If error diagnostics | ||
27 | // are returned then the given value may have been partially-populated but | ||
28 | // may still be accessed by a careful caller for static analysis and editor | ||
29 | // integration use-cases. | ||
30 | func DecodeBody(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics { | ||
31 | rv := reflect.ValueOf(val) | ||
32 | if rv.Kind() != reflect.Ptr { | ||
33 | panic(fmt.Sprintf("target value must be a pointer, not %s", rv.Type().String())) | ||
34 | } | ||
35 | |||
36 | return decodeBodyToValue(body, ctx, rv.Elem()) | ||
37 | } | ||
38 | |||
39 | func decodeBodyToValue(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) hcl.Diagnostics { | ||
40 | et := val.Type() | ||
41 | switch et.Kind() { | ||
42 | case reflect.Struct: | ||
43 | return decodeBodyToStruct(body, ctx, val) | ||
44 | case reflect.Map: | ||
45 | return decodeBodyToMap(body, ctx, val) | ||
46 | default: | ||
47 | panic(fmt.Sprintf("target value must be pointer to struct or map, not %s", et.String())) | ||
48 | } | ||
49 | } | ||
50 | |||
51 | func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) hcl.Diagnostics { | ||
52 | schema, partial := ImpliedBodySchema(val.Interface()) | ||
53 | |||
54 | var content *hcl.BodyContent | ||
55 | var leftovers hcl.Body | ||
56 | var diags hcl.Diagnostics | ||
57 | if partial { | ||
58 | content, leftovers, diags = body.PartialContent(schema) | ||
59 | } else { | ||
60 | content, diags = body.Content(schema) | ||
61 | } | ||
62 | if content == nil { | ||
63 | return diags | ||
64 | } | ||
65 | |||
66 | tags := getFieldTags(val.Type()) | ||
67 | |||
68 | if tags.Remain != nil { | ||
69 | fieldIdx := *tags.Remain | ||
70 | field := val.Type().Field(fieldIdx) | ||
71 | fieldV := val.Field(fieldIdx) | ||
72 | switch { | ||
73 | case bodyType.AssignableTo(field.Type): | ||
74 | fieldV.Set(reflect.ValueOf(leftovers)) | ||
75 | case attrsType.AssignableTo(field.Type): | ||
76 | attrs, attrsDiags := leftovers.JustAttributes() | ||
77 | if len(attrsDiags) > 0 { | ||
78 | diags = append(diags, attrsDiags...) | ||
79 | } | ||
80 | fieldV.Set(reflect.ValueOf(attrs)) | ||
81 | default: | ||
82 | diags = append(diags, decodeBodyToValue(leftovers, ctx, fieldV)...) | ||
83 | } | ||
84 | } | ||
85 | |||
86 | for name, fieldIdx := range tags.Attributes { | ||
87 | attr := content.Attributes[name] | ||
88 | field := val.Type().Field(fieldIdx) | ||
89 | fieldV := val.Field(fieldIdx) | ||
90 | |||
91 | if attr == nil { | ||
92 | if !exprType.AssignableTo(field.Type) { | ||
93 | continue | ||
94 | } | ||
95 | |||
96 | // As a special case, if the target is of type hcl.Expression then | ||
97 | // we'll assign an actual expression that evalues to a cty null, | ||
98 | // so the caller can deal with it within the cty realm rather | ||
99 | // than within the Go realm. | ||
100 | synthExpr := hcl.StaticExpr(cty.NullVal(cty.DynamicPseudoType), body.MissingItemRange()) | ||
101 | fieldV.Set(reflect.ValueOf(synthExpr)) | ||
102 | continue | ||
103 | } | ||
104 | |||
105 | switch { | ||
106 | case attrType.AssignableTo(field.Type): | ||
107 | fieldV.Set(reflect.ValueOf(attr)) | ||
108 | case exprType.AssignableTo(field.Type): | ||
109 | fieldV.Set(reflect.ValueOf(attr.Expr)) | ||
110 | default: | ||
111 | diags = append(diags, DecodeExpression( | ||
112 | attr.Expr, ctx, fieldV.Addr().Interface(), | ||
113 | )...) | ||
114 | } | ||
115 | } | ||
116 | |||
117 | blocksByType := content.Blocks.ByType() | ||
118 | |||
119 | for typeName, fieldIdx := range tags.Blocks { | ||
120 | blocks := blocksByType[typeName] | ||
121 | field := val.Type().Field(fieldIdx) | ||
122 | |||
123 | ty := field.Type | ||
124 | isSlice := false | ||
125 | isPtr := false | ||
126 | if ty.Kind() == reflect.Slice { | ||
127 | isSlice = true | ||
128 | ty = ty.Elem() | ||
129 | } | ||
130 | if ty.Kind() == reflect.Ptr { | ||
131 | isPtr = true | ||
132 | ty = ty.Elem() | ||
133 | } | ||
134 | |||
135 | if len(blocks) > 1 && !isSlice { | ||
136 | diags = append(diags, &hcl.Diagnostic{ | ||
137 | Severity: hcl.DiagError, | ||
138 | Summary: fmt.Sprintf("Duplicate %s block", typeName), | ||
139 | Detail: fmt.Sprintf( | ||
140 | "Only one %s block is allowed. Another was defined at %s.", | ||
141 | typeName, blocks[0].DefRange.String(), | ||
142 | ), | ||
143 | Subject: &blocks[1].DefRange, | ||
144 | }) | ||
145 | continue | ||
146 | } | ||
147 | |||
148 | if len(blocks) == 0 { | ||
149 | if isSlice || isPtr { | ||
150 | val.Field(fieldIdx).Set(reflect.Zero(field.Type)) | ||
151 | } else { | ||
152 | diags = append(diags, &hcl.Diagnostic{ | ||
153 | Severity: hcl.DiagError, | ||
154 | Summary: fmt.Sprintf("Missing %s block", typeName), | ||
155 | Detail: fmt.Sprintf("A %s block is required.", typeName), | ||
156 | Subject: body.MissingItemRange().Ptr(), | ||
157 | }) | ||
158 | } | ||
159 | continue | ||
160 | } | ||
161 | |||
162 | switch { | ||
163 | |||
164 | case isSlice: | ||
165 | elemType := ty | ||
166 | if isPtr { | ||
167 | elemType = reflect.PtrTo(ty) | ||
168 | } | ||
169 | sli := reflect.MakeSlice(reflect.SliceOf(elemType), len(blocks), len(blocks)) | ||
170 | |||
171 | for i, block := range blocks { | ||
172 | if isPtr { | ||
173 | v := reflect.New(ty) | ||
174 | diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...) | ||
175 | sli.Index(i).Set(v) | ||
176 | } else { | ||
177 | diags = append(diags, decodeBlockToValue(block, ctx, sli.Index(i))...) | ||
178 | } | ||
179 | } | ||
180 | |||
181 | val.Field(fieldIdx).Set(sli) | ||
182 | |||
183 | default: | ||
184 | block := blocks[0] | ||
185 | if isPtr { | ||
186 | v := reflect.New(ty) | ||
187 | diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...) | ||
188 | val.Field(fieldIdx).Set(v) | ||
189 | } else { | ||
190 | diags = append(diags, decodeBlockToValue(block, ctx, val.Field(fieldIdx))...) | ||
191 | } | ||
192 | |||
193 | } | ||
194 | |||
195 | } | ||
196 | |||
197 | return diags | ||
198 | } | ||
199 | |||
200 | func decodeBodyToMap(body hcl.Body, ctx *hcl.EvalContext, v reflect.Value) hcl.Diagnostics { | ||
201 | attrs, diags := body.JustAttributes() | ||
202 | if attrs == nil { | ||
203 | return diags | ||
204 | } | ||
205 | |||
206 | mv := reflect.MakeMap(v.Type()) | ||
207 | |||
208 | for k, attr := range attrs { | ||
209 | switch { | ||
210 | case attrType.AssignableTo(v.Type().Elem()): | ||
211 | mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr)) | ||
212 | case exprType.AssignableTo(v.Type().Elem()): | ||
213 | mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr.Expr)) | ||
214 | default: | ||
215 | ev := reflect.New(v.Type().Elem()) | ||
216 | diags = append(diags, DecodeExpression(attr.Expr, ctx, ev.Interface())...) | ||
217 | mv.SetMapIndex(reflect.ValueOf(k), ev.Elem()) | ||
218 | } | ||
219 | } | ||
220 | |||
221 | v.Set(mv) | ||
222 | |||
223 | return diags | ||
224 | } | ||
225 | |||
226 | func decodeBlockToValue(block *hcl.Block, ctx *hcl.EvalContext, v reflect.Value) hcl.Diagnostics { | ||
227 | var diags hcl.Diagnostics | ||
228 | |||
229 | ty := v.Type() | ||
230 | |||
231 | switch { | ||
232 | case blockType.AssignableTo(ty): | ||
233 | v.Elem().Set(reflect.ValueOf(block)) | ||
234 | case bodyType.AssignableTo(ty): | ||
235 | v.Elem().Set(reflect.ValueOf(block.Body)) | ||
236 | case attrsType.AssignableTo(ty): | ||
237 | attrs, attrsDiags := block.Body.JustAttributes() | ||
238 | if len(attrsDiags) > 0 { | ||
239 | diags = append(diags, attrsDiags...) | ||
240 | } | ||
241 | v.Elem().Set(reflect.ValueOf(attrs)) | ||
242 | default: | ||
243 | diags = append(diags, decodeBodyToValue(block.Body, ctx, v)...) | ||
244 | |||
245 | if len(block.Labels) > 0 { | ||
246 | blockTags := getFieldTags(ty) | ||
247 | for li, lv := range block.Labels { | ||
248 | lfieldIdx := blockTags.Labels[li].FieldIndex | ||
249 | v.Field(lfieldIdx).Set(reflect.ValueOf(lv)) | ||
250 | } | ||
251 | } | ||
252 | |||
253 | } | ||
254 | |||
255 | return diags | ||
256 | } | ||
257 | |||
258 | // DecodeExpression extracts the value of the given expression into the given | ||
259 | // value. This value must be something that gocty is able to decode into, | ||
260 | // since the final decoding is delegated to that package. | ||
261 | // | ||
262 | // The given EvalContext is used to resolve any variables or functions in | ||
263 | // expressions encountered while decoding. This may be nil to require only | ||
264 | // constant values, for simple applications that do not support variables or | ||
265 | // functions. | ||
266 | // | ||
267 | // The returned diagnostics should be inspected with its HasErrors method to | ||
268 | // determine if the populated value is valid and complete. If error diagnostics | ||
269 | // are returned then the given value may have been partially-populated but | ||
270 | // may still be accessed by a careful caller for static analysis and editor | ||
271 | // integration use-cases. | ||
272 | func DecodeExpression(expr hcl.Expression, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics { | ||
273 | srcVal, diags := expr.Value(ctx) | ||
274 | |||
275 | convTy, err := gocty.ImpliedType(val) | ||
276 | if err != nil { | ||
277 | panic(fmt.Sprintf("unsuitable DecodeExpression target: %s", err)) | ||
278 | } | ||
279 | |||
280 | srcVal, err = convert.Convert(srcVal, convTy) | ||
281 | if err != nil { | ||
282 | diags = append(diags, &hcl.Diagnostic{ | ||
283 | Severity: hcl.DiagError, | ||
284 | Summary: "Unsuitable value type", | ||
285 | Detail: fmt.Sprintf("Unsuitable value: %s", err.Error()), | ||
286 | Subject: expr.StartRange().Ptr(), | ||
287 | Context: expr.Range().Ptr(), | ||
288 | }) | ||
289 | return diags | ||
290 | } | ||
291 | |||
292 | err = gocty.FromCtyValue(srcVal, val) | ||
293 | if err != nil { | ||
294 | diags = append(diags, &hcl.Diagnostic{ | ||
295 | Severity: hcl.DiagError, | ||
296 | Summary: "Unsuitable value type", | ||
297 | Detail: fmt.Sprintf("Unsuitable value: %s", err.Error()), | ||
298 | Subject: expr.StartRange().Ptr(), | ||
299 | Context: expr.Range().Ptr(), | ||
300 | }) | ||
301 | } | ||
302 | |||
303 | return diags | ||
304 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/doc.go b/vendor/github.com/hashicorp/hcl2/gohcl/doc.go new file mode 100644 index 0000000..8500214 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/gohcl/doc.go | |||
@@ -0,0 +1,49 @@ | |||
1 | // Package gohcl allows decoding HCL configurations into Go data structures. | ||
2 | // | ||
3 | // It provides a convenient and concise way of describing the schema for | ||
4 | // configuration and then accessing the resulting data via native Go | ||
5 | // types. | ||
6 | // | ||
7 | // A struct field tag scheme is used, similar to other decoding and | ||
8 | // unmarshalling libraries. The tags are formatted as in the following example: | ||
9 | // | ||
10 | // ThingType string `hcl:"thing_type,attr"` | ||
11 | // | ||
12 | // Within each tag there are two comma-separated tokens. The first is the | ||
13 | // name of the corresponding construct in configuration, while the second | ||
14 | // is a keyword giving the kind of construct expected. The following | ||
15 | // kind keywords are supported: | ||
16 | // | ||
17 | // attr (the default) indicates that the value is to be populated from an attribute | ||
18 | // block indicates that the value is to populated from a block | ||
19 | // label indicates that the value is to populated from a block label | ||
20 | // remain indicates that the value is to be populated from the remaining body after populating other fields | ||
21 | // | ||
22 | // "attr" fields may either be of type *hcl.Expression, in which case the raw | ||
23 | // expression is assigned, or of any type accepted by gocty, in which case | ||
24 | // gocty will be used to assign the value to a native Go type. | ||
25 | // | ||
26 | // "block" fields may be of type *hcl.Block or hcl.Body, in which case the | ||
27 | // corresponding raw value is assigned, or may be a struct that recursively | ||
28 | // uses the same tags. Block fields may also be slices of any of these types, | ||
29 | // in which case multiple blocks of the corresponding type are decoded into | ||
30 | // the slice. | ||
31 | // | ||
32 | // "label" fields are considered only in a struct used as the type of a field | ||
33 | // marked as "block", and are used sequentially to capture the labels of | ||
34 | // the blocks being decoded. In this case, the name token is used only as | ||
35 | // an identifier for the label in diagnostic messages. | ||
36 | // | ||
37 | // "remain" can be placed on a single field that may be either of type | ||
38 | // hcl.Body or hcl.Attributes, in which case any remaining body content is | ||
39 | // placed into this field for delayed processing. If no "remain" field is | ||
40 | // present then any attributes or blocks not matched by another valid tag | ||
41 | // will cause an error diagnostic. | ||
42 | // | ||
43 | // Broadly-speaking this package deals with two types of error. The first is | ||
44 | // errors in the configuration itself, which are returned as diagnostics | ||
45 | // written with the configuration author as the target audience. The second | ||
46 | // is bugs in the calling program, such as invalid struct tags, which are | ||
47 | // surfaced via panics since there can be no useful runtime handling of such | ||
48 | // errors and they should certainly not be returned to the user as diagnostics. | ||
49 | package gohcl | ||
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/schema.go b/vendor/github.com/hashicorp/hcl2/gohcl/schema.go new file mode 100644 index 0000000..88164cb --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/gohcl/schema.go | |||
@@ -0,0 +1,174 @@ | |||
1 | package gohcl | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | "reflect" | ||
6 | "sort" | ||
7 | "strings" | ||
8 | |||
9 | "github.com/hashicorp/hcl2/hcl" | ||
10 | ) | ||
11 | |||
12 | // ImpliedBodySchema produces a hcl.BodySchema derived from the type of the | ||
13 | // given value, which must be a struct value or a pointer to one. If an | ||
14 | // inappropriate value is passed, this function will panic. | ||
15 | // | ||
16 | // The second return argument indicates whether the given struct includes | ||
17 | // a "remain" field, and thus the returned schema is non-exhaustive. | ||
18 | // | ||
19 | // This uses the tags on the fields of the struct to discover how each | ||
20 | // field's value should be expressed within configuration. If an invalid | ||
21 | // mapping is attempted, this function will panic. | ||
22 | func ImpliedBodySchema(val interface{}) (schema *hcl.BodySchema, partial bool) { | ||
23 | ty := reflect.TypeOf(val) | ||
24 | |||
25 | if ty.Kind() == reflect.Ptr { | ||
26 | ty = ty.Elem() | ||
27 | } | ||
28 | |||
29 | if ty.Kind() != reflect.Struct { | ||
30 | panic(fmt.Sprintf("given value must be struct, not %T", val)) | ||
31 | } | ||
32 | |||
33 | var attrSchemas []hcl.AttributeSchema | ||
34 | var blockSchemas []hcl.BlockHeaderSchema | ||
35 | |||
36 | tags := getFieldTags(ty) | ||
37 | |||
38 | attrNames := make([]string, 0, len(tags.Attributes)) | ||
39 | for n := range tags.Attributes { | ||
40 | attrNames = append(attrNames, n) | ||
41 | } | ||
42 | sort.Strings(attrNames) | ||
43 | for _, n := range attrNames { | ||
44 | idx := tags.Attributes[n] | ||
45 | optional := tags.Optional[n] | ||
46 | field := ty.Field(idx) | ||
47 | |||
48 | var required bool | ||
49 | |||
50 | switch { | ||
51 | case field.Type.AssignableTo(exprType): | ||
52 | // If we're decoding to hcl.Expression then absense can be | ||
53 | // indicated via a null value, so we don't specify that | ||
54 | // the field is required during decoding. | ||
55 | required = false | ||
56 | case field.Type.Kind() != reflect.Ptr && !optional: | ||
57 | required = true | ||
58 | default: | ||
59 | required = false | ||
60 | } | ||
61 | |||
62 | attrSchemas = append(attrSchemas, hcl.AttributeSchema{ | ||
63 | Name: n, | ||
64 | Required: required, | ||
65 | }) | ||
66 | } | ||
67 | |||
68 | blockNames := make([]string, 0, len(tags.Blocks)) | ||
69 | for n := range tags.Blocks { | ||
70 | blockNames = append(blockNames, n) | ||
71 | } | ||
72 | sort.Strings(blockNames) | ||
73 | for _, n := range blockNames { | ||
74 | idx := tags.Blocks[n] | ||
75 | field := ty.Field(idx) | ||
76 | fty := field.Type | ||
77 | if fty.Kind() == reflect.Slice { | ||
78 | fty = fty.Elem() | ||
79 | } | ||
80 | if fty.Kind() == reflect.Ptr { | ||
81 | fty = fty.Elem() | ||
82 | } | ||
83 | if fty.Kind() != reflect.Struct { | ||
84 | panic(fmt.Sprintf( | ||
85 | "hcl 'block' tag kind cannot be applied to %s field %s: struct required", field.Type.String(), field.Name, | ||
86 | )) | ||
87 | } | ||
88 | ftags := getFieldTags(fty) | ||
89 | var labelNames []string | ||
90 | if len(ftags.Labels) > 0 { | ||
91 | labelNames = make([]string, len(ftags.Labels)) | ||
92 | for i, l := range ftags.Labels { | ||
93 | labelNames[i] = l.Name | ||
94 | } | ||
95 | } | ||
96 | |||
97 | blockSchemas = append(blockSchemas, hcl.BlockHeaderSchema{ | ||
98 | Type: n, | ||
99 | LabelNames: labelNames, | ||
100 | }) | ||
101 | } | ||
102 | |||
103 | partial = tags.Remain != nil | ||
104 | schema = &hcl.BodySchema{ | ||
105 | Attributes: attrSchemas, | ||
106 | Blocks: blockSchemas, | ||
107 | } | ||
108 | return schema, partial | ||
109 | } | ||
110 | |||
111 | type fieldTags struct { | ||
112 | Attributes map[string]int | ||
113 | Blocks map[string]int | ||
114 | Labels []labelField | ||
115 | Remain *int | ||
116 | Optional map[string]bool | ||
117 | } | ||
118 | |||
119 | type labelField struct { | ||
120 | FieldIndex int | ||
121 | Name string | ||
122 | } | ||
123 | |||
124 | func getFieldTags(ty reflect.Type) *fieldTags { | ||
125 | ret := &fieldTags{ | ||
126 | Attributes: map[string]int{}, | ||
127 | Blocks: map[string]int{}, | ||
128 | Optional: map[string]bool{}, | ||
129 | } | ||
130 | |||
131 | ct := ty.NumField() | ||
132 | for i := 0; i < ct; i++ { | ||
133 | field := ty.Field(i) | ||
134 | tag := field.Tag.Get("hcl") | ||
135 | if tag == "" { | ||
136 | continue | ||
137 | } | ||
138 | |||
139 | comma := strings.Index(tag, ",") | ||
140 | var name, kind string | ||
141 | if comma != -1 { | ||
142 | name = tag[:comma] | ||
143 | kind = tag[comma+1:] | ||
144 | } else { | ||
145 | name = tag | ||
146 | kind = "attr" | ||
147 | } | ||
148 | |||
149 | switch kind { | ||
150 | case "attr": | ||
151 | ret.Attributes[name] = i | ||
152 | case "block": | ||
153 | ret.Blocks[name] = i | ||
154 | case "label": | ||
155 | ret.Labels = append(ret.Labels, labelField{ | ||
156 | FieldIndex: i, | ||
157 | Name: name, | ||
158 | }) | ||
159 | case "remain": | ||
160 | if ret.Remain != nil { | ||
161 | panic("only one 'remain' tag is permitted") | ||
162 | } | ||
163 | idx := i // copy, because this loop will continue assigning to i | ||
164 | ret.Remain = &idx | ||
165 | case "optional": | ||
166 | ret.Attributes[name] = i | ||
167 | ret.Optional[name] = true | ||
168 | default: | ||
169 | panic(fmt.Sprintf("invalid hcl field tag kind %q on %s %q", kind, field.Type.String(), field.Name)) | ||
170 | } | ||
171 | } | ||
172 | |||
173 | return ret | ||
174 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/types.go b/vendor/github.com/hashicorp/hcl2/gohcl/types.go new file mode 100644 index 0000000..a94f275 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/gohcl/types.go | |||
@@ -0,0 +1,16 @@ | |||
1 | package gohcl | ||
2 | |||
3 | import ( | ||
4 | "reflect" | ||
5 | |||
6 | "github.com/hashicorp/hcl2/hcl" | ||
7 | ) | ||
8 | |||
9 | var victimExpr hcl.Expression | ||
10 | var victimBody hcl.Body | ||
11 | |||
12 | var exprType = reflect.TypeOf(&victimExpr).Elem() | ||
13 | var bodyType = reflect.TypeOf(&victimBody).Elem() | ||
14 | var blockType = reflect.TypeOf((*hcl.Block)(nil)) | ||
15 | var attrType = reflect.TypeOf((*hcl.Attribute)(nil)) | ||
16 | var attrsType = reflect.TypeOf(hcl.Attributes(nil)) | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/diagnostic.go b/vendor/github.com/hashicorp/hcl2/hcl/diagnostic.go new file mode 100644 index 0000000..6ecf744 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/diagnostic.go | |||
@@ -0,0 +1,103 @@ | |||
1 | package hcl | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | ) | ||
6 | |||
7 | // DiagnosticSeverity represents the severity of a diagnostic. | ||
8 | type DiagnosticSeverity int | ||
9 | |||
10 | const ( | ||
11 | // DiagInvalid is the invalid zero value of DiagnosticSeverity | ||
12 | DiagInvalid DiagnosticSeverity = iota | ||
13 | |||
14 | // DiagError indicates that the problem reported by a diagnostic prevents | ||
15 | // further progress in parsing and/or evaluating the subject. | ||
16 | DiagError | ||
17 | |||
18 | // DiagWarning indicates that the problem reported by a diagnostic warrants | ||
19 | // user attention but does not prevent further progress. It is most | ||
20 | // commonly used for showing deprecation notices. | ||
21 | DiagWarning | ||
22 | ) | ||
23 | |||
24 | // Diagnostic represents information to be presented to a user about an | ||
25 | // error or anomoly in parsing or evaluating configuration. | ||
26 | type Diagnostic struct { | ||
27 | Severity DiagnosticSeverity | ||
28 | |||
29 | // Summary and detail contain the English-language description of the | ||
30 | // problem. Summary is a terse description of the general problem and | ||
31 | // detail is a more elaborate, often-multi-sentence description of | ||
32 | // the probem and what might be done to solve it. | ||
33 | Summary string | ||
34 | Detail string | ||
35 | Subject *Range | ||
36 | Context *Range | ||
37 | } | ||
38 | |||
39 | // Diagnostics is a list of Diagnostic instances. | ||
40 | type Diagnostics []*Diagnostic | ||
41 | |||
42 | // error implementation, so that diagnostics can be returned via APIs | ||
43 | // that normally deal in vanilla Go errors. | ||
44 | // | ||
45 | // This presents only minimal context about the error, for compatibility | ||
46 | // with usual expectations about how errors will present as strings. | ||
47 | func (d *Diagnostic) Error() string { | ||
48 | return fmt.Sprintf("%s: %s; %s", d.Subject, d.Summary, d.Detail) | ||
49 | } | ||
50 | |||
51 | // error implementation, so that sets of diagnostics can be returned via | ||
52 | // APIs that normally deal in vanilla Go errors. | ||
53 | func (d Diagnostics) Error() string { | ||
54 | count := len(d) | ||
55 | switch { | ||
56 | case count == 0: | ||
57 | return "no diagnostics" | ||
58 | case count == 1: | ||
59 | return d[0].Error() | ||
60 | default: | ||
61 | return fmt.Sprintf("%s, and %d other diagnostic(s)", d[0].Error(), count-1) | ||
62 | } | ||
63 | } | ||
64 | |||
65 | // Append appends a new error to a Diagnostics and return the whole Diagnostics. | ||
66 | // | ||
67 | // This is provided as a convenience for returning from a function that | ||
68 | // collects and then returns a set of diagnostics: | ||
69 | // | ||
70 | // return nil, diags.Append(&hcl.Diagnostic{ ... }) | ||
71 | // | ||
72 | // Note that this modifies the array underlying the diagnostics slice, so | ||
73 | // must be used carefully within a single codepath. It is incorrect (and rude) | ||
74 | // to extend a diagnostics created by a different subsystem. | ||
75 | func (d Diagnostics) Append(diag *Diagnostic) Diagnostics { | ||
76 | return append(d, diag) | ||
77 | } | ||
78 | |||
79 | // Extend concatenates the given Diagnostics with the receiver and returns | ||
80 | // the whole new Diagnostics. | ||
81 | // | ||
82 | // This is similar to Append but accepts multiple diagnostics to add. It has | ||
83 | // all the same caveats and constraints. | ||
84 | func (d Diagnostics) Extend(diags Diagnostics) Diagnostics { | ||
85 | return append(d, diags...) | ||
86 | } | ||
87 | |||
88 | // HasErrors returns true if the receiver contains any diagnostics of | ||
89 | // severity DiagError. | ||
90 | func (d Diagnostics) HasErrors() bool { | ||
91 | for _, diag := range d { | ||
92 | if diag.Severity == DiagError { | ||
93 | return true | ||
94 | } | ||
95 | } | ||
96 | return false | ||
97 | } | ||
98 | |||
99 | // A DiagnosticWriter emits diagnostics somehow. | ||
100 | type DiagnosticWriter interface { | ||
101 | WriteDiagnostic(*Diagnostic) error | ||
102 | WriteDiagnostics(Diagnostics) error | ||
103 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/diagnostic_text.go b/vendor/github.com/hashicorp/hcl2/hcl/diagnostic_text.go new file mode 100644 index 0000000..dfa473a --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/diagnostic_text.go | |||
@@ -0,0 +1,168 @@ | |||
1 | package hcl | ||
2 | |||
3 | import ( | ||
4 | "bufio" | ||
5 | "errors" | ||
6 | "fmt" | ||
7 | "io" | ||
8 | |||
9 | wordwrap "github.com/mitchellh/go-wordwrap" | ||
10 | ) | ||
11 | |||
12 | type diagnosticTextWriter struct { | ||
13 | files map[string]*File | ||
14 | wr io.Writer | ||
15 | width uint | ||
16 | color bool | ||
17 | } | ||
18 | |||
19 | // NewDiagnosticTextWriter creates a DiagnosticWriter that writes diagnostics | ||
20 | // to the given writer as formatted text. | ||
21 | // | ||
22 | // It is designed to produce text appropriate to print in a monospaced font | ||
23 | // in a terminal of a particular width, or optionally with no width limit. | ||
24 | // | ||
25 | // The given width may be zero to disable word-wrapping of the detail text | ||
26 | // and truncation of source code snippets. | ||
27 | // | ||
28 | // If color is set to true, the output will include VT100 escape sequences to | ||
29 | // color-code the severity indicators. It is suggested to turn this off if | ||
30 | // the target writer is not a terminal. | ||
31 | func NewDiagnosticTextWriter(wr io.Writer, files map[string]*File, width uint, color bool) DiagnosticWriter { | ||
32 | return &diagnosticTextWriter{ | ||
33 | files: files, | ||
34 | wr: wr, | ||
35 | width: width, | ||
36 | color: color, | ||
37 | } | ||
38 | } | ||
39 | |||
40 | func (w *diagnosticTextWriter) WriteDiagnostic(diag *Diagnostic) error { | ||
41 | if diag == nil { | ||
42 | return errors.New("nil diagnostic") | ||
43 | } | ||
44 | |||
45 | var colorCode, highlightCode, resetCode string | ||
46 | if w.color { | ||
47 | switch diag.Severity { | ||
48 | case DiagError: | ||
49 | colorCode = "\x1b[31m" | ||
50 | case DiagWarning: | ||
51 | colorCode = "\x1b[33m" | ||
52 | } | ||
53 | resetCode = "\x1b[0m" | ||
54 | highlightCode = "\x1b[1;4m" | ||
55 | } | ||
56 | |||
57 | var severityStr string | ||
58 | switch diag.Severity { | ||
59 | case DiagError: | ||
60 | severityStr = "Error" | ||
61 | case DiagWarning: | ||
62 | severityStr = "Warning" | ||
63 | default: | ||
64 | // should never happen | ||
65 | severityStr = "???????" | ||
66 | } | ||
67 | |||
68 | fmt.Fprintf(w.wr, "%s%s%s: %s\n\n", colorCode, severityStr, resetCode, diag.Summary) | ||
69 | |||
70 | if diag.Subject != nil { | ||
71 | snipRange := *diag.Subject | ||
72 | highlightRange := snipRange | ||
73 | if diag.Context != nil { | ||
74 | // Show enough of the source code to include both the subject | ||
75 | // and context ranges, which overlap in all reasonable | ||
76 | // situations. | ||
77 | snipRange = RangeOver(snipRange, *diag.Context) | ||
78 | } | ||
79 | // We can't illustrate an empty range, so we'll turn such ranges into | ||
80 | // single-character ranges, which might not be totally valid (may point | ||
81 | // off the end of a line, or off the end of the file) but are good | ||
82 | // enough for the bounds checks we do below. | ||
83 | if snipRange.Empty() { | ||
84 | snipRange.End.Byte++ | ||
85 | snipRange.End.Column++ | ||
86 | } | ||
87 | if highlightRange.Empty() { | ||
88 | highlightRange.End.Byte++ | ||
89 | highlightRange.End.Column++ | ||
90 | } | ||
91 | |||
92 | file := w.files[diag.Subject.Filename] | ||
93 | if file == nil || file.Bytes == nil { | ||
94 | fmt.Fprintf(w.wr, " on %s line %d:\n (source code not available)\n\n", diag.Subject.Filename, diag.Subject.Start.Line) | ||
95 | } else { | ||
96 | |||
97 | var contextLine string | ||
98 | if diag.Subject != nil { | ||
99 | contextLine = contextString(file, diag.Subject.Start.Byte) | ||
100 | if contextLine != "" { | ||
101 | contextLine = ", in " + contextLine | ||
102 | } | ||
103 | } | ||
104 | |||
105 | fmt.Fprintf(w.wr, " on %s line %d%s:\n", diag.Subject.Filename, diag.Subject.Start.Line, contextLine) | ||
106 | |||
107 | src := file.Bytes | ||
108 | sc := NewRangeScanner(src, diag.Subject.Filename, bufio.ScanLines) | ||
109 | |||
110 | for sc.Scan() { | ||
111 | lineRange := sc.Range() | ||
112 | if !lineRange.Overlaps(snipRange) { | ||
113 | continue | ||
114 | } | ||
115 | |||
116 | beforeRange, highlightedRange, afterRange := lineRange.PartitionAround(highlightRange) | ||
117 | if highlightedRange.Empty() { | ||
118 | fmt.Fprintf(w.wr, "%4d: %s\n", lineRange.Start.Line, sc.Bytes()) | ||
119 | } else { | ||
120 | before := beforeRange.SliceBytes(src) | ||
121 | highlighted := highlightedRange.SliceBytes(src) | ||
122 | after := afterRange.SliceBytes(src) | ||
123 | fmt.Fprintf( | ||
124 | w.wr, "%4d: %s%s%s%s%s\n", | ||
125 | lineRange.Start.Line, | ||
126 | before, | ||
127 | highlightCode, highlighted, resetCode, | ||
128 | after, | ||
129 | ) | ||
130 | } | ||
131 | |||
132 | } | ||
133 | |||
134 | w.wr.Write([]byte{'\n'}) | ||
135 | } | ||
136 | } | ||
137 | |||
138 | if diag.Detail != "" { | ||
139 | detail := diag.Detail | ||
140 | if w.width != 0 { | ||
141 | detail = wordwrap.WrapString(detail, w.width) | ||
142 | } | ||
143 | fmt.Fprintf(w.wr, "%s\n\n", detail) | ||
144 | } | ||
145 | |||
146 | return nil | ||
147 | } | ||
148 | |||
149 | func (w *diagnosticTextWriter) WriteDiagnostics(diags Diagnostics) error { | ||
150 | for _, diag := range diags { | ||
151 | err := w.WriteDiagnostic(diag) | ||
152 | if err != nil { | ||
153 | return err | ||
154 | } | ||
155 | } | ||
156 | return nil | ||
157 | } | ||
158 | |||
159 | func contextString(file *File, offset int) string { | ||
160 | type contextStringer interface { | ||
161 | ContextString(offset int) string | ||
162 | } | ||
163 | |||
164 | if cser, ok := file.Nav.(contextStringer); ok { | ||
165 | return cser.ContextString(offset) | ||
166 | } | ||
167 | return "" | ||
168 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/didyoumean.go b/vendor/github.com/hashicorp/hcl2/hcl/didyoumean.go new file mode 100644 index 0000000..c128334 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/didyoumean.go | |||
@@ -0,0 +1,24 @@ | |||
1 | package hcl | ||
2 | |||
3 | import ( | ||
4 | "github.com/agext/levenshtein" | ||
5 | ) | ||
6 | |||
7 | // nameSuggestion tries to find a name from the given slice of suggested names | ||
8 | // that is close to the given name and returns it if found. If no suggestion | ||
9 | // is close enough, returns the empty string. | ||
10 | // | ||
11 | // The suggestions are tried in order, so earlier suggestions take precedence | ||
12 | // if the given string is similar to two or more suggestions. | ||
13 | // | ||
14 | // This function is intended to be used with a relatively-small number of | ||
15 | // suggestions. It's not optimized for hundreds or thousands of them. | ||
16 | func nameSuggestion(given string, suggestions []string) string { | ||
17 | for _, suggestion := range suggestions { | ||
18 | dist := levenshtein.Distance(given, suggestion, nil) | ||
19 | if dist < 3 { // threshold determined experimentally | ||
20 | return suggestion | ||
21 | } | ||
22 | } | ||
23 | return "" | ||
24 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/doc.go b/vendor/github.com/hashicorp/hcl2/hcl/doc.go new file mode 100644 index 0000000..01318c9 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/doc.go | |||
@@ -0,0 +1 @@ | |||
package hcl | |||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/eval_context.go b/vendor/github.com/hashicorp/hcl2/hcl/eval_context.go new file mode 100644 index 0000000..915910a --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/eval_context.go | |||
@@ -0,0 +1,25 @@ | |||
1 | package hcl | ||
2 | |||
3 | import ( | ||
4 | "github.com/zclconf/go-cty/cty" | ||
5 | "github.com/zclconf/go-cty/cty/function" | ||
6 | ) | ||
7 | |||
8 | // An EvalContext provides the variables and functions that should be used | ||
9 | // to evaluate an expression. | ||
10 | type EvalContext struct { | ||
11 | Variables map[string]cty.Value | ||
12 | Functions map[string]function.Function | ||
13 | parent *EvalContext | ||
14 | } | ||
15 | |||
16 | // NewChild returns a new EvalContext that is a child of the receiver. | ||
17 | func (ctx *EvalContext) NewChild() *EvalContext { | ||
18 | return &EvalContext{parent: ctx} | ||
19 | } | ||
20 | |||
21 | // Parent returns the parent of the receiver, or nil if the receiver has | ||
22 | // no parent. | ||
23 | func (ctx *EvalContext) Parent() *EvalContext { | ||
24 | return ctx.parent | ||
25 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_call.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_call.go new file mode 100644 index 0000000..6963fba --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/expr_call.go | |||
@@ -0,0 +1,46 @@ | |||
1 | package hcl | ||
2 | |||
3 | // ExprCall tests if the given expression is a function call and, | ||
4 | // if so, extracts the function name and the expressions that represent | ||
5 | // the arguments. If the given expression is not statically a function call, | ||
6 | // error diagnostics are returned. | ||
7 | // | ||
8 | // A particular Expression implementation can support this function by | ||
9 | // offering a method called ExprCall that takes no arguments and returns | ||
10 | // *StaticCall. This method should return nil if a static call cannot | ||
11 | // be extracted. Alternatively, an implementation can support | ||
12 | // UnwrapExpression to delegate handling of this function to a wrapped | ||
13 | // Expression object. | ||
14 | func ExprCall(expr Expression) (*StaticCall, Diagnostics) { | ||
15 | type exprCall interface { | ||
16 | ExprCall() *StaticCall | ||
17 | } | ||
18 | |||
19 | physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool { | ||
20 | _, supported := expr.(exprCall) | ||
21 | return supported | ||
22 | }) | ||
23 | |||
24 | if exC, supported := physExpr.(exprCall); supported { | ||
25 | if call := exC.ExprCall(); call != nil { | ||
26 | return call, nil | ||
27 | } | ||
28 | } | ||
29 | return nil, Diagnostics{ | ||
30 | &Diagnostic{ | ||
31 | Severity: DiagError, | ||
32 | Summary: "Invalid expression", | ||
33 | Detail: "A static function call is required.", | ||
34 | Subject: expr.StartRange().Ptr(), | ||
35 | }, | ||
36 | } | ||
37 | } | ||
38 | |||
39 | // StaticCall represents a function call that was extracted statically from | ||
40 | // an expression using ExprCall. | ||
41 | type StaticCall struct { | ||
42 | Name string | ||
43 | NameRange Range | ||
44 | Arguments []Expression | ||
45 | ArgsRange Range | ||
46 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_list.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_list.go new file mode 100644 index 0000000..d05cca0 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/expr_list.go | |||
@@ -0,0 +1,37 @@ | |||
1 | package hcl | ||
2 | |||
3 | // ExprList tests if the given expression is a static list construct and, | ||
4 | // if so, extracts the expressions that represent the list elements. | ||
5 | // If the given expression is not a static list, error diagnostics are | ||
6 | // returned. | ||
7 | // | ||
8 | // A particular Expression implementation can support this function by | ||
9 | // offering a method called ExprList that takes no arguments and returns | ||
10 | // []Expression. This method should return nil if a static list cannot | ||
11 | // be extracted. Alternatively, an implementation can support | ||
12 | // UnwrapExpression to delegate handling of this function to a wrapped | ||
13 | // Expression object. | ||
14 | func ExprList(expr Expression) ([]Expression, Diagnostics) { | ||
15 | type exprList interface { | ||
16 | ExprList() []Expression | ||
17 | } | ||
18 | |||
19 | physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool { | ||
20 | _, supported := expr.(exprList) | ||
21 | return supported | ||
22 | }) | ||
23 | |||
24 | if exL, supported := physExpr.(exprList); supported { | ||
25 | if list := exL.ExprList(); list != nil { | ||
26 | return list, nil | ||
27 | } | ||
28 | } | ||
29 | return nil, Diagnostics{ | ||
30 | &Diagnostic{ | ||
31 | Severity: DiagError, | ||
32 | Summary: "Invalid expression", | ||
33 | Detail: "A static list expression is required.", | ||
34 | Subject: expr.StartRange().Ptr(), | ||
35 | }, | ||
36 | } | ||
37 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_map.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_map.go new file mode 100644 index 0000000..96d1ce4 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/expr_map.go | |||
@@ -0,0 +1,44 @@ | |||
1 | package hcl | ||
2 | |||
3 | // ExprMap tests if the given expression is a static map construct and, | ||
4 | // if so, extracts the expressions that represent the map elements. | ||
5 | // If the given expression is not a static map, error diagnostics are | ||
6 | // returned. | ||
7 | // | ||
8 | // A particular Expression implementation can support this function by | ||
9 | // offering a method called ExprMap that takes no arguments and returns | ||
10 | // []KeyValuePair. This method should return nil if a static map cannot | ||
11 | // be extracted. Alternatively, an implementation can support | ||
12 | // UnwrapExpression to delegate handling of this function to a wrapped | ||
13 | // Expression object. | ||
14 | func ExprMap(expr Expression) ([]KeyValuePair, Diagnostics) { | ||
15 | type exprMap interface { | ||
16 | ExprMap() []KeyValuePair | ||
17 | } | ||
18 | |||
19 | physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool { | ||
20 | _, supported := expr.(exprMap) | ||
21 | return supported | ||
22 | }) | ||
23 | |||
24 | if exM, supported := physExpr.(exprMap); supported { | ||
25 | if pairs := exM.ExprMap(); pairs != nil { | ||
26 | return pairs, nil | ||
27 | } | ||
28 | } | ||
29 | return nil, Diagnostics{ | ||
30 | &Diagnostic{ | ||
31 | Severity: DiagError, | ||
32 | Summary: "Invalid expression", | ||
33 | Detail: "A static map expression is required.", | ||
34 | Subject: expr.StartRange().Ptr(), | ||
35 | }, | ||
36 | } | ||
37 | } | ||
38 | |||
39 | // KeyValuePair represents a pair of expressions that serve as a single item | ||
40 | // within a map or object definition construct. | ||
41 | type KeyValuePair struct { | ||
42 | Key Expression | ||
43 | Value Expression | ||
44 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_unwrap.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_unwrap.go new file mode 100644 index 0000000..6d5d205 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/expr_unwrap.go | |||
@@ -0,0 +1,68 @@ | |||
1 | package hcl | ||
2 | |||
3 | type unwrapExpression interface { | ||
4 | UnwrapExpression() Expression | ||
5 | } | ||
6 | |||
7 | // UnwrapExpression removes any "wrapper" expressions from the given expression, | ||
8 | // to recover the representation of the physical expression given in source | ||
9 | // code. | ||
10 | // | ||
11 | // Sometimes wrapping expressions are used to modify expression behavior, e.g. | ||
12 | // in extensions that need to make some local variables available to certain | ||
13 | // sub-trees of the configuration. This can make it difficult to reliably | ||
14 | // type-assert on the physical AST types used by the underlying syntax. | ||
15 | // | ||
16 | // Unwrapping an expression may modify its behavior by stripping away any | ||
17 | // additional constraints or capabilities being applied to the Value and | ||
18 | // Variables methods, so this function should generally only be used prior | ||
19 | // to operations that concern themselves with the static syntax of the input | ||
20 | // configuration, and not with the effective value of the expression. | ||
21 | // | ||
22 | // Wrapper expression types must support unwrapping by implementing a method | ||
23 | // called UnwrapExpression that takes no arguments and returns the embedded | ||
24 | // Expression. Implementations of this method should peel away only one level | ||
25 | // of wrapping, if multiple are present. This method may return nil to | ||
26 | // indicate _dynamically_ that no wrapped expression is available, for | ||
27 | // expression types that might only behave as wrappers in certain cases. | ||
28 | func UnwrapExpression(expr Expression) Expression { | ||
29 | for { | ||
30 | unwrap, wrapped := expr.(unwrapExpression) | ||
31 | if !wrapped { | ||
32 | return expr | ||
33 | } | ||
34 | innerExpr := unwrap.UnwrapExpression() | ||
35 | if innerExpr == nil { | ||
36 | return expr | ||
37 | } | ||
38 | expr = innerExpr | ||
39 | } | ||
40 | } | ||
41 | |||
42 | // UnwrapExpressionUntil is similar to UnwrapExpression except it gives the | ||
43 | // caller an opportunity to test each level of unwrapping to see each a | ||
44 | // particular expression is accepted. | ||
45 | // | ||
46 | // This could be used, for example, to unwrap until a particular other | ||
47 | // interface is satisfied, regardless of wrap wrapping level it is satisfied | ||
48 | // at. | ||
49 | // | ||
50 | // The given callback function must return false to continue wrapping, or | ||
51 | // true to accept and return the proposed expression given. If the callback | ||
52 | // function rejects even the final, physical expression then the result of | ||
53 | // this function is nil. | ||
54 | func UnwrapExpressionUntil(expr Expression, until func(Expression) bool) Expression { | ||
55 | for { | ||
56 | if until(expr) { | ||
57 | return expr | ||
58 | } | ||
59 | unwrap, wrapped := expr.(unwrapExpression) | ||
60 | if !wrapped { | ||
61 | return nil | ||
62 | } | ||
63 | expr = unwrap.UnwrapExpression() | ||
64 | if expr == nil { | ||
65 | return nil | ||
66 | } | ||
67 | } | ||
68 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/didyoumean.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/didyoumean.go new file mode 100644 index 0000000..ccc1c0a --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/didyoumean.go | |||
@@ -0,0 +1,24 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "github.com/agext/levenshtein" | ||
5 | ) | ||
6 | |||
7 | // nameSuggestion tries to find a name from the given slice of suggested names | ||
8 | // that is close to the given name and returns it if found. If no suggestion | ||
9 | // is close enough, returns the empty string. | ||
10 | // | ||
11 | // The suggestions are tried in order, so earlier suggestions take precedence | ||
12 | // if the given string is similar to two or more suggestions. | ||
13 | // | ||
14 | // This function is intended to be used with a relatively-small number of | ||
15 | // suggestions. It's not optimized for hundreds or thousands of them. | ||
16 | func nameSuggestion(given string, suggestions []string) string { | ||
17 | for _, suggestion := range suggestions { | ||
18 | dist := levenshtein.Distance(given, suggestion, nil) | ||
19 | if dist < 3 { // threshold determined experimentally | ||
20 | return suggestion | ||
21 | } | ||
22 | } | ||
23 | return "" | ||
24 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/doc.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/doc.go new file mode 100644 index 0000000..617bc29 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/doc.go | |||
@@ -0,0 +1,7 @@ | |||
1 | // Package hclsyntax contains the parser, AST, etc for HCL's native language, | ||
2 | // as opposed to the JSON variant. | ||
3 | // | ||
4 | // In normal use applications should rarely depend on this package directly, | ||
5 | // instead preferring the higher-level interface of the main hcl package and | ||
6 | // its companion package hclparse. | ||
7 | package hclsyntax | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression.go new file mode 100644 index 0000000..cfc7cd9 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression.go | |||
@@ -0,0 +1,1275 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | |||
6 | "github.com/hashicorp/hcl2/hcl" | ||
7 | "github.com/zclconf/go-cty/cty" | ||
8 | "github.com/zclconf/go-cty/cty/convert" | ||
9 | "github.com/zclconf/go-cty/cty/function" | ||
10 | ) | ||
11 | |||
12 | // Expression is the abstract type for nodes that behave as HCL expressions. | ||
13 | type Expression interface { | ||
14 | Node | ||
15 | |||
16 | // The hcl.Expression methods are duplicated here, rather than simply | ||
17 | // embedded, because both Node and hcl.Expression have a Range method | ||
18 | // and so they conflict. | ||
19 | |||
20 | Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) | ||
21 | Variables() []hcl.Traversal | ||
22 | StartRange() hcl.Range | ||
23 | } | ||
24 | |||
25 | // Assert that Expression implements hcl.Expression | ||
26 | var assertExprImplExpr hcl.Expression = Expression(nil) | ||
27 | |||
28 | // LiteralValueExpr is an expression that just always returns a given value. | ||
29 | type LiteralValueExpr struct { | ||
30 | Val cty.Value | ||
31 | SrcRange hcl.Range | ||
32 | } | ||
33 | |||
34 | func (e *LiteralValueExpr) walkChildNodes(w internalWalkFunc) { | ||
35 | // Literal values have no child nodes | ||
36 | } | ||
37 | |||
38 | func (e *LiteralValueExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
39 | return e.Val, nil | ||
40 | } | ||
41 | |||
42 | func (e *LiteralValueExpr) Range() hcl.Range { | ||
43 | return e.SrcRange | ||
44 | } | ||
45 | |||
46 | func (e *LiteralValueExpr) StartRange() hcl.Range { | ||
47 | return e.SrcRange | ||
48 | } | ||
49 | |||
50 | // Implementation for hcl.AbsTraversalForExpr. | ||
51 | func (e *LiteralValueExpr) AsTraversal() hcl.Traversal { | ||
52 | // This one's a little weird: the contract for AsTraversal is to interpret | ||
53 | // an expression as if it were traversal syntax, and traversal syntax | ||
54 | // doesn't have the special keywords "null", "true", and "false" so these | ||
55 | // are expected to be treated like variables in that case. | ||
56 | // Since our parser already turned them into LiteralValueExpr by the time | ||
57 | // we get here, we need to undo this and infer the name that would've | ||
58 | // originally led to our value. | ||
59 | // We don't do anything for any other values, since they don't overlap | ||
60 | // with traversal roots. | ||
61 | |||
62 | if e.Val.IsNull() { | ||
63 | // In practice the parser only generates null values of the dynamic | ||
64 | // pseudo-type for literals, so we can safely assume that any null | ||
65 | // was orignally the keyword "null". | ||
66 | return hcl.Traversal{ | ||
67 | hcl.TraverseRoot{ | ||
68 | Name: "null", | ||
69 | SrcRange: e.SrcRange, | ||
70 | }, | ||
71 | } | ||
72 | } | ||
73 | |||
74 | switch e.Val { | ||
75 | case cty.True: | ||
76 | return hcl.Traversal{ | ||
77 | hcl.TraverseRoot{ | ||
78 | Name: "true", | ||
79 | SrcRange: e.SrcRange, | ||
80 | }, | ||
81 | } | ||
82 | case cty.False: | ||
83 | return hcl.Traversal{ | ||
84 | hcl.TraverseRoot{ | ||
85 | Name: "false", | ||
86 | SrcRange: e.SrcRange, | ||
87 | }, | ||
88 | } | ||
89 | default: | ||
90 | // No traversal is possible for any other value. | ||
91 | return nil | ||
92 | } | ||
93 | } | ||
94 | |||
95 | // ScopeTraversalExpr is an Expression that retrieves a value from the scope | ||
96 | // using a traversal. | ||
97 | type ScopeTraversalExpr struct { | ||
98 | Traversal hcl.Traversal | ||
99 | SrcRange hcl.Range | ||
100 | } | ||
101 | |||
102 | func (e *ScopeTraversalExpr) walkChildNodes(w internalWalkFunc) { | ||
103 | // Scope traversals have no child nodes | ||
104 | } | ||
105 | |||
106 | func (e *ScopeTraversalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
107 | return e.Traversal.TraverseAbs(ctx) | ||
108 | } | ||
109 | |||
110 | func (e *ScopeTraversalExpr) Range() hcl.Range { | ||
111 | return e.SrcRange | ||
112 | } | ||
113 | |||
114 | func (e *ScopeTraversalExpr) StartRange() hcl.Range { | ||
115 | return e.SrcRange | ||
116 | } | ||
117 | |||
118 | // Implementation for hcl.AbsTraversalForExpr. | ||
119 | func (e *ScopeTraversalExpr) AsTraversal() hcl.Traversal { | ||
120 | return e.Traversal | ||
121 | } | ||
122 | |||
123 | // RelativeTraversalExpr is an Expression that retrieves a value from another | ||
124 | // value using a _relative_ traversal. | ||
125 | type RelativeTraversalExpr struct { | ||
126 | Source Expression | ||
127 | Traversal hcl.Traversal | ||
128 | SrcRange hcl.Range | ||
129 | } | ||
130 | |||
131 | func (e *RelativeTraversalExpr) walkChildNodes(w internalWalkFunc) { | ||
132 | // Scope traversals have no child nodes | ||
133 | } | ||
134 | |||
135 | func (e *RelativeTraversalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
136 | src, diags := e.Source.Value(ctx) | ||
137 | ret, travDiags := e.Traversal.TraverseRel(src) | ||
138 | diags = append(diags, travDiags...) | ||
139 | return ret, diags | ||
140 | } | ||
141 | |||
142 | func (e *RelativeTraversalExpr) Range() hcl.Range { | ||
143 | return e.SrcRange | ||
144 | } | ||
145 | |||
146 | func (e *RelativeTraversalExpr) StartRange() hcl.Range { | ||
147 | return e.SrcRange | ||
148 | } | ||
149 | |||
150 | // Implementation for hcl.AbsTraversalForExpr. | ||
151 | func (e *RelativeTraversalExpr) AsTraversal() hcl.Traversal { | ||
152 | // We can produce a traversal only if our source can. | ||
153 | st, diags := hcl.AbsTraversalForExpr(e.Source) | ||
154 | if diags.HasErrors() { | ||
155 | return nil | ||
156 | } | ||
157 | |||
158 | ret := make(hcl.Traversal, len(st)+len(e.Traversal)) | ||
159 | copy(ret, st) | ||
160 | copy(ret[len(st):], e.Traversal) | ||
161 | return ret | ||
162 | } | ||
163 | |||
164 | // FunctionCallExpr is an Expression that calls a function from the EvalContext | ||
165 | // and returns its result. | ||
166 | type FunctionCallExpr struct { | ||
167 | Name string | ||
168 | Args []Expression | ||
169 | |||
170 | // If true, the final argument should be a tuple, list or set which will | ||
171 | // expand to be one argument per element. | ||
172 | ExpandFinal bool | ||
173 | |||
174 | NameRange hcl.Range | ||
175 | OpenParenRange hcl.Range | ||
176 | CloseParenRange hcl.Range | ||
177 | } | ||
178 | |||
179 | func (e *FunctionCallExpr) walkChildNodes(w internalWalkFunc) { | ||
180 | for i, arg := range e.Args { | ||
181 | e.Args[i] = w(arg).(Expression) | ||
182 | } | ||
183 | } | ||
184 | |||
185 | func (e *FunctionCallExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
186 | var diags hcl.Diagnostics | ||
187 | |||
188 | var f function.Function | ||
189 | exists := false | ||
190 | hasNonNilMap := false | ||
191 | thisCtx := ctx | ||
192 | for thisCtx != nil { | ||
193 | if thisCtx.Functions == nil { | ||
194 | thisCtx = thisCtx.Parent() | ||
195 | continue | ||
196 | } | ||
197 | hasNonNilMap = true | ||
198 | f, exists = thisCtx.Functions[e.Name] | ||
199 | if exists { | ||
200 | break | ||
201 | } | ||
202 | thisCtx = thisCtx.Parent() | ||
203 | } | ||
204 | |||
205 | if !exists { | ||
206 | if !hasNonNilMap { | ||
207 | return cty.DynamicVal, hcl.Diagnostics{ | ||
208 | { | ||
209 | Severity: hcl.DiagError, | ||
210 | Summary: "Function calls not allowed", | ||
211 | Detail: "Functions may not be called here.", | ||
212 | Subject: e.Range().Ptr(), | ||
213 | }, | ||
214 | } | ||
215 | } | ||
216 | |||
217 | avail := make([]string, 0, len(ctx.Functions)) | ||
218 | for name := range ctx.Functions { | ||
219 | avail = append(avail, name) | ||
220 | } | ||
221 | suggestion := nameSuggestion(e.Name, avail) | ||
222 | if suggestion != "" { | ||
223 | suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) | ||
224 | } | ||
225 | |||
226 | return cty.DynamicVal, hcl.Diagnostics{ | ||
227 | { | ||
228 | Severity: hcl.DiagError, | ||
229 | Summary: "Call to unknown function", | ||
230 | Detail: fmt.Sprintf("There is no function named %q.%s", e.Name, suggestion), | ||
231 | Subject: &e.NameRange, | ||
232 | Context: e.Range().Ptr(), | ||
233 | }, | ||
234 | } | ||
235 | } | ||
236 | |||
237 | params := f.Params() | ||
238 | varParam := f.VarParam() | ||
239 | |||
240 | args := e.Args | ||
241 | if e.ExpandFinal { | ||
242 | if len(args) < 1 { | ||
243 | // should never happen if the parser is behaving | ||
244 | panic("ExpandFinal set on function call with no arguments") | ||
245 | } | ||
246 | expandExpr := args[len(args)-1] | ||
247 | expandVal, expandDiags := expandExpr.Value(ctx) | ||
248 | diags = append(diags, expandDiags...) | ||
249 | if expandDiags.HasErrors() { | ||
250 | return cty.DynamicVal, diags | ||
251 | } | ||
252 | |||
253 | switch { | ||
254 | case expandVal.Type().IsTupleType() || expandVal.Type().IsListType() || expandVal.Type().IsSetType(): | ||
255 | if expandVal.IsNull() { | ||
256 | diags = append(diags, &hcl.Diagnostic{ | ||
257 | Severity: hcl.DiagError, | ||
258 | Summary: "Invalid expanding argument value", | ||
259 | Detail: "The expanding argument (indicated by ...) must not be null.", | ||
260 | Context: expandExpr.Range().Ptr(), | ||
261 | Subject: e.Range().Ptr(), | ||
262 | }) | ||
263 | return cty.DynamicVal, diags | ||
264 | } | ||
265 | if !expandVal.IsKnown() { | ||
266 | return cty.DynamicVal, diags | ||
267 | } | ||
268 | |||
269 | newArgs := make([]Expression, 0, (len(args)-1)+expandVal.LengthInt()) | ||
270 | newArgs = append(newArgs, args[:len(args)-1]...) | ||
271 | it := expandVal.ElementIterator() | ||
272 | for it.Next() { | ||
273 | _, val := it.Element() | ||
274 | newArgs = append(newArgs, &LiteralValueExpr{ | ||
275 | Val: val, | ||
276 | SrcRange: expandExpr.Range(), | ||
277 | }) | ||
278 | } | ||
279 | args = newArgs | ||
280 | default: | ||
281 | diags = append(diags, &hcl.Diagnostic{ | ||
282 | Severity: hcl.DiagError, | ||
283 | Summary: "Invalid expanding argument value", | ||
284 | Detail: "The expanding argument (indicated by ...) must be of a tuple, list, or set type.", | ||
285 | Context: expandExpr.Range().Ptr(), | ||
286 | Subject: e.Range().Ptr(), | ||
287 | }) | ||
288 | return cty.DynamicVal, diags | ||
289 | } | ||
290 | } | ||
291 | |||
292 | if len(args) < len(params) { | ||
293 | missing := params[len(args)] | ||
294 | qual := "" | ||
295 | if varParam != nil { | ||
296 | qual = " at least" | ||
297 | } | ||
298 | return cty.DynamicVal, hcl.Diagnostics{ | ||
299 | { | ||
300 | Severity: hcl.DiagError, | ||
301 | Summary: "Not enough function arguments", | ||
302 | Detail: fmt.Sprintf( | ||
303 | "Function %q expects%s %d argument(s). Missing value for %q.", | ||
304 | e.Name, qual, len(params), missing.Name, | ||
305 | ), | ||
306 | Subject: &e.CloseParenRange, | ||
307 | Context: e.Range().Ptr(), | ||
308 | }, | ||
309 | } | ||
310 | } | ||
311 | |||
312 | if varParam == nil && len(args) > len(params) { | ||
313 | return cty.DynamicVal, hcl.Diagnostics{ | ||
314 | { | ||
315 | Severity: hcl.DiagError, | ||
316 | Summary: "Too many function arguments", | ||
317 | Detail: fmt.Sprintf( | ||
318 | "Function %q expects only %d argument(s).", | ||
319 | e.Name, len(params), | ||
320 | ), | ||
321 | Subject: args[len(params)].StartRange().Ptr(), | ||
322 | Context: e.Range().Ptr(), | ||
323 | }, | ||
324 | } | ||
325 | } | ||
326 | |||
327 | argVals := make([]cty.Value, len(args)) | ||
328 | |||
329 | for i, argExpr := range args { | ||
330 | var param *function.Parameter | ||
331 | if i < len(params) { | ||
332 | param = ¶ms[i] | ||
333 | } else { | ||
334 | param = varParam | ||
335 | } | ||
336 | |||
337 | val, argDiags := argExpr.Value(ctx) | ||
338 | if len(argDiags) > 0 { | ||
339 | diags = append(diags, argDiags...) | ||
340 | } | ||
341 | |||
342 | // Try to convert our value to the parameter type | ||
343 | val, err := convert.Convert(val, param.Type) | ||
344 | if err != nil { | ||
345 | diags = append(diags, &hcl.Diagnostic{ | ||
346 | Severity: hcl.DiagError, | ||
347 | Summary: "Invalid function argument", | ||
348 | Detail: fmt.Sprintf( | ||
349 | "Invalid value for %q parameter: %s.", | ||
350 | param.Name, err, | ||
351 | ), | ||
352 | Subject: argExpr.StartRange().Ptr(), | ||
353 | Context: e.Range().Ptr(), | ||
354 | }) | ||
355 | } | ||
356 | |||
357 | argVals[i] = val | ||
358 | } | ||
359 | |||
360 | if diags.HasErrors() { | ||
361 | // Don't try to execute the function if we already have errors with | ||
362 | // the arguments, because the result will probably be a confusing | ||
363 | // error message. | ||
364 | return cty.DynamicVal, diags | ||
365 | } | ||
366 | |||
367 | resultVal, err := f.Call(argVals) | ||
368 | if err != nil { | ||
369 | switch terr := err.(type) { | ||
370 | case function.ArgError: | ||
371 | i := terr.Index | ||
372 | var param *function.Parameter | ||
373 | if i < len(params) { | ||
374 | param = ¶ms[i] | ||
375 | } else { | ||
376 | param = varParam | ||
377 | } | ||
378 | argExpr := e.Args[i] | ||
379 | |||
380 | // TODO: we should also unpick a PathError here and show the | ||
381 | // path to the deep value where the error was detected. | ||
382 | diags = append(diags, &hcl.Diagnostic{ | ||
383 | Severity: hcl.DiagError, | ||
384 | Summary: "Invalid function argument", | ||
385 | Detail: fmt.Sprintf( | ||
386 | "Invalid value for %q parameter: %s.", | ||
387 | param.Name, err, | ||
388 | ), | ||
389 | Subject: argExpr.StartRange().Ptr(), | ||
390 | Context: e.Range().Ptr(), | ||
391 | }) | ||
392 | |||
393 | default: | ||
394 | diags = append(diags, &hcl.Diagnostic{ | ||
395 | Severity: hcl.DiagError, | ||
396 | Summary: "Error in function call", | ||
397 | Detail: fmt.Sprintf( | ||
398 | "Call to function %q failed: %s.", | ||
399 | e.Name, err, | ||
400 | ), | ||
401 | Subject: e.StartRange().Ptr(), | ||
402 | Context: e.Range().Ptr(), | ||
403 | }) | ||
404 | } | ||
405 | |||
406 | return cty.DynamicVal, diags | ||
407 | } | ||
408 | |||
409 | return resultVal, diags | ||
410 | } | ||
411 | |||
412 | func (e *FunctionCallExpr) Range() hcl.Range { | ||
413 | return hcl.RangeBetween(e.NameRange, e.CloseParenRange) | ||
414 | } | ||
415 | |||
416 | func (e *FunctionCallExpr) StartRange() hcl.Range { | ||
417 | return hcl.RangeBetween(e.NameRange, e.OpenParenRange) | ||
418 | } | ||
419 | |||
420 | // Implementation for hcl.ExprCall. | ||
421 | func (e *FunctionCallExpr) ExprCall() *hcl.StaticCall { | ||
422 | ret := &hcl.StaticCall{ | ||
423 | Name: e.Name, | ||
424 | NameRange: e.NameRange, | ||
425 | Arguments: make([]hcl.Expression, len(e.Args)), | ||
426 | ArgsRange: hcl.RangeBetween(e.OpenParenRange, e.CloseParenRange), | ||
427 | } | ||
428 | // Need to convert our own Expression objects into hcl.Expression. | ||
429 | for i, arg := range e.Args { | ||
430 | ret.Arguments[i] = arg | ||
431 | } | ||
432 | return ret | ||
433 | } | ||
434 | |||
435 | type ConditionalExpr struct { | ||
436 | Condition Expression | ||
437 | TrueResult Expression | ||
438 | FalseResult Expression | ||
439 | |||
440 | SrcRange hcl.Range | ||
441 | } | ||
442 | |||
443 | func (e *ConditionalExpr) walkChildNodes(w internalWalkFunc) { | ||
444 | e.Condition = w(e.Condition).(Expression) | ||
445 | e.TrueResult = w(e.TrueResult).(Expression) | ||
446 | e.FalseResult = w(e.FalseResult).(Expression) | ||
447 | } | ||
448 | |||
449 | func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
450 | trueResult, trueDiags := e.TrueResult.Value(ctx) | ||
451 | falseResult, falseDiags := e.FalseResult.Value(ctx) | ||
452 | var diags hcl.Diagnostics | ||
453 | |||
454 | // Try to find a type that both results can be converted to. | ||
455 | resultType, convs := convert.UnifyUnsafe([]cty.Type{trueResult.Type(), falseResult.Type()}) | ||
456 | if resultType == cty.NilType { | ||
457 | return cty.DynamicVal, hcl.Diagnostics{ | ||
458 | { | ||
459 | Severity: hcl.DiagError, | ||
460 | Summary: "Inconsistent conditional result types", | ||
461 | Detail: fmt.Sprintf( | ||
462 | // FIXME: Need a helper function for showing natural-language type diffs, | ||
463 | // since this will generate some useless messages in some cases, like | ||
464 | // "These expressions are object and object respectively" if the | ||
465 | // object types don't exactly match. | ||
466 | "The true and false result expressions must have consistent types. The given expressions are %s and %s, respectively.", | ||
467 | trueResult.Type(), falseResult.Type(), | ||
468 | ), | ||
469 | Subject: hcl.RangeBetween(e.TrueResult.Range(), e.FalseResult.Range()).Ptr(), | ||
470 | Context: &e.SrcRange, | ||
471 | }, | ||
472 | } | ||
473 | } | ||
474 | |||
475 | condResult, condDiags := e.Condition.Value(ctx) | ||
476 | diags = append(diags, condDiags...) | ||
477 | if condResult.IsNull() { | ||
478 | diags = append(diags, &hcl.Diagnostic{ | ||
479 | Severity: hcl.DiagError, | ||
480 | Summary: "Null condition", | ||
481 | Detail: "The condition value is null. Conditions must either be true or false.", | ||
482 | Subject: e.Condition.Range().Ptr(), | ||
483 | Context: &e.SrcRange, | ||
484 | }) | ||
485 | return cty.UnknownVal(resultType), diags | ||
486 | } | ||
487 | if !condResult.IsKnown() { | ||
488 | return cty.UnknownVal(resultType), diags | ||
489 | } | ||
490 | condResult, err := convert.Convert(condResult, cty.Bool) | ||
491 | if err != nil { | ||
492 | diags = append(diags, &hcl.Diagnostic{ | ||
493 | Severity: hcl.DiagError, | ||
494 | Summary: "Incorrect condition type", | ||
495 | Detail: fmt.Sprintf("The condition expression must be of type bool."), | ||
496 | Subject: e.Condition.Range().Ptr(), | ||
497 | Context: &e.SrcRange, | ||
498 | }) | ||
499 | return cty.UnknownVal(resultType), diags | ||
500 | } | ||
501 | |||
502 | if condResult.True() { | ||
503 | diags = append(diags, trueDiags...) | ||
504 | if convs[0] != nil { | ||
505 | var err error | ||
506 | trueResult, err = convs[0](trueResult) | ||
507 | if err != nil { | ||
508 | // Unsafe conversion failed with the concrete result value | ||
509 | diags = append(diags, &hcl.Diagnostic{ | ||
510 | Severity: hcl.DiagError, | ||
511 | Summary: "Inconsistent conditional result types", | ||
512 | Detail: fmt.Sprintf( | ||
513 | "The true result value has the wrong type: %s.", | ||
514 | err.Error(), | ||
515 | ), | ||
516 | Subject: e.TrueResult.Range().Ptr(), | ||
517 | Context: &e.SrcRange, | ||
518 | }) | ||
519 | trueResult = cty.UnknownVal(resultType) | ||
520 | } | ||
521 | } | ||
522 | return trueResult, diags | ||
523 | } else { | ||
524 | diags = append(diags, falseDiags...) | ||
525 | if convs[1] != nil { | ||
526 | var err error | ||
527 | falseResult, err = convs[1](falseResult) | ||
528 | if err != nil { | ||
529 | // Unsafe conversion failed with the concrete result value | ||
530 | diags = append(diags, &hcl.Diagnostic{ | ||
531 | Severity: hcl.DiagError, | ||
532 | Summary: "Inconsistent conditional result types", | ||
533 | Detail: fmt.Sprintf( | ||
534 | "The false result value has the wrong type: %s.", | ||
535 | err.Error(), | ||
536 | ), | ||
537 | Subject: e.TrueResult.Range().Ptr(), | ||
538 | Context: &e.SrcRange, | ||
539 | }) | ||
540 | falseResult = cty.UnknownVal(resultType) | ||
541 | } | ||
542 | } | ||
543 | return falseResult, diags | ||
544 | } | ||
545 | } | ||
546 | |||
547 | func (e *ConditionalExpr) Range() hcl.Range { | ||
548 | return e.SrcRange | ||
549 | } | ||
550 | |||
551 | func (e *ConditionalExpr) StartRange() hcl.Range { | ||
552 | return e.Condition.StartRange() | ||
553 | } | ||
554 | |||
555 | type IndexExpr struct { | ||
556 | Collection Expression | ||
557 | Key Expression | ||
558 | |||
559 | SrcRange hcl.Range | ||
560 | OpenRange hcl.Range | ||
561 | } | ||
562 | |||
563 | func (e *IndexExpr) walkChildNodes(w internalWalkFunc) { | ||
564 | e.Collection = w(e.Collection).(Expression) | ||
565 | e.Key = w(e.Key).(Expression) | ||
566 | } | ||
567 | |||
568 | func (e *IndexExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
569 | var diags hcl.Diagnostics | ||
570 | coll, collDiags := e.Collection.Value(ctx) | ||
571 | key, keyDiags := e.Key.Value(ctx) | ||
572 | diags = append(diags, collDiags...) | ||
573 | diags = append(diags, keyDiags...) | ||
574 | |||
575 | return hcl.Index(coll, key, &e.SrcRange) | ||
576 | } | ||
577 | |||
578 | func (e *IndexExpr) Range() hcl.Range { | ||
579 | return e.SrcRange | ||
580 | } | ||
581 | |||
582 | func (e *IndexExpr) StartRange() hcl.Range { | ||
583 | return e.OpenRange | ||
584 | } | ||
585 | |||
586 | type TupleConsExpr struct { | ||
587 | Exprs []Expression | ||
588 | |||
589 | SrcRange hcl.Range | ||
590 | OpenRange hcl.Range | ||
591 | } | ||
592 | |||
593 | func (e *TupleConsExpr) walkChildNodes(w internalWalkFunc) { | ||
594 | for i, expr := range e.Exprs { | ||
595 | e.Exprs[i] = w(expr).(Expression) | ||
596 | } | ||
597 | } | ||
598 | |||
599 | func (e *TupleConsExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
600 | var vals []cty.Value | ||
601 | var diags hcl.Diagnostics | ||
602 | |||
603 | vals = make([]cty.Value, len(e.Exprs)) | ||
604 | for i, expr := range e.Exprs { | ||
605 | val, valDiags := expr.Value(ctx) | ||
606 | vals[i] = val | ||
607 | diags = append(diags, valDiags...) | ||
608 | } | ||
609 | |||
610 | return cty.TupleVal(vals), diags | ||
611 | } | ||
612 | |||
613 | func (e *TupleConsExpr) Range() hcl.Range { | ||
614 | return e.SrcRange | ||
615 | } | ||
616 | |||
617 | func (e *TupleConsExpr) StartRange() hcl.Range { | ||
618 | return e.OpenRange | ||
619 | } | ||
620 | |||
621 | // Implementation for hcl.ExprList | ||
622 | func (e *TupleConsExpr) ExprList() []hcl.Expression { | ||
623 | ret := make([]hcl.Expression, len(e.Exprs)) | ||
624 | for i, expr := range e.Exprs { | ||
625 | ret[i] = expr | ||
626 | } | ||
627 | return ret | ||
628 | } | ||
629 | |||
630 | type ObjectConsExpr struct { | ||
631 | Items []ObjectConsItem | ||
632 | |||
633 | SrcRange hcl.Range | ||
634 | OpenRange hcl.Range | ||
635 | } | ||
636 | |||
637 | type ObjectConsItem struct { | ||
638 | KeyExpr Expression | ||
639 | ValueExpr Expression | ||
640 | } | ||
641 | |||
642 | func (e *ObjectConsExpr) walkChildNodes(w internalWalkFunc) { | ||
643 | for i, item := range e.Items { | ||
644 | e.Items[i].KeyExpr = w(item.KeyExpr).(Expression) | ||
645 | e.Items[i].ValueExpr = w(item.ValueExpr).(Expression) | ||
646 | } | ||
647 | } | ||
648 | |||
649 | func (e *ObjectConsExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
650 | var vals map[string]cty.Value | ||
651 | var diags hcl.Diagnostics | ||
652 | |||
653 | // This will get set to true if we fail to produce any of our keys, | ||
654 | // either because they are actually unknown or if the evaluation produces | ||
655 | // errors. In all of these case we must return DynamicPseudoType because | ||
656 | // we're unable to know the full set of keys our object has, and thus | ||
657 | // we can't produce a complete value of the intended type. | ||
658 | // | ||
659 | // We still evaluate all of the item keys and values to make sure that we | ||
660 | // get as complete as possible a set of diagnostics. | ||
661 | known := true | ||
662 | |||
663 | vals = make(map[string]cty.Value, len(e.Items)) | ||
664 | for _, item := range e.Items { | ||
665 | key, keyDiags := item.KeyExpr.Value(ctx) | ||
666 | diags = append(diags, keyDiags...) | ||
667 | |||
668 | val, valDiags := item.ValueExpr.Value(ctx) | ||
669 | diags = append(diags, valDiags...) | ||
670 | |||
671 | if keyDiags.HasErrors() { | ||
672 | known = false | ||
673 | continue | ||
674 | } | ||
675 | |||
676 | if key.IsNull() { | ||
677 | diags = append(diags, &hcl.Diagnostic{ | ||
678 | Severity: hcl.DiagError, | ||
679 | Summary: "Null value as key", | ||
680 | Detail: "Can't use a null value as a key.", | ||
681 | Subject: item.ValueExpr.Range().Ptr(), | ||
682 | }) | ||
683 | known = false | ||
684 | continue | ||
685 | } | ||
686 | |||
687 | var err error | ||
688 | key, err = convert.Convert(key, cty.String) | ||
689 | if err != nil { | ||
690 | diags = append(diags, &hcl.Diagnostic{ | ||
691 | Severity: hcl.DiagError, | ||
692 | Summary: "Incorrect key type", | ||
693 | Detail: fmt.Sprintf("Can't use this value as a key: %s.", err.Error()), | ||
694 | Subject: item.ValueExpr.Range().Ptr(), | ||
695 | }) | ||
696 | known = false | ||
697 | continue | ||
698 | } | ||
699 | |||
700 | if !key.IsKnown() { | ||
701 | known = false | ||
702 | continue | ||
703 | } | ||
704 | |||
705 | keyStr := key.AsString() | ||
706 | |||
707 | vals[keyStr] = val | ||
708 | } | ||
709 | |||
710 | if !known { | ||
711 | return cty.DynamicVal, diags | ||
712 | } | ||
713 | |||
714 | return cty.ObjectVal(vals), diags | ||
715 | } | ||
716 | |||
717 | func (e *ObjectConsExpr) Range() hcl.Range { | ||
718 | return e.SrcRange | ||
719 | } | ||
720 | |||
721 | func (e *ObjectConsExpr) StartRange() hcl.Range { | ||
722 | return e.OpenRange | ||
723 | } | ||
724 | |||
725 | // Implementation for hcl.ExprMap | ||
726 | func (e *ObjectConsExpr) ExprMap() []hcl.KeyValuePair { | ||
727 | ret := make([]hcl.KeyValuePair, len(e.Items)) | ||
728 | for i, item := range e.Items { | ||
729 | ret[i] = hcl.KeyValuePair{ | ||
730 | Key: item.KeyExpr, | ||
731 | Value: item.ValueExpr, | ||
732 | } | ||
733 | } | ||
734 | return ret | ||
735 | } | ||
736 | |||
737 | // ObjectConsKeyExpr is a special wrapper used only for ObjectConsExpr keys, | ||
738 | // which deals with the special case that a naked identifier in that position | ||
739 | // must be interpreted as a literal string rather than evaluated directly. | ||
740 | type ObjectConsKeyExpr struct { | ||
741 | Wrapped Expression | ||
742 | } | ||
743 | |||
744 | func (e *ObjectConsKeyExpr) literalName() string { | ||
745 | // This is our logic for deciding whether to behave like a literal string. | ||
746 | // We lean on our AbsTraversalForExpr implementation here, which already | ||
747 | // deals with some awkward cases like the expression being the result | ||
748 | // of the keywords "null", "true" and "false" which we'd want to interpret | ||
749 | // as keys here too. | ||
750 | return hcl.ExprAsKeyword(e.Wrapped) | ||
751 | } | ||
752 | |||
753 | func (e *ObjectConsKeyExpr) walkChildNodes(w internalWalkFunc) { | ||
754 | // We only treat our wrapped expression as a real expression if we're | ||
755 | // not going to interpret it as a literal. | ||
756 | if e.literalName() == "" { | ||
757 | e.Wrapped = w(e.Wrapped).(Expression) | ||
758 | } | ||
759 | } | ||
760 | |||
761 | func (e *ObjectConsKeyExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
762 | if ln := e.literalName(); ln != "" { | ||
763 | return cty.StringVal(ln), nil | ||
764 | } | ||
765 | return e.Wrapped.Value(ctx) | ||
766 | } | ||
767 | |||
768 | func (e *ObjectConsKeyExpr) Range() hcl.Range { | ||
769 | return e.Wrapped.Range() | ||
770 | } | ||
771 | |||
772 | func (e *ObjectConsKeyExpr) StartRange() hcl.Range { | ||
773 | return e.Wrapped.StartRange() | ||
774 | } | ||
775 | |||
776 | // Implementation for hcl.AbsTraversalForExpr. | ||
777 | func (e *ObjectConsKeyExpr) AsTraversal() hcl.Traversal { | ||
778 | // We can produce a traversal only if our wrappee can. | ||
779 | st, diags := hcl.AbsTraversalForExpr(e.Wrapped) | ||
780 | if diags.HasErrors() { | ||
781 | return nil | ||
782 | } | ||
783 | |||
784 | return st | ||
785 | } | ||
786 | |||
787 | func (e *ObjectConsKeyExpr) UnwrapExpression() Expression { | ||
788 | return e.Wrapped | ||
789 | } | ||
790 | |||
791 | // ForExpr represents iteration constructs: | ||
792 | // | ||
793 | // tuple = [for i, v in list: upper(v) if i > 2] | ||
794 | // object = {for k, v in map: k => upper(v)} | ||
795 | // object_of_tuples = {for v in list: v.key: v...} | ||
796 | type ForExpr struct { | ||
797 | KeyVar string // empty if ignoring the key | ||
798 | ValVar string | ||
799 | |||
800 | CollExpr Expression | ||
801 | |||
802 | KeyExpr Expression // nil when producing a tuple | ||
803 | ValExpr Expression | ||
804 | CondExpr Expression // null if no "if" clause is present | ||
805 | |||
806 | Group bool // set if the ellipsis is used on the value in an object for | ||
807 | |||
808 | SrcRange hcl.Range | ||
809 | OpenRange hcl.Range | ||
810 | CloseRange hcl.Range | ||
811 | } | ||
812 | |||
813 | func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
814 | var diags hcl.Diagnostics | ||
815 | |||
816 | collVal, collDiags := e.CollExpr.Value(ctx) | ||
817 | diags = append(diags, collDiags...) | ||
818 | |||
819 | if collVal.IsNull() { | ||
820 | diags = append(diags, &hcl.Diagnostic{ | ||
821 | Severity: hcl.DiagError, | ||
822 | Summary: "Iteration over null value", | ||
823 | Detail: "A null value cannot be used as the collection in a 'for' expression.", | ||
824 | Subject: e.CollExpr.Range().Ptr(), | ||
825 | Context: &e.SrcRange, | ||
826 | }) | ||
827 | return cty.DynamicVal, diags | ||
828 | } | ||
829 | if collVal.Type() == cty.DynamicPseudoType { | ||
830 | return cty.DynamicVal, diags | ||
831 | } | ||
832 | if !collVal.CanIterateElements() { | ||
833 | diags = append(diags, &hcl.Diagnostic{ | ||
834 | Severity: hcl.DiagError, | ||
835 | Summary: "Iteration over non-iterable value", | ||
836 | Detail: fmt.Sprintf( | ||
837 | "A value of type %s cannot be used as the collection in a 'for' expression.", | ||
838 | collVal.Type().FriendlyName(), | ||
839 | ), | ||
840 | Subject: e.CollExpr.Range().Ptr(), | ||
841 | Context: &e.SrcRange, | ||
842 | }) | ||
843 | return cty.DynamicVal, diags | ||
844 | } | ||
845 | if !collVal.IsKnown() { | ||
846 | return cty.DynamicVal, diags | ||
847 | } | ||
848 | |||
849 | childCtx := ctx.NewChild() | ||
850 | childCtx.Variables = map[string]cty.Value{} | ||
851 | |||
852 | // Before we start we'll do an early check to see if any CondExpr we've | ||
853 | // been given is of the wrong type. This isn't 100% reliable (it may | ||
854 | // be DynamicVal until real values are given) but it should catch some | ||
855 | // straightforward cases and prevent a barrage of repeated errors. | ||
856 | if e.CondExpr != nil { | ||
857 | if e.KeyVar != "" { | ||
858 | childCtx.Variables[e.KeyVar] = cty.DynamicVal | ||
859 | } | ||
860 | childCtx.Variables[e.ValVar] = cty.DynamicVal | ||
861 | |||
862 | result, condDiags := e.CondExpr.Value(childCtx) | ||
863 | diags = append(diags, condDiags...) | ||
864 | if result.IsNull() { | ||
865 | diags = append(diags, &hcl.Diagnostic{ | ||
866 | Severity: hcl.DiagError, | ||
867 | Summary: "Condition is null", | ||
868 | Detail: "The value of the 'if' clause must not be null.", | ||
869 | Subject: e.CondExpr.Range().Ptr(), | ||
870 | Context: &e.SrcRange, | ||
871 | }) | ||
872 | return cty.DynamicVal, diags | ||
873 | } | ||
874 | _, err := convert.Convert(result, cty.Bool) | ||
875 | if err != nil { | ||
876 | diags = append(diags, &hcl.Diagnostic{ | ||
877 | Severity: hcl.DiagError, | ||
878 | Summary: "Invalid 'for' condition", | ||
879 | Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()), | ||
880 | Subject: e.CondExpr.Range().Ptr(), | ||
881 | Context: &e.SrcRange, | ||
882 | }) | ||
883 | return cty.DynamicVal, diags | ||
884 | } | ||
885 | if condDiags.HasErrors() { | ||
886 | return cty.DynamicVal, diags | ||
887 | } | ||
888 | } | ||
889 | |||
890 | if e.KeyExpr != nil { | ||
891 | // Producing an object | ||
892 | var vals map[string]cty.Value | ||
893 | var groupVals map[string][]cty.Value | ||
894 | if e.Group { | ||
895 | groupVals = map[string][]cty.Value{} | ||
896 | } else { | ||
897 | vals = map[string]cty.Value{} | ||
898 | } | ||
899 | |||
900 | it := collVal.ElementIterator() | ||
901 | |||
902 | known := true | ||
903 | for it.Next() { | ||
904 | k, v := it.Element() | ||
905 | if e.KeyVar != "" { | ||
906 | childCtx.Variables[e.KeyVar] = k | ||
907 | } | ||
908 | childCtx.Variables[e.ValVar] = v | ||
909 | |||
910 | if e.CondExpr != nil { | ||
911 | includeRaw, condDiags := e.CondExpr.Value(childCtx) | ||
912 | diags = append(diags, condDiags...) | ||
913 | if includeRaw.IsNull() { | ||
914 | if known { | ||
915 | diags = append(diags, &hcl.Diagnostic{ | ||
916 | Severity: hcl.DiagError, | ||
917 | Summary: "Condition is null", | ||
918 | Detail: "The value of the 'if' clause must not be null.", | ||
919 | Subject: e.CondExpr.Range().Ptr(), | ||
920 | Context: &e.SrcRange, | ||
921 | }) | ||
922 | } | ||
923 | known = false | ||
924 | continue | ||
925 | } | ||
926 | include, err := convert.Convert(includeRaw, cty.Bool) | ||
927 | if err != nil { | ||
928 | if known { | ||
929 | diags = append(diags, &hcl.Diagnostic{ | ||
930 | Severity: hcl.DiagError, | ||
931 | Summary: "Invalid 'for' condition", | ||
932 | Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()), | ||
933 | Subject: e.CondExpr.Range().Ptr(), | ||
934 | Context: &e.SrcRange, | ||
935 | }) | ||
936 | } | ||
937 | known = false | ||
938 | continue | ||
939 | } | ||
940 | if !include.IsKnown() { | ||
941 | known = false | ||
942 | continue | ||
943 | } | ||
944 | |||
945 | if include.False() { | ||
946 | // Skip this element | ||
947 | continue | ||
948 | } | ||
949 | } | ||
950 | |||
951 | keyRaw, keyDiags := e.KeyExpr.Value(childCtx) | ||
952 | diags = append(diags, keyDiags...) | ||
953 | if keyRaw.IsNull() { | ||
954 | if known { | ||
955 | diags = append(diags, &hcl.Diagnostic{ | ||
956 | Severity: hcl.DiagError, | ||
957 | Summary: "Invalid object key", | ||
958 | Detail: "Key expression in 'for' expression must not produce a null value.", | ||
959 | Subject: e.KeyExpr.Range().Ptr(), | ||
960 | Context: &e.SrcRange, | ||
961 | }) | ||
962 | } | ||
963 | known = false | ||
964 | continue | ||
965 | } | ||
966 | if !keyRaw.IsKnown() { | ||
967 | known = false | ||
968 | continue | ||
969 | } | ||
970 | |||
971 | key, err := convert.Convert(keyRaw, cty.String) | ||
972 | if err != nil { | ||
973 | if known { | ||
974 | diags = append(diags, &hcl.Diagnostic{ | ||
975 | Severity: hcl.DiagError, | ||
976 | Summary: "Invalid object key", | ||
977 | Detail: fmt.Sprintf("The key expression produced an invalid result: %s.", err.Error()), | ||
978 | Subject: e.KeyExpr.Range().Ptr(), | ||
979 | Context: &e.SrcRange, | ||
980 | }) | ||
981 | } | ||
982 | known = false | ||
983 | continue | ||
984 | } | ||
985 | |||
986 | val, valDiags := e.ValExpr.Value(childCtx) | ||
987 | diags = append(diags, valDiags...) | ||
988 | |||
989 | if e.Group { | ||
990 | k := key.AsString() | ||
991 | groupVals[k] = append(groupVals[k], val) | ||
992 | } else { | ||
993 | k := key.AsString() | ||
994 | if _, exists := vals[k]; exists { | ||
995 | diags = append(diags, &hcl.Diagnostic{ | ||
996 | Severity: hcl.DiagError, | ||
997 | Summary: "Duplicate object key", | ||
998 | Detail: fmt.Sprintf( | ||
999 | "Two different items produced the key %q in this for expression. If duplicates are expected, use the ellipsis (...) after the value expression to enable grouping by key.", | ||
1000 | k, | ||
1001 | ), | ||
1002 | Subject: e.KeyExpr.Range().Ptr(), | ||
1003 | Context: &e.SrcRange, | ||
1004 | }) | ||
1005 | } else { | ||
1006 | vals[key.AsString()] = val | ||
1007 | } | ||
1008 | } | ||
1009 | } | ||
1010 | |||
1011 | if !known { | ||
1012 | return cty.DynamicVal, diags | ||
1013 | } | ||
1014 | |||
1015 | if e.Group { | ||
1016 | vals = map[string]cty.Value{} | ||
1017 | for k, gvs := range groupVals { | ||
1018 | vals[k] = cty.TupleVal(gvs) | ||
1019 | } | ||
1020 | } | ||
1021 | |||
1022 | return cty.ObjectVal(vals), diags | ||
1023 | |||
1024 | } else { | ||
1025 | // Producing a tuple | ||
1026 | vals := []cty.Value{} | ||
1027 | |||
1028 | it := collVal.ElementIterator() | ||
1029 | |||
1030 | known := true | ||
1031 | for it.Next() { | ||
1032 | k, v := it.Element() | ||
1033 | if e.KeyVar != "" { | ||
1034 | childCtx.Variables[e.KeyVar] = k | ||
1035 | } | ||
1036 | childCtx.Variables[e.ValVar] = v | ||
1037 | |||
1038 | if e.CondExpr != nil { | ||
1039 | includeRaw, condDiags := e.CondExpr.Value(childCtx) | ||
1040 | diags = append(diags, condDiags...) | ||
1041 | if includeRaw.IsNull() { | ||
1042 | if known { | ||
1043 | diags = append(diags, &hcl.Diagnostic{ | ||
1044 | Severity: hcl.DiagError, | ||
1045 | Summary: "Condition is null", | ||
1046 | Detail: "The value of the 'if' clause must not be null.", | ||
1047 | Subject: e.CondExpr.Range().Ptr(), | ||
1048 | Context: &e.SrcRange, | ||
1049 | }) | ||
1050 | } | ||
1051 | known = false | ||
1052 | continue | ||
1053 | } | ||
1054 | if !includeRaw.IsKnown() { | ||
1055 | // We will eventually return DynamicVal, but we'll continue | ||
1056 | // iterating in case there are other diagnostics to gather | ||
1057 | // for later elements. | ||
1058 | known = false | ||
1059 | continue | ||
1060 | } | ||
1061 | |||
1062 | include, err := convert.Convert(includeRaw, cty.Bool) | ||
1063 | if err != nil { | ||
1064 | if known { | ||
1065 | diags = append(diags, &hcl.Diagnostic{ | ||
1066 | Severity: hcl.DiagError, | ||
1067 | Summary: "Invalid 'for' condition", | ||
1068 | Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()), | ||
1069 | Subject: e.CondExpr.Range().Ptr(), | ||
1070 | Context: &e.SrcRange, | ||
1071 | }) | ||
1072 | } | ||
1073 | known = false | ||
1074 | continue | ||
1075 | } | ||
1076 | |||
1077 | if include.False() { | ||
1078 | // Skip this element | ||
1079 | continue | ||
1080 | } | ||
1081 | } | ||
1082 | |||
1083 | val, valDiags := e.ValExpr.Value(childCtx) | ||
1084 | diags = append(diags, valDiags...) | ||
1085 | vals = append(vals, val) | ||
1086 | } | ||
1087 | |||
1088 | if !known { | ||
1089 | return cty.DynamicVal, diags | ||
1090 | } | ||
1091 | |||
1092 | return cty.TupleVal(vals), diags | ||
1093 | } | ||
1094 | } | ||
1095 | |||
1096 | func (e *ForExpr) walkChildNodes(w internalWalkFunc) { | ||
1097 | e.CollExpr = w(e.CollExpr).(Expression) | ||
1098 | |||
1099 | scopeNames := map[string]struct{}{} | ||
1100 | if e.KeyVar != "" { | ||
1101 | scopeNames[e.KeyVar] = struct{}{} | ||
1102 | } | ||
1103 | if e.ValVar != "" { | ||
1104 | scopeNames[e.ValVar] = struct{}{} | ||
1105 | } | ||
1106 | |||
1107 | if e.KeyExpr != nil { | ||
1108 | w(ChildScope{ | ||
1109 | LocalNames: scopeNames, | ||
1110 | Expr: &e.KeyExpr, | ||
1111 | }) | ||
1112 | } | ||
1113 | w(ChildScope{ | ||
1114 | LocalNames: scopeNames, | ||
1115 | Expr: &e.ValExpr, | ||
1116 | }) | ||
1117 | if e.CondExpr != nil { | ||
1118 | w(ChildScope{ | ||
1119 | LocalNames: scopeNames, | ||
1120 | Expr: &e.CondExpr, | ||
1121 | }) | ||
1122 | } | ||
1123 | } | ||
1124 | |||
1125 | func (e *ForExpr) Range() hcl.Range { | ||
1126 | return e.SrcRange | ||
1127 | } | ||
1128 | |||
1129 | func (e *ForExpr) StartRange() hcl.Range { | ||
1130 | return e.OpenRange | ||
1131 | } | ||
1132 | |||
1133 | type SplatExpr struct { | ||
1134 | Source Expression | ||
1135 | Each Expression | ||
1136 | Item *AnonSymbolExpr | ||
1137 | |||
1138 | SrcRange hcl.Range | ||
1139 | MarkerRange hcl.Range | ||
1140 | } | ||
1141 | |||
1142 | func (e *SplatExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
1143 | sourceVal, diags := e.Source.Value(ctx) | ||
1144 | if diags.HasErrors() { | ||
1145 | // We'll evaluate our "Each" expression here just to see if it | ||
1146 | // produces any more diagnostics we can report. Since we're not | ||
1147 | // assigning a value to our AnonSymbolExpr here it will return | ||
1148 | // DynamicVal, which should short-circuit any use of it. | ||
1149 | _, itemDiags := e.Item.Value(ctx) | ||
1150 | diags = append(diags, itemDiags...) | ||
1151 | return cty.DynamicVal, diags | ||
1152 | } | ||
1153 | |||
1154 | if sourceVal.IsNull() { | ||
1155 | diags = append(diags, &hcl.Diagnostic{ | ||
1156 | Severity: hcl.DiagError, | ||
1157 | Summary: "Splat of null value", | ||
1158 | Detail: "Splat expressions (with the * symbol) cannot be applied to null values.", | ||
1159 | Subject: e.Source.Range().Ptr(), | ||
1160 | Context: hcl.RangeBetween(e.Source.Range(), e.MarkerRange).Ptr(), | ||
1161 | }) | ||
1162 | return cty.DynamicVal, diags | ||
1163 | } | ||
1164 | if !sourceVal.IsKnown() { | ||
1165 | return cty.DynamicVal, diags | ||
1166 | } | ||
1167 | |||
1168 | // A "special power" of splat expressions is that they can be applied | ||
1169 | // both to tuples/lists and to other values, and in the latter case | ||
1170 | // the value will be treated as an implicit single-value list. We'll | ||
1171 | // deal with that here first. | ||
1172 | if !(sourceVal.Type().IsTupleType() || sourceVal.Type().IsListType()) { | ||
1173 | sourceVal = cty.ListVal([]cty.Value{sourceVal}) | ||
1174 | } | ||
1175 | |||
1176 | vals := make([]cty.Value, 0, sourceVal.LengthInt()) | ||
1177 | it := sourceVal.ElementIterator() | ||
1178 | if ctx == nil { | ||
1179 | // we need a context to use our AnonSymbolExpr, so we'll just | ||
1180 | // make an empty one here to use as a placeholder. | ||
1181 | ctx = ctx.NewChild() | ||
1182 | } | ||
1183 | isKnown := true | ||
1184 | for it.Next() { | ||
1185 | _, sourceItem := it.Element() | ||
1186 | e.Item.setValue(ctx, sourceItem) | ||
1187 | newItem, itemDiags := e.Each.Value(ctx) | ||
1188 | diags = append(diags, itemDiags...) | ||
1189 | if itemDiags.HasErrors() { | ||
1190 | isKnown = false | ||
1191 | } | ||
1192 | vals = append(vals, newItem) | ||
1193 | } | ||
1194 | e.Item.clearValue(ctx) // clean up our temporary value | ||
1195 | |||
1196 | if !isKnown { | ||
1197 | return cty.DynamicVal, diags | ||
1198 | } | ||
1199 | |||
1200 | return cty.TupleVal(vals), diags | ||
1201 | } | ||
1202 | |||
1203 | func (e *SplatExpr) walkChildNodes(w internalWalkFunc) { | ||
1204 | e.Source = w(e.Source).(Expression) | ||
1205 | e.Each = w(e.Each).(Expression) | ||
1206 | } | ||
1207 | |||
1208 | func (e *SplatExpr) Range() hcl.Range { | ||
1209 | return e.SrcRange | ||
1210 | } | ||
1211 | |||
1212 | func (e *SplatExpr) StartRange() hcl.Range { | ||
1213 | return e.MarkerRange | ||
1214 | } | ||
1215 | |||
1216 | // AnonSymbolExpr is used as a placeholder for a value in an expression that | ||
1217 | // can be applied dynamically to any value at runtime. | ||
1218 | // | ||
1219 | // This is a rather odd, synthetic expression. It is used as part of the | ||
1220 | // representation of splat expressions as a placeholder for the current item | ||
1221 | // being visited in the splat evaluation. | ||
1222 | // | ||
1223 | // AnonSymbolExpr cannot be evaluated in isolation. If its Value is called | ||
1224 | // directly then cty.DynamicVal will be returned. Instead, it is evaluated | ||
1225 | // in terms of another node (i.e. a splat expression) which temporarily | ||
1226 | // assigns it a value. | ||
1227 | type AnonSymbolExpr struct { | ||
1228 | SrcRange hcl.Range | ||
1229 | values map[*hcl.EvalContext]cty.Value | ||
1230 | } | ||
1231 | |||
1232 | func (e *AnonSymbolExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
1233 | if ctx == nil { | ||
1234 | return cty.DynamicVal, nil | ||
1235 | } | ||
1236 | val, exists := e.values[ctx] | ||
1237 | if !exists { | ||
1238 | return cty.DynamicVal, nil | ||
1239 | } | ||
1240 | return val, nil | ||
1241 | } | ||
1242 | |||
1243 | // setValue sets a temporary local value for the expression when evaluated | ||
1244 | // in the given context, which must be non-nil. | ||
1245 | func (e *AnonSymbolExpr) setValue(ctx *hcl.EvalContext, val cty.Value) { | ||
1246 | if e.values == nil { | ||
1247 | e.values = make(map[*hcl.EvalContext]cty.Value) | ||
1248 | } | ||
1249 | if ctx == nil { | ||
1250 | panic("can't setValue for a nil EvalContext") | ||
1251 | } | ||
1252 | e.values[ctx] = val | ||
1253 | } | ||
1254 | |||
1255 | func (e *AnonSymbolExpr) clearValue(ctx *hcl.EvalContext) { | ||
1256 | if e.values == nil { | ||
1257 | return | ||
1258 | } | ||
1259 | if ctx == nil { | ||
1260 | panic("can't clearValue for a nil EvalContext") | ||
1261 | } | ||
1262 | delete(e.values, ctx) | ||
1263 | } | ||
1264 | |||
1265 | func (e *AnonSymbolExpr) walkChildNodes(w internalWalkFunc) { | ||
1266 | // AnonSymbolExpr is a leaf node in the tree | ||
1267 | } | ||
1268 | |||
1269 | func (e *AnonSymbolExpr) Range() hcl.Range { | ||
1270 | return e.SrcRange | ||
1271 | } | ||
1272 | |||
1273 | func (e *AnonSymbolExpr) StartRange() hcl.Range { | ||
1274 | return e.SrcRange | ||
1275 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_ops.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_ops.go new file mode 100644 index 0000000..9a5da04 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_ops.go | |||
@@ -0,0 +1,258 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | |||
6 | "github.com/hashicorp/hcl2/hcl" | ||
7 | "github.com/zclconf/go-cty/cty" | ||
8 | "github.com/zclconf/go-cty/cty/convert" | ||
9 | "github.com/zclconf/go-cty/cty/function" | ||
10 | "github.com/zclconf/go-cty/cty/function/stdlib" | ||
11 | ) | ||
12 | |||
13 | type Operation struct { | ||
14 | Impl function.Function | ||
15 | Type cty.Type | ||
16 | } | ||
17 | |||
18 | var ( | ||
19 | OpLogicalOr = &Operation{ | ||
20 | Impl: stdlib.OrFunc, | ||
21 | Type: cty.Bool, | ||
22 | } | ||
23 | OpLogicalAnd = &Operation{ | ||
24 | Impl: stdlib.AndFunc, | ||
25 | Type: cty.Bool, | ||
26 | } | ||
27 | OpLogicalNot = &Operation{ | ||
28 | Impl: stdlib.NotFunc, | ||
29 | Type: cty.Bool, | ||
30 | } | ||
31 | |||
32 | OpEqual = &Operation{ | ||
33 | Impl: stdlib.EqualFunc, | ||
34 | Type: cty.Bool, | ||
35 | } | ||
36 | OpNotEqual = &Operation{ | ||
37 | Impl: stdlib.NotEqualFunc, | ||
38 | Type: cty.Bool, | ||
39 | } | ||
40 | |||
41 | OpGreaterThan = &Operation{ | ||
42 | Impl: stdlib.GreaterThanFunc, | ||
43 | Type: cty.Bool, | ||
44 | } | ||
45 | OpGreaterThanOrEqual = &Operation{ | ||
46 | Impl: stdlib.GreaterThanOrEqualToFunc, | ||
47 | Type: cty.Bool, | ||
48 | } | ||
49 | OpLessThan = &Operation{ | ||
50 | Impl: stdlib.LessThanFunc, | ||
51 | Type: cty.Bool, | ||
52 | } | ||
53 | OpLessThanOrEqual = &Operation{ | ||
54 | Impl: stdlib.LessThanOrEqualToFunc, | ||
55 | Type: cty.Bool, | ||
56 | } | ||
57 | |||
58 | OpAdd = &Operation{ | ||
59 | Impl: stdlib.AddFunc, | ||
60 | Type: cty.Number, | ||
61 | } | ||
62 | OpSubtract = &Operation{ | ||
63 | Impl: stdlib.SubtractFunc, | ||
64 | Type: cty.Number, | ||
65 | } | ||
66 | OpMultiply = &Operation{ | ||
67 | Impl: stdlib.MultiplyFunc, | ||
68 | Type: cty.Number, | ||
69 | } | ||
70 | OpDivide = &Operation{ | ||
71 | Impl: stdlib.DivideFunc, | ||
72 | Type: cty.Number, | ||
73 | } | ||
74 | OpModulo = &Operation{ | ||
75 | Impl: stdlib.ModuloFunc, | ||
76 | Type: cty.Number, | ||
77 | } | ||
78 | OpNegate = &Operation{ | ||
79 | Impl: stdlib.NegateFunc, | ||
80 | Type: cty.Number, | ||
81 | } | ||
82 | ) | ||
83 | |||
84 | var binaryOps []map[TokenType]*Operation | ||
85 | |||
86 | func init() { | ||
87 | // This operation table maps from the operator's token type | ||
88 | // to the AST operation type. All expressions produced from | ||
89 | // binary operators are BinaryOp nodes. | ||
90 | // | ||
91 | // Binary operator groups are listed in order of precedence, with | ||
92 | // the *lowest* precedence first. Operators within the same group | ||
93 | // have left-to-right associativity. | ||
94 | binaryOps = []map[TokenType]*Operation{ | ||
95 | { | ||
96 | TokenOr: OpLogicalOr, | ||
97 | }, | ||
98 | { | ||
99 | TokenAnd: OpLogicalAnd, | ||
100 | }, | ||
101 | { | ||
102 | TokenEqualOp: OpEqual, | ||
103 | TokenNotEqual: OpNotEqual, | ||
104 | }, | ||
105 | { | ||
106 | TokenGreaterThan: OpGreaterThan, | ||
107 | TokenGreaterThanEq: OpGreaterThanOrEqual, | ||
108 | TokenLessThan: OpLessThan, | ||
109 | TokenLessThanEq: OpLessThanOrEqual, | ||
110 | }, | ||
111 | { | ||
112 | TokenPlus: OpAdd, | ||
113 | TokenMinus: OpSubtract, | ||
114 | }, | ||
115 | { | ||
116 | TokenStar: OpMultiply, | ||
117 | TokenSlash: OpDivide, | ||
118 | TokenPercent: OpModulo, | ||
119 | }, | ||
120 | } | ||
121 | } | ||
122 | |||
123 | type BinaryOpExpr struct { | ||
124 | LHS Expression | ||
125 | Op *Operation | ||
126 | RHS Expression | ||
127 | |||
128 | SrcRange hcl.Range | ||
129 | } | ||
130 | |||
131 | func (e *BinaryOpExpr) walkChildNodes(w internalWalkFunc) { | ||
132 | e.LHS = w(e.LHS).(Expression) | ||
133 | e.RHS = w(e.RHS).(Expression) | ||
134 | } | ||
135 | |||
136 | func (e *BinaryOpExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
137 | impl := e.Op.Impl // assumed to be a function taking exactly two arguments | ||
138 | params := impl.Params() | ||
139 | lhsParam := params[0] | ||
140 | rhsParam := params[1] | ||
141 | |||
142 | var diags hcl.Diagnostics | ||
143 | |||
144 | givenLHSVal, lhsDiags := e.LHS.Value(ctx) | ||
145 | givenRHSVal, rhsDiags := e.RHS.Value(ctx) | ||
146 | diags = append(diags, lhsDiags...) | ||
147 | diags = append(diags, rhsDiags...) | ||
148 | |||
149 | lhsVal, err := convert.Convert(givenLHSVal, lhsParam.Type) | ||
150 | if err != nil { | ||
151 | diags = append(diags, &hcl.Diagnostic{ | ||
152 | Severity: hcl.DiagError, | ||
153 | Summary: "Invalid operand", | ||
154 | Detail: fmt.Sprintf("Unsuitable value for left operand: %s.", err), | ||
155 | Subject: e.LHS.Range().Ptr(), | ||
156 | Context: &e.SrcRange, | ||
157 | }) | ||
158 | } | ||
159 | rhsVal, err := convert.Convert(givenRHSVal, rhsParam.Type) | ||
160 | if err != nil { | ||
161 | diags = append(diags, &hcl.Diagnostic{ | ||
162 | Severity: hcl.DiagError, | ||
163 | Summary: "Invalid operand", | ||
164 | Detail: fmt.Sprintf("Unsuitable value for right operand: %s.", err), | ||
165 | Subject: e.RHS.Range().Ptr(), | ||
166 | Context: &e.SrcRange, | ||
167 | }) | ||
168 | } | ||
169 | |||
170 | if diags.HasErrors() { | ||
171 | // Don't actually try the call if we have errors already, since the | ||
172 | // this will probably just produce a confusing duplicative diagnostic. | ||
173 | return cty.UnknownVal(e.Op.Type), diags | ||
174 | } | ||
175 | |||
176 | args := []cty.Value{lhsVal, rhsVal} | ||
177 | result, err := impl.Call(args) | ||
178 | if err != nil { | ||
179 | diags = append(diags, &hcl.Diagnostic{ | ||
180 | // FIXME: This diagnostic is useless. | ||
181 | Severity: hcl.DiagError, | ||
182 | Summary: "Operation failed", | ||
183 | Detail: fmt.Sprintf("Error during operation: %s.", err), | ||
184 | Subject: &e.SrcRange, | ||
185 | }) | ||
186 | return cty.UnknownVal(e.Op.Type), diags | ||
187 | } | ||
188 | |||
189 | return result, diags | ||
190 | } | ||
191 | |||
192 | func (e *BinaryOpExpr) Range() hcl.Range { | ||
193 | return e.SrcRange | ||
194 | } | ||
195 | |||
196 | func (e *BinaryOpExpr) StartRange() hcl.Range { | ||
197 | return e.LHS.StartRange() | ||
198 | } | ||
199 | |||
200 | type UnaryOpExpr struct { | ||
201 | Op *Operation | ||
202 | Val Expression | ||
203 | |||
204 | SrcRange hcl.Range | ||
205 | SymbolRange hcl.Range | ||
206 | } | ||
207 | |||
208 | func (e *UnaryOpExpr) walkChildNodes(w internalWalkFunc) { | ||
209 | e.Val = w(e.Val).(Expression) | ||
210 | } | ||
211 | |||
212 | func (e *UnaryOpExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
213 | impl := e.Op.Impl // assumed to be a function taking exactly one argument | ||
214 | params := impl.Params() | ||
215 | param := params[0] | ||
216 | |||
217 | givenVal, diags := e.Val.Value(ctx) | ||
218 | |||
219 | val, err := convert.Convert(givenVal, param.Type) | ||
220 | if err != nil { | ||
221 | diags = append(diags, &hcl.Diagnostic{ | ||
222 | Severity: hcl.DiagError, | ||
223 | Summary: "Invalid operand", | ||
224 | Detail: fmt.Sprintf("Unsuitable value for unary operand: %s.", err), | ||
225 | Subject: e.Val.Range().Ptr(), | ||
226 | Context: &e.SrcRange, | ||
227 | }) | ||
228 | } | ||
229 | |||
230 | if diags.HasErrors() { | ||
231 | // Don't actually try the call if we have errors already, since the | ||
232 | // this will probably just produce a confusing duplicative diagnostic. | ||
233 | return cty.UnknownVal(e.Op.Type), diags | ||
234 | } | ||
235 | |||
236 | args := []cty.Value{val} | ||
237 | result, err := impl.Call(args) | ||
238 | if err != nil { | ||
239 | diags = append(diags, &hcl.Diagnostic{ | ||
240 | // FIXME: This diagnostic is useless. | ||
241 | Severity: hcl.DiagError, | ||
242 | Summary: "Operation failed", | ||
243 | Detail: fmt.Sprintf("Error during operation: %s.", err), | ||
244 | Subject: &e.SrcRange, | ||
245 | }) | ||
246 | return cty.UnknownVal(e.Op.Type), diags | ||
247 | } | ||
248 | |||
249 | return result, diags | ||
250 | } | ||
251 | |||
252 | func (e *UnaryOpExpr) Range() hcl.Range { | ||
253 | return e.SrcRange | ||
254 | } | ||
255 | |||
256 | func (e *UnaryOpExpr) StartRange() hcl.Range { | ||
257 | return e.SymbolRange | ||
258 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_template.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_template.go new file mode 100644 index 0000000..a1c4727 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_template.go | |||
@@ -0,0 +1,192 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "bytes" | ||
5 | "fmt" | ||
6 | |||
7 | "github.com/hashicorp/hcl2/hcl" | ||
8 | "github.com/zclconf/go-cty/cty" | ||
9 | "github.com/zclconf/go-cty/cty/convert" | ||
10 | ) | ||
11 | |||
12 | type TemplateExpr struct { | ||
13 | Parts []Expression | ||
14 | |||
15 | SrcRange hcl.Range | ||
16 | } | ||
17 | |||
18 | func (e *TemplateExpr) walkChildNodes(w internalWalkFunc) { | ||
19 | for i, part := range e.Parts { | ||
20 | e.Parts[i] = w(part).(Expression) | ||
21 | } | ||
22 | } | ||
23 | |||
24 | func (e *TemplateExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
25 | buf := &bytes.Buffer{} | ||
26 | var diags hcl.Diagnostics | ||
27 | isKnown := true | ||
28 | |||
29 | for _, part := range e.Parts { | ||
30 | partVal, partDiags := part.Value(ctx) | ||
31 | diags = append(diags, partDiags...) | ||
32 | |||
33 | if partVal.IsNull() { | ||
34 | diags = append(diags, &hcl.Diagnostic{ | ||
35 | Severity: hcl.DiagError, | ||
36 | Summary: "Invalid template interpolation value", | ||
37 | Detail: fmt.Sprintf( | ||
38 | "The expression result is null. Cannot include a null value in a string template.", | ||
39 | ), | ||
40 | Subject: part.Range().Ptr(), | ||
41 | Context: &e.SrcRange, | ||
42 | }) | ||
43 | continue | ||
44 | } | ||
45 | |||
46 | if !partVal.IsKnown() { | ||
47 | // If any part is unknown then the result as a whole must be | ||
48 | // unknown too. We'll keep on processing the rest of the parts | ||
49 | // anyway, because we want to still emit any diagnostics resulting | ||
50 | // from evaluating those. | ||
51 | isKnown = false | ||
52 | continue | ||
53 | } | ||
54 | |||
55 | strVal, err := convert.Convert(partVal, cty.String) | ||
56 | if err != nil { | ||
57 | diags = append(diags, &hcl.Diagnostic{ | ||
58 | Severity: hcl.DiagError, | ||
59 | Summary: "Invalid template interpolation value", | ||
60 | Detail: fmt.Sprintf( | ||
61 | "Cannot include the given value in a string template: %s.", | ||
62 | err.Error(), | ||
63 | ), | ||
64 | Subject: part.Range().Ptr(), | ||
65 | Context: &e.SrcRange, | ||
66 | }) | ||
67 | continue | ||
68 | } | ||
69 | |||
70 | buf.WriteString(strVal.AsString()) | ||
71 | } | ||
72 | |||
73 | if !isKnown { | ||
74 | return cty.UnknownVal(cty.String), diags | ||
75 | } | ||
76 | |||
77 | return cty.StringVal(buf.String()), diags | ||
78 | } | ||
79 | |||
80 | func (e *TemplateExpr) Range() hcl.Range { | ||
81 | return e.SrcRange | ||
82 | } | ||
83 | |||
84 | func (e *TemplateExpr) StartRange() hcl.Range { | ||
85 | return e.Parts[0].StartRange() | ||
86 | } | ||
87 | |||
88 | // TemplateJoinExpr is used to convert tuples of strings produced by template | ||
89 | // constructs (i.e. for loops) into flat strings, by converting the values | ||
90 | // tos strings and joining them. This AST node is not used directly; it's | ||
91 | // produced as part of the AST of a "for" loop in a template. | ||
92 | type TemplateJoinExpr struct { | ||
93 | Tuple Expression | ||
94 | } | ||
95 | |||
96 | func (e *TemplateJoinExpr) walkChildNodes(w internalWalkFunc) { | ||
97 | e.Tuple = w(e.Tuple).(Expression) | ||
98 | } | ||
99 | |||
100 | func (e *TemplateJoinExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
101 | tuple, diags := e.Tuple.Value(ctx) | ||
102 | |||
103 | if tuple.IsNull() { | ||
104 | // This indicates a bug in the code that constructed the AST. | ||
105 | panic("TemplateJoinExpr got null tuple") | ||
106 | } | ||
107 | if tuple.Type() == cty.DynamicPseudoType { | ||
108 | return cty.UnknownVal(cty.String), diags | ||
109 | } | ||
110 | if !tuple.Type().IsTupleType() { | ||
111 | // This indicates a bug in the code that constructed the AST. | ||
112 | panic("TemplateJoinExpr got non-tuple tuple") | ||
113 | } | ||
114 | if !tuple.IsKnown() { | ||
115 | return cty.UnknownVal(cty.String), diags | ||
116 | } | ||
117 | |||
118 | buf := &bytes.Buffer{} | ||
119 | it := tuple.ElementIterator() | ||
120 | for it.Next() { | ||
121 | _, val := it.Element() | ||
122 | |||
123 | if val.IsNull() { | ||
124 | diags = append(diags, &hcl.Diagnostic{ | ||
125 | Severity: hcl.DiagError, | ||
126 | Summary: "Invalid template interpolation value", | ||
127 | Detail: fmt.Sprintf( | ||
128 | "An iteration result is null. Cannot include a null value in a string template.", | ||
129 | ), | ||
130 | Subject: e.Range().Ptr(), | ||
131 | }) | ||
132 | continue | ||
133 | } | ||
134 | if val.Type() == cty.DynamicPseudoType { | ||
135 | return cty.UnknownVal(cty.String), diags | ||
136 | } | ||
137 | strVal, err := convert.Convert(val, cty.String) | ||
138 | if err != nil { | ||
139 | diags = append(diags, &hcl.Diagnostic{ | ||
140 | Severity: hcl.DiagError, | ||
141 | Summary: "Invalid template interpolation value", | ||
142 | Detail: fmt.Sprintf( | ||
143 | "Cannot include one of the interpolation results into the string template: %s.", | ||
144 | err.Error(), | ||
145 | ), | ||
146 | Subject: e.Range().Ptr(), | ||
147 | }) | ||
148 | continue | ||
149 | } | ||
150 | if !val.IsKnown() { | ||
151 | return cty.UnknownVal(cty.String), diags | ||
152 | } | ||
153 | |||
154 | buf.WriteString(strVal.AsString()) | ||
155 | } | ||
156 | |||
157 | return cty.StringVal(buf.String()), diags | ||
158 | } | ||
159 | |||
160 | func (e *TemplateJoinExpr) Range() hcl.Range { | ||
161 | return e.Tuple.Range() | ||
162 | } | ||
163 | |||
164 | func (e *TemplateJoinExpr) StartRange() hcl.Range { | ||
165 | return e.Tuple.StartRange() | ||
166 | } | ||
167 | |||
168 | // TemplateWrapExpr is used instead of a TemplateExpr when a template | ||
169 | // consists _only_ of a single interpolation sequence. In that case, the | ||
170 | // template's result is the single interpolation's result, verbatim with | ||
171 | // no type conversions. | ||
172 | type TemplateWrapExpr struct { | ||
173 | Wrapped Expression | ||
174 | |||
175 | SrcRange hcl.Range | ||
176 | } | ||
177 | |||
178 | func (e *TemplateWrapExpr) walkChildNodes(w internalWalkFunc) { | ||
179 | e.Wrapped = w(e.Wrapped).(Expression) | ||
180 | } | ||
181 | |||
182 | func (e *TemplateWrapExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
183 | return e.Wrapped.Value(ctx) | ||
184 | } | ||
185 | |||
186 | func (e *TemplateWrapExpr) Range() hcl.Range { | ||
187 | return e.SrcRange | ||
188 | } | ||
189 | |||
190 | func (e *TemplateWrapExpr) StartRange() hcl.Range { | ||
191 | return e.SrcRange | ||
192 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars.go new file mode 100644 index 0000000..9177092 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars.go | |||
@@ -0,0 +1,76 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | // Generated by expression_vars_get.go. DO NOT EDIT. | ||
4 | // Run 'go generate' on this package to update the set of functions here. | ||
5 | |||
6 | import ( | ||
7 | "github.com/hashicorp/hcl2/hcl" | ||
8 | ) | ||
9 | |||
10 | func (e *AnonSymbolExpr) Variables() []hcl.Traversal { | ||
11 | return Variables(e) | ||
12 | } | ||
13 | |||
14 | func (e *BinaryOpExpr) Variables() []hcl.Traversal { | ||
15 | return Variables(e) | ||
16 | } | ||
17 | |||
18 | func (e *ConditionalExpr) Variables() []hcl.Traversal { | ||
19 | return Variables(e) | ||
20 | } | ||
21 | |||
22 | func (e *ForExpr) Variables() []hcl.Traversal { | ||
23 | return Variables(e) | ||
24 | } | ||
25 | |||
26 | func (e *FunctionCallExpr) Variables() []hcl.Traversal { | ||
27 | return Variables(e) | ||
28 | } | ||
29 | |||
30 | func (e *IndexExpr) Variables() []hcl.Traversal { | ||
31 | return Variables(e) | ||
32 | } | ||
33 | |||
34 | func (e *LiteralValueExpr) Variables() []hcl.Traversal { | ||
35 | return Variables(e) | ||
36 | } | ||
37 | |||
38 | func (e *ObjectConsExpr) Variables() []hcl.Traversal { | ||
39 | return Variables(e) | ||
40 | } | ||
41 | |||
42 | func (e *ObjectConsKeyExpr) Variables() []hcl.Traversal { | ||
43 | return Variables(e) | ||
44 | } | ||
45 | |||
46 | func (e *RelativeTraversalExpr) Variables() []hcl.Traversal { | ||
47 | return Variables(e) | ||
48 | } | ||
49 | |||
50 | func (e *ScopeTraversalExpr) Variables() []hcl.Traversal { | ||
51 | return Variables(e) | ||
52 | } | ||
53 | |||
54 | func (e *SplatExpr) Variables() []hcl.Traversal { | ||
55 | return Variables(e) | ||
56 | } | ||
57 | |||
58 | func (e *TemplateExpr) Variables() []hcl.Traversal { | ||
59 | return Variables(e) | ||
60 | } | ||
61 | |||
62 | func (e *TemplateJoinExpr) Variables() []hcl.Traversal { | ||
63 | return Variables(e) | ||
64 | } | ||
65 | |||
66 | func (e *TemplateWrapExpr) Variables() []hcl.Traversal { | ||
67 | return Variables(e) | ||
68 | } | ||
69 | |||
70 | func (e *TupleConsExpr) Variables() []hcl.Traversal { | ||
71 | return Variables(e) | ||
72 | } | ||
73 | |||
74 | func (e *UnaryOpExpr) Variables() []hcl.Traversal { | ||
75 | return Variables(e) | ||
76 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars_gen.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars_gen.go new file mode 100644 index 0000000..88f1980 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars_gen.go | |||
@@ -0,0 +1,99 @@ | |||
1 | // This is a 'go generate'-oriented program for producing the "Variables" | ||
2 | // method on every Expression implementation found within this package. | ||
3 | // All expressions share the same implementation for this method, which | ||
4 | // just wraps the package-level function "Variables" and uses an AST walk | ||
5 | // to do its work. | ||
6 | |||
7 | // +build ignore | ||
8 | |||
9 | package main | ||
10 | |||
11 | import ( | ||
12 | "fmt" | ||
13 | "go/ast" | ||
14 | "go/parser" | ||
15 | "go/token" | ||
16 | "os" | ||
17 | "sort" | ||
18 | ) | ||
19 | |||
20 | func main() { | ||
21 | fs := token.NewFileSet() | ||
22 | pkgs, err := parser.ParseDir(fs, ".", nil, 0) | ||
23 | if err != nil { | ||
24 | fmt.Fprintf(os.Stderr, "error while parsing: %s\n", err) | ||
25 | os.Exit(1) | ||
26 | } | ||
27 | pkg := pkgs["hclsyntax"] | ||
28 | |||
29 | // Walk all the files and collect the receivers of any "Value" methods | ||
30 | // that look like they are trying to implement Expression. | ||
31 | var recvs []string | ||
32 | for _, f := range pkg.Files { | ||
33 | for _, decl := range f.Decls { | ||
34 | fd, ok := decl.(*ast.FuncDecl) | ||
35 | if !ok { | ||
36 | continue | ||
37 | } | ||
38 | if fd.Name.Name != "Value" { | ||
39 | continue | ||
40 | } | ||
41 | results := fd.Type.Results.List | ||
42 | if len(results) != 2 { | ||
43 | continue | ||
44 | } | ||
45 | valResult := fd.Type.Results.List[0].Type.(*ast.SelectorExpr).X.(*ast.Ident) | ||
46 | diagsResult := fd.Type.Results.List[1].Type.(*ast.SelectorExpr).X.(*ast.Ident) | ||
47 | |||
48 | if valResult.Name != "cty" && diagsResult.Name != "hcl" { | ||
49 | continue | ||
50 | } | ||
51 | |||
52 | // If we have a method called Value and it returns something in | ||
53 | // "cty" followed by something in "hcl" then that's specific enough | ||
54 | // for now, even though this is not 100% exact as a correct | ||
55 | // implementation of Value. | ||
56 | |||
57 | recvTy := fd.Recv.List[0].Type | ||
58 | |||
59 | switch rtt := recvTy.(type) { | ||
60 | case *ast.StarExpr: | ||
61 | name := rtt.X.(*ast.Ident).Name | ||
62 | recvs = append(recvs, fmt.Sprintf("*%s", name)) | ||
63 | default: | ||
64 | fmt.Fprintf(os.Stderr, "don't know what to do with a %T receiver\n", recvTy) | ||
65 | } | ||
66 | |||
67 | } | ||
68 | } | ||
69 | |||
70 | sort.Strings(recvs) | ||
71 | |||
72 | of, err := os.OpenFile("expression_vars.go", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm) | ||
73 | if err != nil { | ||
74 | fmt.Fprintf(os.Stderr, "failed to open output file: %s\n", err) | ||
75 | os.Exit(1) | ||
76 | } | ||
77 | |||
78 | fmt.Fprint(of, outputPreamble) | ||
79 | for _, recv := range recvs { | ||
80 | fmt.Fprintf(of, outputMethodFmt, recv) | ||
81 | } | ||
82 | fmt.Fprint(of, "\n") | ||
83 | |||
84 | } | ||
85 | |||
86 | const outputPreamble = `package hclsyntax | ||
87 | |||
88 | // Generated by expression_vars_get.go. DO NOT EDIT. | ||
89 | // Run 'go generate' on this package to update the set of functions here. | ||
90 | |||
91 | import ( | ||
92 | "github.com/hashicorp/hcl2/hcl" | ||
93 | )` | ||
94 | |||
95 | const outputMethodFmt = ` | ||
96 | |||
97 | func (e %s) Variables() []hcl.Traversal { | ||
98 | return Variables(e) | ||
99 | }` | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/file.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/file.go new file mode 100644 index 0000000..490c025 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/file.go | |||
@@ -0,0 +1,20 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | ) | ||
6 | |||
7 | // File is the top-level object resulting from parsing a configuration file. | ||
8 | type File struct { | ||
9 | Body *Body | ||
10 | Bytes []byte | ||
11 | } | ||
12 | |||
13 | func (f *File) AsHCLFile() *hcl.File { | ||
14 | return &hcl.File{ | ||
15 | Body: f.Body, | ||
16 | Bytes: f.Bytes, | ||
17 | |||
18 | // TODO: The Nav object, once we have an implementation of it | ||
19 | } | ||
20 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/generate.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/generate.go new file mode 100644 index 0000000..841656a --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/generate.go | |||
@@ -0,0 +1,9 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | //go:generate go run expression_vars_gen.go | ||
4 | //go:generate ruby unicode2ragel.rb --url=http://www.unicode.org/Public/9.0.0/ucd/DerivedCoreProperties.txt -m UnicodeDerived -p ID_Start,ID_Continue -o unicode_derived.rl | ||
5 | //go:generate ragel -Z scan_tokens.rl | ||
6 | //go:generate gofmt -w scan_tokens.go | ||
7 | //go:generate ragel -Z scan_string_lit.rl | ||
8 | //go:generate gofmt -w scan_string_lit.go | ||
9 | //go:generate stringer -type TokenType -output token_type_string.go | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/keywords.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/keywords.go new file mode 100644 index 0000000..eef8b96 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/keywords.go | |||
@@ -0,0 +1,21 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "bytes" | ||
5 | ) | ||
6 | |||
7 | type Keyword []byte | ||
8 | |||
9 | var forKeyword = Keyword([]byte{'f', 'o', 'r'}) | ||
10 | var inKeyword = Keyword([]byte{'i', 'n'}) | ||
11 | var ifKeyword = Keyword([]byte{'i', 'f'}) | ||
12 | var elseKeyword = Keyword([]byte{'e', 'l', 's', 'e'}) | ||
13 | var endifKeyword = Keyword([]byte{'e', 'n', 'd', 'i', 'f'}) | ||
14 | var endforKeyword = Keyword([]byte{'e', 'n', 'd', 'f', 'o', 'r'}) | ||
15 | |||
16 | func (kw Keyword) TokenMatches(token Token) bool { | ||
17 | if token.Type != TokenIdent { | ||
18 | return false | ||
19 | } | ||
20 | return bytes.Equal([]byte(kw), token.Bytes) | ||
21 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/navigation.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/navigation.go new file mode 100644 index 0000000..4d41b6b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/navigation.go | |||
@@ -0,0 +1,41 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "bytes" | ||
5 | "fmt" | ||
6 | ) | ||
7 | |||
8 | type navigation struct { | ||
9 | root *Body | ||
10 | } | ||
11 | |||
12 | // Implementation of hcled.ContextString | ||
13 | func (n navigation) ContextString(offset int) string { | ||
14 | // We will walk our top-level blocks until we find one that contains | ||
15 | // the given offset, and then construct a representation of the header | ||
16 | // of the block. | ||
17 | |||
18 | var block *Block | ||
19 | for _, candidate := range n.root.Blocks { | ||
20 | if candidate.Range().ContainsOffset(offset) { | ||
21 | block = candidate | ||
22 | break | ||
23 | } | ||
24 | } | ||
25 | |||
26 | if block == nil { | ||
27 | return "" | ||
28 | } | ||
29 | |||
30 | if len(block.Labels) == 0 { | ||
31 | // Easy case! | ||
32 | return block.Type | ||
33 | } | ||
34 | |||
35 | buf := &bytes.Buffer{} | ||
36 | buf.WriteString(block.Type) | ||
37 | for _, label := range block.Labels { | ||
38 | fmt.Fprintf(buf, " %q", label) | ||
39 | } | ||
40 | return buf.String() | ||
41 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/node.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/node.go new file mode 100644 index 0000000..fd426d4 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/node.go | |||
@@ -0,0 +1,22 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | ) | ||
6 | |||
7 | // Node is the abstract type that every AST node implements. | ||
8 | // | ||
9 | // This is a closed interface, so it cannot be implemented from outside of | ||
10 | // this package. | ||
11 | type Node interface { | ||
12 | // This is the mechanism by which the public-facing walk functions | ||
13 | // are implemented. Implementations should call the given function | ||
14 | // for each child node and then replace that node with its return value. | ||
15 | // The return value might just be the same node, for non-transforming | ||
16 | // walks. | ||
17 | walkChildNodes(w internalWalkFunc) | ||
18 | |||
19 | Range() hcl.Range | ||
20 | } | ||
21 | |||
22 | type internalWalkFunc func(Node) Node | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go new file mode 100644 index 0000000..002858f --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go | |||
@@ -0,0 +1,1836 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "bytes" | ||
5 | "fmt" | ||
6 | "strconv" | ||
7 | "unicode/utf8" | ||
8 | |||
9 | "github.com/apparentlymart/go-textseg/textseg" | ||
10 | "github.com/hashicorp/hcl2/hcl" | ||
11 | "github.com/zclconf/go-cty/cty" | ||
12 | "github.com/zclconf/go-cty/cty/convert" | ||
13 | ) | ||
14 | |||
15 | type parser struct { | ||
16 | *peeker | ||
17 | |||
18 | // set to true if any recovery is attempted. The parser can use this | ||
19 | // to attempt to reduce error noise by suppressing "bad token" errors | ||
20 | // in recovery mode, assuming that the recovery heuristics have failed | ||
21 | // in this case and left the peeker in a wrong place. | ||
22 | recovery bool | ||
23 | } | ||
24 | |||
25 | func (p *parser) ParseBody(end TokenType) (*Body, hcl.Diagnostics) { | ||
26 | attrs := Attributes{} | ||
27 | blocks := Blocks{} | ||
28 | var diags hcl.Diagnostics | ||
29 | |||
30 | startRange := p.PrevRange() | ||
31 | var endRange hcl.Range | ||
32 | |||
33 | Token: | ||
34 | for { | ||
35 | next := p.Peek() | ||
36 | if next.Type == end { | ||
37 | endRange = p.NextRange() | ||
38 | p.Read() | ||
39 | break Token | ||
40 | } | ||
41 | |||
42 | switch next.Type { | ||
43 | case TokenNewline: | ||
44 | p.Read() | ||
45 | continue | ||
46 | case TokenIdent: | ||
47 | item, itemDiags := p.ParseBodyItem() | ||
48 | diags = append(diags, itemDiags...) | ||
49 | switch titem := item.(type) { | ||
50 | case *Block: | ||
51 | blocks = append(blocks, titem) | ||
52 | case *Attribute: | ||
53 | if existing, exists := attrs[titem.Name]; exists { | ||
54 | diags = append(diags, &hcl.Diagnostic{ | ||
55 | Severity: hcl.DiagError, | ||
56 | Summary: "Attribute redefined", | ||
57 | Detail: fmt.Sprintf( | ||
58 | "The attribute %q was already defined at %s. Each attribute may be defined only once.", | ||
59 | titem.Name, existing.NameRange.String(), | ||
60 | ), | ||
61 | Subject: &titem.NameRange, | ||
62 | }) | ||
63 | } else { | ||
64 | attrs[titem.Name] = titem | ||
65 | } | ||
66 | default: | ||
67 | // This should never happen for valid input, but may if a | ||
68 | // syntax error was detected in ParseBodyItem that prevented | ||
69 | // it from even producing a partially-broken item. In that | ||
70 | // case, it would've left at least one error in the diagnostics | ||
71 | // slice we already dealt with above. | ||
72 | // | ||
73 | // We'll assume ParseBodyItem attempted recovery to leave | ||
74 | // us in a reasonable position to try parsing the next item. | ||
75 | continue | ||
76 | } | ||
77 | default: | ||
78 | bad := p.Read() | ||
79 | if !p.recovery { | ||
80 | if bad.Type == TokenOQuote { | ||
81 | diags = append(diags, &hcl.Diagnostic{ | ||
82 | Severity: hcl.DiagError, | ||
83 | Summary: "Invalid attribute name", | ||
84 | Detail: "Attribute names must not be quoted.", | ||
85 | Subject: &bad.Range, | ||
86 | }) | ||
87 | } else { | ||
88 | diags = append(diags, &hcl.Diagnostic{ | ||
89 | Severity: hcl.DiagError, | ||
90 | Summary: "Attribute or block definition required", | ||
91 | Detail: "An attribute or block definition is required here.", | ||
92 | Subject: &bad.Range, | ||
93 | }) | ||
94 | } | ||
95 | } | ||
96 | endRange = p.PrevRange() // arbitrary, but somewhere inside the body means better diagnostics | ||
97 | |||
98 | p.recover(end) // attempt to recover to the token after the end of this body | ||
99 | break Token | ||
100 | } | ||
101 | } | ||
102 | |||
103 | return &Body{ | ||
104 | Attributes: attrs, | ||
105 | Blocks: blocks, | ||
106 | |||
107 | SrcRange: hcl.RangeBetween(startRange, endRange), | ||
108 | EndRange: hcl.Range{ | ||
109 | Filename: endRange.Filename, | ||
110 | Start: endRange.End, | ||
111 | End: endRange.End, | ||
112 | }, | ||
113 | }, diags | ||
114 | } | ||
115 | |||
116 | func (p *parser) ParseBodyItem() (Node, hcl.Diagnostics) { | ||
117 | ident := p.Read() | ||
118 | if ident.Type != TokenIdent { | ||
119 | p.recoverAfterBodyItem() | ||
120 | return nil, hcl.Diagnostics{ | ||
121 | { | ||
122 | Severity: hcl.DiagError, | ||
123 | Summary: "Attribute or block definition required", | ||
124 | Detail: "An attribute or block definition is required here.", | ||
125 | Subject: &ident.Range, | ||
126 | }, | ||
127 | } | ||
128 | } | ||
129 | |||
130 | next := p.Peek() | ||
131 | |||
132 | switch next.Type { | ||
133 | case TokenEqual: | ||
134 | return p.finishParsingBodyAttribute(ident) | ||
135 | case TokenOQuote, TokenOBrace, TokenIdent: | ||
136 | return p.finishParsingBodyBlock(ident) | ||
137 | default: | ||
138 | p.recoverAfterBodyItem() | ||
139 | return nil, hcl.Diagnostics{ | ||
140 | { | ||
141 | Severity: hcl.DiagError, | ||
142 | Summary: "Attribute or block definition required", | ||
143 | Detail: "An attribute or block definition is required here. To define an attribute, use the equals sign \"=\" to introduce the attribute value.", | ||
144 | Subject: &ident.Range, | ||
145 | }, | ||
146 | } | ||
147 | } | ||
148 | |||
149 | return nil, nil | ||
150 | } | ||
151 | |||
152 | func (p *parser) finishParsingBodyAttribute(ident Token) (Node, hcl.Diagnostics) { | ||
153 | eqTok := p.Read() // eat equals token | ||
154 | if eqTok.Type != TokenEqual { | ||
155 | // should never happen if caller behaves | ||
156 | panic("finishParsingBodyAttribute called with next not equals") | ||
157 | } | ||
158 | |||
159 | var endRange hcl.Range | ||
160 | |||
161 | expr, diags := p.ParseExpression() | ||
162 | if p.recovery && diags.HasErrors() { | ||
163 | // recovery within expressions tends to be tricky, so we've probably | ||
164 | // landed somewhere weird. We'll try to reset to the start of a body | ||
165 | // item so parsing can continue. | ||
166 | endRange = p.PrevRange() | ||
167 | p.recoverAfterBodyItem() | ||
168 | } else { | ||
169 | end := p.Peek() | ||
170 | if end.Type != TokenNewline && end.Type != TokenEOF { | ||
171 | if !p.recovery { | ||
172 | diags = append(diags, &hcl.Diagnostic{ | ||
173 | Severity: hcl.DiagError, | ||
174 | Summary: "Missing newline after attribute definition", | ||
175 | Detail: "An attribute definition must end with a newline.", | ||
176 | Subject: &end.Range, | ||
177 | Context: hcl.RangeBetween(ident.Range, end.Range).Ptr(), | ||
178 | }) | ||
179 | } | ||
180 | endRange = p.PrevRange() | ||
181 | p.recoverAfterBodyItem() | ||
182 | } else { | ||
183 | endRange = p.PrevRange() | ||
184 | p.Read() // eat newline | ||
185 | } | ||
186 | } | ||
187 | |||
188 | return &Attribute{ | ||
189 | Name: string(ident.Bytes), | ||
190 | Expr: expr, | ||
191 | |||
192 | SrcRange: hcl.RangeBetween(ident.Range, endRange), | ||
193 | NameRange: ident.Range, | ||
194 | EqualsRange: eqTok.Range, | ||
195 | }, diags | ||
196 | } | ||
197 | |||
198 | func (p *parser) finishParsingBodyBlock(ident Token) (Node, hcl.Diagnostics) { | ||
199 | var blockType = string(ident.Bytes) | ||
200 | var diags hcl.Diagnostics | ||
201 | var labels []string | ||
202 | var labelRanges []hcl.Range | ||
203 | |||
204 | var oBrace Token | ||
205 | |||
206 | Token: | ||
207 | for { | ||
208 | tok := p.Peek() | ||
209 | |||
210 | switch tok.Type { | ||
211 | |||
212 | case TokenOBrace: | ||
213 | oBrace = p.Read() | ||
214 | break Token | ||
215 | |||
216 | case TokenOQuote: | ||
217 | label, labelRange, labelDiags := p.parseQuotedStringLiteral() | ||
218 | diags = append(diags, labelDiags...) | ||
219 | labels = append(labels, label) | ||
220 | labelRanges = append(labelRanges, labelRange) | ||
221 | if labelDiags.HasErrors() { | ||
222 | p.recoverAfterBodyItem() | ||
223 | return &Block{ | ||
224 | Type: blockType, | ||
225 | Labels: labels, | ||
226 | Body: nil, | ||
227 | |||
228 | TypeRange: ident.Range, | ||
229 | LabelRanges: labelRanges, | ||
230 | OpenBraceRange: ident.Range, // placeholder | ||
231 | CloseBraceRange: ident.Range, // placeholder | ||
232 | }, diags | ||
233 | } | ||
234 | |||
235 | case TokenIdent: | ||
236 | tok = p.Read() // eat token | ||
237 | label, labelRange := string(tok.Bytes), tok.Range | ||
238 | labels = append(labels, label) | ||
239 | labelRanges = append(labelRanges, labelRange) | ||
240 | |||
241 | default: | ||
242 | switch tok.Type { | ||
243 | case TokenEqual: | ||
244 | diags = append(diags, &hcl.Diagnostic{ | ||
245 | Severity: hcl.DiagError, | ||
246 | Summary: "Invalid block definition", | ||
247 | Detail: "The equals sign \"=\" indicates an attribute definition, and must not be used when defining a block.", | ||
248 | Subject: &tok.Range, | ||
249 | Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(), | ||
250 | }) | ||
251 | case TokenNewline: | ||
252 | diags = append(diags, &hcl.Diagnostic{ | ||
253 | Severity: hcl.DiagError, | ||
254 | Summary: "Invalid block definition", | ||
255 | Detail: "A block definition must have block content delimited by \"{\" and \"}\", starting on the same line as the block header.", | ||
256 | Subject: &tok.Range, | ||
257 | Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(), | ||
258 | }) | ||
259 | default: | ||
260 | if !p.recovery { | ||
261 | diags = append(diags, &hcl.Diagnostic{ | ||
262 | Severity: hcl.DiagError, | ||
263 | Summary: "Invalid block definition", | ||
264 | Detail: "Either a quoted string block label or an opening brace (\"{\") is expected here.", | ||
265 | Subject: &tok.Range, | ||
266 | Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(), | ||
267 | }) | ||
268 | } | ||
269 | } | ||
270 | |||
271 | p.recoverAfterBodyItem() | ||
272 | |||
273 | return &Block{ | ||
274 | Type: blockType, | ||
275 | Labels: labels, | ||
276 | Body: nil, | ||
277 | |||
278 | TypeRange: ident.Range, | ||
279 | LabelRanges: labelRanges, | ||
280 | OpenBraceRange: ident.Range, // placeholder | ||
281 | CloseBraceRange: ident.Range, // placeholder | ||
282 | }, diags | ||
283 | } | ||
284 | } | ||
285 | |||
286 | // Once we fall out here, the peeker is pointed just after our opening | ||
287 | // brace, so we can begin our nested body parsing. | ||
288 | body, bodyDiags := p.ParseBody(TokenCBrace) | ||
289 | diags = append(diags, bodyDiags...) | ||
290 | cBraceRange := p.PrevRange() | ||
291 | |||
292 | eol := p.Peek() | ||
293 | if eol.Type == TokenNewline || eol.Type == TokenEOF { | ||
294 | p.Read() // eat newline | ||
295 | } else { | ||
296 | if !p.recovery { | ||
297 | diags = append(diags, &hcl.Diagnostic{ | ||
298 | Severity: hcl.DiagError, | ||
299 | Summary: "Missing newline after block definition", | ||
300 | Detail: "A block definition must end with a newline.", | ||
301 | Subject: &eol.Range, | ||
302 | Context: hcl.RangeBetween(ident.Range, eol.Range).Ptr(), | ||
303 | }) | ||
304 | } | ||
305 | p.recoverAfterBodyItem() | ||
306 | } | ||
307 | |||
308 | return &Block{ | ||
309 | Type: blockType, | ||
310 | Labels: labels, | ||
311 | Body: body, | ||
312 | |||
313 | TypeRange: ident.Range, | ||
314 | LabelRanges: labelRanges, | ||
315 | OpenBraceRange: oBrace.Range, | ||
316 | CloseBraceRange: cBraceRange, | ||
317 | }, diags | ||
318 | } | ||
319 | |||
320 | func (p *parser) ParseExpression() (Expression, hcl.Diagnostics) { | ||
321 | return p.parseTernaryConditional() | ||
322 | } | ||
323 | |||
324 | func (p *parser) parseTernaryConditional() (Expression, hcl.Diagnostics) { | ||
325 | // The ternary conditional operator (.. ? .. : ..) behaves somewhat | ||
326 | // like a binary operator except that the "symbol" is itself | ||
327 | // an expression enclosed in two punctuation characters. | ||
328 | // The middle expression is parsed as if the ? and : symbols | ||
329 | // were parentheses. The "rhs" (the "false expression") is then | ||
330 | // treated right-associatively so it behaves similarly to the | ||
331 | // middle in terms of precedence. | ||
332 | |||
333 | startRange := p.NextRange() | ||
334 | var condExpr, trueExpr, falseExpr Expression | ||
335 | var diags hcl.Diagnostics | ||
336 | |||
337 | condExpr, condDiags := p.parseBinaryOps(binaryOps) | ||
338 | diags = append(diags, condDiags...) | ||
339 | if p.recovery && condDiags.HasErrors() { | ||
340 | return condExpr, diags | ||
341 | } | ||
342 | |||
343 | questionMark := p.Peek() | ||
344 | if questionMark.Type != TokenQuestion { | ||
345 | return condExpr, diags | ||
346 | } | ||
347 | |||
348 | p.Read() // eat question mark | ||
349 | |||
350 | trueExpr, trueDiags := p.ParseExpression() | ||
351 | diags = append(diags, trueDiags...) | ||
352 | if p.recovery && trueDiags.HasErrors() { | ||
353 | return condExpr, diags | ||
354 | } | ||
355 | |||
356 | colon := p.Peek() | ||
357 | if colon.Type != TokenColon { | ||
358 | diags = append(diags, &hcl.Diagnostic{ | ||
359 | Severity: hcl.DiagError, | ||
360 | Summary: "Missing false expression in conditional", | ||
361 | Detail: "The conditional operator (...?...:...) requires a false expression, delimited by a colon.", | ||
362 | Subject: &colon.Range, | ||
363 | Context: hcl.RangeBetween(startRange, colon.Range).Ptr(), | ||
364 | }) | ||
365 | return condExpr, diags | ||
366 | } | ||
367 | |||
368 | p.Read() // eat colon | ||
369 | |||
370 | falseExpr, falseDiags := p.ParseExpression() | ||
371 | diags = append(diags, falseDiags...) | ||
372 | if p.recovery && falseDiags.HasErrors() { | ||
373 | return condExpr, diags | ||
374 | } | ||
375 | |||
376 | return &ConditionalExpr{ | ||
377 | Condition: condExpr, | ||
378 | TrueResult: trueExpr, | ||
379 | FalseResult: falseExpr, | ||
380 | |||
381 | SrcRange: hcl.RangeBetween(startRange, falseExpr.Range()), | ||
382 | }, diags | ||
383 | } | ||
384 | |||
385 | // parseBinaryOps calls itself recursively to work through all of the | ||
386 | // operator precedence groups, and then eventually calls parseExpressionTerm | ||
387 | // for each operand. | ||
388 | func (p *parser) parseBinaryOps(ops []map[TokenType]*Operation) (Expression, hcl.Diagnostics) { | ||
389 | if len(ops) == 0 { | ||
390 | // We've run out of operators, so now we'll just try to parse a term. | ||
391 | return p.parseExpressionWithTraversals() | ||
392 | } | ||
393 | |||
394 | thisLevel := ops[0] | ||
395 | remaining := ops[1:] | ||
396 | |||
397 | var lhs, rhs Expression | ||
398 | var operation *Operation | ||
399 | var diags hcl.Diagnostics | ||
400 | |||
401 | // Parse a term that might be the first operand of a binary | ||
402 | // operation or it might just be a standalone term. | ||
403 | // We won't know until we've parsed it and can look ahead | ||
404 | // to see if there's an operator token for this level. | ||
405 | lhs, lhsDiags := p.parseBinaryOps(remaining) | ||
406 | diags = append(diags, lhsDiags...) | ||
407 | if p.recovery && lhsDiags.HasErrors() { | ||
408 | return lhs, diags | ||
409 | } | ||
410 | |||
411 | // We'll keep eating up operators until we run out, so that operators | ||
412 | // with the same precedence will combine in a left-associative manner: | ||
413 | // a+b+c => (a+b)+c, not a+(b+c) | ||
414 | // | ||
415 | // Should we later want to have right-associative operators, a way | ||
416 | // to achieve that would be to call back up to ParseExpression here | ||
417 | // instead of iteratively parsing only the remaining operators. | ||
418 | for { | ||
419 | next := p.Peek() | ||
420 | var newOp *Operation | ||
421 | var ok bool | ||
422 | if newOp, ok = thisLevel[next.Type]; !ok { | ||
423 | break | ||
424 | } | ||
425 | |||
426 | // Are we extending an expression started on the previous iteration? | ||
427 | if operation != nil { | ||
428 | lhs = &BinaryOpExpr{ | ||
429 | LHS: lhs, | ||
430 | Op: operation, | ||
431 | RHS: rhs, | ||
432 | |||
433 | SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()), | ||
434 | } | ||
435 | } | ||
436 | |||
437 | operation = newOp | ||
438 | p.Read() // eat operator token | ||
439 | var rhsDiags hcl.Diagnostics | ||
440 | rhs, rhsDiags = p.parseBinaryOps(remaining) | ||
441 | diags = append(diags, rhsDiags...) | ||
442 | if p.recovery && rhsDiags.HasErrors() { | ||
443 | return lhs, diags | ||
444 | } | ||
445 | } | ||
446 | |||
447 | if operation == nil { | ||
448 | return lhs, diags | ||
449 | } | ||
450 | |||
451 | return &BinaryOpExpr{ | ||
452 | LHS: lhs, | ||
453 | Op: operation, | ||
454 | RHS: rhs, | ||
455 | |||
456 | SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()), | ||
457 | }, diags | ||
458 | } | ||
459 | |||
460 | func (p *parser) parseExpressionWithTraversals() (Expression, hcl.Diagnostics) { | ||
461 | term, diags := p.parseExpressionTerm() | ||
462 | ret := term | ||
463 | |||
464 | Traversal: | ||
465 | for { | ||
466 | next := p.Peek() | ||
467 | |||
468 | switch next.Type { | ||
469 | case TokenDot: | ||
470 | // Attribute access or splat | ||
471 | dot := p.Read() | ||
472 | attrTok := p.Peek() | ||
473 | |||
474 | switch attrTok.Type { | ||
475 | case TokenIdent: | ||
476 | attrTok = p.Read() // eat token | ||
477 | name := string(attrTok.Bytes) | ||
478 | rng := hcl.RangeBetween(dot.Range, attrTok.Range) | ||
479 | step := hcl.TraverseAttr{ | ||
480 | Name: name, | ||
481 | SrcRange: rng, | ||
482 | } | ||
483 | |||
484 | ret = makeRelativeTraversal(ret, step, rng) | ||
485 | |||
486 | case TokenNumberLit: | ||
487 | // This is a weird form we inherited from HIL, allowing numbers | ||
488 | // to be used as attributes as a weird way of writing [n]. | ||
489 | // This was never actually a first-class thing in HIL, but | ||
490 | // HIL tolerated sequences like .0. in its variable names and | ||
491 | // calling applications like Terraform exploited that to | ||
492 | // introduce indexing syntax where none existed. | ||
493 | numTok := p.Read() // eat token | ||
494 | attrTok = numTok | ||
495 | |||
496 | // This syntax is ambiguous if multiple indices are used in | ||
497 | // succession, like foo.0.1.baz: that actually parses as | ||
498 | // a fractional number 0.1. Since we're only supporting this | ||
499 | // syntax for compatibility with legacy Terraform | ||
500 | // configurations, and Terraform does not tend to have lists | ||
501 | // of lists, we'll choose to reject that here with a helpful | ||
502 | // error message, rather than failing later because the index | ||
503 | // isn't a whole number. | ||
504 | if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 { | ||
505 | first := numTok.Bytes[:dotIdx] | ||
506 | second := numTok.Bytes[dotIdx+1:] | ||
507 | diags = append(diags, &hcl.Diagnostic{ | ||
508 | Severity: hcl.DiagError, | ||
509 | Summary: "Invalid legacy index syntax", | ||
510 | Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax instead, like [%s][%s].", first, second), | ||
511 | Subject: &attrTok.Range, | ||
512 | }) | ||
513 | rng := hcl.RangeBetween(dot.Range, numTok.Range) | ||
514 | step := hcl.TraverseIndex{ | ||
515 | Key: cty.DynamicVal, | ||
516 | SrcRange: rng, | ||
517 | } | ||
518 | ret = makeRelativeTraversal(ret, step, rng) | ||
519 | break | ||
520 | } | ||
521 | |||
522 | numVal, numDiags := p.numberLitValue(numTok) | ||
523 | diags = append(diags, numDiags...) | ||
524 | |||
525 | rng := hcl.RangeBetween(dot.Range, numTok.Range) | ||
526 | step := hcl.TraverseIndex{ | ||
527 | Key: numVal, | ||
528 | SrcRange: rng, | ||
529 | } | ||
530 | |||
531 | ret = makeRelativeTraversal(ret, step, rng) | ||
532 | |||
533 | case TokenStar: | ||
534 | // "Attribute-only" splat expression. | ||
535 | // (This is a kinda weird construct inherited from HIL, which | ||
536 | // behaves a bit like a [*] splat except that it is only able | ||
537 | // to do attribute traversals into each of its elements, | ||
538 | // whereas foo[*] can support _any_ traversal. | ||
539 | marker := p.Read() // eat star | ||
540 | trav := make(hcl.Traversal, 0, 1) | ||
541 | var firstRange, lastRange hcl.Range | ||
542 | firstRange = p.NextRange() | ||
543 | for p.Peek().Type == TokenDot { | ||
544 | dot := p.Read() | ||
545 | |||
546 | if p.Peek().Type == TokenNumberLit { | ||
547 | // Continuing the "weird stuff inherited from HIL" | ||
548 | // theme, we also allow numbers as attribute names | ||
549 | // inside splats and interpret them as indexing | ||
550 | // into a list, for expressions like: | ||
551 | // foo.bar.*.baz.0.foo | ||
552 | numTok := p.Read() | ||
553 | |||
554 | // Weird special case if the user writes something | ||
555 | // like foo.bar.*.baz.0.0.foo, where 0.0 parses | ||
556 | // as a number. | ||
557 | if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 { | ||
558 | first := numTok.Bytes[:dotIdx] | ||
559 | second := numTok.Bytes[dotIdx+1:] | ||
560 | diags = append(diags, &hcl.Diagnostic{ | ||
561 | Severity: hcl.DiagError, | ||
562 | Summary: "Invalid legacy index syntax", | ||
563 | Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax with a full splat expression [*] instead, like [%s][%s].", first, second), | ||
564 | Subject: &attrTok.Range, | ||
565 | }) | ||
566 | trav = append(trav, hcl.TraverseIndex{ | ||
567 | Key: cty.DynamicVal, | ||
568 | SrcRange: hcl.RangeBetween(dot.Range, numTok.Range), | ||
569 | }) | ||
570 | lastRange = numTok.Range | ||
571 | continue | ||
572 | } | ||
573 | |||
574 | numVal, numDiags := p.numberLitValue(numTok) | ||
575 | diags = append(diags, numDiags...) | ||
576 | trav = append(trav, hcl.TraverseIndex{ | ||
577 | Key: numVal, | ||
578 | SrcRange: hcl.RangeBetween(dot.Range, numTok.Range), | ||
579 | }) | ||
580 | lastRange = numTok.Range | ||
581 | continue | ||
582 | } | ||
583 | |||
584 | if p.Peek().Type != TokenIdent { | ||
585 | if !p.recovery { | ||
586 | if p.Peek().Type == TokenStar { | ||
587 | diags = append(diags, &hcl.Diagnostic{ | ||
588 | Severity: hcl.DiagError, | ||
589 | Summary: "Nested splat expression not allowed", | ||
590 | Detail: "A splat expression (*) cannot be used inside another attribute-only splat expression.", | ||
591 | Subject: p.Peek().Range.Ptr(), | ||
592 | }) | ||
593 | } else { | ||
594 | diags = append(diags, &hcl.Diagnostic{ | ||
595 | Severity: hcl.DiagError, | ||
596 | Summary: "Invalid attribute name", | ||
597 | Detail: "An attribute name is required after a dot.", | ||
598 | Subject: &attrTok.Range, | ||
599 | }) | ||
600 | } | ||
601 | } | ||
602 | p.setRecovery() | ||
603 | continue Traversal | ||
604 | } | ||
605 | |||
606 | attrTok := p.Read() | ||
607 | trav = append(trav, hcl.TraverseAttr{ | ||
608 | Name: string(attrTok.Bytes), | ||
609 | SrcRange: hcl.RangeBetween(dot.Range, attrTok.Range), | ||
610 | }) | ||
611 | lastRange = attrTok.Range | ||
612 | } | ||
613 | |||
614 | itemExpr := &AnonSymbolExpr{ | ||
615 | SrcRange: hcl.RangeBetween(dot.Range, marker.Range), | ||
616 | } | ||
617 | var travExpr Expression | ||
618 | if len(trav) == 0 { | ||
619 | travExpr = itemExpr | ||
620 | } else { | ||
621 | travExpr = &RelativeTraversalExpr{ | ||
622 | Source: itemExpr, | ||
623 | Traversal: trav, | ||
624 | SrcRange: hcl.RangeBetween(firstRange, lastRange), | ||
625 | } | ||
626 | } | ||
627 | |||
628 | ret = &SplatExpr{ | ||
629 | Source: ret, | ||
630 | Each: travExpr, | ||
631 | Item: itemExpr, | ||
632 | |||
633 | SrcRange: hcl.RangeBetween(dot.Range, lastRange), | ||
634 | MarkerRange: hcl.RangeBetween(dot.Range, marker.Range), | ||
635 | } | ||
636 | |||
637 | default: | ||
638 | diags = append(diags, &hcl.Diagnostic{ | ||
639 | Severity: hcl.DiagError, | ||
640 | Summary: "Invalid attribute name", | ||
641 | Detail: "An attribute name is required after a dot.", | ||
642 | Subject: &attrTok.Range, | ||
643 | }) | ||
644 | // This leaves the peeker in a bad place, so following items | ||
645 | // will probably be misparsed until we hit something that | ||
646 | // allows us to re-sync. | ||
647 | // | ||
648 | // We will probably need to do something better here eventually | ||
649 | // in order to support autocomplete triggered by typing a | ||
650 | // period. | ||
651 | p.setRecovery() | ||
652 | } | ||
653 | |||
654 | case TokenOBrack: | ||
655 | // Indexing of a collection. | ||
656 | // This may or may not be a hcl.Traverser, depending on whether | ||
657 | // the key value is something constant. | ||
658 | |||
659 | open := p.Read() | ||
660 | // TODO: If we have a TokenStar inside our brackets, parse as | ||
661 | // a Splat expression: foo[*].baz[0]. | ||
662 | var close Token | ||
663 | p.PushIncludeNewlines(false) // arbitrary newlines allowed in brackets | ||
664 | keyExpr, keyDiags := p.ParseExpression() | ||
665 | diags = append(diags, keyDiags...) | ||
666 | if p.recovery && keyDiags.HasErrors() { | ||
667 | close = p.recover(TokenCBrack) | ||
668 | } else { | ||
669 | close = p.Read() | ||
670 | if close.Type != TokenCBrack && !p.recovery { | ||
671 | diags = append(diags, &hcl.Diagnostic{ | ||
672 | Severity: hcl.DiagError, | ||
673 | Summary: "Missing close bracket on index", | ||
674 | Detail: "The index operator must end with a closing bracket (\"]\").", | ||
675 | Subject: &close.Range, | ||
676 | }) | ||
677 | close = p.recover(TokenCBrack) | ||
678 | } | ||
679 | } | ||
680 | p.PopIncludeNewlines() | ||
681 | |||
682 | if lit, isLit := keyExpr.(*LiteralValueExpr); isLit { | ||
683 | litKey, _ := lit.Value(nil) | ||
684 | rng := hcl.RangeBetween(open.Range, close.Range) | ||
685 | step := hcl.TraverseIndex{ | ||
686 | Key: litKey, | ||
687 | SrcRange: rng, | ||
688 | } | ||
689 | ret = makeRelativeTraversal(ret, step, rng) | ||
690 | } else { | ||
691 | rng := hcl.RangeBetween(open.Range, close.Range) | ||
692 | ret = &IndexExpr{ | ||
693 | Collection: ret, | ||
694 | Key: keyExpr, | ||
695 | |||
696 | SrcRange: rng, | ||
697 | OpenRange: open.Range, | ||
698 | } | ||
699 | } | ||
700 | |||
701 | default: | ||
702 | break Traversal | ||
703 | } | ||
704 | } | ||
705 | |||
706 | return ret, diags | ||
707 | } | ||
708 | |||
709 | // makeRelativeTraversal takes an expression and a traverser and returns | ||
710 | // a traversal expression that combines the two. If the given expression | ||
711 | // is already a traversal, it is extended in place (mutating it) and | ||
712 | // returned. If it isn't, a new RelativeTraversalExpr is created and returned. | ||
713 | func makeRelativeTraversal(expr Expression, next hcl.Traverser, rng hcl.Range) Expression { | ||
714 | switch texpr := expr.(type) { | ||
715 | case *ScopeTraversalExpr: | ||
716 | texpr.Traversal = append(texpr.Traversal, next) | ||
717 | texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng) | ||
718 | return texpr | ||
719 | case *RelativeTraversalExpr: | ||
720 | texpr.Traversal = append(texpr.Traversal, next) | ||
721 | texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng) | ||
722 | return texpr | ||
723 | default: | ||
724 | return &RelativeTraversalExpr{ | ||
725 | Source: expr, | ||
726 | Traversal: hcl.Traversal{next}, | ||
727 | SrcRange: rng, | ||
728 | } | ||
729 | } | ||
730 | } | ||
731 | |||
732 | func (p *parser) parseExpressionTerm() (Expression, hcl.Diagnostics) { | ||
733 | start := p.Peek() | ||
734 | |||
735 | switch start.Type { | ||
736 | case TokenOParen: | ||
737 | p.Read() // eat open paren | ||
738 | |||
739 | p.PushIncludeNewlines(false) | ||
740 | |||
741 | expr, diags := p.ParseExpression() | ||
742 | if diags.HasErrors() { | ||
743 | // attempt to place the peeker after our closing paren | ||
744 | // before we return, so that the next parser has some | ||
745 | // chance of finding a valid expression. | ||
746 | p.recover(TokenCParen) | ||
747 | p.PopIncludeNewlines() | ||
748 | return expr, diags | ||
749 | } | ||
750 | |||
751 | close := p.Peek() | ||
752 | if close.Type != TokenCParen { | ||
753 | diags = append(diags, &hcl.Diagnostic{ | ||
754 | Severity: hcl.DiagError, | ||
755 | Summary: "Unbalanced parentheses", | ||
756 | Detail: "Expected a closing parenthesis to terminate the expression.", | ||
757 | Subject: &close.Range, | ||
758 | Context: hcl.RangeBetween(start.Range, close.Range).Ptr(), | ||
759 | }) | ||
760 | p.setRecovery() | ||
761 | } | ||
762 | |||
763 | p.Read() // eat closing paren | ||
764 | p.PopIncludeNewlines() | ||
765 | |||
766 | return expr, diags | ||
767 | |||
768 | case TokenNumberLit: | ||
769 | tok := p.Read() // eat number token | ||
770 | |||
771 | numVal, diags := p.numberLitValue(tok) | ||
772 | return &LiteralValueExpr{ | ||
773 | Val: numVal, | ||
774 | SrcRange: tok.Range, | ||
775 | }, diags | ||
776 | |||
777 | case TokenIdent: | ||
778 | tok := p.Read() // eat identifier token | ||
779 | |||
780 | if p.Peek().Type == TokenOParen { | ||
781 | return p.finishParsingFunctionCall(tok) | ||
782 | } | ||
783 | |||
784 | name := string(tok.Bytes) | ||
785 | switch name { | ||
786 | case "true": | ||
787 | return &LiteralValueExpr{ | ||
788 | Val: cty.True, | ||
789 | SrcRange: tok.Range, | ||
790 | }, nil | ||
791 | case "false": | ||
792 | return &LiteralValueExpr{ | ||
793 | Val: cty.False, | ||
794 | SrcRange: tok.Range, | ||
795 | }, nil | ||
796 | case "null": | ||
797 | return &LiteralValueExpr{ | ||
798 | Val: cty.NullVal(cty.DynamicPseudoType), | ||
799 | SrcRange: tok.Range, | ||
800 | }, nil | ||
801 | default: | ||
802 | return &ScopeTraversalExpr{ | ||
803 | Traversal: hcl.Traversal{ | ||
804 | hcl.TraverseRoot{ | ||
805 | Name: name, | ||
806 | SrcRange: tok.Range, | ||
807 | }, | ||
808 | }, | ||
809 | SrcRange: tok.Range, | ||
810 | }, nil | ||
811 | } | ||
812 | |||
813 | case TokenOQuote, TokenOHeredoc: | ||
814 | open := p.Read() // eat opening marker | ||
815 | closer := p.oppositeBracket(open.Type) | ||
816 | exprs, passthru, _, diags := p.parseTemplateInner(closer) | ||
817 | |||
818 | closeRange := p.PrevRange() | ||
819 | |||
820 | if passthru { | ||
821 | if len(exprs) != 1 { | ||
822 | panic("passthru set with len(exprs) != 1") | ||
823 | } | ||
824 | return &TemplateWrapExpr{ | ||
825 | Wrapped: exprs[0], | ||
826 | SrcRange: hcl.RangeBetween(open.Range, closeRange), | ||
827 | }, diags | ||
828 | } | ||
829 | |||
830 | return &TemplateExpr{ | ||
831 | Parts: exprs, | ||
832 | SrcRange: hcl.RangeBetween(open.Range, closeRange), | ||
833 | }, diags | ||
834 | |||
835 | case TokenMinus: | ||
836 | tok := p.Read() // eat minus token | ||
837 | |||
838 | // Important to use parseExpressionWithTraversals rather than parseExpression | ||
839 | // here, otherwise we can capture a following binary expression into | ||
840 | // our negation. | ||
841 | // e.g. -46+5 should parse as (-46)+5, not -(46+5) | ||
842 | operand, diags := p.parseExpressionWithTraversals() | ||
843 | return &UnaryOpExpr{ | ||
844 | Op: OpNegate, | ||
845 | Val: operand, | ||
846 | |||
847 | SrcRange: hcl.RangeBetween(tok.Range, operand.Range()), | ||
848 | SymbolRange: tok.Range, | ||
849 | }, diags | ||
850 | |||
851 | case TokenBang: | ||
852 | tok := p.Read() // eat bang token | ||
853 | |||
854 | // Important to use parseExpressionWithTraversals rather than parseExpression | ||
855 | // here, otherwise we can capture a following binary expression into | ||
856 | // our negation. | ||
857 | operand, diags := p.parseExpressionWithTraversals() | ||
858 | return &UnaryOpExpr{ | ||
859 | Op: OpLogicalNot, | ||
860 | Val: operand, | ||
861 | |||
862 | SrcRange: hcl.RangeBetween(tok.Range, operand.Range()), | ||
863 | SymbolRange: tok.Range, | ||
864 | }, diags | ||
865 | |||
866 | case TokenOBrack: | ||
867 | return p.parseTupleCons() | ||
868 | |||
869 | case TokenOBrace: | ||
870 | return p.parseObjectCons() | ||
871 | |||
872 | default: | ||
873 | var diags hcl.Diagnostics | ||
874 | if !p.recovery { | ||
875 | diags = append(diags, &hcl.Diagnostic{ | ||
876 | Severity: hcl.DiagError, | ||
877 | Summary: "Invalid expression", | ||
878 | Detail: "Expected the start of an expression, but found an invalid expression token.", | ||
879 | Subject: &start.Range, | ||
880 | }) | ||
881 | } | ||
882 | p.setRecovery() | ||
883 | |||
884 | // Return a placeholder so that the AST is still structurally sound | ||
885 | // even in the presence of parse errors. | ||
886 | return &LiteralValueExpr{ | ||
887 | Val: cty.DynamicVal, | ||
888 | SrcRange: start.Range, | ||
889 | }, diags | ||
890 | } | ||
891 | } | ||
892 | |||
893 | func (p *parser) numberLitValue(tok Token) (cty.Value, hcl.Diagnostics) { | ||
894 | // We'll lean on the cty converter to do the conversion, to ensure that | ||
895 | // the behavior is the same as what would happen if converting a | ||
896 | // non-literal string to a number. | ||
897 | numStrVal := cty.StringVal(string(tok.Bytes)) | ||
898 | numVal, err := convert.Convert(numStrVal, cty.Number) | ||
899 | if err != nil { | ||
900 | ret := cty.UnknownVal(cty.Number) | ||
901 | return ret, hcl.Diagnostics{ | ||
902 | { | ||
903 | Severity: hcl.DiagError, | ||
904 | Summary: "Invalid number literal", | ||
905 | // FIXME: not a very good error message, but convert only | ||
906 | // gives us "a number is required", so not much help either. | ||
907 | Detail: "Failed to recognize the value of this number literal.", | ||
908 | Subject: &tok.Range, | ||
909 | }, | ||
910 | } | ||
911 | } | ||
912 | return numVal, nil | ||
913 | } | ||
914 | |||
915 | // finishParsingFunctionCall parses a function call assuming that the function | ||
916 | // name was already read, and so the peeker should be pointing at the opening | ||
917 | // parenthesis after the name. | ||
918 | func (p *parser) finishParsingFunctionCall(name Token) (Expression, hcl.Diagnostics) { | ||
919 | openTok := p.Read() | ||
920 | if openTok.Type != TokenOParen { | ||
921 | // should never happen if callers behave | ||
922 | panic("finishParsingFunctionCall called with non-parenthesis as next token") | ||
923 | } | ||
924 | |||
925 | var args []Expression | ||
926 | var diags hcl.Diagnostics | ||
927 | var expandFinal bool | ||
928 | var closeTok Token | ||
929 | |||
930 | // Arbitrary newlines are allowed inside the function call parentheses. | ||
931 | p.PushIncludeNewlines(false) | ||
932 | |||
933 | Token: | ||
934 | for { | ||
935 | tok := p.Peek() | ||
936 | |||
937 | if tok.Type == TokenCParen { | ||
938 | closeTok = p.Read() // eat closing paren | ||
939 | break Token | ||
940 | } | ||
941 | |||
942 | arg, argDiags := p.ParseExpression() | ||
943 | args = append(args, arg) | ||
944 | diags = append(diags, argDiags...) | ||
945 | if p.recovery && argDiags.HasErrors() { | ||
946 | // if there was a parse error in the argument then we've | ||
947 | // probably been left in a weird place in the token stream, | ||
948 | // so we'll bail out with a partial argument list. | ||
949 | p.recover(TokenCParen) | ||
950 | break Token | ||
951 | } | ||
952 | |||
953 | sep := p.Read() | ||
954 | if sep.Type == TokenCParen { | ||
955 | closeTok = sep | ||
956 | break Token | ||
957 | } | ||
958 | |||
959 | if sep.Type == TokenEllipsis { | ||
960 | expandFinal = true | ||
961 | |||
962 | if p.Peek().Type != TokenCParen { | ||
963 | if !p.recovery { | ||
964 | diags = append(diags, &hcl.Diagnostic{ | ||
965 | Severity: hcl.DiagError, | ||
966 | Summary: "Missing closing parenthesis", | ||
967 | Detail: "An expanded function argument (with ...) must be immediately followed by closing parentheses.", | ||
968 | Subject: &sep.Range, | ||
969 | Context: hcl.RangeBetween(name.Range, sep.Range).Ptr(), | ||
970 | }) | ||
971 | } | ||
972 | closeTok = p.recover(TokenCParen) | ||
973 | } else { | ||
974 | closeTok = p.Read() // eat closing paren | ||
975 | } | ||
976 | break Token | ||
977 | } | ||
978 | |||
979 | if sep.Type != TokenComma { | ||
980 | diags = append(diags, &hcl.Diagnostic{ | ||
981 | Severity: hcl.DiagError, | ||
982 | Summary: "Missing argument separator", | ||
983 | Detail: "A comma is required to separate each function argument from the next.", | ||
984 | Subject: &sep.Range, | ||
985 | Context: hcl.RangeBetween(name.Range, sep.Range).Ptr(), | ||
986 | }) | ||
987 | closeTok = p.recover(TokenCParen) | ||
988 | break Token | ||
989 | } | ||
990 | |||
991 | if p.Peek().Type == TokenCParen { | ||
992 | // A trailing comma after the last argument gets us in here. | ||
993 | closeTok = p.Read() // eat closing paren | ||
994 | break Token | ||
995 | } | ||
996 | |||
997 | } | ||
998 | |||
999 | p.PopIncludeNewlines() | ||
1000 | |||
1001 | return &FunctionCallExpr{ | ||
1002 | Name: string(name.Bytes), | ||
1003 | Args: args, | ||
1004 | |||
1005 | ExpandFinal: expandFinal, | ||
1006 | |||
1007 | NameRange: name.Range, | ||
1008 | OpenParenRange: openTok.Range, | ||
1009 | CloseParenRange: closeTok.Range, | ||
1010 | }, diags | ||
1011 | } | ||
1012 | |||
1013 | func (p *parser) parseTupleCons() (Expression, hcl.Diagnostics) { | ||
1014 | open := p.Read() | ||
1015 | if open.Type != TokenOBrack { | ||
1016 | // Should never happen if callers are behaving | ||
1017 | panic("parseTupleCons called without peeker pointing to open bracket") | ||
1018 | } | ||
1019 | |||
1020 | p.PushIncludeNewlines(false) | ||
1021 | defer p.PopIncludeNewlines() | ||
1022 | |||
1023 | if forKeyword.TokenMatches(p.Peek()) { | ||
1024 | return p.finishParsingForExpr(open) | ||
1025 | } | ||
1026 | |||
1027 | var close Token | ||
1028 | |||
1029 | var diags hcl.Diagnostics | ||
1030 | var exprs []Expression | ||
1031 | |||
1032 | for { | ||
1033 | next := p.Peek() | ||
1034 | if next.Type == TokenCBrack { | ||
1035 | close = p.Read() // eat closer | ||
1036 | break | ||
1037 | } | ||
1038 | |||
1039 | expr, exprDiags := p.ParseExpression() | ||
1040 | exprs = append(exprs, expr) | ||
1041 | diags = append(diags, exprDiags...) | ||
1042 | |||
1043 | if p.recovery && exprDiags.HasErrors() { | ||
1044 | // If expression parsing failed then we are probably in a strange | ||
1045 | // place in the token stream, so we'll bail out and try to reset | ||
1046 | // to after our closing bracket to allow parsing to continue. | ||
1047 | close = p.recover(TokenCBrack) | ||
1048 | break | ||
1049 | } | ||
1050 | |||
1051 | next = p.Peek() | ||
1052 | if next.Type == TokenCBrack { | ||
1053 | close = p.Read() // eat closer | ||
1054 | break | ||
1055 | } | ||
1056 | |||
1057 | if next.Type != TokenComma { | ||
1058 | if !p.recovery { | ||
1059 | diags = append(diags, &hcl.Diagnostic{ | ||
1060 | Severity: hcl.DiagError, | ||
1061 | Summary: "Missing item separator", | ||
1062 | Detail: "Expected a comma to mark the beginning of the next item.", | ||
1063 | Subject: &next.Range, | ||
1064 | Context: hcl.RangeBetween(open.Range, next.Range).Ptr(), | ||
1065 | }) | ||
1066 | } | ||
1067 | close = p.recover(TokenCBrack) | ||
1068 | break | ||
1069 | } | ||
1070 | |||
1071 | p.Read() // eat comma | ||
1072 | |||
1073 | } | ||
1074 | |||
1075 | return &TupleConsExpr{ | ||
1076 | Exprs: exprs, | ||
1077 | |||
1078 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
1079 | OpenRange: open.Range, | ||
1080 | }, diags | ||
1081 | } | ||
1082 | |||
1083 | func (p *parser) parseObjectCons() (Expression, hcl.Diagnostics) { | ||
1084 | open := p.Read() | ||
1085 | if open.Type != TokenOBrace { | ||
1086 | // Should never happen if callers are behaving | ||
1087 | panic("parseObjectCons called without peeker pointing to open brace") | ||
1088 | } | ||
1089 | |||
1090 | p.PushIncludeNewlines(true) | ||
1091 | defer p.PopIncludeNewlines() | ||
1092 | |||
1093 | if forKeyword.TokenMatches(p.Peek()) { | ||
1094 | return p.finishParsingForExpr(open) | ||
1095 | } | ||
1096 | |||
1097 | var close Token | ||
1098 | |||
1099 | var diags hcl.Diagnostics | ||
1100 | var items []ObjectConsItem | ||
1101 | |||
1102 | for { | ||
1103 | next := p.Peek() | ||
1104 | if next.Type == TokenNewline { | ||
1105 | p.Read() // eat newline | ||
1106 | continue | ||
1107 | } | ||
1108 | |||
1109 | if next.Type == TokenCBrace { | ||
1110 | close = p.Read() // eat closer | ||
1111 | break | ||
1112 | } | ||
1113 | |||
1114 | var key Expression | ||
1115 | var keyDiags hcl.Diagnostics | ||
1116 | key, keyDiags = p.ParseExpression() | ||
1117 | diags = append(diags, keyDiags...) | ||
1118 | |||
1119 | if p.recovery && keyDiags.HasErrors() { | ||
1120 | // If expression parsing failed then we are probably in a strange | ||
1121 | // place in the token stream, so we'll bail out and try to reset | ||
1122 | // to after our closing brace to allow parsing to continue. | ||
1123 | close = p.recover(TokenCBrace) | ||
1124 | break | ||
1125 | } | ||
1126 | |||
1127 | // We wrap up the key expression in a special wrapper that deals | ||
1128 | // with our special case that naked identifiers as object keys | ||
1129 | // are interpreted as literal strings. | ||
1130 | key = &ObjectConsKeyExpr{Wrapped: key} | ||
1131 | |||
1132 | next = p.Peek() | ||
1133 | if next.Type != TokenEqual && next.Type != TokenColon { | ||
1134 | if !p.recovery { | ||
1135 | if next.Type == TokenNewline || next.Type == TokenComma { | ||
1136 | diags = append(diags, &hcl.Diagnostic{ | ||
1137 | Severity: hcl.DiagError, | ||
1138 | Summary: "Missing item value", | ||
1139 | Detail: "Expected an item value, introduced by an equals sign (\"=\").", | ||
1140 | Subject: &next.Range, | ||
1141 | Context: hcl.RangeBetween(open.Range, next.Range).Ptr(), | ||
1142 | }) | ||
1143 | } else { | ||
1144 | diags = append(diags, &hcl.Diagnostic{ | ||
1145 | Severity: hcl.DiagError, | ||
1146 | Summary: "Missing key/value separator", | ||
1147 | Detail: "Expected an equals sign (\"=\") to mark the beginning of the item value.", | ||
1148 | Subject: &next.Range, | ||
1149 | Context: hcl.RangeBetween(open.Range, next.Range).Ptr(), | ||
1150 | }) | ||
1151 | } | ||
1152 | } | ||
1153 | close = p.recover(TokenCBrace) | ||
1154 | break | ||
1155 | } | ||
1156 | |||
1157 | p.Read() // eat equals sign or colon | ||
1158 | |||
1159 | value, valueDiags := p.ParseExpression() | ||
1160 | diags = append(diags, valueDiags...) | ||
1161 | |||
1162 | if p.recovery && valueDiags.HasErrors() { | ||
1163 | // If expression parsing failed then we are probably in a strange | ||
1164 | // place in the token stream, so we'll bail out and try to reset | ||
1165 | // to after our closing brace to allow parsing to continue. | ||
1166 | close = p.recover(TokenCBrace) | ||
1167 | break | ||
1168 | } | ||
1169 | |||
1170 | items = append(items, ObjectConsItem{ | ||
1171 | KeyExpr: key, | ||
1172 | ValueExpr: value, | ||
1173 | }) | ||
1174 | |||
1175 | next = p.Peek() | ||
1176 | if next.Type == TokenCBrace { | ||
1177 | close = p.Read() // eat closer | ||
1178 | break | ||
1179 | } | ||
1180 | |||
1181 | if next.Type != TokenComma && next.Type != TokenNewline { | ||
1182 | if !p.recovery { | ||
1183 | diags = append(diags, &hcl.Diagnostic{ | ||
1184 | Severity: hcl.DiagError, | ||
1185 | Summary: "Missing item separator", | ||
1186 | Detail: "Expected a newline or comma to mark the beginning of the next item.", | ||
1187 | Subject: &next.Range, | ||
1188 | Context: hcl.RangeBetween(open.Range, next.Range).Ptr(), | ||
1189 | }) | ||
1190 | } | ||
1191 | close = p.recover(TokenCBrace) | ||
1192 | break | ||
1193 | } | ||
1194 | |||
1195 | p.Read() // eat comma or newline | ||
1196 | |||
1197 | } | ||
1198 | |||
1199 | return &ObjectConsExpr{ | ||
1200 | Items: items, | ||
1201 | |||
1202 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
1203 | OpenRange: open.Range, | ||
1204 | }, diags | ||
1205 | } | ||
1206 | |||
1207 | func (p *parser) finishParsingForExpr(open Token) (Expression, hcl.Diagnostics) { | ||
1208 | introducer := p.Read() | ||
1209 | if !forKeyword.TokenMatches(introducer) { | ||
1210 | // Should never happen if callers are behaving | ||
1211 | panic("finishParsingForExpr called without peeker pointing to 'for' identifier") | ||
1212 | } | ||
1213 | |||
1214 | var makeObj bool | ||
1215 | var closeType TokenType | ||
1216 | switch open.Type { | ||
1217 | case TokenOBrace: | ||
1218 | makeObj = true | ||
1219 | closeType = TokenCBrace | ||
1220 | case TokenOBrack: | ||
1221 | makeObj = false // making a tuple | ||
1222 | closeType = TokenCBrack | ||
1223 | default: | ||
1224 | // Should never happen if callers are behaving | ||
1225 | panic("finishParsingForExpr called with invalid open token") | ||
1226 | } | ||
1227 | |||
1228 | var diags hcl.Diagnostics | ||
1229 | var keyName, valName string | ||
1230 | |||
1231 | if p.Peek().Type != TokenIdent { | ||
1232 | if !p.recovery { | ||
1233 | diags = append(diags, &hcl.Diagnostic{ | ||
1234 | Severity: hcl.DiagError, | ||
1235 | Summary: "Invalid 'for' expression", | ||
1236 | Detail: "For expression requires variable name after 'for'.", | ||
1237 | Subject: p.Peek().Range.Ptr(), | ||
1238 | Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(), | ||
1239 | }) | ||
1240 | } | ||
1241 | close := p.recover(closeType) | ||
1242 | return &LiteralValueExpr{ | ||
1243 | Val: cty.DynamicVal, | ||
1244 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
1245 | }, diags | ||
1246 | } | ||
1247 | |||
1248 | valName = string(p.Read().Bytes) | ||
1249 | |||
1250 | if p.Peek().Type == TokenComma { | ||
1251 | // What we just read was actually the key, then. | ||
1252 | keyName = valName | ||
1253 | p.Read() // eat comma | ||
1254 | |||
1255 | if p.Peek().Type != TokenIdent { | ||
1256 | if !p.recovery { | ||
1257 | diags = append(diags, &hcl.Diagnostic{ | ||
1258 | Severity: hcl.DiagError, | ||
1259 | Summary: "Invalid 'for' expression", | ||
1260 | Detail: "For expression requires value variable name after comma.", | ||
1261 | Subject: p.Peek().Range.Ptr(), | ||
1262 | Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(), | ||
1263 | }) | ||
1264 | } | ||
1265 | close := p.recover(closeType) | ||
1266 | return &LiteralValueExpr{ | ||
1267 | Val: cty.DynamicVal, | ||
1268 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
1269 | }, diags | ||
1270 | } | ||
1271 | |||
1272 | valName = string(p.Read().Bytes) | ||
1273 | } | ||
1274 | |||
1275 | if !inKeyword.TokenMatches(p.Peek()) { | ||
1276 | if !p.recovery { | ||
1277 | diags = append(diags, &hcl.Diagnostic{ | ||
1278 | Severity: hcl.DiagError, | ||
1279 | Summary: "Invalid 'for' expression", | ||
1280 | Detail: "For expression requires 'in' keyword after names.", | ||
1281 | Subject: p.Peek().Range.Ptr(), | ||
1282 | Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(), | ||
1283 | }) | ||
1284 | } | ||
1285 | close := p.recover(closeType) | ||
1286 | return &LiteralValueExpr{ | ||
1287 | Val: cty.DynamicVal, | ||
1288 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
1289 | }, diags | ||
1290 | } | ||
1291 | p.Read() // eat 'in' keyword | ||
1292 | |||
1293 | collExpr, collDiags := p.ParseExpression() | ||
1294 | diags = append(diags, collDiags...) | ||
1295 | if p.recovery && collDiags.HasErrors() { | ||
1296 | close := p.recover(closeType) | ||
1297 | return &LiteralValueExpr{ | ||
1298 | Val: cty.DynamicVal, | ||
1299 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
1300 | }, diags | ||
1301 | } | ||
1302 | |||
1303 | if p.Peek().Type != TokenColon { | ||
1304 | if !p.recovery { | ||
1305 | diags = append(diags, &hcl.Diagnostic{ | ||
1306 | Severity: hcl.DiagError, | ||
1307 | Summary: "Invalid 'for' expression", | ||
1308 | Detail: "For expression requires colon after collection expression.", | ||
1309 | Subject: p.Peek().Range.Ptr(), | ||
1310 | Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(), | ||
1311 | }) | ||
1312 | } | ||
1313 | close := p.recover(closeType) | ||
1314 | return &LiteralValueExpr{ | ||
1315 | Val: cty.DynamicVal, | ||
1316 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
1317 | }, diags | ||
1318 | } | ||
1319 | p.Read() // eat colon | ||
1320 | |||
1321 | var keyExpr, valExpr Expression | ||
1322 | var keyDiags, valDiags hcl.Diagnostics | ||
1323 | valExpr, valDiags = p.ParseExpression() | ||
1324 | if p.Peek().Type == TokenFatArrow { | ||
1325 | // What we just parsed was actually keyExpr | ||
1326 | p.Read() // eat the fat arrow | ||
1327 | keyExpr, keyDiags = valExpr, valDiags | ||
1328 | |||
1329 | valExpr, valDiags = p.ParseExpression() | ||
1330 | } | ||
1331 | diags = append(diags, keyDiags...) | ||
1332 | diags = append(diags, valDiags...) | ||
1333 | if p.recovery && (keyDiags.HasErrors() || valDiags.HasErrors()) { | ||
1334 | close := p.recover(closeType) | ||
1335 | return &LiteralValueExpr{ | ||
1336 | Val: cty.DynamicVal, | ||
1337 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
1338 | }, diags | ||
1339 | } | ||
1340 | |||
1341 | group := false | ||
1342 | var ellipsis Token | ||
1343 | if p.Peek().Type == TokenEllipsis { | ||
1344 | ellipsis = p.Read() | ||
1345 | group = true | ||
1346 | } | ||
1347 | |||
1348 | var condExpr Expression | ||
1349 | var condDiags hcl.Diagnostics | ||
1350 | if ifKeyword.TokenMatches(p.Peek()) { | ||
1351 | p.Read() // eat "if" | ||
1352 | condExpr, condDiags = p.ParseExpression() | ||
1353 | diags = append(diags, condDiags...) | ||
1354 | if p.recovery && condDiags.HasErrors() { | ||
1355 | close := p.recover(p.oppositeBracket(open.Type)) | ||
1356 | return &LiteralValueExpr{ | ||
1357 | Val: cty.DynamicVal, | ||
1358 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
1359 | }, diags | ||
1360 | } | ||
1361 | } | ||
1362 | |||
1363 | var close Token | ||
1364 | if p.Peek().Type == closeType { | ||
1365 | close = p.Read() | ||
1366 | } else { | ||
1367 | if !p.recovery { | ||
1368 | diags = append(diags, &hcl.Diagnostic{ | ||
1369 | Severity: hcl.DiagError, | ||
1370 | Summary: "Invalid 'for' expression", | ||
1371 | Detail: "Extra characters after the end of the 'for' expression.", | ||
1372 | Subject: p.Peek().Range.Ptr(), | ||
1373 | Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(), | ||
1374 | }) | ||
1375 | } | ||
1376 | close = p.recover(closeType) | ||
1377 | } | ||
1378 | |||
1379 | if !makeObj { | ||
1380 | if keyExpr != nil { | ||
1381 | diags = append(diags, &hcl.Diagnostic{ | ||
1382 | Severity: hcl.DiagError, | ||
1383 | Summary: "Invalid 'for' expression", | ||
1384 | Detail: "Key expression is not valid when building a tuple.", | ||
1385 | Subject: keyExpr.Range().Ptr(), | ||
1386 | Context: hcl.RangeBetween(open.Range, close.Range).Ptr(), | ||
1387 | }) | ||
1388 | } | ||
1389 | |||
1390 | if group { | ||
1391 | diags = append(diags, &hcl.Diagnostic{ | ||
1392 | Severity: hcl.DiagError, | ||
1393 | Summary: "Invalid 'for' expression", | ||
1394 | Detail: "Grouping ellipsis (...) cannot be used when building a tuple.", | ||
1395 | Subject: &ellipsis.Range, | ||
1396 | Context: hcl.RangeBetween(open.Range, close.Range).Ptr(), | ||
1397 | }) | ||
1398 | } | ||
1399 | } else { | ||
1400 | if keyExpr == nil { | ||
1401 | diags = append(diags, &hcl.Diagnostic{ | ||
1402 | Severity: hcl.DiagError, | ||
1403 | Summary: "Invalid 'for' expression", | ||
1404 | Detail: "Key expression is required when building an object.", | ||
1405 | Subject: valExpr.Range().Ptr(), | ||
1406 | Context: hcl.RangeBetween(open.Range, close.Range).Ptr(), | ||
1407 | }) | ||
1408 | } | ||
1409 | } | ||
1410 | |||
1411 | return &ForExpr{ | ||
1412 | KeyVar: keyName, | ||
1413 | ValVar: valName, | ||
1414 | CollExpr: collExpr, | ||
1415 | KeyExpr: keyExpr, | ||
1416 | ValExpr: valExpr, | ||
1417 | CondExpr: condExpr, | ||
1418 | Group: group, | ||
1419 | |||
1420 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
1421 | OpenRange: open.Range, | ||
1422 | CloseRange: close.Range, | ||
1423 | }, diags | ||
1424 | } | ||
1425 | |||
1426 | // parseQuotedStringLiteral is a helper for parsing quoted strings that | ||
1427 | // aren't allowed to contain any interpolations, such as block labels. | ||
1428 | func (p *parser) parseQuotedStringLiteral() (string, hcl.Range, hcl.Diagnostics) { | ||
1429 | oQuote := p.Read() | ||
1430 | if oQuote.Type != TokenOQuote { | ||
1431 | return "", oQuote.Range, hcl.Diagnostics{ | ||
1432 | { | ||
1433 | Severity: hcl.DiagError, | ||
1434 | Summary: "Invalid string literal", | ||
1435 | Detail: "A quoted string is required here.", | ||
1436 | Subject: &oQuote.Range, | ||
1437 | }, | ||
1438 | } | ||
1439 | } | ||
1440 | |||
1441 | var diags hcl.Diagnostics | ||
1442 | ret := &bytes.Buffer{} | ||
1443 | var cQuote Token | ||
1444 | |||
1445 | Token: | ||
1446 | for { | ||
1447 | tok := p.Read() | ||
1448 | switch tok.Type { | ||
1449 | |||
1450 | case TokenCQuote: | ||
1451 | cQuote = tok | ||
1452 | break Token | ||
1453 | |||
1454 | case TokenQuotedLit: | ||
1455 | s, sDiags := p.decodeStringLit(tok) | ||
1456 | diags = append(diags, sDiags...) | ||
1457 | ret.WriteString(s) | ||
1458 | |||
1459 | case TokenTemplateControl, TokenTemplateInterp: | ||
1460 | which := "$" | ||
1461 | if tok.Type == TokenTemplateControl { | ||
1462 | which = "!" | ||
1463 | } | ||
1464 | |||
1465 | diags = append(diags, &hcl.Diagnostic{ | ||
1466 | Severity: hcl.DiagError, | ||
1467 | Summary: "Invalid string literal", | ||
1468 | Detail: fmt.Sprintf( | ||
1469 | "Template sequences are not allowed in this string. To include a literal %q, double it (as \"%s%s\") to escape it.", | ||
1470 | which, which, which, | ||
1471 | ), | ||
1472 | Subject: &tok.Range, | ||
1473 | Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(), | ||
1474 | }) | ||
1475 | p.recover(TokenTemplateSeqEnd) | ||
1476 | |||
1477 | case TokenEOF: | ||
1478 | diags = append(diags, &hcl.Diagnostic{ | ||
1479 | Severity: hcl.DiagError, | ||
1480 | Summary: "Unterminated string literal", | ||
1481 | Detail: "Unable to find the closing quote mark before the end of the file.", | ||
1482 | Subject: &tok.Range, | ||
1483 | Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(), | ||
1484 | }) | ||
1485 | break Token | ||
1486 | |||
1487 | default: | ||
1488 | // Should never happen, as long as the scanner is behaving itself | ||
1489 | diags = append(diags, &hcl.Diagnostic{ | ||
1490 | Severity: hcl.DiagError, | ||
1491 | Summary: "Invalid string literal", | ||
1492 | Detail: "This item is not valid in a string literal.", | ||
1493 | Subject: &tok.Range, | ||
1494 | Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(), | ||
1495 | }) | ||
1496 | p.recover(TokenOQuote) | ||
1497 | break Token | ||
1498 | |||
1499 | } | ||
1500 | |||
1501 | } | ||
1502 | |||
1503 | return ret.String(), hcl.RangeBetween(oQuote.Range, cQuote.Range), diags | ||
1504 | } | ||
1505 | |||
1506 | // decodeStringLit processes the given token, which must be either a | ||
1507 | // TokenQuotedLit or a TokenStringLit, returning the string resulting from | ||
1508 | // resolving any escape sequences. | ||
1509 | // | ||
1510 | // If any error diagnostics are returned, the returned string may be incomplete | ||
1511 | // or otherwise invalid. | ||
1512 | func (p *parser) decodeStringLit(tok Token) (string, hcl.Diagnostics) { | ||
1513 | var quoted bool | ||
1514 | switch tok.Type { | ||
1515 | case TokenQuotedLit: | ||
1516 | quoted = true | ||
1517 | case TokenStringLit: | ||
1518 | quoted = false | ||
1519 | default: | ||
1520 | panic("decodeQuotedLit can only be used with TokenStringLit and TokenQuotedLit tokens") | ||
1521 | } | ||
1522 | var diags hcl.Diagnostics | ||
1523 | |||
1524 | ret := make([]byte, 0, len(tok.Bytes)) | ||
1525 | slices := scanStringLit(tok.Bytes, quoted) | ||
1526 | |||
1527 | // We will mutate rng constantly as we walk through our token slices below. | ||
1528 | // Any diagnostics must take a copy of this rng rather than simply pointing | ||
1529 | // to it, e.g. by using rng.Ptr() rather than &rng. | ||
1530 | rng := tok.Range | ||
1531 | rng.End = rng.Start | ||
1532 | |||
1533 | Slices: | ||
1534 | for _, slice := range slices { | ||
1535 | if len(slice) == 0 { | ||
1536 | continue | ||
1537 | } | ||
1538 | |||
1539 | // Advance the start of our range to where the previous token ended | ||
1540 | rng.Start = rng.End | ||
1541 | |||
1542 | // Advance the end of our range to after our token. | ||
1543 | b := slice | ||
1544 | for len(b) > 0 { | ||
1545 | adv, ch, _ := textseg.ScanGraphemeClusters(b, true) | ||
1546 | rng.End.Byte += adv | ||
1547 | switch ch[0] { | ||
1548 | case '\r', '\n': | ||
1549 | rng.End.Line++ | ||
1550 | rng.End.Column = 1 | ||
1551 | default: | ||
1552 | rng.End.Column++ | ||
1553 | } | ||
1554 | b = b[adv:] | ||
1555 | } | ||
1556 | |||
1557 | TokenType: | ||
1558 | switch slice[0] { | ||
1559 | case '\\': | ||
1560 | if !quoted { | ||
1561 | // If we're not in quoted mode then just treat this token as | ||
1562 | // normal. (Slices can still start with backslash even if we're | ||
1563 | // not specifically looking for backslash sequences.) | ||
1564 | break TokenType | ||
1565 | } | ||
1566 | if len(slice) < 2 { | ||
1567 | diags = append(diags, &hcl.Diagnostic{ | ||
1568 | Severity: hcl.DiagError, | ||
1569 | Summary: "Invalid escape sequence", | ||
1570 | Detail: "Backslash must be followed by an escape sequence selector character.", | ||
1571 | Subject: rng.Ptr(), | ||
1572 | }) | ||
1573 | break TokenType | ||
1574 | } | ||
1575 | |||
1576 | switch slice[1] { | ||
1577 | |||
1578 | case 'n': | ||
1579 | ret = append(ret, '\n') | ||
1580 | continue Slices | ||
1581 | case 'r': | ||
1582 | ret = append(ret, '\r') | ||
1583 | continue Slices | ||
1584 | case 't': | ||
1585 | ret = append(ret, '\t') | ||
1586 | continue Slices | ||
1587 | case '"': | ||
1588 | ret = append(ret, '"') | ||
1589 | continue Slices | ||
1590 | case '\\': | ||
1591 | ret = append(ret, '\\') | ||
1592 | continue Slices | ||
1593 | case 'u', 'U': | ||
1594 | if slice[1] == 'u' && len(slice) != 6 { | ||
1595 | diags = append(diags, &hcl.Diagnostic{ | ||
1596 | Severity: hcl.DiagError, | ||
1597 | Summary: "Invalid escape sequence", | ||
1598 | Detail: "The \\u escape sequence must be followed by four hexadecimal digits.", | ||
1599 | Subject: rng.Ptr(), | ||
1600 | }) | ||
1601 | break TokenType | ||
1602 | } else if slice[1] == 'U' && len(slice) != 10 { | ||
1603 | diags = append(diags, &hcl.Diagnostic{ | ||
1604 | Severity: hcl.DiagError, | ||
1605 | Summary: "Invalid escape sequence", | ||
1606 | Detail: "The \\U escape sequence must be followed by eight hexadecimal digits.", | ||
1607 | Subject: rng.Ptr(), | ||
1608 | }) | ||
1609 | break TokenType | ||
1610 | } | ||
1611 | |||
1612 | numHex := string(slice[2:]) | ||
1613 | num, err := strconv.ParseUint(numHex, 16, 32) | ||
1614 | if err != nil { | ||
1615 | // Should never happen because the scanner won't match | ||
1616 | // a sequence of digits that isn't valid. | ||
1617 | panic(err) | ||
1618 | } | ||
1619 | |||
1620 | r := rune(num) | ||
1621 | l := utf8.RuneLen(r) | ||
1622 | if l == -1 { | ||
1623 | diags = append(diags, &hcl.Diagnostic{ | ||
1624 | Severity: hcl.DiagError, | ||
1625 | Summary: "Invalid escape sequence", | ||
1626 | Detail: fmt.Sprintf("Cannot encode character U+%04x in UTF-8.", num), | ||
1627 | Subject: rng.Ptr(), | ||
1628 | }) | ||
1629 | break TokenType | ||
1630 | } | ||
1631 | for i := 0; i < l; i++ { | ||
1632 | ret = append(ret, 0) | ||
1633 | } | ||
1634 | rb := ret[len(ret)-l:] | ||
1635 | utf8.EncodeRune(rb, r) | ||
1636 | |||
1637 | continue Slices | ||
1638 | |||
1639 | default: | ||
1640 | diags = append(diags, &hcl.Diagnostic{ | ||
1641 | Severity: hcl.DiagError, | ||
1642 | Summary: "Invalid escape sequence", | ||
1643 | Detail: fmt.Sprintf("The symbol %q is not a valid escape sequence selector.", slice[1:]), | ||
1644 | Subject: rng.Ptr(), | ||
1645 | }) | ||
1646 | ret = append(ret, slice[1:]...) | ||
1647 | continue Slices | ||
1648 | } | ||
1649 | |||
1650 | case '$', '%': | ||
1651 | if len(slice) != 3 { | ||
1652 | // Not long enough to be our escape sequence, so it's literal. | ||
1653 | break TokenType | ||
1654 | } | ||
1655 | |||
1656 | if slice[1] == slice[0] && slice[2] == '{' { | ||
1657 | ret = append(ret, slice[0]) | ||
1658 | ret = append(ret, '{') | ||
1659 | continue Slices | ||
1660 | } | ||
1661 | |||
1662 | break TokenType | ||
1663 | } | ||
1664 | |||
1665 | // If we fall out here or break out of here from the switch above | ||
1666 | // then this slice is just a literal. | ||
1667 | ret = append(ret, slice...) | ||
1668 | } | ||
1669 | |||
1670 | return string(ret), diags | ||
1671 | } | ||
1672 | |||
1673 | // setRecovery turns on recovery mode without actually doing any recovery. | ||
1674 | // This can be used when a parser knowingly leaves the peeker in a useless | ||
1675 | // place and wants to suppress errors that might result from that decision. | ||
1676 | func (p *parser) setRecovery() { | ||
1677 | p.recovery = true | ||
1678 | } | ||
1679 | |||
1680 | // recover seeks forward in the token stream until it finds TokenType "end", | ||
1681 | // then returns with the peeker pointed at the following token. | ||
1682 | // | ||
1683 | // If the given token type is a bracketer, this function will additionally | ||
1684 | // count nested instances of the brackets to try to leave the peeker at | ||
1685 | // the end of the _current_ instance of that bracketer, skipping over any | ||
1686 | // nested instances. This is a best-effort operation and may have | ||
1687 | // unpredictable results on input with bad bracketer nesting. | ||
1688 | func (p *parser) recover(end TokenType) Token { | ||
1689 | start := p.oppositeBracket(end) | ||
1690 | p.recovery = true | ||
1691 | |||
1692 | nest := 0 | ||
1693 | for { | ||
1694 | tok := p.Read() | ||
1695 | ty := tok.Type | ||
1696 | if end == TokenTemplateSeqEnd && ty == TokenTemplateControl { | ||
1697 | // normalize so that our matching behavior can work, since | ||
1698 | // TokenTemplateControl/TokenTemplateInterp are asymmetrical | ||
1699 | // with TokenTemplateSeqEnd and thus we need to count both | ||
1700 | // openers if that's the closer we're looking for. | ||
1701 | ty = TokenTemplateInterp | ||
1702 | } | ||
1703 | |||
1704 | switch ty { | ||
1705 | case start: | ||
1706 | nest++ | ||
1707 | case end: | ||
1708 | if nest < 1 { | ||
1709 | return tok | ||
1710 | } | ||
1711 | |||
1712 | nest-- | ||
1713 | case TokenEOF: | ||
1714 | return tok | ||
1715 | } | ||
1716 | } | ||
1717 | } | ||
1718 | |||
1719 | // recoverOver seeks forward in the token stream until it finds a block | ||
1720 | // starting with TokenType "start", then finds the corresponding end token, | ||
1721 | // leaving the peeker pointed at the token after that end token. | ||
1722 | // | ||
1723 | // The given token type _must_ be a bracketer. For example, if the given | ||
1724 | // start token is TokenOBrace then the parser will be left at the _end_ of | ||
1725 | // the next brace-delimited block encountered, or at EOF if no such block | ||
1726 | // is found or it is unclosed. | ||
1727 | func (p *parser) recoverOver(start TokenType) { | ||
1728 | end := p.oppositeBracket(start) | ||
1729 | |||
1730 | // find the opening bracket first | ||
1731 | Token: | ||
1732 | for { | ||
1733 | tok := p.Read() | ||
1734 | switch tok.Type { | ||
1735 | case start, TokenEOF: | ||
1736 | break Token | ||
1737 | } | ||
1738 | } | ||
1739 | |||
1740 | // Now use our existing recover function to locate the _end_ of the | ||
1741 | // container we've found. | ||
1742 | p.recover(end) | ||
1743 | } | ||
1744 | |||
1745 | func (p *parser) recoverAfterBodyItem() { | ||
1746 | p.recovery = true | ||
1747 | var open []TokenType | ||
1748 | |||
1749 | Token: | ||
1750 | for { | ||
1751 | tok := p.Read() | ||
1752 | |||
1753 | switch tok.Type { | ||
1754 | |||
1755 | case TokenNewline: | ||
1756 | if len(open) == 0 { | ||
1757 | break Token | ||
1758 | } | ||
1759 | |||
1760 | case TokenEOF: | ||
1761 | break Token | ||
1762 | |||
1763 | case TokenOBrace, TokenOBrack, TokenOParen, TokenOQuote, TokenOHeredoc, TokenTemplateInterp, TokenTemplateControl: | ||
1764 | open = append(open, tok.Type) | ||
1765 | |||
1766 | case TokenCBrace, TokenCBrack, TokenCParen, TokenCQuote, TokenCHeredoc: | ||
1767 | opener := p.oppositeBracket(tok.Type) | ||
1768 | for len(open) > 0 && open[len(open)-1] != opener { | ||
1769 | open = open[:len(open)-1] | ||
1770 | } | ||
1771 | if len(open) > 0 { | ||
1772 | open = open[:len(open)-1] | ||
1773 | } | ||
1774 | |||
1775 | case TokenTemplateSeqEnd: | ||
1776 | for len(open) > 0 && open[len(open)-1] != TokenTemplateInterp && open[len(open)-1] != TokenTemplateControl { | ||
1777 | open = open[:len(open)-1] | ||
1778 | } | ||
1779 | if len(open) > 0 { | ||
1780 | open = open[:len(open)-1] | ||
1781 | } | ||
1782 | |||
1783 | } | ||
1784 | } | ||
1785 | } | ||
1786 | |||
1787 | // oppositeBracket finds the bracket that opposes the given bracketer, or | ||
1788 | // NilToken if the given token isn't a bracketer. | ||
1789 | // | ||
1790 | // "Bracketer", for the sake of this function, is one end of a matching | ||
1791 | // open/close set of tokens that establish a bracketing context. | ||
1792 | func (p *parser) oppositeBracket(ty TokenType) TokenType { | ||
1793 | switch ty { | ||
1794 | |||
1795 | case TokenOBrace: | ||
1796 | return TokenCBrace | ||
1797 | case TokenOBrack: | ||
1798 | return TokenCBrack | ||
1799 | case TokenOParen: | ||
1800 | return TokenCParen | ||
1801 | case TokenOQuote: | ||
1802 | return TokenCQuote | ||
1803 | case TokenOHeredoc: | ||
1804 | return TokenCHeredoc | ||
1805 | |||
1806 | case TokenCBrace: | ||
1807 | return TokenOBrace | ||
1808 | case TokenCBrack: | ||
1809 | return TokenOBrack | ||
1810 | case TokenCParen: | ||
1811 | return TokenOParen | ||
1812 | case TokenCQuote: | ||
1813 | return TokenOQuote | ||
1814 | case TokenCHeredoc: | ||
1815 | return TokenOHeredoc | ||
1816 | |||
1817 | case TokenTemplateControl: | ||
1818 | return TokenTemplateSeqEnd | ||
1819 | case TokenTemplateInterp: | ||
1820 | return TokenTemplateSeqEnd | ||
1821 | case TokenTemplateSeqEnd: | ||
1822 | // This is ambigous, but we return Interp here because that's | ||
1823 | // what's assumed by the "recover" method. | ||
1824 | return TokenTemplateInterp | ||
1825 | |||
1826 | default: | ||
1827 | return TokenNil | ||
1828 | } | ||
1829 | } | ||
1830 | |||
1831 | func errPlaceholderExpr(rng hcl.Range) Expression { | ||
1832 | return &LiteralValueExpr{ | ||
1833 | Val: cty.DynamicVal, | ||
1834 | SrcRange: rng, | ||
1835 | } | ||
1836 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go new file mode 100644 index 0000000..3711067 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go | |||
@@ -0,0 +1,728 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | "strings" | ||
6 | "unicode" | ||
7 | |||
8 | "github.com/hashicorp/hcl2/hcl" | ||
9 | "github.com/zclconf/go-cty/cty" | ||
10 | ) | ||
11 | |||
12 | func (p *parser) ParseTemplate() (Expression, hcl.Diagnostics) { | ||
13 | return p.parseTemplate(TokenEOF) | ||
14 | } | ||
15 | |||
16 | func (p *parser) parseTemplate(end TokenType) (Expression, hcl.Diagnostics) { | ||
17 | exprs, passthru, rng, diags := p.parseTemplateInner(end) | ||
18 | |||
19 | if passthru { | ||
20 | if len(exprs) != 1 { | ||
21 | panic("passthru set with len(exprs) != 1") | ||
22 | } | ||
23 | return &TemplateWrapExpr{ | ||
24 | Wrapped: exprs[0], | ||
25 | SrcRange: rng, | ||
26 | }, diags | ||
27 | } | ||
28 | |||
29 | return &TemplateExpr{ | ||
30 | Parts: exprs, | ||
31 | SrcRange: rng, | ||
32 | }, diags | ||
33 | } | ||
34 | |||
35 | func (p *parser) parseTemplateInner(end TokenType) ([]Expression, bool, hcl.Range, hcl.Diagnostics) { | ||
36 | parts, diags := p.parseTemplateParts(end) | ||
37 | tp := templateParser{ | ||
38 | Tokens: parts.Tokens, | ||
39 | SrcRange: parts.SrcRange, | ||
40 | } | ||
41 | exprs, exprsDiags := tp.parseRoot() | ||
42 | diags = append(diags, exprsDiags...) | ||
43 | |||
44 | passthru := false | ||
45 | if len(parts.Tokens) == 2 { // one real token and one synthetic "end" token | ||
46 | if _, isInterp := parts.Tokens[0].(*templateInterpToken); isInterp { | ||
47 | passthru = true | ||
48 | } | ||
49 | } | ||
50 | |||
51 | return exprs, passthru, parts.SrcRange, diags | ||
52 | } | ||
53 | |||
54 | type templateParser struct { | ||
55 | Tokens []templateToken | ||
56 | SrcRange hcl.Range | ||
57 | |||
58 | pos int | ||
59 | } | ||
60 | |||
61 | func (p *templateParser) parseRoot() ([]Expression, hcl.Diagnostics) { | ||
62 | var exprs []Expression | ||
63 | var diags hcl.Diagnostics | ||
64 | |||
65 | for { | ||
66 | next := p.Peek() | ||
67 | if _, isEnd := next.(*templateEndToken); isEnd { | ||
68 | break | ||
69 | } | ||
70 | |||
71 | expr, exprDiags := p.parseExpr() | ||
72 | diags = append(diags, exprDiags...) | ||
73 | exprs = append(exprs, expr) | ||
74 | } | ||
75 | |||
76 | return exprs, diags | ||
77 | } | ||
78 | |||
79 | func (p *templateParser) parseExpr() (Expression, hcl.Diagnostics) { | ||
80 | next := p.Peek() | ||
81 | switch tok := next.(type) { | ||
82 | |||
83 | case *templateLiteralToken: | ||
84 | p.Read() // eat literal | ||
85 | return &LiteralValueExpr{ | ||
86 | Val: cty.StringVal(tok.Val), | ||
87 | SrcRange: tok.SrcRange, | ||
88 | }, nil | ||
89 | |||
90 | case *templateInterpToken: | ||
91 | p.Read() // eat interp | ||
92 | return tok.Expr, nil | ||
93 | |||
94 | case *templateIfToken: | ||
95 | return p.parseIf() | ||
96 | |||
97 | case *templateForToken: | ||
98 | return p.parseFor() | ||
99 | |||
100 | case *templateEndToken: | ||
101 | p.Read() // eat erroneous token | ||
102 | return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{ | ||
103 | { | ||
104 | // This is a particularly unhelpful diagnostic, so callers | ||
105 | // should attempt to pre-empt it and produce a more helpful | ||
106 | // diagnostic that is context-aware. | ||
107 | Severity: hcl.DiagError, | ||
108 | Summary: "Unexpected end of template", | ||
109 | Detail: "The control directives within this template are unbalanced.", | ||
110 | Subject: &tok.SrcRange, | ||
111 | }, | ||
112 | } | ||
113 | |||
114 | case *templateEndCtrlToken: | ||
115 | p.Read() // eat erroneous token | ||
116 | return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{ | ||
117 | { | ||
118 | Severity: hcl.DiagError, | ||
119 | Summary: fmt.Sprintf("Unexpected %s directive", tok.Name()), | ||
120 | Detail: "The control directives within this template are unbalanced.", | ||
121 | Subject: &tok.SrcRange, | ||
122 | }, | ||
123 | } | ||
124 | |||
125 | default: | ||
126 | // should never happen, because above should be exhaustive | ||
127 | panic(fmt.Sprintf("unhandled template token type %T", next)) | ||
128 | } | ||
129 | } | ||
130 | |||
131 | func (p *templateParser) parseIf() (Expression, hcl.Diagnostics) { | ||
132 | open := p.Read() | ||
133 | openIf, isIf := open.(*templateIfToken) | ||
134 | if !isIf { | ||
135 | // should never happen if caller is behaving | ||
136 | panic("parseIf called with peeker not pointing at if token") | ||
137 | } | ||
138 | |||
139 | var ifExprs, elseExprs []Expression | ||
140 | var diags hcl.Diagnostics | ||
141 | var endifRange hcl.Range | ||
142 | |||
143 | currentExprs := &ifExprs | ||
144 | Token: | ||
145 | for { | ||
146 | next := p.Peek() | ||
147 | if end, isEnd := next.(*templateEndToken); isEnd { | ||
148 | diags = append(diags, &hcl.Diagnostic{ | ||
149 | Severity: hcl.DiagError, | ||
150 | Summary: "Unexpected end of template", | ||
151 | Detail: fmt.Sprintf( | ||
152 | "The if directive at %s is missing its corresponding endif directive.", | ||
153 | openIf.SrcRange, | ||
154 | ), | ||
155 | Subject: &end.SrcRange, | ||
156 | }) | ||
157 | return errPlaceholderExpr(end.SrcRange), diags | ||
158 | } | ||
159 | if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd { | ||
160 | p.Read() // eat end directive | ||
161 | |||
162 | switch end.Type { | ||
163 | |||
164 | case templateElse: | ||
165 | if currentExprs == &ifExprs { | ||
166 | currentExprs = &elseExprs | ||
167 | continue Token | ||
168 | } | ||
169 | |||
170 | diags = append(diags, &hcl.Diagnostic{ | ||
171 | Severity: hcl.DiagError, | ||
172 | Summary: "Unexpected else directive", | ||
173 | Detail: fmt.Sprintf( | ||
174 | "Already in the else clause for the if started at %s.", | ||
175 | openIf.SrcRange, | ||
176 | ), | ||
177 | Subject: &end.SrcRange, | ||
178 | }) | ||
179 | |||
180 | case templateEndIf: | ||
181 | endifRange = end.SrcRange | ||
182 | break Token | ||
183 | |||
184 | default: | ||
185 | diags = append(diags, &hcl.Diagnostic{ | ||
186 | Severity: hcl.DiagError, | ||
187 | Summary: fmt.Sprintf("Unexpected %s directive", end.Name()), | ||
188 | Detail: fmt.Sprintf( | ||
189 | "Expecting an endif directive for the if started at %s.", | ||
190 | openIf.SrcRange, | ||
191 | ), | ||
192 | Subject: &end.SrcRange, | ||
193 | }) | ||
194 | } | ||
195 | |||
196 | return errPlaceholderExpr(end.SrcRange), diags | ||
197 | } | ||
198 | |||
199 | expr, exprDiags := p.parseExpr() | ||
200 | diags = append(diags, exprDiags...) | ||
201 | *currentExprs = append(*currentExprs, expr) | ||
202 | } | ||
203 | |||
204 | if len(ifExprs) == 0 { | ||
205 | ifExprs = append(ifExprs, &LiteralValueExpr{ | ||
206 | Val: cty.StringVal(""), | ||
207 | SrcRange: hcl.Range{ | ||
208 | Filename: openIf.SrcRange.Filename, | ||
209 | Start: openIf.SrcRange.End, | ||
210 | End: openIf.SrcRange.End, | ||
211 | }, | ||
212 | }) | ||
213 | } | ||
214 | if len(elseExprs) == 0 { | ||
215 | elseExprs = append(elseExprs, &LiteralValueExpr{ | ||
216 | Val: cty.StringVal(""), | ||
217 | SrcRange: hcl.Range{ | ||
218 | Filename: endifRange.Filename, | ||
219 | Start: endifRange.Start, | ||
220 | End: endifRange.Start, | ||
221 | }, | ||
222 | }) | ||
223 | } | ||
224 | |||
225 | trueExpr := &TemplateExpr{ | ||
226 | Parts: ifExprs, | ||
227 | SrcRange: hcl.RangeBetween(ifExprs[0].Range(), ifExprs[len(ifExprs)-1].Range()), | ||
228 | } | ||
229 | falseExpr := &TemplateExpr{ | ||
230 | Parts: elseExprs, | ||
231 | SrcRange: hcl.RangeBetween(elseExprs[0].Range(), elseExprs[len(elseExprs)-1].Range()), | ||
232 | } | ||
233 | |||
234 | return &ConditionalExpr{ | ||
235 | Condition: openIf.CondExpr, | ||
236 | TrueResult: trueExpr, | ||
237 | FalseResult: falseExpr, | ||
238 | |||
239 | SrcRange: hcl.RangeBetween(openIf.SrcRange, endifRange), | ||
240 | }, diags | ||
241 | } | ||
242 | |||
243 | func (p *templateParser) parseFor() (Expression, hcl.Diagnostics) { | ||
244 | open := p.Read() | ||
245 | openFor, isFor := open.(*templateForToken) | ||
246 | if !isFor { | ||
247 | // should never happen if caller is behaving | ||
248 | panic("parseFor called with peeker not pointing at for token") | ||
249 | } | ||
250 | |||
251 | var contentExprs []Expression | ||
252 | var diags hcl.Diagnostics | ||
253 | var endforRange hcl.Range | ||
254 | |||
255 | Token: | ||
256 | for { | ||
257 | next := p.Peek() | ||
258 | if end, isEnd := next.(*templateEndToken); isEnd { | ||
259 | diags = append(diags, &hcl.Diagnostic{ | ||
260 | Severity: hcl.DiagError, | ||
261 | Summary: "Unexpected end of template", | ||
262 | Detail: fmt.Sprintf( | ||
263 | "The for directive at %s is missing its corresponding endfor directive.", | ||
264 | openFor.SrcRange, | ||
265 | ), | ||
266 | Subject: &end.SrcRange, | ||
267 | }) | ||
268 | return errPlaceholderExpr(end.SrcRange), diags | ||
269 | } | ||
270 | if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd { | ||
271 | p.Read() // eat end directive | ||
272 | |||
273 | switch end.Type { | ||
274 | |||
275 | case templateElse: | ||
276 | diags = append(diags, &hcl.Diagnostic{ | ||
277 | Severity: hcl.DiagError, | ||
278 | Summary: "Unexpected else directive", | ||
279 | Detail: "An else clause is not expected for a for directive.", | ||
280 | Subject: &end.SrcRange, | ||
281 | }) | ||
282 | |||
283 | case templateEndFor: | ||
284 | endforRange = end.SrcRange | ||
285 | break Token | ||
286 | |||
287 | default: | ||
288 | diags = append(diags, &hcl.Diagnostic{ | ||
289 | Severity: hcl.DiagError, | ||
290 | Summary: fmt.Sprintf("Unexpected %s directive", end.Name()), | ||
291 | Detail: fmt.Sprintf( | ||
292 | "Expecting an endfor directive corresponding to the for directive at %s.", | ||
293 | openFor.SrcRange, | ||
294 | ), | ||
295 | Subject: &end.SrcRange, | ||
296 | }) | ||
297 | } | ||
298 | |||
299 | return errPlaceholderExpr(end.SrcRange), diags | ||
300 | } | ||
301 | |||
302 | expr, exprDiags := p.parseExpr() | ||
303 | diags = append(diags, exprDiags...) | ||
304 | contentExprs = append(contentExprs, expr) | ||
305 | } | ||
306 | |||
307 | if len(contentExprs) == 0 { | ||
308 | contentExprs = append(contentExprs, &LiteralValueExpr{ | ||
309 | Val: cty.StringVal(""), | ||
310 | SrcRange: hcl.Range{ | ||
311 | Filename: openFor.SrcRange.Filename, | ||
312 | Start: openFor.SrcRange.End, | ||
313 | End: openFor.SrcRange.End, | ||
314 | }, | ||
315 | }) | ||
316 | } | ||
317 | |||
318 | contentExpr := &TemplateExpr{ | ||
319 | Parts: contentExprs, | ||
320 | SrcRange: hcl.RangeBetween(contentExprs[0].Range(), contentExprs[len(contentExprs)-1].Range()), | ||
321 | } | ||
322 | |||
323 | forExpr := &ForExpr{ | ||
324 | KeyVar: openFor.KeyVar, | ||
325 | ValVar: openFor.ValVar, | ||
326 | |||
327 | CollExpr: openFor.CollExpr, | ||
328 | ValExpr: contentExpr, | ||
329 | |||
330 | SrcRange: hcl.RangeBetween(openFor.SrcRange, endforRange), | ||
331 | OpenRange: openFor.SrcRange, | ||
332 | CloseRange: endforRange, | ||
333 | } | ||
334 | |||
335 | return &TemplateJoinExpr{ | ||
336 | Tuple: forExpr, | ||
337 | }, diags | ||
338 | } | ||
339 | |||
340 | func (p *templateParser) Peek() templateToken { | ||
341 | return p.Tokens[p.pos] | ||
342 | } | ||
343 | |||
344 | func (p *templateParser) Read() templateToken { | ||
345 | ret := p.Peek() | ||
346 | if _, end := ret.(*templateEndToken); !end { | ||
347 | p.pos++ | ||
348 | } | ||
349 | return ret | ||
350 | } | ||
351 | |||
352 | // parseTemplateParts produces a flat sequence of "template tokens", which are | ||
353 | // either literal values (with any "trimming" already applied), interpolation | ||
354 | // sequences, or control flow markers. | ||
355 | // | ||
356 | // A further pass is required on the result to turn it into an AST. | ||
357 | func (p *parser) parseTemplateParts(end TokenType) (*templateParts, hcl.Diagnostics) { | ||
358 | var parts []templateToken | ||
359 | var diags hcl.Diagnostics | ||
360 | |||
361 | startRange := p.NextRange() | ||
362 | ltrimNext := false | ||
363 | nextCanTrimPrev := false | ||
364 | var endRange hcl.Range | ||
365 | |||
366 | Token: | ||
367 | for { | ||
368 | next := p.Read() | ||
369 | if next.Type == end { | ||
370 | // all done! | ||
371 | endRange = next.Range | ||
372 | break | ||
373 | } | ||
374 | |||
375 | ltrim := ltrimNext | ||
376 | ltrimNext = false | ||
377 | canTrimPrev := nextCanTrimPrev | ||
378 | nextCanTrimPrev = false | ||
379 | |||
380 | switch next.Type { | ||
381 | case TokenStringLit, TokenQuotedLit: | ||
382 | str, strDiags := p.decodeStringLit(next) | ||
383 | diags = append(diags, strDiags...) | ||
384 | |||
385 | if ltrim { | ||
386 | str = strings.TrimLeftFunc(str, unicode.IsSpace) | ||
387 | } | ||
388 | |||
389 | parts = append(parts, &templateLiteralToken{ | ||
390 | Val: str, | ||
391 | SrcRange: next.Range, | ||
392 | }) | ||
393 | nextCanTrimPrev = true | ||
394 | |||
395 | case TokenTemplateInterp: | ||
396 | // if the opener is ${~ then we want to eat any trailing whitespace | ||
397 | // in the preceding literal token, assuming it is indeed a literal | ||
398 | // token. | ||
399 | if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 { | ||
400 | prevExpr := parts[len(parts)-1] | ||
401 | if lexpr, ok := prevExpr.(*templateLiteralToken); ok { | ||
402 | lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace) | ||
403 | } | ||
404 | } | ||
405 | |||
406 | p.PushIncludeNewlines(false) | ||
407 | expr, exprDiags := p.ParseExpression() | ||
408 | diags = append(diags, exprDiags...) | ||
409 | close := p.Peek() | ||
410 | if close.Type != TokenTemplateSeqEnd { | ||
411 | if !p.recovery { | ||
412 | diags = append(diags, &hcl.Diagnostic{ | ||
413 | Severity: hcl.DiagError, | ||
414 | Summary: "Extra characters after interpolation expression", | ||
415 | Detail: "Expected a closing brace to end the interpolation expression, but found extra characters.", | ||
416 | Subject: &close.Range, | ||
417 | Context: hcl.RangeBetween(startRange, close.Range).Ptr(), | ||
418 | }) | ||
419 | } | ||
420 | p.recover(TokenTemplateSeqEnd) | ||
421 | } else { | ||
422 | p.Read() // eat closing brace | ||
423 | |||
424 | // If the closer is ~} then we want to eat any leading | ||
425 | // whitespace on the next token, if it turns out to be a | ||
426 | // literal token. | ||
427 | if len(close.Bytes) == 2 && close.Bytes[0] == '~' { | ||
428 | ltrimNext = true | ||
429 | } | ||
430 | } | ||
431 | p.PopIncludeNewlines() | ||
432 | parts = append(parts, &templateInterpToken{ | ||
433 | Expr: expr, | ||
434 | SrcRange: hcl.RangeBetween(next.Range, close.Range), | ||
435 | }) | ||
436 | |||
437 | case TokenTemplateControl: | ||
438 | // if the opener is %{~ then we want to eat any trailing whitespace | ||
439 | // in the preceding literal token, assuming it is indeed a literal | ||
440 | // token. | ||
441 | if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 { | ||
442 | prevExpr := parts[len(parts)-1] | ||
443 | if lexpr, ok := prevExpr.(*templateLiteralToken); ok { | ||
444 | lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace) | ||
445 | } | ||
446 | } | ||
447 | p.PushIncludeNewlines(false) | ||
448 | |||
449 | kw := p.Peek() | ||
450 | if kw.Type != TokenIdent { | ||
451 | if !p.recovery { | ||
452 | diags = append(diags, &hcl.Diagnostic{ | ||
453 | Severity: hcl.DiagError, | ||
454 | Summary: "Invalid template directive", | ||
455 | Detail: "A template directive keyword (\"if\", \"for\", etc) is expected at the beginning of a %{ sequence.", | ||
456 | Subject: &kw.Range, | ||
457 | Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(), | ||
458 | }) | ||
459 | } | ||
460 | p.recover(TokenTemplateSeqEnd) | ||
461 | p.PopIncludeNewlines() | ||
462 | continue Token | ||
463 | } | ||
464 | p.Read() // eat keyword token | ||
465 | |||
466 | switch { | ||
467 | |||
468 | case ifKeyword.TokenMatches(kw): | ||
469 | condExpr, exprDiags := p.ParseExpression() | ||
470 | diags = append(diags, exprDiags...) | ||
471 | parts = append(parts, &templateIfToken{ | ||
472 | CondExpr: condExpr, | ||
473 | SrcRange: hcl.RangeBetween(next.Range, p.NextRange()), | ||
474 | }) | ||
475 | |||
476 | case elseKeyword.TokenMatches(kw): | ||
477 | parts = append(parts, &templateEndCtrlToken{ | ||
478 | Type: templateElse, | ||
479 | SrcRange: hcl.RangeBetween(next.Range, p.NextRange()), | ||
480 | }) | ||
481 | |||
482 | case endifKeyword.TokenMatches(kw): | ||
483 | parts = append(parts, &templateEndCtrlToken{ | ||
484 | Type: templateEndIf, | ||
485 | SrcRange: hcl.RangeBetween(next.Range, p.NextRange()), | ||
486 | }) | ||
487 | |||
488 | case forKeyword.TokenMatches(kw): | ||
489 | var keyName, valName string | ||
490 | if p.Peek().Type != TokenIdent { | ||
491 | if !p.recovery { | ||
492 | diags = append(diags, &hcl.Diagnostic{ | ||
493 | Severity: hcl.DiagError, | ||
494 | Summary: "Invalid 'for' directive", | ||
495 | Detail: "For directive requires variable name after 'for'.", | ||
496 | Subject: p.Peek().Range.Ptr(), | ||
497 | }) | ||
498 | } | ||
499 | p.recover(TokenTemplateSeqEnd) | ||
500 | p.PopIncludeNewlines() | ||
501 | continue Token | ||
502 | } | ||
503 | |||
504 | valName = string(p.Read().Bytes) | ||
505 | |||
506 | if p.Peek().Type == TokenComma { | ||
507 | // What we just read was actually the key, then. | ||
508 | keyName = valName | ||
509 | p.Read() // eat comma | ||
510 | |||
511 | if p.Peek().Type != TokenIdent { | ||
512 | if !p.recovery { | ||
513 | diags = append(diags, &hcl.Diagnostic{ | ||
514 | Severity: hcl.DiagError, | ||
515 | Summary: "Invalid 'for' directive", | ||
516 | Detail: "For directive requires value variable name after comma.", | ||
517 | Subject: p.Peek().Range.Ptr(), | ||
518 | }) | ||
519 | } | ||
520 | p.recover(TokenTemplateSeqEnd) | ||
521 | p.PopIncludeNewlines() | ||
522 | continue Token | ||
523 | } | ||
524 | |||
525 | valName = string(p.Read().Bytes) | ||
526 | } | ||
527 | |||
528 | if !inKeyword.TokenMatches(p.Peek()) { | ||
529 | if !p.recovery { | ||
530 | diags = append(diags, &hcl.Diagnostic{ | ||
531 | Severity: hcl.DiagError, | ||
532 | Summary: "Invalid 'for' directive", | ||
533 | Detail: "For directive requires 'in' keyword after names.", | ||
534 | Subject: p.Peek().Range.Ptr(), | ||
535 | }) | ||
536 | } | ||
537 | p.recover(TokenTemplateSeqEnd) | ||
538 | p.PopIncludeNewlines() | ||
539 | continue Token | ||
540 | } | ||
541 | p.Read() // eat 'in' keyword | ||
542 | |||
543 | collExpr, collDiags := p.ParseExpression() | ||
544 | diags = append(diags, collDiags...) | ||
545 | parts = append(parts, &templateForToken{ | ||
546 | KeyVar: keyName, | ||
547 | ValVar: valName, | ||
548 | CollExpr: collExpr, | ||
549 | |||
550 | SrcRange: hcl.RangeBetween(next.Range, p.NextRange()), | ||
551 | }) | ||
552 | |||
553 | case endforKeyword.TokenMatches(kw): | ||
554 | parts = append(parts, &templateEndCtrlToken{ | ||
555 | Type: templateEndFor, | ||
556 | SrcRange: hcl.RangeBetween(next.Range, p.NextRange()), | ||
557 | }) | ||
558 | |||
559 | default: | ||
560 | if !p.recovery { | ||
561 | suggestions := []string{"if", "for", "else", "endif", "endfor"} | ||
562 | given := string(kw.Bytes) | ||
563 | suggestion := nameSuggestion(given, suggestions) | ||
564 | if suggestion != "" { | ||
565 | suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) | ||
566 | } | ||
567 | |||
568 | diags = append(diags, &hcl.Diagnostic{ | ||
569 | Severity: hcl.DiagError, | ||
570 | Summary: "Invalid template control keyword", | ||
571 | Detail: fmt.Sprintf("%q is not a valid template control keyword.%s", given, suggestion), | ||
572 | Subject: &kw.Range, | ||
573 | Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(), | ||
574 | }) | ||
575 | } | ||
576 | p.recover(TokenTemplateSeqEnd) | ||
577 | p.PopIncludeNewlines() | ||
578 | continue Token | ||
579 | |||
580 | } | ||
581 | |||
582 | close := p.Peek() | ||
583 | if close.Type != TokenTemplateSeqEnd { | ||
584 | if !p.recovery { | ||
585 | diags = append(diags, &hcl.Diagnostic{ | ||
586 | Severity: hcl.DiagError, | ||
587 | Summary: fmt.Sprintf("Extra characters in %s marker", kw.Bytes), | ||
588 | Detail: "Expected a closing brace to end the sequence, but found extra characters.", | ||
589 | Subject: &close.Range, | ||
590 | Context: hcl.RangeBetween(startRange, close.Range).Ptr(), | ||
591 | }) | ||
592 | } | ||
593 | p.recover(TokenTemplateSeqEnd) | ||
594 | } else { | ||
595 | p.Read() // eat closing brace | ||
596 | |||
597 | // If the closer is ~} then we want to eat any leading | ||
598 | // whitespace on the next token, if it turns out to be a | ||
599 | // literal token. | ||
600 | if len(close.Bytes) == 2 && close.Bytes[0] == '~' { | ||
601 | ltrimNext = true | ||
602 | } | ||
603 | } | ||
604 | p.PopIncludeNewlines() | ||
605 | |||
606 | default: | ||
607 | if !p.recovery { | ||
608 | diags = append(diags, &hcl.Diagnostic{ | ||
609 | Severity: hcl.DiagError, | ||
610 | Summary: "Unterminated template string", | ||
611 | Detail: "No closing marker was found for the string.", | ||
612 | Subject: &next.Range, | ||
613 | Context: hcl.RangeBetween(startRange, next.Range).Ptr(), | ||
614 | }) | ||
615 | } | ||
616 | final := p.recover(end) | ||
617 | endRange = final.Range | ||
618 | break Token | ||
619 | } | ||
620 | } | ||
621 | |||
622 | if len(parts) == 0 { | ||
623 | // If a sequence has no content, we'll treat it as if it had an | ||
624 | // empty string in it because that's what the user probably means | ||
625 | // if they write "" in configuration. | ||
626 | parts = append(parts, &templateLiteralToken{ | ||
627 | Val: "", | ||
628 | SrcRange: hcl.Range{ | ||
629 | // Range is the zero-character span immediately after the | ||
630 | // opening quote. | ||
631 | Filename: startRange.Filename, | ||
632 | Start: startRange.End, | ||
633 | End: startRange.End, | ||
634 | }, | ||
635 | }) | ||
636 | } | ||
637 | |||
638 | // Always end with an end token, so the parser can produce diagnostics | ||
639 | // about unclosed items with proper position information. | ||
640 | parts = append(parts, &templateEndToken{ | ||
641 | SrcRange: endRange, | ||
642 | }) | ||
643 | |||
644 | ret := &templateParts{ | ||
645 | Tokens: parts, | ||
646 | SrcRange: hcl.RangeBetween(startRange, endRange), | ||
647 | } | ||
648 | |||
649 | return ret, diags | ||
650 | } | ||
651 | |||
652 | type templateParts struct { | ||
653 | Tokens []templateToken | ||
654 | SrcRange hcl.Range | ||
655 | } | ||
656 | |||
657 | // templateToken is a higher-level token that represents a single atom within | ||
658 | // the template language. Our template parsing first raises the raw token | ||
659 | // stream to a sequence of templateToken, and then transforms the result into | ||
660 | // an expression tree. | ||
661 | type templateToken interface { | ||
662 | templateToken() templateToken | ||
663 | } | ||
664 | |||
665 | type templateLiteralToken struct { | ||
666 | Val string | ||
667 | SrcRange hcl.Range | ||
668 | isTemplateToken | ||
669 | } | ||
670 | |||
671 | type templateInterpToken struct { | ||
672 | Expr Expression | ||
673 | SrcRange hcl.Range | ||
674 | isTemplateToken | ||
675 | } | ||
676 | |||
677 | type templateIfToken struct { | ||
678 | CondExpr Expression | ||
679 | SrcRange hcl.Range | ||
680 | isTemplateToken | ||
681 | } | ||
682 | |||
683 | type templateForToken struct { | ||
684 | KeyVar string // empty if ignoring key | ||
685 | ValVar string | ||
686 | CollExpr Expression | ||
687 | SrcRange hcl.Range | ||
688 | isTemplateToken | ||
689 | } | ||
690 | |||
691 | type templateEndCtrlType int | ||
692 | |||
693 | const ( | ||
694 | templateEndIf templateEndCtrlType = iota | ||
695 | templateElse | ||
696 | templateEndFor | ||
697 | ) | ||
698 | |||
699 | type templateEndCtrlToken struct { | ||
700 | Type templateEndCtrlType | ||
701 | SrcRange hcl.Range | ||
702 | isTemplateToken | ||
703 | } | ||
704 | |||
705 | func (t *templateEndCtrlToken) Name() string { | ||
706 | switch t.Type { | ||
707 | case templateEndIf: | ||
708 | return "endif" | ||
709 | case templateElse: | ||
710 | return "else" | ||
711 | case templateEndFor: | ||
712 | return "endfor" | ||
713 | default: | ||
714 | // should never happen | ||
715 | panic("invalid templateEndCtrlType") | ||
716 | } | ||
717 | } | ||
718 | |||
719 | type templateEndToken struct { | ||
720 | SrcRange hcl.Range | ||
721 | isTemplateToken | ||
722 | } | ||
723 | |||
724 | type isTemplateToken [0]int | ||
725 | |||
726 | func (t isTemplateToken) templateToken() templateToken { | ||
727 | return t | ||
728 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_traversal.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_traversal.go new file mode 100644 index 0000000..2ff3ed6 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_traversal.go | |||
@@ -0,0 +1,159 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | "github.com/zclconf/go-cty/cty" | ||
6 | ) | ||
7 | |||
8 | // ParseTraversalAbs parses an absolute traversal that is assumed to consume | ||
9 | // all of the remaining tokens in the peeker. The usual parser recovery | ||
10 | // behavior is not supported here because traversals are not expected to | ||
11 | // be parsed as part of a larger program. | ||
12 | func (p *parser) ParseTraversalAbs() (hcl.Traversal, hcl.Diagnostics) { | ||
13 | var ret hcl.Traversal | ||
14 | var diags hcl.Diagnostics | ||
15 | |||
16 | // Absolute traversal must always begin with a variable name | ||
17 | varTok := p.Read() | ||
18 | if varTok.Type != TokenIdent { | ||
19 | diags = append(diags, &hcl.Diagnostic{ | ||
20 | Severity: hcl.DiagError, | ||
21 | Summary: "Variable name required", | ||
22 | Detail: "Must begin with a variable name.", | ||
23 | Subject: &varTok.Range, | ||
24 | }) | ||
25 | return ret, diags | ||
26 | } | ||
27 | |||
28 | varName := string(varTok.Bytes) | ||
29 | ret = append(ret, hcl.TraverseRoot{ | ||
30 | Name: varName, | ||
31 | SrcRange: varTok.Range, | ||
32 | }) | ||
33 | |||
34 | for { | ||
35 | next := p.Peek() | ||
36 | |||
37 | if next.Type == TokenEOF { | ||
38 | return ret, diags | ||
39 | } | ||
40 | |||
41 | switch next.Type { | ||
42 | case TokenDot: | ||
43 | // Attribute access | ||
44 | dot := p.Read() // eat dot | ||
45 | nameTok := p.Read() | ||
46 | if nameTok.Type != TokenIdent { | ||
47 | if nameTok.Type == TokenStar { | ||
48 | diags = append(diags, &hcl.Diagnostic{ | ||
49 | Severity: hcl.DiagError, | ||
50 | Summary: "Attribute name required", | ||
51 | Detail: "Splat expressions (.*) may not be used here.", | ||
52 | Subject: &nameTok.Range, | ||
53 | Context: hcl.RangeBetween(varTok.Range, nameTok.Range).Ptr(), | ||
54 | }) | ||
55 | } else { | ||
56 | diags = append(diags, &hcl.Diagnostic{ | ||
57 | Severity: hcl.DiagError, | ||
58 | Summary: "Attribute name required", | ||
59 | Detail: "Dot must be followed by attribute name.", | ||
60 | Subject: &nameTok.Range, | ||
61 | Context: hcl.RangeBetween(varTok.Range, nameTok.Range).Ptr(), | ||
62 | }) | ||
63 | } | ||
64 | return ret, diags | ||
65 | } | ||
66 | |||
67 | attrName := string(nameTok.Bytes) | ||
68 | ret = append(ret, hcl.TraverseAttr{ | ||
69 | Name: attrName, | ||
70 | SrcRange: hcl.RangeBetween(dot.Range, nameTok.Range), | ||
71 | }) | ||
72 | case TokenOBrack: | ||
73 | // Index | ||
74 | open := p.Read() // eat open bracket | ||
75 | next := p.Peek() | ||
76 | |||
77 | switch next.Type { | ||
78 | case TokenNumberLit: | ||
79 | tok := p.Read() // eat number | ||
80 | numVal, numDiags := p.numberLitValue(tok) | ||
81 | diags = append(diags, numDiags...) | ||
82 | |||
83 | close := p.Read() | ||
84 | if close.Type != TokenCBrack { | ||
85 | diags = append(diags, &hcl.Diagnostic{ | ||
86 | Severity: hcl.DiagError, | ||
87 | Summary: "Unclosed index brackets", | ||
88 | Detail: "Index key must be followed by a closing bracket.", | ||
89 | Subject: &close.Range, | ||
90 | Context: hcl.RangeBetween(open.Range, close.Range).Ptr(), | ||
91 | }) | ||
92 | } | ||
93 | |||
94 | ret = append(ret, hcl.TraverseIndex{ | ||
95 | Key: numVal, | ||
96 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
97 | }) | ||
98 | |||
99 | if diags.HasErrors() { | ||
100 | return ret, diags | ||
101 | } | ||
102 | |||
103 | case TokenOQuote: | ||
104 | str, _, strDiags := p.parseQuotedStringLiteral() | ||
105 | diags = append(diags, strDiags...) | ||
106 | |||
107 | close := p.Read() | ||
108 | if close.Type != TokenCBrack { | ||
109 | diags = append(diags, &hcl.Diagnostic{ | ||
110 | Severity: hcl.DiagError, | ||
111 | Summary: "Unclosed index brackets", | ||
112 | Detail: "Index key must be followed by a closing bracket.", | ||
113 | Subject: &close.Range, | ||
114 | Context: hcl.RangeBetween(open.Range, close.Range).Ptr(), | ||
115 | }) | ||
116 | } | ||
117 | |||
118 | ret = append(ret, hcl.TraverseIndex{ | ||
119 | Key: cty.StringVal(str), | ||
120 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
121 | }) | ||
122 | |||
123 | if diags.HasErrors() { | ||
124 | return ret, diags | ||
125 | } | ||
126 | |||
127 | default: | ||
128 | if next.Type == TokenStar { | ||
129 | diags = append(diags, &hcl.Diagnostic{ | ||
130 | Severity: hcl.DiagError, | ||
131 | Summary: "Attribute name required", | ||
132 | Detail: "Splat expressions ([*]) may not be used here.", | ||
133 | Subject: &next.Range, | ||
134 | Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(), | ||
135 | }) | ||
136 | } else { | ||
137 | diags = append(diags, &hcl.Diagnostic{ | ||
138 | Severity: hcl.DiagError, | ||
139 | Summary: "Index value required", | ||
140 | Detail: "Index brackets must contain either a literal number or a literal string.", | ||
141 | Subject: &next.Range, | ||
142 | Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(), | ||
143 | }) | ||
144 | } | ||
145 | return ret, diags | ||
146 | } | ||
147 | |||
148 | default: | ||
149 | diags = append(diags, &hcl.Diagnostic{ | ||
150 | Severity: hcl.DiagError, | ||
151 | Summary: "Invalid character", | ||
152 | Detail: "Expected an attribute access or an index operator.", | ||
153 | Subject: &next.Range, | ||
154 | Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(), | ||
155 | }) | ||
156 | return ret, diags | ||
157 | } | ||
158 | } | ||
159 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/peeker.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/peeker.go new file mode 100644 index 0000000..5a4b50e --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/peeker.go | |||
@@ -0,0 +1,212 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "bytes" | ||
5 | "fmt" | ||
6 | "path/filepath" | ||
7 | "runtime" | ||
8 | "strings" | ||
9 | |||
10 | "github.com/hashicorp/hcl2/hcl" | ||
11 | ) | ||
12 | |||
13 | // This is set to true at init() time in tests, to enable more useful output | ||
14 | // if a stack discipline error is detected. It should not be enabled in | ||
15 | // normal mode since there is a performance penalty from accessing the | ||
16 | // runtime stack to produce the traces, but could be temporarily set to | ||
17 | // true for debugging if desired. | ||
18 | var tracePeekerNewlinesStack = false | ||
19 | |||
20 | type peeker struct { | ||
21 | Tokens Tokens | ||
22 | NextIndex int | ||
23 | |||
24 | IncludeComments bool | ||
25 | IncludeNewlinesStack []bool | ||
26 | |||
27 | // used only when tracePeekerNewlinesStack is set | ||
28 | newlineStackChanges []peekerNewlineStackChange | ||
29 | } | ||
30 | |||
31 | // for use in debugging the stack usage only | ||
32 | type peekerNewlineStackChange struct { | ||
33 | Pushing bool // if false, then popping | ||
34 | Frame runtime.Frame | ||
35 | Include bool | ||
36 | } | ||
37 | |||
38 | func newPeeker(tokens Tokens, includeComments bool) *peeker { | ||
39 | return &peeker{ | ||
40 | Tokens: tokens, | ||
41 | IncludeComments: includeComments, | ||
42 | |||
43 | IncludeNewlinesStack: []bool{true}, | ||
44 | } | ||
45 | } | ||
46 | |||
47 | func (p *peeker) Peek() Token { | ||
48 | ret, _ := p.nextToken() | ||
49 | return ret | ||
50 | } | ||
51 | |||
52 | func (p *peeker) Read() Token { | ||
53 | ret, nextIdx := p.nextToken() | ||
54 | p.NextIndex = nextIdx | ||
55 | return ret | ||
56 | } | ||
57 | |||
58 | func (p *peeker) NextRange() hcl.Range { | ||
59 | return p.Peek().Range | ||
60 | } | ||
61 | |||
62 | func (p *peeker) PrevRange() hcl.Range { | ||
63 | if p.NextIndex == 0 { | ||
64 | return p.NextRange() | ||
65 | } | ||
66 | |||
67 | return p.Tokens[p.NextIndex-1].Range | ||
68 | } | ||
69 | |||
70 | func (p *peeker) nextToken() (Token, int) { | ||
71 | for i := p.NextIndex; i < len(p.Tokens); i++ { | ||
72 | tok := p.Tokens[i] | ||
73 | switch tok.Type { | ||
74 | case TokenComment: | ||
75 | if !p.IncludeComments { | ||
76 | // Single-line comment tokens, starting with # or //, absorb | ||
77 | // the trailing newline that terminates them as part of their | ||
78 | // bytes. When we're filtering out comments, we must as a | ||
79 | // special case transform these to newline tokens in order | ||
80 | // to properly parse newline-terminated block items. | ||
81 | |||
82 | if p.includingNewlines() { | ||
83 | if len(tok.Bytes) > 0 && tok.Bytes[len(tok.Bytes)-1] == '\n' { | ||
84 | fakeNewline := Token{ | ||
85 | Type: TokenNewline, | ||
86 | Bytes: tok.Bytes[len(tok.Bytes)-1 : len(tok.Bytes)], | ||
87 | |||
88 | // We use the whole token range as the newline | ||
89 | // range, even though that's a little... weird, | ||
90 | // because otherwise we'd need to go count | ||
91 | // characters again in order to figure out the | ||
92 | // column of the newline, and that complexity | ||
93 | // isn't justified when ranges of newlines are | ||
94 | // so rarely printed anyway. | ||
95 | Range: tok.Range, | ||
96 | } | ||
97 | return fakeNewline, i + 1 | ||
98 | } | ||
99 | } | ||
100 | |||
101 | continue | ||
102 | } | ||
103 | case TokenNewline: | ||
104 | if !p.includingNewlines() { | ||
105 | continue | ||
106 | } | ||
107 | } | ||
108 | |||
109 | return tok, i + 1 | ||
110 | } | ||
111 | |||
112 | // if we fall out here then we'll return the EOF token, and leave | ||
113 | // our index pointed off the end of the array so we'll keep | ||
114 | // returning EOF in future too. | ||
115 | return p.Tokens[len(p.Tokens)-1], len(p.Tokens) | ||
116 | } | ||
117 | |||
118 | func (p *peeker) includingNewlines() bool { | ||
119 | return p.IncludeNewlinesStack[len(p.IncludeNewlinesStack)-1] | ||
120 | } | ||
121 | |||
122 | func (p *peeker) PushIncludeNewlines(include bool) { | ||
123 | if tracePeekerNewlinesStack { | ||
124 | // Record who called us so that we can more easily track down any | ||
125 | // mismanagement of the stack in the parser. | ||
126 | callers := []uintptr{0} | ||
127 | runtime.Callers(2, callers) | ||
128 | frames := runtime.CallersFrames(callers) | ||
129 | frame, _ := frames.Next() | ||
130 | p.newlineStackChanges = append(p.newlineStackChanges, peekerNewlineStackChange{ | ||
131 | true, frame, include, | ||
132 | }) | ||
133 | } | ||
134 | |||
135 | p.IncludeNewlinesStack = append(p.IncludeNewlinesStack, include) | ||
136 | } | ||
137 | |||
138 | func (p *peeker) PopIncludeNewlines() bool { | ||
139 | stack := p.IncludeNewlinesStack | ||
140 | remain, ret := stack[:len(stack)-1], stack[len(stack)-1] | ||
141 | p.IncludeNewlinesStack = remain | ||
142 | |||
143 | if tracePeekerNewlinesStack { | ||
144 | // Record who called us so that we can more easily track down any | ||
145 | // mismanagement of the stack in the parser. | ||
146 | callers := []uintptr{0} | ||
147 | runtime.Callers(2, callers) | ||
148 | frames := runtime.CallersFrames(callers) | ||
149 | frame, _ := frames.Next() | ||
150 | p.newlineStackChanges = append(p.newlineStackChanges, peekerNewlineStackChange{ | ||
151 | false, frame, ret, | ||
152 | }) | ||
153 | } | ||
154 | |||
155 | return ret | ||
156 | } | ||
157 | |||
158 | // AssertEmptyNewlinesStack checks if the IncludeNewlinesStack is empty, doing | ||
159 | // panicking if it is not. This can be used to catch stack mismanagement that | ||
160 | // might otherwise just cause confusing downstream errors. | ||
161 | // | ||
162 | // This function is a no-op if the stack is empty when called. | ||
163 | // | ||
164 | // If newlines stack tracing is enabled by setting the global variable | ||
165 | // tracePeekerNewlinesStack at init time, a full log of all of the push/pop | ||
166 | // calls will be produced to help identify which caller in the parser is | ||
167 | // misbehaving. | ||
168 | func (p *peeker) AssertEmptyIncludeNewlinesStack() { | ||
169 | if len(p.IncludeNewlinesStack) != 1 { | ||
170 | // Should never happen; indicates mismanagement of the stack inside | ||
171 | // the parser. | ||
172 | if p.newlineStackChanges != nil { // only if traceNewlinesStack is enabled above | ||
173 | panic(fmt.Errorf( | ||
174 | "non-empty IncludeNewlinesStack after parse with %d calls unaccounted for:\n%s", | ||
175 | len(p.IncludeNewlinesStack)-1, | ||
176 | formatPeekerNewlineStackChanges(p.newlineStackChanges), | ||
177 | )) | ||
178 | } else { | ||
179 | panic(fmt.Errorf("non-empty IncludeNewlinesStack after parse: %#v", p.IncludeNewlinesStack)) | ||
180 | } | ||
181 | } | ||
182 | } | ||
183 | |||
184 | func formatPeekerNewlineStackChanges(changes []peekerNewlineStackChange) string { | ||
185 | indent := 0 | ||
186 | var buf bytes.Buffer | ||
187 | for _, change := range changes { | ||
188 | funcName := change.Frame.Function | ||
189 | if idx := strings.LastIndexByte(funcName, '.'); idx != -1 { | ||
190 | funcName = funcName[idx+1:] | ||
191 | } | ||
192 | filename := change.Frame.File | ||
193 | if idx := strings.LastIndexByte(filename, filepath.Separator); idx != -1 { | ||
194 | filename = filename[idx+1:] | ||
195 | } | ||
196 | |||
197 | switch change.Pushing { | ||
198 | |||
199 | case true: | ||
200 | buf.WriteString(strings.Repeat(" ", indent)) | ||
201 | fmt.Fprintf(&buf, "PUSH %#v (%s at %s:%d)\n", change.Include, funcName, filename, change.Frame.Line) | ||
202 | indent++ | ||
203 | |||
204 | case false: | ||
205 | indent-- | ||
206 | buf.WriteString(strings.Repeat(" ", indent)) | ||
207 | fmt.Fprintf(&buf, "POP %#v (%s at %s:%d)\n", change.Include, funcName, filename, change.Frame.Line) | ||
208 | |||
209 | } | ||
210 | } | ||
211 | return buf.String() | ||
212 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/public.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/public.go new file mode 100644 index 0000000..cf0ee29 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/public.go | |||
@@ -0,0 +1,171 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | ) | ||
6 | |||
7 | // ParseConfig parses the given buffer as a whole HCL config file, returning | ||
8 | // a *hcl.File representing its contents. If HasErrors called on the returned | ||
9 | // diagnostics returns true, the returned body is likely to be incomplete | ||
10 | // and should therefore be used with care. | ||
11 | // | ||
12 | // The body in the returned file has dynamic type *hclsyntax.Body, so callers | ||
13 | // may freely type-assert this to get access to the full hclsyntax API in | ||
14 | // situations where detailed access is required. However, most common use-cases | ||
15 | // should be served using the hcl.Body interface to ensure compatibility with | ||
16 | // other configurationg syntaxes, such as JSON. | ||
17 | func ParseConfig(src []byte, filename string, start hcl.Pos) (*hcl.File, hcl.Diagnostics) { | ||
18 | tokens, diags := LexConfig(src, filename, start) | ||
19 | peeker := newPeeker(tokens, false) | ||
20 | parser := &parser{peeker: peeker} | ||
21 | body, parseDiags := parser.ParseBody(TokenEOF) | ||
22 | diags = append(diags, parseDiags...) | ||
23 | |||
24 | // Panic if the parser uses incorrect stack discipline with the peeker's | ||
25 | // newlines stack, since otherwise it will produce confusing downstream | ||
26 | // errors. | ||
27 | peeker.AssertEmptyIncludeNewlinesStack() | ||
28 | |||
29 | return &hcl.File{ | ||
30 | Body: body, | ||
31 | Bytes: src, | ||
32 | |||
33 | Nav: navigation{ | ||
34 | root: body, | ||
35 | }, | ||
36 | }, diags | ||
37 | } | ||
38 | |||
39 | // ParseExpression parses the given buffer as a standalone HCL expression, | ||
40 | // returning it as an instance of Expression. | ||
41 | func ParseExpression(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) { | ||
42 | tokens, diags := LexExpression(src, filename, start) | ||
43 | peeker := newPeeker(tokens, false) | ||
44 | parser := &parser{peeker: peeker} | ||
45 | |||
46 | // Bare expressions are always parsed in "ignore newlines" mode, as if | ||
47 | // they were wrapped in parentheses. | ||
48 | parser.PushIncludeNewlines(false) | ||
49 | |||
50 | expr, parseDiags := parser.ParseExpression() | ||
51 | diags = append(diags, parseDiags...) | ||
52 | |||
53 | next := parser.Peek() | ||
54 | if next.Type != TokenEOF && !parser.recovery { | ||
55 | diags = append(diags, &hcl.Diagnostic{ | ||
56 | Severity: hcl.DiagError, | ||
57 | Summary: "Extra characters after expression", | ||
58 | Detail: "An expression was successfully parsed, but extra characters were found after it.", | ||
59 | Subject: &next.Range, | ||
60 | }) | ||
61 | } | ||
62 | |||
63 | parser.PopIncludeNewlines() | ||
64 | |||
65 | // Panic if the parser uses incorrect stack discipline with the peeker's | ||
66 | // newlines stack, since otherwise it will produce confusing downstream | ||
67 | // errors. | ||
68 | peeker.AssertEmptyIncludeNewlinesStack() | ||
69 | |||
70 | return expr, diags | ||
71 | } | ||
72 | |||
73 | // ParseTemplate parses the given buffer as a standalone HCL template, | ||
74 | // returning it as an instance of Expression. | ||
75 | func ParseTemplate(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) { | ||
76 | tokens, diags := LexTemplate(src, filename, start) | ||
77 | peeker := newPeeker(tokens, false) | ||
78 | parser := &parser{peeker: peeker} | ||
79 | expr, parseDiags := parser.ParseTemplate() | ||
80 | diags = append(diags, parseDiags...) | ||
81 | |||
82 | // Panic if the parser uses incorrect stack discipline with the peeker's | ||
83 | // newlines stack, since otherwise it will produce confusing downstream | ||
84 | // errors. | ||
85 | peeker.AssertEmptyIncludeNewlinesStack() | ||
86 | |||
87 | return expr, diags | ||
88 | } | ||
89 | |||
90 | // ParseTraversalAbs parses the given buffer as a standalone absolute traversal. | ||
91 | // | ||
92 | // Parsing as a traversal is more limited than parsing as an expession since | ||
93 | // it allows only attribute and indexing operations on variables. Traverals | ||
94 | // are useful as a syntax for referring to objects without necessarily | ||
95 | // evaluating them. | ||
96 | func ParseTraversalAbs(src []byte, filename string, start hcl.Pos) (hcl.Traversal, hcl.Diagnostics) { | ||
97 | tokens, diags := LexExpression(src, filename, start) | ||
98 | peeker := newPeeker(tokens, false) | ||
99 | parser := &parser{peeker: peeker} | ||
100 | |||
101 | // Bare traverals are always parsed in "ignore newlines" mode, as if | ||
102 | // they were wrapped in parentheses. | ||
103 | parser.PushIncludeNewlines(false) | ||
104 | |||
105 | expr, parseDiags := parser.ParseTraversalAbs() | ||
106 | diags = append(diags, parseDiags...) | ||
107 | |||
108 | parser.PopIncludeNewlines() | ||
109 | |||
110 | // Panic if the parser uses incorrect stack discipline with the peeker's | ||
111 | // newlines stack, since otherwise it will produce confusing downstream | ||
112 | // errors. | ||
113 | peeker.AssertEmptyIncludeNewlinesStack() | ||
114 | |||
115 | return expr, diags | ||
116 | } | ||
117 | |||
118 | // LexConfig performs lexical analysis on the given buffer, treating it as a | ||
119 | // whole HCL config file, and returns the resulting tokens. | ||
120 | // | ||
121 | // Only minimal validation is done during lexical analysis, so the returned | ||
122 | // diagnostics may include errors about lexical issues such as bad character | ||
123 | // encodings or unrecognized characters, but full parsing is required to | ||
124 | // detect _all_ syntax errors. | ||
125 | func LexConfig(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) { | ||
126 | tokens := scanTokens(src, filename, start, scanNormal) | ||
127 | diags := checkInvalidTokens(tokens) | ||
128 | return tokens, diags | ||
129 | } | ||
130 | |||
131 | // LexExpression performs lexical analysis on the given buffer, treating it as | ||
132 | // a standalone HCL expression, and returns the resulting tokens. | ||
133 | // | ||
134 | // Only minimal validation is done during lexical analysis, so the returned | ||
135 | // diagnostics may include errors about lexical issues such as bad character | ||
136 | // encodings or unrecognized characters, but full parsing is required to | ||
137 | // detect _all_ syntax errors. | ||
138 | func LexExpression(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) { | ||
139 | // This is actually just the same thing as LexConfig, since configs | ||
140 | // and expressions lex in the same way. | ||
141 | tokens := scanTokens(src, filename, start, scanNormal) | ||
142 | diags := checkInvalidTokens(tokens) | ||
143 | return tokens, diags | ||
144 | } | ||
145 | |||
146 | // LexTemplate performs lexical analysis on the given buffer, treating it as a | ||
147 | // standalone HCL template, and returns the resulting tokens. | ||
148 | // | ||
149 | // Only minimal validation is done during lexical analysis, so the returned | ||
150 | // diagnostics may include errors about lexical issues such as bad character | ||
151 | // encodings or unrecognized characters, but full parsing is required to | ||
152 | // detect _all_ syntax errors. | ||
153 | func LexTemplate(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) { | ||
154 | tokens := scanTokens(src, filename, start, scanTemplate) | ||
155 | diags := checkInvalidTokens(tokens) | ||
156 | return tokens, diags | ||
157 | } | ||
158 | |||
159 | // ValidIdentifier tests if the given string could be a valid identifier in | ||
160 | // a native syntax expression. | ||
161 | // | ||
162 | // This is useful when accepting names from the user that will be used as | ||
163 | // variable or attribute names in the scope, to ensure that any name chosen | ||
164 | // will be traversable using the variable or attribute traversal syntax. | ||
165 | func ValidIdentifier(s string) bool { | ||
166 | // This is a kinda-expensive way to do something pretty simple, but it | ||
167 | // is easiest to do with our existing scanner-related infrastructure here | ||
168 | // and nobody should be validating identifiers in a tight loop. | ||
169 | tokens := scanTokens([]byte(s), "", hcl.Pos{}, scanIdentOnly) | ||
170 | return len(tokens) == 2 && tokens[0].Type == TokenIdent && tokens[1].Type == TokenEOF | ||
171 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.go new file mode 100644 index 0000000..de1f524 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.go | |||
@@ -0,0 +1,301 @@ | |||
1 | // line 1 "scan_string_lit.rl" | ||
2 | |||
3 | package hclsyntax | ||
4 | |||
5 | // This file is generated from scan_string_lit.rl. DO NOT EDIT. | ||
6 | |||
7 | // line 9 "scan_string_lit.go" | ||
8 | var _hclstrtok_actions []byte = []byte{ | ||
9 | 0, 1, 0, 1, 1, 2, 1, 0, | ||
10 | } | ||
11 | |||
12 | var _hclstrtok_key_offsets []byte = []byte{ | ||
13 | 0, 0, 2, 4, 6, 10, 14, 18, | ||
14 | 22, 27, 31, 36, 41, 46, 51, 57, | ||
15 | 62, 74, 85, 96, 107, 118, 129, 140, | ||
16 | 151, | ||
17 | } | ||
18 | |||
19 | var _hclstrtok_trans_keys []byte = []byte{ | ||
20 | 128, 191, 128, 191, 128, 191, 10, 13, | ||
21 | 36, 37, 10, 13, 36, 37, 10, 13, | ||
22 | 36, 37, 10, 13, 36, 37, 10, 13, | ||
23 | 36, 37, 123, 10, 13, 36, 37, 10, | ||
24 | 13, 36, 37, 92, 10, 13, 36, 37, | ||
25 | 92, 10, 13, 36, 37, 92, 10, 13, | ||
26 | 36, 37, 92, 10, 13, 36, 37, 92, | ||
27 | 123, 10, 13, 36, 37, 92, 85, 117, | ||
28 | 128, 191, 192, 223, 224, 239, 240, 247, | ||
29 | 248, 255, 10, 13, 36, 37, 92, 48, | ||
30 | 57, 65, 70, 97, 102, 10, 13, 36, | ||
31 | 37, 92, 48, 57, 65, 70, 97, 102, | ||
32 | 10, 13, 36, 37, 92, 48, 57, 65, | ||
33 | 70, 97, 102, 10, 13, 36, 37, 92, | ||
34 | 48, 57, 65, 70, 97, 102, 10, 13, | ||
35 | 36, 37, 92, 48, 57, 65, 70, 97, | ||
36 | 102, 10, 13, 36, 37, 92, 48, 57, | ||
37 | 65, 70, 97, 102, 10, 13, 36, 37, | ||
38 | 92, 48, 57, 65, 70, 97, 102, 10, | ||
39 | 13, 36, 37, 92, 48, 57, 65, 70, | ||
40 | 97, 102, | ||
41 | } | ||
42 | |||
43 | var _hclstrtok_single_lengths []byte = []byte{ | ||
44 | 0, 0, 0, 0, 4, 4, 4, 4, | ||
45 | 5, 4, 5, 5, 5, 5, 6, 5, | ||
46 | 2, 5, 5, 5, 5, 5, 5, 5, | ||
47 | 5, | ||
48 | } | ||
49 | |||
50 | var _hclstrtok_range_lengths []byte = []byte{ | ||
51 | 0, 1, 1, 1, 0, 0, 0, 0, | ||
52 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
53 | 5, 3, 3, 3, 3, 3, 3, 3, | ||
54 | 3, | ||
55 | } | ||
56 | |||
57 | var _hclstrtok_index_offsets []byte = []byte{ | ||
58 | 0, 0, 2, 4, 6, 11, 16, 21, | ||
59 | 26, 32, 37, 43, 49, 55, 61, 68, | ||
60 | 74, 82, 91, 100, 109, 118, 127, 136, | ||
61 | 145, | ||
62 | } | ||
63 | |||
64 | var _hclstrtok_indicies []byte = []byte{ | ||
65 | 0, 1, 2, 1, 3, 1, 5, 6, | ||
66 | 7, 8, 4, 10, 11, 12, 13, 9, | ||
67 | 14, 11, 12, 13, 9, 10, 11, 15, | ||
68 | 13, 9, 10, 11, 12, 13, 14, 9, | ||
69 | 10, 11, 12, 15, 9, 17, 18, 19, | ||
70 | 20, 21, 16, 23, 24, 25, 26, 27, | ||
71 | 22, 0, 24, 25, 26, 27, 22, 23, | ||
72 | 24, 28, 26, 27, 22, 23, 24, 25, | ||
73 | 26, 27, 0, 22, 23, 24, 25, 28, | ||
74 | 27, 22, 29, 30, 22, 2, 3, 31, | ||
75 | 22, 0, 23, 24, 25, 26, 27, 32, | ||
76 | 32, 32, 22, 23, 24, 25, 26, 27, | ||
77 | 33, 33, 33, 22, 23, 24, 25, 26, | ||
78 | 27, 34, 34, 34, 22, 23, 24, 25, | ||
79 | 26, 27, 30, 30, 30, 22, 23, 24, | ||
80 | 25, 26, 27, 35, 35, 35, 22, 23, | ||
81 | 24, 25, 26, 27, 36, 36, 36, 22, | ||
82 | 23, 24, 25, 26, 27, 37, 37, 37, | ||
83 | 22, 23, 24, 25, 26, 27, 0, 0, | ||
84 | 0, 22, | ||
85 | } | ||
86 | |||
87 | var _hclstrtok_trans_targs []byte = []byte{ | ||
88 | 11, 0, 1, 2, 4, 5, 6, 7, | ||
89 | 9, 4, 5, 6, 7, 9, 5, 8, | ||
90 | 10, 11, 12, 13, 15, 16, 10, 11, | ||
91 | 12, 13, 15, 16, 14, 17, 21, 3, | ||
92 | 18, 19, 20, 22, 23, 24, | ||
93 | } | ||
94 | |||
95 | var _hclstrtok_trans_actions []byte = []byte{ | ||
96 | 0, 0, 0, 0, 0, 1, 1, 1, | ||
97 | 1, 3, 5, 5, 5, 5, 0, 0, | ||
98 | 0, 1, 1, 1, 1, 1, 3, 5, | ||
99 | 5, 5, 5, 5, 0, 0, 0, 0, | ||
100 | 0, 0, 0, 0, 0, 0, | ||
101 | } | ||
102 | |||
103 | var _hclstrtok_eof_actions []byte = []byte{ | ||
104 | 0, 0, 0, 0, 0, 3, 3, 3, | ||
105 | 3, 3, 0, 3, 3, 3, 3, 3, | ||
106 | 3, 3, 3, 3, 3, 3, 3, 3, | ||
107 | 3, | ||
108 | } | ||
109 | |||
110 | const hclstrtok_start int = 4 | ||
111 | const hclstrtok_first_final int = 4 | ||
112 | const hclstrtok_error int = 0 | ||
113 | |||
114 | const hclstrtok_en_quoted int = 10 | ||
115 | const hclstrtok_en_unquoted int = 4 | ||
116 | |||
117 | // line 10 "scan_string_lit.rl" | ||
118 | |||
119 | func scanStringLit(data []byte, quoted bool) [][]byte { | ||
120 | var ret [][]byte | ||
121 | |||
122 | // line 61 "scan_string_lit.rl" | ||
123 | |||
124 | // Ragel state | ||
125 | p := 0 // "Pointer" into data | ||
126 | pe := len(data) // End-of-data "pointer" | ||
127 | ts := 0 | ||
128 | te := 0 | ||
129 | eof := pe | ||
130 | |||
131 | var cs int // current state | ||
132 | switch { | ||
133 | case quoted: | ||
134 | cs = hclstrtok_en_quoted | ||
135 | default: | ||
136 | cs = hclstrtok_en_unquoted | ||
137 | } | ||
138 | |||
139 | // Make Go compiler happy | ||
140 | _ = ts | ||
141 | _ = eof | ||
142 | |||
143 | /*token := func () { | ||
144 | ret = append(ret, data[ts:te]) | ||
145 | }*/ | ||
146 | |||
147 | // line 154 "scan_string_lit.go" | ||
148 | { | ||
149 | } | ||
150 | |||
151 | // line 158 "scan_string_lit.go" | ||
152 | { | ||
153 | var _klen int | ||
154 | var _trans int | ||
155 | var _acts int | ||
156 | var _nacts uint | ||
157 | var _keys int | ||
158 | if p == pe { | ||
159 | goto _test_eof | ||
160 | } | ||
161 | if cs == 0 { | ||
162 | goto _out | ||
163 | } | ||
164 | _resume: | ||
165 | _keys = int(_hclstrtok_key_offsets[cs]) | ||
166 | _trans = int(_hclstrtok_index_offsets[cs]) | ||
167 | |||
168 | _klen = int(_hclstrtok_single_lengths[cs]) | ||
169 | if _klen > 0 { | ||
170 | _lower := int(_keys) | ||
171 | var _mid int | ||
172 | _upper := int(_keys + _klen - 1) | ||
173 | for { | ||
174 | if _upper < _lower { | ||
175 | break | ||
176 | } | ||
177 | |||
178 | _mid = _lower + ((_upper - _lower) >> 1) | ||
179 | switch { | ||
180 | case data[p] < _hclstrtok_trans_keys[_mid]: | ||
181 | _upper = _mid - 1 | ||
182 | case data[p] > _hclstrtok_trans_keys[_mid]: | ||
183 | _lower = _mid + 1 | ||
184 | default: | ||
185 | _trans += int(_mid - int(_keys)) | ||
186 | goto _match | ||
187 | } | ||
188 | } | ||
189 | _keys += _klen | ||
190 | _trans += _klen | ||
191 | } | ||
192 | |||
193 | _klen = int(_hclstrtok_range_lengths[cs]) | ||
194 | if _klen > 0 { | ||
195 | _lower := int(_keys) | ||
196 | var _mid int | ||
197 | _upper := int(_keys + (_klen << 1) - 2) | ||
198 | for { | ||
199 | if _upper < _lower { | ||
200 | break | ||
201 | } | ||
202 | |||
203 | _mid = _lower + (((_upper - _lower) >> 1) & ^1) | ||
204 | switch { | ||
205 | case data[p] < _hclstrtok_trans_keys[_mid]: | ||
206 | _upper = _mid - 2 | ||
207 | case data[p] > _hclstrtok_trans_keys[_mid+1]: | ||
208 | _lower = _mid + 2 | ||
209 | default: | ||
210 | _trans += int((_mid - int(_keys)) >> 1) | ||
211 | goto _match | ||
212 | } | ||
213 | } | ||
214 | _trans += _klen | ||
215 | } | ||
216 | |||
217 | _match: | ||
218 | _trans = int(_hclstrtok_indicies[_trans]) | ||
219 | cs = int(_hclstrtok_trans_targs[_trans]) | ||
220 | |||
221 | if _hclstrtok_trans_actions[_trans] == 0 { | ||
222 | goto _again | ||
223 | } | ||
224 | |||
225 | _acts = int(_hclstrtok_trans_actions[_trans]) | ||
226 | _nacts = uint(_hclstrtok_actions[_acts]) | ||
227 | _acts++ | ||
228 | for ; _nacts > 0; _nacts-- { | ||
229 | _acts++ | ||
230 | switch _hclstrtok_actions[_acts-1] { | ||
231 | case 0: | ||
232 | // line 40 "scan_string_lit.rl" | ||
233 | |||
234 | // If te is behind p then we've skipped over some literal | ||
235 | // characters which we must now return. | ||
236 | if te < p { | ||
237 | ret = append(ret, data[te:p]) | ||
238 | } | ||
239 | ts = p | ||
240 | |||
241 | case 1: | ||
242 | // line 48 "scan_string_lit.rl" | ||
243 | |||
244 | te = p | ||
245 | ret = append(ret, data[ts:te]) | ||
246 | |||
247 | // line 255 "scan_string_lit.go" | ||
248 | } | ||
249 | } | ||
250 | |||
251 | _again: | ||
252 | if cs == 0 { | ||
253 | goto _out | ||
254 | } | ||
255 | p++ | ||
256 | if p != pe { | ||
257 | goto _resume | ||
258 | } | ||
259 | _test_eof: | ||
260 | { | ||
261 | } | ||
262 | if p == eof { | ||
263 | __acts := _hclstrtok_eof_actions[cs] | ||
264 | __nacts := uint(_hclstrtok_actions[__acts]) | ||
265 | __acts++ | ||
266 | for ; __nacts > 0; __nacts-- { | ||
267 | __acts++ | ||
268 | switch _hclstrtok_actions[__acts-1] { | ||
269 | case 1: | ||
270 | // line 48 "scan_string_lit.rl" | ||
271 | |||
272 | te = p | ||
273 | ret = append(ret, data[ts:te]) | ||
274 | |||
275 | // line 281 "scan_string_lit.go" | ||
276 | } | ||
277 | } | ||
278 | } | ||
279 | |||
280 | _out: | ||
281 | { | ||
282 | } | ||
283 | } | ||
284 | |||
285 | // line 89 "scan_string_lit.rl" | ||
286 | |||
287 | if te < p { | ||
288 | // Collect any leftover literal characters at the end of the input | ||
289 | ret = append(ret, data[te:p]) | ||
290 | } | ||
291 | |||
292 | // If we fall out here without being in a final state then we've | ||
293 | // encountered something that the scanner can't match, which should | ||
294 | // be impossible (the scanner matches all bytes _somehow_) but we'll | ||
295 | // tolerate it and let the caller deal with it. | ||
296 | if cs < hclstrtok_first_final { | ||
297 | ret = append(ret, data[p:len(data)]) | ||
298 | } | ||
299 | |||
300 | return ret | ||
301 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.rl b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.rl new file mode 100644 index 0000000..f8ac117 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.rl | |||
@@ -0,0 +1,105 @@ | |||
1 | |||
2 | package hclsyntax | ||
3 | |||
4 | // This file is generated from scan_string_lit.rl. DO NOT EDIT. | ||
5 | %%{ | ||
6 | # (except you are actually in scan_string_lit.rl here, so edit away!) | ||
7 | |||
8 | machine hclstrtok; | ||
9 | write data; | ||
10 | }%% | ||
11 | |||
12 | func scanStringLit(data []byte, quoted bool) [][]byte { | ||
13 | var ret [][]byte | ||
14 | |||
15 | %%{ | ||
16 | include UnicodeDerived "unicode_derived.rl"; | ||
17 | |||
18 | UTF8Cont = 0x80 .. 0xBF; | ||
19 | AnyUTF8 = ( | ||
20 | 0x00..0x7F | | ||
21 | 0xC0..0xDF . UTF8Cont | | ||
22 | 0xE0..0xEF . UTF8Cont . UTF8Cont | | ||
23 | 0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont | ||
24 | ); | ||
25 | BadUTF8 = any - AnyUTF8; | ||
26 | |||
27 | Hex = ('0'..'9' | 'a'..'f' | 'A'..'F'); | ||
28 | |||
29 | # Our goal with this patterns is to capture user intent as best as | ||
30 | # possible, even if the input is invalid. The caller will then verify | ||
31 | # whether each token is valid and generate suitable error messages | ||
32 | # if not. | ||
33 | UnicodeEscapeShort = "\\u" . Hex{0,4}; | ||
34 | UnicodeEscapeLong = "\\U" . Hex{0,8}; | ||
35 | UnicodeEscape = (UnicodeEscapeShort | UnicodeEscapeLong); | ||
36 | SimpleEscape = "\\" . (AnyUTF8 - ('U'|'u'))?; | ||
37 | TemplateEscape = ("$" . ("$" . ("{"?))?) | ("%" . ("%" . ("{"?))?); | ||
38 | Newline = ("\r\n" | "\r" | "\n"); | ||
39 | |||
40 | action Begin { | ||
41 | // If te is behind p then we've skipped over some literal | ||
42 | // characters which we must now return. | ||
43 | if te < p { | ||
44 | ret = append(ret, data[te:p]) | ||
45 | } | ||
46 | ts = p; | ||
47 | } | ||
48 | action End { | ||
49 | te = p; | ||
50 | ret = append(ret, data[ts:te]); | ||
51 | } | ||
52 | |||
53 | QuotedToken = (UnicodeEscape | SimpleEscape | TemplateEscape | Newline) >Begin %End; | ||
54 | UnquotedToken = (TemplateEscape | Newline) >Begin %End; | ||
55 | QuotedLiteral = (any - ("\\" | "$" | "%" | "\r" | "\n")); | ||
56 | UnquotedLiteral = (any - ("$" | "%" | "\r" | "\n")); | ||
57 | |||
58 | quoted := (QuotedToken | QuotedLiteral)**; | ||
59 | unquoted := (UnquotedToken | UnquotedLiteral)**; | ||
60 | |||
61 | }%% | ||
62 | |||
63 | // Ragel state | ||
64 | p := 0 // "Pointer" into data | ||
65 | pe := len(data) // End-of-data "pointer" | ||
66 | ts := 0 | ||
67 | te := 0 | ||
68 | eof := pe | ||
69 | |||
70 | var cs int // current state | ||
71 | switch { | ||
72 | case quoted: | ||
73 | cs = hclstrtok_en_quoted | ||
74 | default: | ||
75 | cs = hclstrtok_en_unquoted | ||
76 | } | ||
77 | |||
78 | // Make Go compiler happy | ||
79 | _ = ts | ||
80 | _ = eof | ||
81 | |||
82 | /*token := func () { | ||
83 | ret = append(ret, data[ts:te]) | ||
84 | }*/ | ||
85 | |||
86 | %%{ | ||
87 | write init nocs; | ||
88 | write exec; | ||
89 | }%% | ||
90 | |||
91 | if te < p { | ||
92 | // Collect any leftover literal characters at the end of the input | ||
93 | ret = append(ret, data[te:p]) | ||
94 | } | ||
95 | |||
96 | // If we fall out here without being in a final state then we've | ||
97 | // encountered something that the scanner can't match, which should | ||
98 | // be impossible (the scanner matches all bytes _somehow_) but we'll | ||
99 | // tolerate it and let the caller deal with it. | ||
100 | if cs < hclstrtok_first_final { | ||
101 | ret = append(ret, data[p:len(data)]) | ||
102 | } | ||
103 | |||
104 | return ret | ||
105 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.go new file mode 100644 index 0000000..395e9c1 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.go | |||
@@ -0,0 +1,5443 @@ | |||
1 | // line 1 "scan_tokens.rl" | ||
2 | |||
3 | package hclsyntax | ||
4 | |||
5 | import ( | ||
6 | "bytes" | ||
7 | |||
8 | "github.com/hashicorp/hcl2/hcl" | ||
9 | ) | ||
10 | |||
11 | // This file is generated from scan_tokens.rl. DO NOT EDIT. | ||
12 | |||
13 | // line 15 "scan_tokens.go" | ||
14 | var _hcltok_actions []byte = []byte{ | ||
15 | 0, 1, 0, 1, 1, 1, 2, 1, 3, | ||
16 | 1, 4, 1, 6, 1, 7, 1, 8, | ||
17 | 1, 9, 1, 10, 1, 11, 1, 12, | ||
18 | 1, 13, 1, 14, 1, 15, 1, 16, | ||
19 | 1, 17, 1, 18, 1, 19, 1, 22, | ||
20 | 1, 23, 1, 24, 1, 25, 1, 26, | ||
21 | 1, 27, 1, 28, 1, 29, 1, 30, | ||
22 | 1, 31, 1, 34, 1, 35, 1, 36, | ||
23 | 1, 37, 1, 38, 1, 39, 1, 40, | ||
24 | 1, 41, 1, 42, 1, 43, 1, 46, | ||
25 | 1, 47, 1, 48, 1, 49, 1, 50, | ||
26 | 1, 51, 1, 52, 1, 58, 1, 59, | ||
27 | 1, 60, 1, 61, 1, 62, 1, 63, | ||
28 | 1, 64, 1, 65, 1, 66, 1, 67, | ||
29 | 1, 68, 1, 69, 1, 70, 1, 71, | ||
30 | 1, 72, 1, 73, 1, 74, 1, 75, | ||
31 | 1, 76, 1, 77, 1, 78, 1, 79, | ||
32 | 1, 80, 1, 81, 1, 82, 1, 83, | ||
33 | 1, 84, 1, 85, 1, 86, 1, 87, | ||
34 | 2, 0, 15, 2, 1, 15, 2, 2, | ||
35 | 24, 2, 2, 28, 2, 3, 24, 2, | ||
36 | 3, 28, 2, 4, 5, 2, 7, 0, | ||
37 | 2, 7, 1, 2, 7, 20, 2, 7, | ||
38 | 21, 2, 7, 32, 2, 7, 33, 2, | ||
39 | 7, 44, 2, 7, 45, 2, 7, 53, | ||
40 | 2, 7, 54, 2, 7, 55, 2, 7, | ||
41 | 56, 2, 7, 57, 3, 7, 2, 20, | ||
42 | 3, 7, 3, 20, | ||
43 | } | ||
44 | |||
45 | var _hcltok_key_offsets []int16 = []int16{ | ||
46 | 0, 0, 1, 2, 3, 5, 10, 14, | ||
47 | 16, 58, 99, 145, 146, 150, 156, 156, | ||
48 | 158, 160, 169, 175, 182, 183, 186, 187, | ||
49 | 191, 196, 205, 209, 213, 221, 223, 225, | ||
50 | 227, 230, 262, 264, 266, 270, 274, 277, | ||
51 | 288, 301, 320, 333, 349, 361, 377, 392, | ||
52 | 413, 423, 435, 446, 460, 475, 485, 497, | ||
53 | 506, 518, 520, 524, 545, 554, 564, 570, | ||
54 | 576, 577, 626, 628, 632, 634, 640, 647, | ||
55 | 655, 662, 665, 671, 675, 679, 681, 685, | ||
56 | 689, 693, 699, 707, 715, 721, 723, 727, | ||
57 | 729, 735, 739, 743, 747, 751, 756, 763, | ||
58 | 769, 771, 773, 777, 779, 785, 789, 793, | ||
59 | 803, 808, 822, 837, 839, 847, 849, 854, | ||
60 | 868, 873, 875, 879, 880, 884, 890, 896, | ||
61 | 906, 916, 927, 935, 938, 941, 945, 949, | ||
62 | 951, 954, 954, 957, 959, 989, 991, 993, | ||
63 | 997, 1002, 1006, 1011, 1013, 1015, 1017, 1026, | ||
64 | 1030, 1034, 1040, 1042, 1050, 1058, 1070, 1073, | ||
65 | 1079, 1083, 1085, 1089, 1109, 1111, 1113, 1124, | ||
66 | 1130, 1132, 1134, 1136, 1140, 1146, 1152, 1154, | ||
67 | 1159, 1163, 1165, 1173, 1191, 1231, 1241, 1245, | ||
68 | 1247, 1249, 1250, 1254, 1258, 1262, 1266, 1270, | ||
69 | 1275, 1279, 1283, 1287, 1289, 1291, 1295, 1305, | ||
70 | 1309, 1311, 1315, 1319, 1323, 1336, 1338, 1340, | ||
71 | 1344, 1346, 1350, 1352, 1354, 1384, 1388, 1392, | ||
72 | 1396, 1399, 1406, 1411, 1422, 1426, 1442, 1456, | ||
73 | 1460, 1465, 1469, 1473, 1479, 1481, 1487, 1489, | ||
74 | 1493, 1495, 1501, 1506, 1511, 1521, 1523, 1525, | ||
75 | 1529, 1533, 1535, 1548, 1550, 1554, 1558, 1566, | ||
76 | 1568, 1572, 1574, 1575, 1578, 1583, 1585, 1587, | ||
77 | 1591, 1593, 1597, 1603, 1623, 1629, 1635, 1637, | ||
78 | 1638, 1648, 1649, 1657, 1664, 1666, 1669, 1671, | ||
79 | 1673, 1675, 1680, 1684, 1688, 1693, 1703, 1713, | ||
80 | 1717, 1721, 1735, 1761, 1771, 1773, 1775, 1778, | ||
81 | 1780, 1783, 1785, 1789, 1791, 1792, 1796, 1798, | ||
82 | 1801, 1808, 1816, 1818, 1820, 1824, 1826, 1832, | ||
83 | 1843, 1846, 1848, 1852, 1857, 1887, 1892, 1894, | ||
84 | 1897, 1902, 1916, 1923, 1937, 1942, 1955, 1959, | ||
85 | 1972, 1977, 1995, 1996, 2005, 2009, 2021, 2026, | ||
86 | 2033, 2040, 2047, 2049, 2053, 2075, 2080, 2081, | ||
87 | 2085, 2087, 2137, 2140, 2151, 2155, 2157, 2163, | ||
88 | 2169, 2171, 2176, 2178, 2182, 2184, 2185, 2187, | ||
89 | 2189, 2195, 2197, 2199, 2203, 2209, 2222, 2224, | ||
90 | 2230, 2234, 2242, 2253, 2261, 2264, 2294, 2300, | ||
91 | 2303, 2308, 2310, 2314, 2318, 2322, 2324, 2331, | ||
92 | 2333, 2342, 2349, 2357, 2359, 2379, 2391, 2395, | ||
93 | 2397, 2415, 2454, 2456, 2460, 2462, 2469, 2473, | ||
94 | 2501, 2503, 2505, 2507, 2509, 2512, 2514, 2518, | ||
95 | 2522, 2524, 2527, 2529, 2531, 2534, 2536, 2538, | ||
96 | 2539, 2541, 2543, 2547, 2551, 2554, 2567, 2569, | ||
97 | 2575, 2579, 2581, 2585, 2589, 2603, 2606, 2615, | ||
98 | 2617, 2621, 2627, 2627, 2629, 2631, 2640, 2646, | ||
99 | 2653, 2654, 2657, 2658, 2662, 2667, 2676, 2680, | ||
100 | 2684, 2692, 2694, 2696, 2698, 2701, 2733, 2735, | ||
101 | 2737, 2741, 2745, 2748, 2759, 2772, 2791, 2804, | ||
102 | 2820, 2832, 2848, 2863, 2884, 2894, 2906, 2917, | ||
103 | 2931, 2946, 2956, 2968, 2977, 2989, 2991, 2995, | ||
104 | 3016, 3025, 3035, 3041, 3047, 3048, 3097, 3099, | ||
105 | 3103, 3105, 3111, 3118, 3126, 3133, 3136, 3142, | ||
106 | 3146, 3150, 3152, 3156, 3160, 3164, 3170, 3178, | ||
107 | 3186, 3192, 3194, 3198, 3200, 3206, 3210, 3214, | ||
108 | 3218, 3222, 3227, 3234, 3240, 3242, 3244, 3248, | ||
109 | 3250, 3256, 3260, 3264, 3274, 3279, 3293, 3308, | ||
110 | 3310, 3318, 3320, 3325, 3339, 3344, 3346, 3350, | ||
111 | 3351, 3355, 3361, 3367, 3377, 3387, 3398, 3406, | ||
112 | 3409, 3412, 3416, 3420, 3422, 3425, 3425, 3428, | ||
113 | 3430, 3460, 3462, 3464, 3468, 3473, 3477, 3482, | ||
114 | 3484, 3486, 3488, 3497, 3501, 3505, 3511, 3513, | ||
115 | 3521, 3529, 3541, 3544, 3550, 3554, 3556, 3560, | ||
116 | 3580, 3582, 3584, 3595, 3601, 3603, 3605, 3607, | ||
117 | 3611, 3617, 3623, 3625, 3630, 3634, 3636, 3644, | ||
118 | 3662, 3702, 3712, 3716, 3718, 3720, 3721, 3725, | ||
119 | 3729, 3733, 3737, 3741, 3746, 3750, 3754, 3758, | ||
120 | 3760, 3762, 3766, 3776, 3780, 3782, 3786, 3790, | ||
121 | 3794, 3807, 3809, 3811, 3815, 3817, 3821, 3823, | ||
122 | 3825, 3855, 3859, 3863, 3867, 3870, 3877, 3882, | ||
123 | 3893, 3897, 3913, 3927, 3931, 3936, 3940, 3944, | ||
124 | 3950, 3952, 3958, 3960, 3964, 3966, 3972, 3977, | ||
125 | 3982, 3992, 3994, 3996, 4000, 4004, 4006, 4019, | ||
126 | 4021, 4025, 4029, 4037, 4039, 4043, 4045, 4046, | ||
127 | 4049, 4054, 4056, 4058, 4062, 4064, 4068, 4074, | ||
128 | 4094, 4100, 4106, 4108, 4109, 4119, 4120, 4128, | ||
129 | 4135, 4137, 4140, 4142, 4144, 4146, 4151, 4155, | ||
130 | 4159, 4164, 4174, 4184, 4188, 4192, 4206, 4232, | ||
131 | 4242, 4244, 4246, 4249, 4251, 4254, 4256, 4260, | ||
132 | 4262, 4263, 4267, 4269, 4271, 4278, 4282, 4289, | ||
133 | 4296, 4305, 4321, 4333, 4351, 4362, 4374, 4382, | ||
134 | 4400, 4408, 4438, 4441, 4451, 4461, 4473, 4484, | ||
135 | 4493, 4506, 4518, 4522, 4528, 4555, 4564, 4567, | ||
136 | 4572, 4578, 4583, 4604, 4608, 4614, 4614, 4621, | ||
137 | 4630, 4638, 4641, 4645, 4651, 4657, 4660, 4664, | ||
138 | 4671, 4677, 4686, 4695, 4699, 4703, 4707, 4711, | ||
139 | 4718, 4722, 4726, 4736, 4742, 4746, 4752, 4756, | ||
140 | 4759, 4765, 4771, 4783, 4787, 4791, 4801, 4805, | ||
141 | 4816, 4818, 4820, 4824, 4836, 4841, 4865, 4869, | ||
142 | 4875, 4897, 4906, 4910, 4913, 4914, 4922, 4930, | ||
143 | 4936, 4946, 4953, 4971, 4974, 4977, 4985, 4991, | ||
144 | 4995, 4999, 5003, 5009, 5017, 5022, 5028, 5032, | ||
145 | 5040, 5047, 5051, 5058, 5064, 5072, 5080, 5086, | ||
146 | 5092, 5103, 5107, 5119, 5128, 5145, 5162, 5165, | ||
147 | 5169, 5171, 5177, 5179, 5183, 5198, 5202, 5206, | ||
148 | 5210, 5214, 5218, 5220, 5226, 5231, 5235, 5241, | ||
149 | 5248, 5251, 5269, 5271, 5316, 5322, 5328, 5332, | ||
150 | 5336, 5342, 5346, 5352, 5358, 5365, 5367, 5373, | ||
151 | 5379, 5383, 5387, 5395, 5408, 5414, 5421, 5429, | ||
152 | 5435, 5444, 5450, 5454, 5459, 5463, 5471, 5475, | ||
153 | 5479, 5509, 5515, 5521, 5527, 5533, 5540, 5546, | ||
154 | 5553, 5558, 5568, 5572, 5579, 5585, 5589, 5596, | ||
155 | 5600, 5606, 5609, 5613, 5617, 5621, 5625, 5630, | ||
156 | 5635, 5639, 5650, 5654, 5658, 5664, 5672, 5676, | ||
157 | 5693, 5697, 5703, 5713, 5719, 5725, 5728, 5733, | ||
158 | 5742, 5746, 5750, 5756, 5760, 5766, 5774, 5792, | ||
159 | 5793, 5803, 5804, 5813, 5821, 5823, 5826, 5828, | ||
160 | 5830, 5832, 5837, 5850, 5854, 5869, 5898, 5909, | ||
161 | 5911, 5915, 5919, 5924, 5928, 5930, 5937, 5941, | ||
162 | 5949, 5953, 5954, 5955, 5957, 5959, 5961, 5963, | ||
163 | 5965, 5966, 5967, 5968, 5970, 5972, 5974, 5975, | ||
164 | 5976, 5977, 5978, 5980, 5982, 5984, 5985, 5986, | ||
165 | 5990, 5996, 5996, 5998, 6000, 6009, 6015, 6022, | ||
166 | 6023, 6026, 6027, 6031, 6036, 6045, 6049, 6053, | ||
167 | 6061, 6063, 6065, 6067, 6070, 6102, 6104, 6106, | ||
168 | 6110, 6114, 6117, 6128, 6141, 6160, 6173, 6189, | ||
169 | 6201, 6217, 6232, 6253, 6263, 6275, 6286, 6300, | ||
170 | 6315, 6325, 6337, 6346, 6358, 6360, 6364, 6385, | ||
171 | 6394, 6404, 6410, 6416, 6417, 6466, 6468, 6472, | ||
172 | 6474, 6480, 6487, 6495, 6502, 6505, 6511, 6515, | ||
173 | 6519, 6521, 6525, 6529, 6533, 6539, 6547, 6555, | ||
174 | 6561, 6563, 6567, 6569, 6575, 6579, 6583, 6587, | ||
175 | 6591, 6596, 6603, 6609, 6611, 6613, 6617, 6619, | ||
176 | 6625, 6629, 6633, 6643, 6648, 6662, 6677, 6679, | ||
177 | 6687, 6689, 6694, 6708, 6713, 6715, 6719, 6720, | ||
178 | 6724, 6730, 6736, 6746, 6756, 6767, 6775, 6778, | ||
179 | 6781, 6785, 6789, 6791, 6794, 6794, 6797, 6799, | ||
180 | 6829, 6831, 6833, 6837, 6842, 6846, 6851, 6853, | ||
181 | 6855, 6857, 6866, 6870, 6874, 6880, 6882, 6890, | ||
182 | 6898, 6910, 6913, 6919, 6923, 6925, 6929, 6949, | ||
183 | 6951, 6953, 6964, 6970, 6972, 6974, 6976, 6980, | ||
184 | 6986, 6992, 6994, 6999, 7003, 7005, 7013, 7031, | ||
185 | 7071, 7081, 7085, 7087, 7089, 7090, 7094, 7098, | ||
186 | 7102, 7106, 7110, 7115, 7119, 7123, 7127, 7129, | ||
187 | 7131, 7135, 7145, 7149, 7151, 7155, 7159, 7163, | ||
188 | 7176, 7178, 7180, 7184, 7186, 7190, 7192, 7194, | ||
189 | 7224, 7228, 7232, 7236, 7239, 7246, 7251, 7262, | ||
190 | 7266, 7282, 7296, 7300, 7305, 7309, 7313, 7319, | ||
191 | 7321, 7327, 7329, 7333, 7335, 7341, 7346, 7351, | ||
192 | 7361, 7363, 7365, 7369, 7373, 7375, 7388, 7390, | ||
193 | 7394, 7398, 7406, 7408, 7412, 7414, 7415, 7418, | ||
194 | 7423, 7425, 7427, 7431, 7433, 7437, 7443, 7463, | ||
195 | 7469, 7475, 7477, 7478, 7488, 7489, 7497, 7504, | ||
196 | 7506, 7509, 7511, 7513, 7515, 7520, 7524, 7528, | ||
197 | 7533, 7543, 7553, 7557, 7561, 7575, 7601, 7611, | ||
198 | 7613, 7615, 7618, 7620, 7623, 7625, 7629, 7631, | ||
199 | 7632, 7636, 7638, 7640, 7647, 7651, 7658, 7665, | ||
200 | 7674, 7690, 7702, 7720, 7731, 7743, 7751, 7769, | ||
201 | 7777, 7807, 7810, 7820, 7830, 7842, 7853, 7862, | ||
202 | 7875, 7887, 7891, 7897, 7924, 7933, 7936, 7941, | ||
203 | 7947, 7952, 7973, 7977, 7983, 7983, 7990, 7999, | ||
204 | 8007, 8010, 8014, 8020, 8026, 8029, 8033, 8040, | ||
205 | 8046, 8055, 8064, 8068, 8072, 8076, 8080, 8087, | ||
206 | 8091, 8095, 8105, 8111, 8115, 8121, 8125, 8128, | ||
207 | 8134, 8140, 8152, 8156, 8160, 8170, 8174, 8185, | ||
208 | 8187, 8189, 8193, 8205, 8210, 8234, 8238, 8244, | ||
209 | 8266, 8275, 8279, 8282, 8283, 8291, 8299, 8305, | ||
210 | 8315, 8322, 8340, 8343, 8346, 8354, 8360, 8364, | ||
211 | 8368, 8372, 8378, 8386, 8391, 8397, 8401, 8409, | ||
212 | 8416, 8420, 8427, 8433, 8441, 8449, 8455, 8461, | ||
213 | 8472, 8476, 8488, 8497, 8514, 8531, 8534, 8538, | ||
214 | 8540, 8546, 8548, 8552, 8567, 8571, 8575, 8579, | ||
215 | 8583, 8587, 8589, 8595, 8600, 8604, 8610, 8617, | ||
216 | 8620, 8638, 8640, 8685, 8691, 8697, 8701, 8705, | ||
217 | 8711, 8715, 8721, 8727, 8734, 8736, 8742, 8748, | ||
218 | 8752, 8756, 8764, 8777, 8783, 8790, 8798, 8804, | ||
219 | 8813, 8819, 8823, 8828, 8832, 8840, 8844, 8848, | ||
220 | 8878, 8884, 8890, 8896, 8902, 8909, 8915, 8922, | ||
221 | 8927, 8937, 8941, 8948, 8954, 8958, 8965, 8969, | ||
222 | 8975, 8978, 8982, 8986, 8990, 8994, 8999, 9004, | ||
223 | 9008, 9019, 9023, 9027, 9033, 9041, 9045, 9062, | ||
224 | 9066, 9072, 9082, 9088, 9094, 9097, 9102, 9111, | ||
225 | 9115, 9119, 9125, 9129, 9135, 9143, 9161, 9162, | ||
226 | 9172, 9173, 9182, 9190, 9192, 9195, 9197, 9199, | ||
227 | 9201, 9206, 9219, 9223, 9238, 9267, 9278, 9280, | ||
228 | 9284, 9288, 9293, 9297, 9299, 9306, 9310, 9318, | ||
229 | 9322, 9398, 9400, 9401, 9402, 9403, 9404, 9405, | ||
230 | 9407, 9408, 9413, 9415, 9417, 9418, 9462, 9463, | ||
231 | 9464, 9466, 9471, 9475, 9475, 9477, 9479, 9490, | ||
232 | 9500, 9508, 9509, 9511, 9512, 9516, 9520, 9530, | ||
233 | 9534, 9541, 9552, 9559, 9563, 9569, 9580, 9612, | ||
234 | 9661, 9676, 9691, 9696, 9698, 9703, 9735, 9743, | ||
235 | 9745, 9767, 9789, 9791, 9807, 9823, 9839, 9855, | ||
236 | 9870, 9880, 9897, 9914, 9931, 9947, 9957, 9974, | ||
237 | 9990, 10006, 10022, 10038, 10054, 10070, 10086, 10087, | ||
238 | 10088, 10089, 10090, 10092, 10094, 10096, 10110, 10124, | ||
239 | 10138, 10152, 10153, 10154, 10156, 10158, 10160, 10174, | ||
240 | 10188, 10189, 10190, 10192, 10194, 10196, 10245, 10289, | ||
241 | 10291, 10296, 10300, 10300, 10302, 10304, 10315, 10325, | ||
242 | 10333, 10334, 10336, 10337, 10341, 10345, 10355, 10359, | ||
243 | 10366, 10377, 10384, 10388, 10394, 10405, 10437, 10486, | ||
244 | 10501, 10516, 10521, 10523, 10528, 10560, 10568, 10570, | ||
245 | 10592, 10614, | ||
246 | } | ||
247 | |||
248 | var _hcltok_trans_keys []byte = []byte{ | ||
249 | 10, 46, 42, 42, 47, 46, 69, 101, | ||
250 | 48, 57, 43, 45, 48, 57, 48, 57, | ||
251 | 45, 95, 194, 195, 198, 199, 203, 205, | ||
252 | 206, 207, 210, 212, 213, 214, 215, 216, | ||
253 | 217, 219, 220, 221, 222, 223, 224, 225, | ||
254 | 226, 227, 228, 233, 234, 237, 239, 240, | ||
255 | 65, 90, 97, 122, 196, 202, 208, 218, | ||
256 | 229, 236, 95, 194, 195, 198, 199, 203, | ||
257 | 205, 206, 207, 210, 212, 213, 214, 215, | ||
258 | 216, 217, 219, 220, 221, 222, 223, 224, | ||
259 | 225, 226, 227, 228, 233, 234, 237, 239, | ||
260 | 240, 65, 90, 97, 122, 196, 202, 208, | ||
261 | 218, 229, 236, 10, 13, 45, 95, 194, | ||
262 | 195, 198, 199, 203, 204, 205, 206, 207, | ||
263 | 210, 212, 213, 214, 215, 216, 217, 219, | ||
264 | 220, 221, 222, 223, 224, 225, 226, 227, | ||
265 | 228, 233, 234, 237, 239, 240, 243, 48, | ||
266 | 57, 65, 90, 97, 122, 196, 218, 229, | ||
267 | 236, 10, 170, 181, 183, 186, 128, 150, | ||
268 | 152, 182, 184, 255, 192, 255, 0, 127, | ||
269 | 173, 130, 133, 146, 159, 165, 171, 175, | ||
270 | 255, 181, 190, 184, 185, 192, 255, 140, | ||
271 | 134, 138, 142, 161, 163, 255, 182, 130, | ||
272 | 136, 137, 176, 151, 152, 154, 160, 190, | ||
273 | 136, 144, 192, 255, 135, 129, 130, 132, | ||
274 | 133, 144, 170, 176, 178, 144, 154, 160, | ||
275 | 191, 128, 169, 174, 255, 148, 169, 157, | ||
276 | 158, 189, 190, 192, 255, 144, 255, 139, | ||
277 | 140, 178, 255, 186, 128, 181, 160, 161, | ||
278 | 162, 163, 164, 165, 166, 167, 168, 169, | ||
279 | 170, 171, 172, 173, 174, 175, 176, 177, | ||
280 | 178, 179, 180, 181, 182, 183, 184, 185, | ||
281 | 186, 187, 188, 189, 190, 191, 128, 173, | ||
282 | 128, 155, 160, 180, 182, 189, 148, 161, | ||
283 | 163, 255, 176, 164, 165, 132, 169, 177, | ||
284 | 141, 142, 145, 146, 179, 181, 186, 187, | ||
285 | 158, 133, 134, 137, 138, 143, 150, 152, | ||
286 | 155, 164, 165, 178, 255, 188, 129, 131, | ||
287 | 133, 138, 143, 144, 147, 168, 170, 176, | ||
288 | 178, 179, 181, 182, 184, 185, 190, 255, | ||
289 | 157, 131, 134, 137, 138, 142, 144, 146, | ||
290 | 152, 159, 165, 182, 255, 129, 131, 133, | ||
291 | 141, 143, 145, 147, 168, 170, 176, 178, | ||
292 | 179, 181, 185, 188, 255, 134, 138, 142, | ||
293 | 143, 145, 159, 164, 165, 176, 184, 186, | ||
294 | 255, 129, 131, 133, 140, 143, 144, 147, | ||
295 | 168, 170, 176, 178, 179, 181, 185, 188, | ||
296 | 191, 177, 128, 132, 135, 136, 139, 141, | ||
297 | 150, 151, 156, 157, 159, 163, 166, 175, | ||
298 | 156, 130, 131, 133, 138, 142, 144, 146, | ||
299 | 149, 153, 154, 158, 159, 163, 164, 168, | ||
300 | 170, 174, 185, 190, 191, 144, 151, 128, | ||
301 | 130, 134, 136, 138, 141, 166, 175, 128, | ||
302 | 131, 133, 140, 142, 144, 146, 168, 170, | ||
303 | 185, 189, 255, 133, 137, 151, 142, 148, | ||
304 | 155, 159, 164, 165, 176, 255, 128, 131, | ||
305 | 133, 140, 142, 144, 146, 168, 170, 179, | ||
306 | 181, 185, 188, 191, 158, 128, 132, 134, | ||
307 | 136, 138, 141, 149, 150, 160, 163, 166, | ||
308 | 175, 177, 178, 129, 131, 133, 140, 142, | ||
309 | 144, 146, 186, 189, 255, 133, 137, 143, | ||
310 | 147, 152, 158, 164, 165, 176, 185, 192, | ||
311 | 255, 189, 130, 131, 133, 150, 154, 177, | ||
312 | 179, 187, 138, 150, 128, 134, 143, 148, | ||
313 | 152, 159, 166, 175, 178, 179, 129, 186, | ||
314 | 128, 142, 144, 153, 132, 138, 141, 165, | ||
315 | 167, 129, 130, 135, 136, 148, 151, 153, | ||
316 | 159, 161, 163, 170, 171, 173, 185, 187, | ||
317 | 189, 134, 128, 132, 136, 141, 144, 153, | ||
318 | 156, 159, 128, 181, 183, 185, 152, 153, | ||
319 | 160, 169, 190, 191, 128, 135, 137, 172, | ||
320 | 177, 191, 128, 132, 134, 151, 153, 188, | ||
321 | 134, 128, 129, 130, 131, 137, 138, 139, | ||
322 | 140, 141, 142, 143, 144, 153, 154, 155, | ||
323 | 156, 157, 158, 159, 160, 161, 162, 163, | ||
324 | 164, 165, 166, 167, 168, 169, 170, 173, | ||
325 | 175, 176, 177, 178, 179, 181, 182, 183, | ||
326 | 188, 189, 190, 191, 132, 152, 172, 184, | ||
327 | 185, 187, 128, 191, 128, 137, 144, 255, | ||
328 | 158, 159, 134, 187, 136, 140, 142, 143, | ||
329 | 137, 151, 153, 142, 143, 158, 159, 137, | ||
330 | 177, 142, 143, 182, 183, 191, 255, 128, | ||
331 | 130, 133, 136, 150, 152, 255, 145, 150, | ||
332 | 151, 155, 156, 160, 168, 178, 255, 128, | ||
333 | 143, 160, 255, 182, 183, 190, 255, 129, | ||
334 | 255, 173, 174, 192, 255, 129, 154, 160, | ||
335 | 255, 171, 173, 185, 255, 128, 140, 142, | ||
336 | 148, 160, 180, 128, 147, 160, 172, 174, | ||
337 | 176, 178, 179, 148, 150, 152, 155, 158, | ||
338 | 159, 170, 255, 139, 141, 144, 153, 160, | ||
339 | 255, 184, 255, 128, 170, 176, 255, 182, | ||
340 | 255, 128, 158, 160, 171, 176, 187, 134, | ||
341 | 173, 176, 180, 128, 171, 176, 255, 138, | ||
342 | 143, 155, 255, 128, 155, 160, 255, 159, | ||
343 | 189, 190, 192, 255, 167, 128, 137, 144, | ||
344 | 153, 176, 189, 140, 143, 154, 170, 180, | ||
345 | 255, 180, 255, 128, 183, 128, 137, 141, | ||
346 | 189, 128, 136, 144, 146, 148, 182, 184, | ||
347 | 185, 128, 181, 187, 191, 150, 151, 158, | ||
348 | 159, 152, 154, 156, 158, 134, 135, 142, | ||
349 | 143, 190, 255, 190, 128, 180, 182, 188, | ||
350 | 130, 132, 134, 140, 144, 147, 150, 155, | ||
351 | 160, 172, 178, 180, 182, 188, 128, 129, | ||
352 | 130, 131, 132, 133, 134, 176, 177, 178, | ||
353 | 179, 180, 181, 182, 183, 191, 255, 129, | ||
354 | 147, 149, 176, 178, 190, 192, 255, 144, | ||
355 | 156, 161, 144, 156, 165, 176, 130, 135, | ||
356 | 149, 164, 166, 168, 138, 147, 152, 157, | ||
357 | 170, 185, 188, 191, 142, 133, 137, 160, | ||
358 | 255, 137, 255, 128, 174, 176, 255, 159, | ||
359 | 165, 170, 180, 255, 167, 173, 128, 165, | ||
360 | 176, 255, 168, 174, 176, 190, 192, 255, | ||
361 | 128, 150, 160, 166, 168, 174, 176, 182, | ||
362 | 184, 190, 128, 134, 136, 142, 144, 150, | ||
363 | 152, 158, 160, 191, 128, 129, 130, 131, | ||
364 | 132, 133, 134, 135, 144, 145, 255, 133, | ||
365 | 135, 161, 175, 177, 181, 184, 188, 160, | ||
366 | 151, 152, 187, 192, 255, 133, 173, 177, | ||
367 | 255, 143, 159, 187, 255, 176, 191, 182, | ||
368 | 183, 184, 191, 192, 255, 150, 255, 128, | ||
369 | 146, 147, 148, 152, 153, 154, 155, 156, | ||
370 | 158, 159, 160, 161, 162, 163, 164, 165, | ||
371 | 166, 167, 168, 169, 170, 171, 172, 173, | ||
372 | 174, 175, 176, 129, 255, 141, 255, 144, | ||
373 | 189, 141, 143, 172, 255, 191, 128, 175, | ||
374 | 180, 189, 151, 159, 162, 255, 175, 137, | ||
375 | 138, 184, 255, 183, 255, 168, 255, 128, | ||
376 | 179, 188, 134, 143, 154, 159, 184, 186, | ||
377 | 190, 255, 128, 173, 176, 255, 148, 159, | ||
378 | 189, 255, 129, 142, 154, 159, 191, 255, | ||
379 | 128, 182, 128, 141, 144, 153, 160, 182, | ||
380 | 186, 255, 128, 130, 155, 157, 160, 175, | ||
381 | 178, 182, 129, 134, 137, 142, 145, 150, | ||
382 | 160, 166, 168, 174, 176, 255, 155, 166, | ||
383 | 175, 128, 170, 172, 173, 176, 185, 158, | ||
384 | 159, 160, 255, 164, 175, 135, 138, 188, | ||
385 | 255, 164, 169, 171, 172, 173, 174, 175, | ||
386 | 180, 181, 182, 183, 184, 185, 187, 188, | ||
387 | 189, 190, 191, 165, 186, 174, 175, 154, | ||
388 | 255, 190, 128, 134, 147, 151, 157, 168, | ||
389 | 170, 182, 184, 188, 128, 129, 131, 132, | ||
390 | 134, 255, 147, 255, 190, 255, 144, 145, | ||
391 | 136, 175, 188, 255, 128, 143, 160, 175, | ||
392 | 179, 180, 141, 143, 176, 180, 182, 255, | ||
393 | 189, 255, 191, 144, 153, 161, 186, 129, | ||
394 | 154, 166, 255, 191, 255, 130, 135, 138, | ||
395 | 143, 146, 151, 154, 156, 144, 145, 146, | ||
396 | 147, 148, 150, 151, 152, 155, 157, 158, | ||
397 | 160, 170, 171, 172, 175, 161, 169, 128, | ||
398 | 129, 130, 131, 133, 135, 138, 139, 140, | ||
399 | 141, 142, 143, 144, 145, 146, 147, 148, | ||
400 | 149, 152, 156, 157, 160, 161, 162, 163, | ||
401 | 164, 166, 168, 169, 170, 171, 172, 173, | ||
402 | 174, 176, 177, 153, 155, 178, 179, 128, | ||
403 | 139, 141, 166, 168, 186, 188, 189, 191, | ||
404 | 255, 142, 143, 158, 255, 187, 255, 128, | ||
405 | 180, 189, 128, 156, 160, 255, 145, 159, | ||
406 | 161, 255, 128, 159, 176, 255, 139, 143, | ||
407 | 187, 255, 128, 157, 160, 255, 144, 132, | ||
408 | 135, 150, 255, 158, 159, 170, 175, 148, | ||
409 | 151, 188, 255, 128, 167, 176, 255, 164, | ||
410 | 255, 183, 255, 128, 149, 160, 167, 136, | ||
411 | 188, 128, 133, 138, 181, 183, 184, 191, | ||
412 | 255, 150, 159, 183, 255, 128, 158, 160, | ||
413 | 178, 180, 181, 128, 149, 160, 185, 128, | ||
414 | 183, 190, 191, 191, 128, 131, 133, 134, | ||
415 | 140, 147, 149, 151, 153, 179, 184, 186, | ||
416 | 160, 188, 128, 156, 128, 135, 137, 166, | ||
417 | 128, 181, 128, 149, 160, 178, 128, 145, | ||
418 | 128, 178, 129, 130, 131, 132, 133, 135, | ||
419 | 136, 138, 139, 140, 141, 144, 145, 146, | ||
420 | 147, 150, 151, 152, 153, 154, 155, 156, | ||
421 | 162, 163, 171, 176, 177, 178, 128, 134, | ||
422 | 135, 165, 176, 190, 144, 168, 176, 185, | ||
423 | 128, 180, 182, 191, 182, 144, 179, 155, | ||
424 | 133, 137, 141, 143, 157, 255, 190, 128, | ||
425 | 145, 147, 183, 136, 128, 134, 138, 141, | ||
426 | 143, 157, 159, 168, 176, 255, 171, 175, | ||
427 | 186, 255, 128, 131, 133, 140, 143, 144, | ||
428 | 147, 168, 170, 176, 178, 179, 181, 185, | ||
429 | 188, 191, 144, 151, 128, 132, 135, 136, | ||
430 | 139, 141, 157, 163, 166, 172, 176, 180, | ||
431 | 128, 138, 144, 153, 134, 136, 143, 154, | ||
432 | 255, 128, 181, 184, 255, 129, 151, 158, | ||
433 | 255, 129, 131, 133, 143, 154, 255, 128, | ||
434 | 137, 128, 153, 157, 171, 176, 185, 160, | ||
435 | 255, 170, 190, 192, 255, 128, 184, 128, | ||
436 | 136, 138, 182, 184, 191, 128, 144, 153, | ||
437 | 178, 255, 168, 144, 145, 183, 255, 128, | ||
438 | 142, 145, 149, 129, 141, 144, 146, 147, | ||
439 | 148, 175, 255, 132, 255, 128, 144, 129, | ||
440 | 143, 144, 153, 145, 152, 135, 255, 160, | ||
441 | 168, 169, 171, 172, 173, 174, 188, 189, | ||
442 | 190, 191, 161, 167, 185, 255, 128, 158, | ||
443 | 160, 169, 144, 173, 176, 180, 128, 131, | ||
444 | 144, 153, 163, 183, 189, 255, 144, 255, | ||
445 | 133, 143, 191, 255, 143, 159, 160, 128, | ||
446 | 129, 255, 159, 160, 171, 172, 255, 173, | ||
447 | 255, 179, 255, 128, 176, 177, 178, 128, | ||
448 | 129, 171, 175, 189, 255, 128, 136, 144, | ||
449 | 153, 157, 158, 133, 134, 137, 144, 145, | ||
450 | 146, 147, 148, 149, 154, 155, 156, 157, | ||
451 | 158, 159, 168, 169, 170, 150, 153, 165, | ||
452 | 169, 173, 178, 187, 255, 131, 132, 140, | ||
453 | 169, 174, 255, 130, 132, 149, 157, 173, | ||
454 | 186, 188, 160, 161, 163, 164, 167, 168, | ||
455 | 132, 134, 149, 157, 186, 139, 140, 191, | ||
456 | 255, 134, 128, 132, 138, 144, 146, 255, | ||
457 | 166, 167, 129, 155, 187, 149, 181, 143, | ||
458 | 175, 137, 169, 131, 140, 141, 192, 255, | ||
459 | 128, 182, 187, 255, 173, 180, 182, 255, | ||
460 | 132, 155, 159, 161, 175, 128, 160, 163, | ||
461 | 164, 165, 184, 185, 186, 161, 162, 128, | ||
462 | 134, 136, 152, 155, 161, 163, 164, 166, | ||
463 | 170, 133, 143, 151, 255, 139, 143, 154, | ||
464 | 255, 164, 167, 185, 187, 128, 131, 133, | ||
465 | 159, 161, 162, 169, 178, 180, 183, 130, | ||
466 | 135, 137, 139, 148, 151, 153, 155, 157, | ||
467 | 159, 164, 190, 141, 143, 145, 146, 161, | ||
468 | 162, 167, 170, 172, 178, 180, 183, 185, | ||
469 | 188, 128, 137, 139, 155, 161, 163, 165, | ||
470 | 169, 171, 187, 155, 156, 151, 255, 156, | ||
471 | 157, 160, 181, 255, 186, 187, 255, 162, | ||
472 | 255, 160, 168, 161, 167, 158, 255, 160, | ||
473 | 132, 135, 133, 134, 176, 255, 170, 181, | ||
474 | 186, 191, 176, 180, 182, 183, 186, 189, | ||
475 | 134, 140, 136, 138, 142, 161, 163, 255, | ||
476 | 130, 137, 136, 255, 144, 170, 176, 178, | ||
477 | 160, 191, 128, 138, 174, 175, 177, 255, | ||
478 | 148, 150, 164, 167, 173, 176, 185, 189, | ||
479 | 190, 192, 255, 144, 146, 175, 141, 255, | ||
480 | 166, 176, 178, 255, 186, 138, 170, 180, | ||
481 | 181, 160, 161, 162, 164, 165, 166, 167, | ||
482 | 168, 169, 170, 171, 172, 173, 174, 175, | ||
483 | 176, 177, 178, 179, 180, 181, 182, 184, | ||
484 | 186, 187, 188, 189, 190, 183, 185, 154, | ||
485 | 164, 168, 128, 149, 128, 152, 189, 132, | ||
486 | 185, 144, 152, 161, 177, 255, 169, 177, | ||
487 | 129, 132, 141, 142, 145, 146, 179, 181, | ||
488 | 186, 188, 190, 255, 142, 156, 157, 159, | ||
489 | 161, 176, 177, 133, 138, 143, 144, 147, | ||
490 | 168, 170, 176, 178, 179, 181, 182, 184, | ||
491 | 185, 158, 153, 156, 178, 180, 189, 133, | ||
492 | 141, 143, 145, 147, 168, 170, 176, 178, | ||
493 | 179, 181, 185, 144, 185, 160, 161, 189, | ||
494 | 133, 140, 143, 144, 147, 168, 170, 176, | ||
495 | 178, 179, 181, 185, 177, 156, 157, 159, | ||
496 | 161, 131, 156, 133, 138, 142, 144, 146, | ||
497 | 149, 153, 154, 158, 159, 163, 164, 168, | ||
498 | 170, 174, 185, 144, 189, 133, 140, 142, | ||
499 | 144, 146, 168, 170, 185, 152, 154, 160, | ||
500 | 161, 128, 189, 133, 140, 142, 144, 146, | ||
501 | 168, 170, 179, 181, 185, 158, 160, 161, | ||
502 | 177, 178, 189, 133, 140, 142, 144, 146, | ||
503 | 186, 142, 148, 150, 159, 161, 186, 191, | ||
504 | 189, 133, 150, 154, 177, 179, 187, 128, | ||
505 | 134, 129, 176, 178, 179, 132, 138, 141, | ||
506 | 165, 167, 189, 129, 130, 135, 136, 148, | ||
507 | 151, 153, 159, 161, 163, 170, 171, 173, | ||
508 | 176, 178, 179, 134, 128, 132, 156, 159, | ||
509 | 128, 128, 135, 137, 172, 136, 140, 128, | ||
510 | 129, 130, 131, 137, 138, 139, 140, 141, | ||
511 | 142, 143, 144, 153, 154, 155, 156, 157, | ||
512 | 158, 159, 160, 161, 162, 163, 164, 165, | ||
513 | 166, 167, 168, 169, 170, 172, 173, 174, | ||
514 | 175, 176, 177, 178, 179, 180, 181, 182, | ||
515 | 184, 188, 189, 190, 191, 132, 152, 185, | ||
516 | 187, 191, 128, 170, 161, 144, 149, 154, | ||
517 | 157, 165, 166, 174, 176, 181, 255, 130, | ||
518 | 141, 143, 159, 155, 255, 128, 140, 142, | ||
519 | 145, 160, 177, 128, 145, 160, 172, 174, | ||
520 | 176, 151, 156, 170, 128, 168, 176, 255, | ||
521 | 138, 255, 128, 150, 160, 255, 149, 255, | ||
522 | 167, 133, 179, 133, 139, 131, 160, 174, | ||
523 | 175, 186, 255, 166, 255, 128, 163, 141, | ||
524 | 143, 154, 189, 169, 172, 174, 177, 181, | ||
525 | 182, 129, 130, 132, 133, 134, 176, 177, | ||
526 | 178, 179, 180, 181, 182, 183, 177, 191, | ||
527 | 165, 170, 175, 177, 180, 255, 168, 174, | ||
528 | 176, 255, 128, 134, 136, 142, 144, 150, | ||
529 | 152, 158, 128, 129, 130, 131, 132, 133, | ||
530 | 134, 135, 144, 145, 255, 133, 135, 161, | ||
531 | 169, 177, 181, 184, 188, 160, 151, 154, | ||
532 | 128, 146, 147, 148, 152, 153, 154, 155, | ||
533 | 156, 158, 159, 160, 161, 162, 163, 164, | ||
534 | 165, 166, 167, 168, 169, 170, 171, 172, | ||
535 | 173, 174, 175, 176, 129, 255, 141, 143, | ||
536 | 160, 169, 172, 255, 191, 128, 174, 130, | ||
537 | 134, 139, 163, 255, 130, 179, 187, 189, | ||
538 | 178, 183, 138, 165, 176, 255, 135, 159, | ||
539 | 189, 255, 132, 178, 143, 160, 164, 166, | ||
540 | 175, 186, 190, 128, 168, 186, 128, 130, | ||
541 | 132, 139, 160, 182, 190, 255, 176, 178, | ||
542 | 180, 183, 184, 190, 255, 128, 130, 155, | ||
543 | 157, 160, 170, 178, 180, 128, 162, 164, | ||
544 | 169, 171, 172, 173, 174, 175, 180, 181, | ||
545 | 182, 183, 185, 186, 187, 188, 189, 190, | ||
546 | 191, 165, 179, 157, 190, 128, 134, 147, | ||
547 | 151, 159, 168, 170, 182, 184, 188, 176, | ||
548 | 180, 182, 255, 161, 186, 144, 145, 146, | ||
549 | 147, 148, 150, 151, 152, 155, 157, 158, | ||
550 | 160, 170, 171, 172, 175, 161, 169, 128, | ||
551 | 129, 130, 131, 133, 138, 139, 140, 141, | ||
552 | 142, 143, 144, 145, 146, 147, 148, 149, | ||
553 | 152, 156, 157, 160, 161, 162, 163, 164, | ||
554 | 166, 168, 169, 170, 171, 172, 173, 174, | ||
555 | 176, 177, 153, 155, 178, 179, 145, 255, | ||
556 | 139, 143, 182, 255, 158, 175, 128, 144, | ||
557 | 147, 149, 151, 153, 179, 128, 135, 137, | ||
558 | 164, 128, 130, 131, 132, 133, 134, 135, | ||
559 | 136, 138, 139, 140, 141, 144, 145, 146, | ||
560 | 147, 150, 151, 152, 153, 154, 156, 162, | ||
561 | 163, 171, 176, 177, 178, 131, 183, 131, | ||
562 | 175, 144, 168, 131, 166, 182, 144, 178, | ||
563 | 131, 178, 154, 156, 129, 132, 128, 145, | ||
564 | 147, 171, 159, 255, 144, 157, 161, 135, | ||
565 | 138, 128, 175, 135, 132, 133, 128, 174, | ||
566 | 152, 155, 132, 128, 170, 128, 153, 160, | ||
567 | 190, 192, 255, 128, 136, 138, 174, 128, | ||
568 | 178, 255, 160, 168, 169, 171, 172, 173, | ||
569 | 174, 188, 189, 190, 191, 161, 167, 144, | ||
570 | 173, 128, 131, 163, 183, 189, 255, 133, | ||
571 | 143, 145, 255, 147, 159, 128, 176, 177, | ||
572 | 178, 128, 136, 144, 153, 144, 145, 146, | ||
573 | 147, 148, 149, 154, 155, 156, 157, 158, | ||
574 | 159, 150, 153, 131, 140, 255, 160, 163, | ||
575 | 164, 165, 184, 185, 186, 161, 162, 133, | ||
576 | 255, 170, 181, 183, 186, 128, 150, 152, | ||
577 | 182, 184, 255, 192, 255, 128, 255, 173, | ||
578 | 130, 133, 146, 159, 165, 171, 175, 255, | ||
579 | 181, 190, 184, 185, 192, 255, 140, 134, | ||
580 | 138, 142, 161, 163, 255, 182, 130, 136, | ||
581 | 137, 176, 151, 152, 154, 160, 190, 136, | ||
582 | 144, 192, 255, 135, 129, 130, 132, 133, | ||
583 | 144, 170, 176, 178, 144, 154, 160, 191, | ||
584 | 128, 169, 174, 255, 148, 169, 157, 158, | ||
585 | 189, 190, 192, 255, 144, 255, 139, 140, | ||
586 | 178, 255, 186, 128, 181, 160, 161, 162, | ||
587 | 163, 164, 165, 166, 167, 168, 169, 170, | ||
588 | 171, 172, 173, 174, 175, 176, 177, 178, | ||
589 | 179, 180, 181, 182, 183, 184, 185, 186, | ||
590 | 187, 188, 189, 190, 191, 128, 173, 128, | ||
591 | 155, 160, 180, 182, 189, 148, 161, 163, | ||
592 | 255, 176, 164, 165, 132, 169, 177, 141, | ||
593 | 142, 145, 146, 179, 181, 186, 187, 158, | ||
594 | 133, 134, 137, 138, 143, 150, 152, 155, | ||
595 | 164, 165, 178, 255, 188, 129, 131, 133, | ||
596 | 138, 143, 144, 147, 168, 170, 176, 178, | ||
597 | 179, 181, 182, 184, 185, 190, 255, 157, | ||
598 | 131, 134, 137, 138, 142, 144, 146, 152, | ||
599 | 159, 165, 182, 255, 129, 131, 133, 141, | ||
600 | 143, 145, 147, 168, 170, 176, 178, 179, | ||
601 | 181, 185, 188, 255, 134, 138, 142, 143, | ||
602 | 145, 159, 164, 165, 176, 184, 186, 255, | ||
603 | 129, 131, 133, 140, 143, 144, 147, 168, | ||
604 | 170, 176, 178, 179, 181, 185, 188, 191, | ||
605 | 177, 128, 132, 135, 136, 139, 141, 150, | ||
606 | 151, 156, 157, 159, 163, 166, 175, 156, | ||
607 | 130, 131, 133, 138, 142, 144, 146, 149, | ||
608 | 153, 154, 158, 159, 163, 164, 168, 170, | ||
609 | 174, 185, 190, 191, 144, 151, 128, 130, | ||
610 | 134, 136, 138, 141, 166, 175, 128, 131, | ||
611 | 133, 140, 142, 144, 146, 168, 170, 185, | ||
612 | 189, 255, 133, 137, 151, 142, 148, 155, | ||
613 | 159, 164, 165, 176, 255, 128, 131, 133, | ||
614 | 140, 142, 144, 146, 168, 170, 179, 181, | ||
615 | 185, 188, 191, 158, 128, 132, 134, 136, | ||
616 | 138, 141, 149, 150, 160, 163, 166, 175, | ||
617 | 177, 178, 129, 131, 133, 140, 142, 144, | ||
618 | 146, 186, 189, 255, 133, 137, 143, 147, | ||
619 | 152, 158, 164, 165, 176, 185, 192, 255, | ||
620 | 189, 130, 131, 133, 150, 154, 177, 179, | ||
621 | 187, 138, 150, 128, 134, 143, 148, 152, | ||
622 | 159, 166, 175, 178, 179, 129, 186, 128, | ||
623 | 142, 144, 153, 132, 138, 141, 165, 167, | ||
624 | 129, 130, 135, 136, 148, 151, 153, 159, | ||
625 | 161, 163, 170, 171, 173, 185, 187, 189, | ||
626 | 134, 128, 132, 136, 141, 144, 153, 156, | ||
627 | 159, 128, 181, 183, 185, 152, 153, 160, | ||
628 | 169, 190, 191, 128, 135, 137, 172, 177, | ||
629 | 191, 128, 132, 134, 151, 153, 188, 134, | ||
630 | 128, 129, 130, 131, 137, 138, 139, 140, | ||
631 | 141, 142, 143, 144, 153, 154, 155, 156, | ||
632 | 157, 158, 159, 160, 161, 162, 163, 164, | ||
633 | 165, 166, 167, 168, 169, 170, 173, 175, | ||
634 | 176, 177, 178, 179, 181, 182, 183, 188, | ||
635 | 189, 190, 191, 132, 152, 172, 184, 185, | ||
636 | 187, 128, 191, 128, 137, 144, 255, 158, | ||
637 | 159, 134, 187, 136, 140, 142, 143, 137, | ||
638 | 151, 153, 142, 143, 158, 159, 137, 177, | ||
639 | 142, 143, 182, 183, 191, 255, 128, 130, | ||
640 | 133, 136, 150, 152, 255, 145, 150, 151, | ||
641 | 155, 156, 160, 168, 178, 255, 128, 143, | ||
642 | 160, 255, 182, 183, 190, 255, 129, 255, | ||
643 | 173, 174, 192, 255, 129, 154, 160, 255, | ||
644 | 171, 173, 185, 255, 128, 140, 142, 148, | ||
645 | 160, 180, 128, 147, 160, 172, 174, 176, | ||
646 | 178, 179, 148, 150, 152, 155, 158, 159, | ||
647 | 170, 255, 139, 141, 144, 153, 160, 255, | ||
648 | 184, 255, 128, 170, 176, 255, 182, 255, | ||
649 | 128, 158, 160, 171, 176, 187, 134, 173, | ||
650 | 176, 180, 128, 171, 176, 255, 138, 143, | ||
651 | 155, 255, 128, 155, 160, 255, 159, 189, | ||
652 | 190, 192, 255, 167, 128, 137, 144, 153, | ||
653 | 176, 189, 140, 143, 154, 170, 180, 255, | ||
654 | 180, 255, 128, 183, 128, 137, 141, 189, | ||
655 | 128, 136, 144, 146, 148, 182, 184, 185, | ||
656 | 128, 181, 187, 191, 150, 151, 158, 159, | ||
657 | 152, 154, 156, 158, 134, 135, 142, 143, | ||
658 | 190, 255, 190, 128, 180, 182, 188, 130, | ||
659 | 132, 134, 140, 144, 147, 150, 155, 160, | ||
660 | 172, 178, 180, 182, 188, 128, 129, 130, | ||
661 | 131, 132, 133, 134, 176, 177, 178, 179, | ||
662 | 180, 181, 182, 183, 191, 255, 129, 147, | ||
663 | 149, 176, 178, 190, 192, 255, 144, 156, | ||
664 | 161, 144, 156, 165, 176, 130, 135, 149, | ||
665 | 164, 166, 168, 138, 147, 152, 157, 170, | ||
666 | 185, 188, 191, 142, 133, 137, 160, 255, | ||
667 | 137, 255, 128, 174, 176, 255, 159, 165, | ||
668 | 170, 180, 255, 167, 173, 128, 165, 176, | ||
669 | 255, 168, 174, 176, 190, 192, 255, 128, | ||
670 | 150, 160, 166, 168, 174, 176, 182, 184, | ||
671 | 190, 128, 134, 136, 142, 144, 150, 152, | ||
672 | 158, 160, 191, 128, 129, 130, 131, 132, | ||
673 | 133, 134, 135, 144, 145, 255, 133, 135, | ||
674 | 161, 175, 177, 181, 184, 188, 160, 151, | ||
675 | 152, 187, 192, 255, 133, 173, 177, 255, | ||
676 | 143, 159, 187, 255, 176, 191, 182, 183, | ||
677 | 184, 191, 192, 255, 150, 255, 128, 146, | ||
678 | 147, 148, 152, 153, 154, 155, 156, 158, | ||
679 | 159, 160, 161, 162, 163, 164, 165, 166, | ||
680 | 167, 168, 169, 170, 171, 172, 173, 174, | ||
681 | 175, 176, 129, 255, 141, 255, 144, 189, | ||
682 | 141, 143, 172, 255, 191, 128, 175, 180, | ||
683 | 189, 151, 159, 162, 255, 175, 137, 138, | ||
684 | 184, 255, 183, 255, 168, 255, 128, 179, | ||
685 | 188, 134, 143, 154, 159, 184, 186, 190, | ||
686 | 255, 128, 173, 176, 255, 148, 159, 189, | ||
687 | 255, 129, 142, 154, 159, 191, 255, 128, | ||
688 | 182, 128, 141, 144, 153, 160, 182, 186, | ||
689 | 255, 128, 130, 155, 157, 160, 175, 178, | ||
690 | 182, 129, 134, 137, 142, 145, 150, 160, | ||
691 | 166, 168, 174, 176, 255, 155, 166, 175, | ||
692 | 128, 170, 172, 173, 176, 185, 158, 159, | ||
693 | 160, 255, 164, 175, 135, 138, 188, 255, | ||
694 | 164, 169, 171, 172, 173, 174, 175, 180, | ||
695 | 181, 182, 183, 184, 185, 187, 188, 189, | ||
696 | 190, 191, 165, 186, 174, 175, 154, 255, | ||
697 | 190, 128, 134, 147, 151, 157, 168, 170, | ||
698 | 182, 184, 188, 128, 129, 131, 132, 134, | ||
699 | 255, 147, 255, 190, 255, 144, 145, 136, | ||
700 | 175, 188, 255, 128, 143, 160, 175, 179, | ||
701 | 180, 141, 143, 176, 180, 182, 255, 189, | ||
702 | 255, 191, 144, 153, 161, 186, 129, 154, | ||
703 | 166, 255, 191, 255, 130, 135, 138, 143, | ||
704 | 146, 151, 154, 156, 144, 145, 146, 147, | ||
705 | 148, 150, 151, 152, 155, 157, 158, 160, | ||
706 | 170, 171, 172, 175, 161, 169, 128, 129, | ||
707 | 130, 131, 133, 135, 138, 139, 140, 141, | ||
708 | 142, 143, 144, 145, 146, 147, 148, 149, | ||
709 | 152, 156, 157, 160, 161, 162, 163, 164, | ||
710 | 166, 168, 169, 170, 171, 172, 173, 174, | ||
711 | 176, 177, 153, 155, 178, 179, 128, 139, | ||
712 | 141, 166, 168, 186, 188, 189, 191, 255, | ||
713 | 142, 143, 158, 255, 187, 255, 128, 180, | ||
714 | 189, 128, 156, 160, 255, 145, 159, 161, | ||
715 | 255, 128, 159, 176, 255, 139, 143, 187, | ||
716 | 255, 128, 157, 160, 255, 144, 132, 135, | ||
717 | 150, 255, 158, 159, 170, 175, 148, 151, | ||
718 | 188, 255, 128, 167, 176, 255, 164, 255, | ||
719 | 183, 255, 128, 149, 160, 167, 136, 188, | ||
720 | 128, 133, 138, 181, 183, 184, 191, 255, | ||
721 | 150, 159, 183, 255, 128, 158, 160, 178, | ||
722 | 180, 181, 128, 149, 160, 185, 128, 183, | ||
723 | 190, 191, 191, 128, 131, 133, 134, 140, | ||
724 | 147, 149, 151, 153, 179, 184, 186, 160, | ||
725 | 188, 128, 156, 128, 135, 137, 166, 128, | ||
726 | 181, 128, 149, 160, 178, 128, 145, 128, | ||
727 | 178, 129, 130, 131, 132, 133, 135, 136, | ||
728 | 138, 139, 140, 141, 144, 145, 146, 147, | ||
729 | 150, 151, 152, 153, 154, 155, 156, 162, | ||
730 | 163, 171, 176, 177, 178, 128, 134, 135, | ||
731 | 165, 176, 190, 144, 168, 176, 185, 128, | ||
732 | 180, 182, 191, 182, 144, 179, 155, 133, | ||
733 | 137, 141, 143, 157, 255, 190, 128, 145, | ||
734 | 147, 183, 136, 128, 134, 138, 141, 143, | ||
735 | 157, 159, 168, 176, 255, 171, 175, 186, | ||
736 | 255, 128, 131, 133, 140, 143, 144, 147, | ||
737 | 168, 170, 176, 178, 179, 181, 185, 188, | ||
738 | 191, 144, 151, 128, 132, 135, 136, 139, | ||
739 | 141, 157, 163, 166, 172, 176, 180, 128, | ||
740 | 138, 144, 153, 134, 136, 143, 154, 255, | ||
741 | 128, 181, 184, 255, 129, 151, 158, 255, | ||
742 | 129, 131, 133, 143, 154, 255, 128, 137, | ||
743 | 128, 153, 157, 171, 176, 185, 160, 255, | ||
744 | 170, 190, 192, 255, 128, 184, 128, 136, | ||
745 | 138, 182, 184, 191, 128, 144, 153, 178, | ||
746 | 255, 168, 144, 145, 183, 255, 128, 142, | ||
747 | 145, 149, 129, 141, 144, 146, 147, 148, | ||
748 | 175, 255, 132, 255, 128, 144, 129, 143, | ||
749 | 144, 153, 145, 152, 135, 255, 160, 168, | ||
750 | 169, 171, 172, 173, 174, 188, 189, 190, | ||
751 | 191, 161, 167, 185, 255, 128, 158, 160, | ||
752 | 169, 144, 173, 176, 180, 128, 131, 144, | ||
753 | 153, 163, 183, 189, 255, 144, 255, 133, | ||
754 | 143, 191, 255, 143, 159, 160, 128, 129, | ||
755 | 255, 159, 160, 171, 172, 255, 173, 255, | ||
756 | 179, 255, 128, 176, 177, 178, 128, 129, | ||
757 | 171, 175, 189, 255, 128, 136, 144, 153, | ||
758 | 157, 158, 133, 134, 137, 144, 145, 146, | ||
759 | 147, 148, 149, 154, 155, 156, 157, 158, | ||
760 | 159, 168, 169, 170, 150, 153, 165, 169, | ||
761 | 173, 178, 187, 255, 131, 132, 140, 169, | ||
762 | 174, 255, 130, 132, 149, 157, 173, 186, | ||
763 | 188, 160, 161, 163, 164, 167, 168, 132, | ||
764 | 134, 149, 157, 186, 139, 140, 191, 255, | ||
765 | 134, 128, 132, 138, 144, 146, 255, 166, | ||
766 | 167, 129, 155, 187, 149, 181, 143, 175, | ||
767 | 137, 169, 131, 140, 141, 192, 255, 128, | ||
768 | 182, 187, 255, 173, 180, 182, 255, 132, | ||
769 | 155, 159, 161, 175, 128, 160, 163, 164, | ||
770 | 165, 184, 185, 186, 161, 162, 128, 134, | ||
771 | 136, 152, 155, 161, 163, 164, 166, 170, | ||
772 | 133, 143, 151, 255, 139, 143, 154, 255, | ||
773 | 164, 167, 185, 187, 128, 131, 133, 159, | ||
774 | 161, 162, 169, 178, 180, 183, 130, 135, | ||
775 | 137, 139, 148, 151, 153, 155, 157, 159, | ||
776 | 164, 190, 141, 143, 145, 146, 161, 162, | ||
777 | 167, 170, 172, 178, 180, 183, 185, 188, | ||
778 | 128, 137, 139, 155, 161, 163, 165, 169, | ||
779 | 171, 187, 155, 156, 151, 255, 156, 157, | ||
780 | 160, 181, 255, 186, 187, 255, 162, 255, | ||
781 | 160, 168, 161, 167, 158, 255, 160, 132, | ||
782 | 135, 133, 134, 176, 255, 128, 191, 154, | ||
783 | 164, 168, 128, 149, 150, 191, 128, 152, | ||
784 | 153, 191, 181, 128, 159, 160, 189, 190, | ||
785 | 191, 189, 128, 131, 132, 185, 186, 191, | ||
786 | 144, 128, 151, 152, 161, 162, 176, 177, | ||
787 | 255, 169, 177, 129, 132, 141, 142, 145, | ||
788 | 146, 179, 181, 186, 188, 190, 191, 192, | ||
789 | 255, 142, 158, 128, 155, 156, 161, 162, | ||
790 | 175, 176, 177, 178, 191, 169, 177, 180, | ||
791 | 183, 128, 132, 133, 138, 139, 142, 143, | ||
792 | 144, 145, 146, 147, 185, 186, 191, 157, | ||
793 | 128, 152, 153, 158, 159, 177, 178, 180, | ||
794 | 181, 191, 142, 146, 169, 177, 180, 189, | ||
795 | 128, 132, 133, 185, 186, 191, 144, 185, | ||
796 | 128, 159, 160, 161, 162, 191, 169, 177, | ||
797 | 180, 189, 128, 132, 133, 140, 141, 142, | ||
798 | 143, 144, 145, 146, 147, 185, 186, 191, | ||
799 | 158, 177, 128, 155, 156, 161, 162, 191, | ||
800 | 131, 145, 155, 157, 128, 132, 133, 138, | ||
801 | 139, 141, 142, 149, 150, 152, 153, 159, | ||
802 | 160, 162, 163, 164, 165, 167, 168, 170, | ||
803 | 171, 173, 174, 185, 186, 191, 144, 128, | ||
804 | 191, 141, 145, 169, 189, 128, 132, 133, | ||
805 | 185, 186, 191, 128, 151, 152, 154, 155, | ||
806 | 159, 160, 161, 162, 191, 128, 141, 145, | ||
807 | 169, 180, 189, 129, 132, 133, 185, 186, | ||
808 | 191, 158, 128, 159, 160, 161, 162, 176, | ||
809 | 177, 178, 179, 191, 141, 145, 189, 128, | ||
810 | 132, 133, 186, 187, 191, 142, 128, 147, | ||
811 | 148, 150, 151, 158, 159, 161, 162, 185, | ||
812 | 186, 191, 178, 188, 128, 132, 133, 150, | ||
813 | 151, 153, 154, 189, 190, 191, 128, 134, | ||
814 | 135, 191, 128, 177, 129, 179, 180, 191, | ||
815 | 128, 131, 137, 141, 152, 160, 164, 166, | ||
816 | 172, 177, 189, 129, 132, 133, 134, 135, | ||
817 | 138, 139, 147, 148, 167, 168, 169, 170, | ||
818 | 179, 180, 191, 133, 128, 134, 135, 155, | ||
819 | 156, 159, 160, 191, 128, 129, 191, 136, | ||
820 | 128, 172, 173, 191, 128, 135, 136, 140, | ||
821 | 141, 191, 191, 128, 170, 171, 190, 161, | ||
822 | 128, 143, 144, 149, 150, 153, 154, 157, | ||
823 | 158, 164, 165, 166, 167, 173, 174, 176, | ||
824 | 177, 180, 181, 255, 130, 141, 143, 159, | ||
825 | 134, 187, 136, 140, 142, 143, 137, 151, | ||
826 | 153, 142, 143, 158, 159, 137, 177, 191, | ||
827 | 142, 143, 182, 183, 192, 255, 129, 151, | ||
828 | 128, 133, 134, 135, 136, 255, 145, 150, | ||
829 | 151, 155, 191, 192, 255, 128, 143, 144, | ||
830 | 159, 160, 255, 182, 183, 190, 191, 192, | ||
831 | 255, 128, 129, 255, 173, 174, 192, 255, | ||
832 | 128, 129, 154, 155, 159, 160, 255, 171, | ||
833 | 173, 185, 191, 192, 255, 141, 128, 145, | ||
834 | 146, 159, 160, 177, 178, 191, 173, 128, | ||
835 | 145, 146, 159, 160, 176, 177, 191, 128, | ||
836 | 179, 180, 191, 151, 156, 128, 191, 128, | ||
837 | 159, 160, 255, 184, 191, 192, 255, 169, | ||
838 | 128, 170, 171, 175, 176, 255, 182, 191, | ||
839 | 192, 255, 128, 158, 159, 191, 128, 143, | ||
840 | 144, 173, 174, 175, 176, 180, 181, 191, | ||
841 | 128, 171, 172, 175, 176, 255, 138, 191, | ||
842 | 192, 255, 128, 150, 151, 159, 160, 255, | ||
843 | 149, 191, 192, 255, 167, 128, 191, 128, | ||
844 | 132, 133, 179, 180, 191, 128, 132, 133, | ||
845 | 139, 140, 191, 128, 130, 131, 160, 161, | ||
846 | 173, 174, 175, 176, 185, 186, 255, 166, | ||
847 | 191, 192, 255, 128, 163, 164, 191, 128, | ||
848 | 140, 141, 143, 144, 153, 154, 189, 190, | ||
849 | 191, 128, 136, 137, 191, 173, 128, 168, | ||
850 | 169, 177, 178, 180, 181, 182, 183, 191, | ||
851 | 0, 127, 192, 255, 150, 151, 158, 159, | ||
852 | 152, 154, 156, 158, 134, 135, 142, 143, | ||
853 | 190, 191, 192, 255, 181, 189, 191, 128, | ||
854 | 190, 133, 181, 128, 129, 130, 140, 141, | ||
855 | 143, 144, 147, 148, 149, 150, 155, 156, | ||
856 | 159, 160, 172, 173, 177, 178, 188, 189, | ||
857 | 191, 177, 191, 128, 190, 128, 143, 144, | ||
858 | 156, 157, 191, 130, 135, 148, 164, 166, | ||
859 | 168, 128, 137, 138, 149, 150, 151, 152, | ||
860 | 157, 158, 169, 170, 185, 186, 187, 188, | ||
861 | 191, 142, 128, 132, 133, 137, 138, 159, | ||
862 | 160, 255, 137, 191, 192, 255, 175, 128, | ||
863 | 255, 159, 165, 170, 175, 177, 180, 191, | ||
864 | 192, 255, 166, 173, 128, 167, 168, 175, | ||
865 | 176, 255, 168, 174, 176, 191, 192, 255, | ||
866 | 167, 175, 183, 191, 128, 150, 151, 159, | ||
867 | 160, 190, 135, 143, 151, 128, 158, 159, | ||
868 | 191, 128, 132, 133, 135, 136, 160, 161, | ||
869 | 169, 170, 176, 177, 181, 182, 183, 184, | ||
870 | 188, 189, 191, 160, 151, 154, 187, 192, | ||
871 | 255, 128, 132, 133, 173, 174, 176, 177, | ||
872 | 255, 143, 159, 187, 191, 192, 255, 128, | ||
873 | 175, 176, 191, 150, 191, 192, 255, 141, | ||
874 | 191, 192, 255, 128, 143, 144, 189, 190, | ||
875 | 191, 141, 143, 160, 169, 172, 191, 192, | ||
876 | 255, 191, 128, 174, 175, 190, 128, 157, | ||
877 | 158, 159, 160, 255, 176, 191, 192, 255, | ||
878 | 128, 150, 151, 159, 160, 161, 162, 255, | ||
879 | 175, 137, 138, 184, 191, 192, 255, 128, | ||
880 | 182, 183, 255, 130, 134, 139, 163, 191, | ||
881 | 192, 255, 128, 129, 130, 179, 180, 191, | ||
882 | 187, 189, 128, 177, 178, 183, 184, 191, | ||
883 | 128, 137, 138, 165, 166, 175, 176, 255, | ||
884 | 135, 159, 189, 191, 192, 255, 128, 131, | ||
885 | 132, 178, 179, 191, 143, 165, 191, 128, | ||
886 | 159, 160, 175, 176, 185, 186, 190, 128, | ||
887 | 168, 169, 191, 131, 186, 128, 139, 140, | ||
888 | 159, 160, 182, 183, 189, 190, 255, 176, | ||
889 | 178, 180, 183, 184, 190, 191, 192, 255, | ||
890 | 129, 128, 130, 131, 154, 155, 157, 158, | ||
891 | 159, 160, 170, 171, 177, 178, 180, 181, | ||
892 | 191, 128, 167, 175, 129, 134, 135, 136, | ||
893 | 137, 142, 143, 144, 145, 150, 151, 159, | ||
894 | 160, 255, 155, 166, 175, 128, 162, 163, | ||
895 | 191, 164, 175, 135, 138, 188, 191, 192, | ||
896 | 255, 174, 175, 154, 191, 192, 255, 157, | ||
897 | 169, 183, 189, 191, 128, 134, 135, 146, | ||
898 | 147, 151, 152, 158, 159, 190, 130, 133, | ||
899 | 128, 255, 178, 191, 192, 255, 128, 146, | ||
900 | 147, 255, 190, 191, 192, 255, 128, 143, | ||
901 | 144, 255, 144, 145, 136, 175, 188, 191, | ||
902 | 192, 255, 181, 128, 175, 176, 255, 189, | ||
903 | 191, 192, 255, 128, 160, 161, 186, 187, | ||
904 | 191, 128, 129, 154, 155, 165, 166, 255, | ||
905 | 191, 192, 255, 128, 129, 130, 135, 136, | ||
906 | 137, 138, 143, 144, 145, 146, 151, 152, | ||
907 | 153, 154, 156, 157, 191, 128, 191, 128, | ||
908 | 129, 130, 131, 133, 138, 139, 140, 141, | ||
909 | 142, 143, 144, 145, 146, 147, 148, 149, | ||
910 | 152, 156, 157, 160, 161, 162, 163, 164, | ||
911 | 166, 168, 169, 170, 171, 172, 173, 174, | ||
912 | 176, 177, 132, 151, 153, 155, 158, 175, | ||
913 | 178, 179, 180, 191, 140, 167, 187, 190, | ||
914 | 128, 255, 142, 143, 158, 191, 192, 255, | ||
915 | 187, 191, 192, 255, 128, 180, 181, 191, | ||
916 | 128, 156, 157, 159, 160, 255, 145, 191, | ||
917 | 192, 255, 128, 159, 160, 175, 176, 255, | ||
918 | 139, 143, 182, 191, 192, 255, 144, 132, | ||
919 | 135, 150, 191, 192, 255, 158, 175, 148, | ||
920 | 151, 188, 191, 192, 255, 128, 167, 168, | ||
921 | 175, 176, 255, 164, 191, 192, 255, 183, | ||
922 | 191, 192, 255, 128, 149, 150, 159, 160, | ||
923 | 167, 168, 191, 136, 182, 188, 128, 133, | ||
924 | 134, 137, 138, 184, 185, 190, 191, 255, | ||
925 | 150, 159, 183, 191, 192, 255, 179, 128, | ||
926 | 159, 160, 181, 182, 191, 128, 149, 150, | ||
927 | 159, 160, 185, 186, 191, 128, 183, 184, | ||
928 | 189, 190, 191, 128, 148, 152, 129, 143, | ||
929 | 144, 179, 180, 191, 128, 159, 160, 188, | ||
930 | 189, 191, 128, 156, 157, 191, 136, 128, | ||
931 | 164, 165, 191, 128, 181, 182, 191, 128, | ||
932 | 149, 150, 159, 160, 178, 179, 191, 128, | ||
933 | 145, 146, 191, 128, 178, 179, 191, 128, | ||
934 | 130, 131, 132, 133, 134, 135, 136, 138, | ||
935 | 139, 140, 141, 144, 145, 146, 147, 150, | ||
936 | 151, 152, 153, 154, 156, 162, 163, 171, | ||
937 | 176, 177, 178, 129, 191, 128, 130, 131, | ||
938 | 183, 184, 191, 128, 130, 131, 175, 176, | ||
939 | 191, 128, 143, 144, 168, 169, 191, 128, | ||
940 | 130, 131, 166, 167, 191, 182, 128, 143, | ||
941 | 144, 178, 179, 191, 128, 130, 131, 178, | ||
942 | 179, 191, 128, 154, 156, 129, 132, 133, | ||
943 | 191, 146, 128, 171, 172, 191, 135, 137, | ||
944 | 142, 158, 128, 168, 169, 175, 176, 255, | ||
945 | 159, 191, 192, 255, 144, 128, 156, 157, | ||
946 | 161, 162, 191, 128, 134, 135, 138, 139, | ||
947 | 191, 128, 175, 176, 191, 134, 128, 131, | ||
948 | 132, 135, 136, 191, 128, 174, 175, 191, | ||
949 | 128, 151, 152, 155, 156, 191, 132, 128, | ||
950 | 191, 128, 170, 171, 191, 128, 153, 154, | ||
951 | 191, 160, 190, 192, 255, 128, 184, 185, | ||
952 | 191, 137, 128, 174, 175, 191, 128, 129, | ||
953 | 177, 178, 255, 144, 191, 192, 255, 128, | ||
954 | 142, 143, 144, 145, 146, 149, 129, 148, | ||
955 | 150, 191, 175, 191, 192, 255, 132, 191, | ||
956 | 192, 255, 128, 144, 129, 143, 145, 191, | ||
957 | 144, 153, 128, 143, 145, 152, 154, 191, | ||
958 | 135, 191, 192, 255, 160, 168, 169, 171, | ||
959 | 172, 173, 174, 188, 189, 190, 191, 128, | ||
960 | 159, 161, 167, 170, 187, 185, 191, 192, | ||
961 | 255, 128, 143, 144, 173, 174, 191, 128, | ||
962 | 131, 132, 162, 163, 183, 184, 188, 189, | ||
963 | 255, 133, 143, 145, 191, 192, 255, 128, | ||
964 | 146, 147, 159, 160, 191, 160, 128, 191, | ||
965 | 128, 129, 191, 192, 255, 159, 160, 171, | ||
966 | 128, 170, 172, 191, 192, 255, 173, 191, | ||
967 | 192, 255, 179, 191, 192, 255, 128, 176, | ||
968 | 177, 178, 129, 191, 128, 129, 130, 191, | ||
969 | 171, 175, 189, 191, 192, 255, 128, 136, | ||
970 | 137, 143, 144, 153, 154, 191, 144, 145, | ||
971 | 146, 147, 148, 149, 154, 155, 156, 157, | ||
972 | 158, 159, 128, 143, 150, 153, 160, 191, | ||
973 | 149, 157, 173, 186, 188, 160, 161, 163, | ||
974 | 164, 167, 168, 132, 134, 149, 157, 186, | ||
975 | 191, 139, 140, 192, 255, 133, 145, 128, | ||
976 | 134, 135, 137, 138, 255, 166, 167, 129, | ||
977 | 155, 187, 149, 181, 143, 175, 137, 169, | ||
978 | 131, 140, 191, 192, 255, 160, 163, 164, | ||
979 | 165, 184, 185, 186, 128, 159, 161, 162, | ||
980 | 166, 191, 133, 191, 192, 255, 132, 160, | ||
981 | 163, 167, 179, 184, 186, 128, 164, 165, | ||
982 | 168, 169, 187, 188, 191, 130, 135, 137, | ||
983 | 139, 144, 147, 151, 153, 155, 157, 159, | ||
984 | 163, 171, 179, 184, 189, 191, 128, 140, | ||
985 | 141, 148, 149, 160, 161, 164, 165, 166, | ||
986 | 167, 190, 138, 164, 170, 128, 155, 156, | ||
987 | 160, 161, 187, 188, 191, 128, 191, 155, | ||
988 | 156, 128, 191, 151, 191, 192, 255, 156, | ||
989 | 157, 160, 128, 191, 181, 191, 192, 255, | ||
990 | 158, 159, 186, 128, 185, 187, 191, 192, | ||
991 | 255, 162, 191, 192, 255, 160, 168, 128, | ||
992 | 159, 161, 167, 169, 191, 158, 191, 192, | ||
993 | 255, 123, 123, 128, 191, 128, 191, 128, | ||
994 | 191, 128, 191, 128, 191, 10, 123, 123, | ||
995 | 128, 191, 128, 191, 128, 191, 123, 123, | ||
996 | 10, 123, 128, 191, 128, 191, 128, 191, | ||
997 | 123, 123, 170, 181, 183, 186, 128, 150, | ||
998 | 152, 182, 184, 255, 192, 255, 128, 255, | ||
999 | 173, 130, 133, 146, 159, 165, 171, 175, | ||
1000 | 255, 181, 190, 184, 185, 192, 255, 140, | ||
1001 | 134, 138, 142, 161, 163, 255, 182, 130, | ||
1002 | 136, 137, 176, 151, 152, 154, 160, 190, | ||
1003 | 136, 144, 192, 255, 135, 129, 130, 132, | ||
1004 | 133, 144, 170, 176, 178, 144, 154, 160, | ||
1005 | 191, 128, 169, 174, 255, 148, 169, 157, | ||
1006 | 158, 189, 190, 192, 255, 144, 255, 139, | ||
1007 | 140, 178, 255, 186, 128, 181, 160, 161, | ||
1008 | 162, 163, 164, 165, 166, 167, 168, 169, | ||
1009 | 170, 171, 172, 173, 174, 175, 176, 177, | ||
1010 | 178, 179, 180, 181, 182, 183, 184, 185, | ||
1011 | 186, 187, 188, 189, 190, 191, 128, 173, | ||
1012 | 128, 155, 160, 180, 182, 189, 148, 161, | ||
1013 | 163, 255, 176, 164, 165, 132, 169, 177, | ||
1014 | 141, 142, 145, 146, 179, 181, 186, 187, | ||
1015 | 158, 133, 134, 137, 138, 143, 150, 152, | ||
1016 | 155, 164, 165, 178, 255, 188, 129, 131, | ||
1017 | 133, 138, 143, 144, 147, 168, 170, 176, | ||
1018 | 178, 179, 181, 182, 184, 185, 190, 255, | ||
1019 | 157, 131, 134, 137, 138, 142, 144, 146, | ||
1020 | 152, 159, 165, 182, 255, 129, 131, 133, | ||
1021 | 141, 143, 145, 147, 168, 170, 176, 178, | ||
1022 | 179, 181, 185, 188, 255, 134, 138, 142, | ||
1023 | 143, 145, 159, 164, 165, 176, 184, 186, | ||
1024 | 255, 129, 131, 133, 140, 143, 144, 147, | ||
1025 | 168, 170, 176, 178, 179, 181, 185, 188, | ||
1026 | 191, 177, 128, 132, 135, 136, 139, 141, | ||
1027 | 150, 151, 156, 157, 159, 163, 166, 175, | ||
1028 | 156, 130, 131, 133, 138, 142, 144, 146, | ||
1029 | 149, 153, 154, 158, 159, 163, 164, 168, | ||
1030 | 170, 174, 185, 190, 191, 144, 151, 128, | ||
1031 | 130, 134, 136, 138, 141, 166, 175, 128, | ||
1032 | 131, 133, 140, 142, 144, 146, 168, 170, | ||
1033 | 185, 189, 255, 133, 137, 151, 142, 148, | ||
1034 | 155, 159, 164, 165, 176, 255, 128, 131, | ||
1035 | 133, 140, 142, 144, 146, 168, 170, 179, | ||
1036 | 181, 185, 188, 191, 158, 128, 132, 134, | ||
1037 | 136, 138, 141, 149, 150, 160, 163, 166, | ||
1038 | 175, 177, 178, 129, 131, 133, 140, 142, | ||
1039 | 144, 146, 186, 189, 255, 133, 137, 143, | ||
1040 | 147, 152, 158, 164, 165, 176, 185, 192, | ||
1041 | 255, 189, 130, 131, 133, 150, 154, 177, | ||
1042 | 179, 187, 138, 150, 128, 134, 143, 148, | ||
1043 | 152, 159, 166, 175, 178, 179, 129, 186, | ||
1044 | 128, 142, 144, 153, 132, 138, 141, 165, | ||
1045 | 167, 129, 130, 135, 136, 148, 151, 153, | ||
1046 | 159, 161, 163, 170, 171, 173, 185, 187, | ||
1047 | 189, 134, 128, 132, 136, 141, 144, 153, | ||
1048 | 156, 159, 128, 181, 183, 185, 152, 153, | ||
1049 | 160, 169, 190, 191, 128, 135, 137, 172, | ||
1050 | 177, 191, 128, 132, 134, 151, 153, 188, | ||
1051 | 134, 128, 129, 130, 131, 137, 138, 139, | ||
1052 | 140, 141, 142, 143, 144, 153, 154, 155, | ||
1053 | 156, 157, 158, 159, 160, 161, 162, 163, | ||
1054 | 164, 165, 166, 167, 168, 169, 170, 173, | ||
1055 | 175, 176, 177, 178, 179, 181, 182, 183, | ||
1056 | 188, 189, 190, 191, 132, 152, 172, 184, | ||
1057 | 185, 187, 128, 191, 128, 137, 144, 255, | ||
1058 | 158, 159, 134, 187, 136, 140, 142, 143, | ||
1059 | 137, 151, 153, 142, 143, 158, 159, 137, | ||
1060 | 177, 142, 143, 182, 183, 191, 255, 128, | ||
1061 | 130, 133, 136, 150, 152, 255, 145, 150, | ||
1062 | 151, 155, 156, 160, 168, 178, 255, 128, | ||
1063 | 143, 160, 255, 182, 183, 190, 255, 129, | ||
1064 | 255, 173, 174, 192, 255, 129, 154, 160, | ||
1065 | 255, 171, 173, 185, 255, 128, 140, 142, | ||
1066 | 148, 160, 180, 128, 147, 160, 172, 174, | ||
1067 | 176, 178, 179, 148, 150, 152, 155, 158, | ||
1068 | 159, 170, 255, 139, 141, 144, 153, 160, | ||
1069 | 255, 184, 255, 128, 170, 176, 255, 182, | ||
1070 | 255, 128, 158, 160, 171, 176, 187, 134, | ||
1071 | 173, 176, 180, 128, 171, 176, 255, 138, | ||
1072 | 143, 155, 255, 128, 155, 160, 255, 159, | ||
1073 | 189, 190, 192, 255, 167, 128, 137, 144, | ||
1074 | 153, 176, 189, 140, 143, 154, 170, 180, | ||
1075 | 255, 180, 255, 128, 183, 128, 137, 141, | ||
1076 | 189, 128, 136, 144, 146, 148, 182, 184, | ||
1077 | 185, 128, 181, 187, 191, 150, 151, 158, | ||
1078 | 159, 152, 154, 156, 158, 134, 135, 142, | ||
1079 | 143, 190, 255, 190, 128, 180, 182, 188, | ||
1080 | 130, 132, 134, 140, 144, 147, 150, 155, | ||
1081 | 160, 172, 178, 180, 182, 188, 128, 129, | ||
1082 | 130, 131, 132, 133, 134, 176, 177, 178, | ||
1083 | 179, 180, 181, 182, 183, 191, 255, 129, | ||
1084 | 147, 149, 176, 178, 190, 192, 255, 144, | ||
1085 | 156, 161, 144, 156, 165, 176, 130, 135, | ||
1086 | 149, 164, 166, 168, 138, 147, 152, 157, | ||
1087 | 170, 185, 188, 191, 142, 133, 137, 160, | ||
1088 | 255, 137, 255, 128, 174, 176, 255, 159, | ||
1089 | 165, 170, 180, 255, 167, 173, 128, 165, | ||
1090 | 176, 255, 168, 174, 176, 190, 192, 255, | ||
1091 | 128, 150, 160, 166, 168, 174, 176, 182, | ||
1092 | 184, 190, 128, 134, 136, 142, 144, 150, | ||
1093 | 152, 158, 160, 191, 128, 129, 130, 131, | ||
1094 | 132, 133, 134, 135, 144, 145, 255, 133, | ||
1095 | 135, 161, 175, 177, 181, 184, 188, 160, | ||
1096 | 151, 152, 187, 192, 255, 133, 173, 177, | ||
1097 | 255, 143, 159, 187, 255, 176, 191, 182, | ||
1098 | 183, 184, 191, 192, 255, 150, 255, 128, | ||
1099 | 146, 147, 148, 152, 153, 154, 155, 156, | ||
1100 | 158, 159, 160, 161, 162, 163, 164, 165, | ||
1101 | 166, 167, 168, 169, 170, 171, 172, 173, | ||
1102 | 174, 175, 176, 129, 255, 141, 255, 144, | ||
1103 | 189, 141, 143, 172, 255, 191, 128, 175, | ||
1104 | 180, 189, 151, 159, 162, 255, 175, 137, | ||
1105 | 138, 184, 255, 183, 255, 168, 255, 128, | ||
1106 | 179, 188, 134, 143, 154, 159, 184, 186, | ||
1107 | 190, 255, 128, 173, 176, 255, 148, 159, | ||
1108 | 189, 255, 129, 142, 154, 159, 191, 255, | ||
1109 | 128, 182, 128, 141, 144, 153, 160, 182, | ||
1110 | 186, 255, 128, 130, 155, 157, 160, 175, | ||
1111 | 178, 182, 129, 134, 137, 142, 145, 150, | ||
1112 | 160, 166, 168, 174, 176, 255, 155, 166, | ||
1113 | 175, 128, 170, 172, 173, 176, 185, 158, | ||
1114 | 159, 160, 255, 164, 175, 135, 138, 188, | ||
1115 | 255, 164, 169, 171, 172, 173, 174, 175, | ||
1116 | 180, 181, 182, 183, 184, 185, 187, 188, | ||
1117 | 189, 190, 191, 165, 186, 174, 175, 154, | ||
1118 | 255, 190, 128, 134, 147, 151, 157, 168, | ||
1119 | 170, 182, 184, 188, 128, 129, 131, 132, | ||
1120 | 134, 255, 147, 255, 190, 255, 144, 145, | ||
1121 | 136, 175, 188, 255, 128, 143, 160, 175, | ||
1122 | 179, 180, 141, 143, 176, 180, 182, 255, | ||
1123 | 189, 255, 191, 144, 153, 161, 186, 129, | ||
1124 | 154, 166, 255, 191, 255, 130, 135, 138, | ||
1125 | 143, 146, 151, 154, 156, 144, 145, 146, | ||
1126 | 147, 148, 150, 151, 152, 155, 157, 158, | ||
1127 | 160, 170, 171, 172, 175, 161, 169, 128, | ||
1128 | 129, 130, 131, 133, 135, 138, 139, 140, | ||
1129 | 141, 142, 143, 144, 145, 146, 147, 148, | ||
1130 | 149, 152, 156, 157, 160, 161, 162, 163, | ||
1131 | 164, 166, 168, 169, 170, 171, 172, 173, | ||
1132 | 174, 176, 177, 153, 155, 178, 179, 128, | ||
1133 | 139, 141, 166, 168, 186, 188, 189, 191, | ||
1134 | 255, 142, 143, 158, 255, 187, 255, 128, | ||
1135 | 180, 189, 128, 156, 160, 255, 145, 159, | ||
1136 | 161, 255, 128, 159, 176, 255, 139, 143, | ||
1137 | 187, 255, 128, 157, 160, 255, 144, 132, | ||
1138 | 135, 150, 255, 158, 159, 170, 175, 148, | ||
1139 | 151, 188, 255, 128, 167, 176, 255, 164, | ||
1140 | 255, 183, 255, 128, 149, 160, 167, 136, | ||
1141 | 188, 128, 133, 138, 181, 183, 184, 191, | ||
1142 | 255, 150, 159, 183, 255, 128, 158, 160, | ||
1143 | 178, 180, 181, 128, 149, 160, 185, 128, | ||
1144 | 183, 190, 191, 191, 128, 131, 133, 134, | ||
1145 | 140, 147, 149, 151, 153, 179, 184, 186, | ||
1146 | 160, 188, 128, 156, 128, 135, 137, 166, | ||
1147 | 128, 181, 128, 149, 160, 178, 128, 145, | ||
1148 | 128, 178, 129, 130, 131, 132, 133, 135, | ||
1149 | 136, 138, 139, 140, 141, 144, 145, 146, | ||
1150 | 147, 150, 151, 152, 153, 154, 155, 156, | ||
1151 | 162, 163, 171, 176, 177, 178, 128, 134, | ||
1152 | 135, 165, 176, 190, 144, 168, 176, 185, | ||
1153 | 128, 180, 182, 191, 182, 144, 179, 155, | ||
1154 | 133, 137, 141, 143, 157, 255, 190, 128, | ||
1155 | 145, 147, 183, 136, 128, 134, 138, 141, | ||
1156 | 143, 157, 159, 168, 176, 255, 171, 175, | ||
1157 | 186, 255, 128, 131, 133, 140, 143, 144, | ||
1158 | 147, 168, 170, 176, 178, 179, 181, 185, | ||
1159 | 188, 191, 144, 151, 128, 132, 135, 136, | ||
1160 | 139, 141, 157, 163, 166, 172, 176, 180, | ||
1161 | 128, 138, 144, 153, 134, 136, 143, 154, | ||
1162 | 255, 128, 181, 184, 255, 129, 151, 158, | ||
1163 | 255, 129, 131, 133, 143, 154, 255, 128, | ||
1164 | 137, 128, 153, 157, 171, 176, 185, 160, | ||
1165 | 255, 170, 190, 192, 255, 128, 184, 128, | ||
1166 | 136, 138, 182, 184, 191, 128, 144, 153, | ||
1167 | 178, 255, 168, 144, 145, 183, 255, 128, | ||
1168 | 142, 145, 149, 129, 141, 144, 146, 147, | ||
1169 | 148, 175, 255, 132, 255, 128, 144, 129, | ||
1170 | 143, 144, 153, 145, 152, 135, 255, 160, | ||
1171 | 168, 169, 171, 172, 173, 174, 188, 189, | ||
1172 | 190, 191, 161, 167, 185, 255, 128, 158, | ||
1173 | 160, 169, 144, 173, 176, 180, 128, 131, | ||
1174 | 144, 153, 163, 183, 189, 255, 144, 255, | ||
1175 | 133, 143, 191, 255, 143, 159, 160, 128, | ||
1176 | 129, 255, 159, 160, 171, 172, 255, 173, | ||
1177 | 255, 179, 255, 128, 176, 177, 178, 128, | ||
1178 | 129, 171, 175, 189, 255, 128, 136, 144, | ||
1179 | 153, 157, 158, 133, 134, 137, 144, 145, | ||
1180 | 146, 147, 148, 149, 154, 155, 156, 157, | ||
1181 | 158, 159, 168, 169, 170, 150, 153, 165, | ||
1182 | 169, 173, 178, 187, 255, 131, 132, 140, | ||
1183 | 169, 174, 255, 130, 132, 149, 157, 173, | ||
1184 | 186, 188, 160, 161, 163, 164, 167, 168, | ||
1185 | 132, 134, 149, 157, 186, 139, 140, 191, | ||
1186 | 255, 134, 128, 132, 138, 144, 146, 255, | ||
1187 | 166, 167, 129, 155, 187, 149, 181, 143, | ||
1188 | 175, 137, 169, 131, 140, 141, 192, 255, | ||
1189 | 128, 182, 187, 255, 173, 180, 182, 255, | ||
1190 | 132, 155, 159, 161, 175, 128, 160, 163, | ||
1191 | 164, 165, 184, 185, 186, 161, 162, 128, | ||
1192 | 134, 136, 152, 155, 161, 163, 164, 166, | ||
1193 | 170, 133, 143, 151, 255, 139, 143, 154, | ||
1194 | 255, 164, 167, 185, 187, 128, 131, 133, | ||
1195 | 159, 161, 162, 169, 178, 180, 183, 130, | ||
1196 | 135, 137, 139, 148, 151, 153, 155, 157, | ||
1197 | 159, 164, 190, 141, 143, 145, 146, 161, | ||
1198 | 162, 167, 170, 172, 178, 180, 183, 185, | ||
1199 | 188, 128, 137, 139, 155, 161, 163, 165, | ||
1200 | 169, 171, 187, 155, 156, 151, 255, 156, | ||
1201 | 157, 160, 181, 255, 186, 187, 255, 162, | ||
1202 | 255, 160, 168, 161, 167, 158, 255, 160, | ||
1203 | 132, 135, 133, 134, 176, 255, 128, 191, | ||
1204 | 154, 164, 168, 128, 149, 150, 191, 128, | ||
1205 | 152, 153, 191, 181, 128, 159, 160, 189, | ||
1206 | 190, 191, 189, 128, 131, 132, 185, 186, | ||
1207 | 191, 144, 128, 151, 152, 161, 162, 176, | ||
1208 | 177, 255, 169, 177, 129, 132, 141, 142, | ||
1209 | 145, 146, 179, 181, 186, 188, 190, 191, | ||
1210 | 192, 255, 142, 158, 128, 155, 156, 161, | ||
1211 | 162, 175, 176, 177, 178, 191, 169, 177, | ||
1212 | 180, 183, 128, 132, 133, 138, 139, 142, | ||
1213 | 143, 144, 145, 146, 147, 185, 186, 191, | ||
1214 | 157, 128, 152, 153, 158, 159, 177, 178, | ||
1215 | 180, 181, 191, 142, 146, 169, 177, 180, | ||
1216 | 189, 128, 132, 133, 185, 186, 191, 144, | ||
1217 | 185, 128, 159, 160, 161, 162, 191, 169, | ||
1218 | 177, 180, 189, 128, 132, 133, 140, 141, | ||
1219 | 142, 143, 144, 145, 146, 147, 185, 186, | ||
1220 | 191, 158, 177, 128, 155, 156, 161, 162, | ||
1221 | 191, 131, 145, 155, 157, 128, 132, 133, | ||
1222 | 138, 139, 141, 142, 149, 150, 152, 153, | ||
1223 | 159, 160, 162, 163, 164, 165, 167, 168, | ||
1224 | 170, 171, 173, 174, 185, 186, 191, 144, | ||
1225 | 128, 191, 141, 145, 169, 189, 128, 132, | ||
1226 | 133, 185, 186, 191, 128, 151, 152, 154, | ||
1227 | 155, 159, 160, 161, 162, 191, 128, 141, | ||
1228 | 145, 169, 180, 189, 129, 132, 133, 185, | ||
1229 | 186, 191, 158, 128, 159, 160, 161, 162, | ||
1230 | 176, 177, 178, 179, 191, 141, 145, 189, | ||
1231 | 128, 132, 133, 186, 187, 191, 142, 128, | ||
1232 | 147, 148, 150, 151, 158, 159, 161, 162, | ||
1233 | 185, 186, 191, 178, 188, 128, 132, 133, | ||
1234 | 150, 151, 153, 154, 189, 190, 191, 128, | ||
1235 | 134, 135, 191, 128, 177, 129, 179, 180, | ||
1236 | 191, 128, 131, 137, 141, 152, 160, 164, | ||
1237 | 166, 172, 177, 189, 129, 132, 133, 134, | ||
1238 | 135, 138, 139, 147, 148, 167, 168, 169, | ||
1239 | 170, 179, 180, 191, 133, 128, 134, 135, | ||
1240 | 155, 156, 159, 160, 191, 128, 129, 191, | ||
1241 | 136, 128, 172, 173, 191, 128, 135, 136, | ||
1242 | 140, 141, 191, 191, 128, 170, 171, 190, | ||
1243 | 161, 128, 143, 144, 149, 150, 153, 154, | ||
1244 | 157, 158, 164, 165, 166, 167, 173, 174, | ||
1245 | 176, 177, 180, 181, 255, 130, 141, 143, | ||
1246 | 159, 134, 187, 136, 140, 142, 143, 137, | ||
1247 | 151, 153, 142, 143, 158, 159, 137, 177, | ||
1248 | 191, 142, 143, 182, 183, 192, 255, 129, | ||
1249 | 151, 128, 133, 134, 135, 136, 255, 145, | ||
1250 | 150, 151, 155, 191, 192, 255, 128, 143, | ||
1251 | 144, 159, 160, 255, 182, 183, 190, 191, | ||
1252 | 192, 255, 128, 129, 255, 173, 174, 192, | ||
1253 | 255, 128, 129, 154, 155, 159, 160, 255, | ||
1254 | 171, 173, 185, 191, 192, 255, 141, 128, | ||
1255 | 145, 146, 159, 160, 177, 178, 191, 173, | ||
1256 | 128, 145, 146, 159, 160, 176, 177, 191, | ||
1257 | 128, 179, 180, 191, 151, 156, 128, 191, | ||
1258 | 128, 159, 160, 255, 184, 191, 192, 255, | ||
1259 | 169, 128, 170, 171, 175, 176, 255, 182, | ||
1260 | 191, 192, 255, 128, 158, 159, 191, 128, | ||
1261 | 143, 144, 173, 174, 175, 176, 180, 181, | ||
1262 | 191, 128, 171, 172, 175, 176, 255, 138, | ||
1263 | 191, 192, 255, 128, 150, 151, 159, 160, | ||
1264 | 255, 149, 191, 192, 255, 167, 128, 191, | ||
1265 | 128, 132, 133, 179, 180, 191, 128, 132, | ||
1266 | 133, 139, 140, 191, 128, 130, 131, 160, | ||
1267 | 161, 173, 174, 175, 176, 185, 186, 255, | ||
1268 | 166, 191, 192, 255, 128, 163, 164, 191, | ||
1269 | 128, 140, 141, 143, 144, 153, 154, 189, | ||
1270 | 190, 191, 128, 136, 137, 191, 173, 128, | ||
1271 | 168, 169, 177, 178, 180, 181, 182, 183, | ||
1272 | 191, 0, 127, 192, 255, 150, 151, 158, | ||
1273 | 159, 152, 154, 156, 158, 134, 135, 142, | ||
1274 | 143, 190, 191, 192, 255, 181, 189, 191, | ||
1275 | 128, 190, 133, 181, 128, 129, 130, 140, | ||
1276 | 141, 143, 144, 147, 148, 149, 150, 155, | ||
1277 | 156, 159, 160, 172, 173, 177, 178, 188, | ||
1278 | 189, 191, 177, 191, 128, 190, 128, 143, | ||
1279 | 144, 156, 157, 191, 130, 135, 148, 164, | ||
1280 | 166, 168, 128, 137, 138, 149, 150, 151, | ||
1281 | 152, 157, 158, 169, 170, 185, 186, 187, | ||
1282 | 188, 191, 142, 128, 132, 133, 137, 138, | ||
1283 | 159, 160, 255, 137, 191, 192, 255, 175, | ||
1284 | 128, 255, 159, 165, 170, 175, 177, 180, | ||
1285 | 191, 192, 255, 166, 173, 128, 167, 168, | ||
1286 | 175, 176, 255, 168, 174, 176, 191, 192, | ||
1287 | 255, 167, 175, 183, 191, 128, 150, 151, | ||
1288 | 159, 160, 190, 135, 143, 151, 128, 158, | ||
1289 | 159, 191, 128, 132, 133, 135, 136, 160, | ||
1290 | 161, 169, 170, 176, 177, 181, 182, 183, | ||
1291 | 184, 188, 189, 191, 160, 151, 154, 187, | ||
1292 | 192, 255, 128, 132, 133, 173, 174, 176, | ||
1293 | 177, 255, 143, 159, 187, 191, 192, 255, | ||
1294 | 128, 175, 176, 191, 150, 191, 192, 255, | ||
1295 | 141, 191, 192, 255, 128, 143, 144, 189, | ||
1296 | 190, 191, 141, 143, 160, 169, 172, 191, | ||
1297 | 192, 255, 191, 128, 174, 175, 190, 128, | ||
1298 | 157, 158, 159, 160, 255, 176, 191, 192, | ||
1299 | 255, 128, 150, 151, 159, 160, 161, 162, | ||
1300 | 255, 175, 137, 138, 184, 191, 192, 255, | ||
1301 | 128, 182, 183, 255, 130, 134, 139, 163, | ||
1302 | 191, 192, 255, 128, 129, 130, 179, 180, | ||
1303 | 191, 187, 189, 128, 177, 178, 183, 184, | ||
1304 | 191, 128, 137, 138, 165, 166, 175, 176, | ||
1305 | 255, 135, 159, 189, 191, 192, 255, 128, | ||
1306 | 131, 132, 178, 179, 191, 143, 165, 191, | ||
1307 | 128, 159, 160, 175, 176, 185, 186, 190, | ||
1308 | 128, 168, 169, 191, 131, 186, 128, 139, | ||
1309 | 140, 159, 160, 182, 183, 189, 190, 255, | ||
1310 | 176, 178, 180, 183, 184, 190, 191, 192, | ||
1311 | 255, 129, 128, 130, 131, 154, 155, 157, | ||
1312 | 158, 159, 160, 170, 171, 177, 178, 180, | ||
1313 | 181, 191, 128, 167, 175, 129, 134, 135, | ||
1314 | 136, 137, 142, 143, 144, 145, 150, 151, | ||
1315 | 159, 160, 255, 155, 166, 175, 128, 162, | ||
1316 | 163, 191, 164, 175, 135, 138, 188, 191, | ||
1317 | 192, 255, 174, 175, 154, 191, 192, 255, | ||
1318 | 157, 169, 183, 189, 191, 128, 134, 135, | ||
1319 | 146, 147, 151, 152, 158, 159, 190, 130, | ||
1320 | 133, 128, 255, 178, 191, 192, 255, 128, | ||
1321 | 146, 147, 255, 190, 191, 192, 255, 128, | ||
1322 | 143, 144, 255, 144, 145, 136, 175, 188, | ||
1323 | 191, 192, 255, 181, 128, 175, 176, 255, | ||
1324 | 189, 191, 192, 255, 128, 160, 161, 186, | ||
1325 | 187, 191, 128, 129, 154, 155, 165, 166, | ||
1326 | 255, 191, 192, 255, 128, 129, 130, 135, | ||
1327 | 136, 137, 138, 143, 144, 145, 146, 151, | ||
1328 | 152, 153, 154, 156, 157, 191, 128, 191, | ||
1329 | 128, 129, 130, 131, 133, 138, 139, 140, | ||
1330 | 141, 142, 143, 144, 145, 146, 147, 148, | ||
1331 | 149, 152, 156, 157, 160, 161, 162, 163, | ||
1332 | 164, 166, 168, 169, 170, 171, 172, 173, | ||
1333 | 174, 176, 177, 132, 151, 153, 155, 158, | ||
1334 | 175, 178, 179, 180, 191, 140, 167, 187, | ||
1335 | 190, 128, 255, 142, 143, 158, 191, 192, | ||
1336 | 255, 187, 191, 192, 255, 128, 180, 181, | ||
1337 | 191, 128, 156, 157, 159, 160, 255, 145, | ||
1338 | 191, 192, 255, 128, 159, 160, 175, 176, | ||
1339 | 255, 139, 143, 182, 191, 192, 255, 144, | ||
1340 | 132, 135, 150, 191, 192, 255, 158, 175, | ||
1341 | 148, 151, 188, 191, 192, 255, 128, 167, | ||
1342 | 168, 175, 176, 255, 164, 191, 192, 255, | ||
1343 | 183, 191, 192, 255, 128, 149, 150, 159, | ||
1344 | 160, 167, 168, 191, 136, 182, 188, 128, | ||
1345 | 133, 134, 137, 138, 184, 185, 190, 191, | ||
1346 | 255, 150, 159, 183, 191, 192, 255, 179, | ||
1347 | 128, 159, 160, 181, 182, 191, 128, 149, | ||
1348 | 150, 159, 160, 185, 186, 191, 128, 183, | ||
1349 | 184, 189, 190, 191, 128, 148, 152, 129, | ||
1350 | 143, 144, 179, 180, 191, 128, 159, 160, | ||
1351 | 188, 189, 191, 128, 156, 157, 191, 136, | ||
1352 | 128, 164, 165, 191, 128, 181, 182, 191, | ||
1353 | 128, 149, 150, 159, 160, 178, 179, 191, | ||
1354 | 128, 145, 146, 191, 128, 178, 179, 191, | ||
1355 | 128, 130, 131, 132, 133, 134, 135, 136, | ||
1356 | 138, 139, 140, 141, 144, 145, 146, 147, | ||
1357 | 150, 151, 152, 153, 154, 156, 162, 163, | ||
1358 | 171, 176, 177, 178, 129, 191, 128, 130, | ||
1359 | 131, 183, 184, 191, 128, 130, 131, 175, | ||
1360 | 176, 191, 128, 143, 144, 168, 169, 191, | ||
1361 | 128, 130, 131, 166, 167, 191, 182, 128, | ||
1362 | 143, 144, 178, 179, 191, 128, 130, 131, | ||
1363 | 178, 179, 191, 128, 154, 156, 129, 132, | ||
1364 | 133, 191, 146, 128, 171, 172, 191, 135, | ||
1365 | 137, 142, 158, 128, 168, 169, 175, 176, | ||
1366 | 255, 159, 191, 192, 255, 144, 128, 156, | ||
1367 | 157, 161, 162, 191, 128, 134, 135, 138, | ||
1368 | 139, 191, 128, 175, 176, 191, 134, 128, | ||
1369 | 131, 132, 135, 136, 191, 128, 174, 175, | ||
1370 | 191, 128, 151, 152, 155, 156, 191, 132, | ||
1371 | 128, 191, 128, 170, 171, 191, 128, 153, | ||
1372 | 154, 191, 160, 190, 192, 255, 128, 184, | ||
1373 | 185, 191, 137, 128, 174, 175, 191, 128, | ||
1374 | 129, 177, 178, 255, 144, 191, 192, 255, | ||
1375 | 128, 142, 143, 144, 145, 146, 149, 129, | ||
1376 | 148, 150, 191, 175, 191, 192, 255, 132, | ||
1377 | 191, 192, 255, 128, 144, 129, 143, 145, | ||
1378 | 191, 144, 153, 128, 143, 145, 152, 154, | ||
1379 | 191, 135, 191, 192, 255, 160, 168, 169, | ||
1380 | 171, 172, 173, 174, 188, 189, 190, 191, | ||
1381 | 128, 159, 161, 167, 170, 187, 185, 191, | ||
1382 | 192, 255, 128, 143, 144, 173, 174, 191, | ||
1383 | 128, 131, 132, 162, 163, 183, 184, 188, | ||
1384 | 189, 255, 133, 143, 145, 191, 192, 255, | ||
1385 | 128, 146, 147, 159, 160, 191, 160, 128, | ||
1386 | 191, 128, 129, 191, 192, 255, 159, 160, | ||
1387 | 171, 128, 170, 172, 191, 192, 255, 173, | ||
1388 | 191, 192, 255, 179, 191, 192, 255, 128, | ||
1389 | 176, 177, 178, 129, 191, 128, 129, 130, | ||
1390 | 191, 171, 175, 189, 191, 192, 255, 128, | ||
1391 | 136, 137, 143, 144, 153, 154, 191, 144, | ||
1392 | 145, 146, 147, 148, 149, 154, 155, 156, | ||
1393 | 157, 158, 159, 128, 143, 150, 153, 160, | ||
1394 | 191, 149, 157, 173, 186, 188, 160, 161, | ||
1395 | 163, 164, 167, 168, 132, 134, 149, 157, | ||
1396 | 186, 191, 139, 140, 192, 255, 133, 145, | ||
1397 | 128, 134, 135, 137, 138, 255, 166, 167, | ||
1398 | 129, 155, 187, 149, 181, 143, 175, 137, | ||
1399 | 169, 131, 140, 191, 192, 255, 160, 163, | ||
1400 | 164, 165, 184, 185, 186, 128, 159, 161, | ||
1401 | 162, 166, 191, 133, 191, 192, 255, 132, | ||
1402 | 160, 163, 167, 179, 184, 186, 128, 164, | ||
1403 | 165, 168, 169, 187, 188, 191, 130, 135, | ||
1404 | 137, 139, 144, 147, 151, 153, 155, 157, | ||
1405 | 159, 163, 171, 179, 184, 189, 191, 128, | ||
1406 | 140, 141, 148, 149, 160, 161, 164, 165, | ||
1407 | 166, 167, 190, 138, 164, 170, 128, 155, | ||
1408 | 156, 160, 161, 187, 188, 191, 128, 191, | ||
1409 | 155, 156, 128, 191, 151, 191, 192, 255, | ||
1410 | 156, 157, 160, 128, 191, 181, 191, 192, | ||
1411 | 255, 158, 159, 186, 128, 185, 187, 191, | ||
1412 | 192, 255, 162, 191, 192, 255, 160, 168, | ||
1413 | 128, 159, 161, 167, 169, 191, 158, 191, | ||
1414 | 192, 255, 9, 10, 13, 32, 33, 34, | ||
1415 | 35, 37, 38, 46, 47, 60, 61, 62, | ||
1416 | 64, 92, 95, 123, 124, 125, 126, 127, | ||
1417 | 194, 195, 198, 199, 203, 204, 205, 206, | ||
1418 | 207, 210, 212, 213, 214, 215, 216, 217, | ||
1419 | 219, 220, 221, 222, 223, 224, 225, 226, | ||
1420 | 227, 228, 233, 234, 237, 238, 239, 240, | ||
1421 | 0, 39, 40, 45, 48, 57, 58, 63, | ||
1422 | 65, 90, 91, 96, 97, 122, 192, 193, | ||
1423 | 196, 218, 229, 236, 241, 247, 9, 32, | ||
1424 | 10, 61, 10, 38, 46, 42, 47, 42, | ||
1425 | 46, 69, 101, 48, 57, 60, 61, 61, | ||
1426 | 62, 61, 45, 95, 194, 195, 198, 199, | ||
1427 | 203, 204, 205, 206, 207, 210, 212, 213, | ||
1428 | 214, 215, 216, 217, 219, 220, 221, 222, | ||
1429 | 223, 224, 225, 226, 227, 228, 233, 234, | ||
1430 | 237, 239, 240, 243, 48, 57, 65, 90, | ||
1431 | 97, 122, 196, 218, 229, 236, 124, 125, | ||
1432 | 128, 191, 170, 181, 186, 128, 191, 151, | ||
1433 | 183, 128, 255, 192, 255, 0, 127, 173, | ||
1434 | 130, 133, 146, 159, 165, 171, 175, 191, | ||
1435 | 192, 255, 181, 190, 128, 175, 176, 183, | ||
1436 | 184, 185, 186, 191, 134, 139, 141, 162, | ||
1437 | 128, 135, 136, 255, 182, 130, 137, 176, | ||
1438 | 151, 152, 154, 160, 136, 191, 192, 255, | ||
1439 | 128, 143, 144, 170, 171, 175, 176, 178, | ||
1440 | 179, 191, 128, 159, 160, 191, 176, 128, | ||
1441 | 138, 139, 173, 174, 255, 148, 150, 164, | ||
1442 | 167, 173, 176, 185, 189, 190, 192, 255, | ||
1443 | 144, 128, 145, 146, 175, 176, 191, 128, | ||
1444 | 140, 141, 255, 166, 176, 178, 191, 192, | ||
1445 | 255, 186, 128, 137, 138, 170, 171, 179, | ||
1446 | 180, 181, 182, 191, 160, 161, 162, 164, | ||
1447 | 165, 166, 167, 168, 169, 170, 171, 172, | ||
1448 | 173, 174, 175, 176, 177, 178, 179, 180, | ||
1449 | 181, 182, 183, 184, 185, 186, 187, 188, | ||
1450 | 189, 190, 128, 191, 128, 129, 130, 131, | ||
1451 | 137, 138, 139, 140, 141, 142, 143, 144, | ||
1452 | 153, 154, 155, 156, 157, 158, 159, 160, | ||
1453 | 161, 162, 163, 164, 165, 166, 167, 168, | ||
1454 | 169, 170, 171, 172, 173, 174, 175, 176, | ||
1455 | 177, 178, 179, 180, 182, 183, 184, 188, | ||
1456 | 189, 190, 191, 132, 187, 129, 130, 132, | ||
1457 | 133, 134, 176, 177, 178, 179, 180, 181, | ||
1458 | 182, 183, 128, 191, 128, 129, 130, 131, | ||
1459 | 132, 133, 134, 135, 144, 136, 143, 145, | ||
1460 | 191, 192, 255, 182, 183, 184, 128, 191, | ||
1461 | 128, 191, 191, 128, 190, 192, 255, 128, | ||
1462 | 146, 147, 148, 152, 153, 154, 155, 156, | ||
1463 | 158, 159, 160, 161, 162, 163, 164, 165, | ||
1464 | 166, 167, 168, 169, 170, 171, 172, 173, | ||
1465 | 174, 175, 176, 129, 191, 192, 255, 158, | ||
1466 | 159, 128, 157, 160, 191, 192, 255, 128, | ||
1467 | 191, 164, 169, 171, 172, 173, 174, 175, | ||
1468 | 180, 181, 182, 183, 184, 185, 187, 188, | ||
1469 | 189, 190, 191, 128, 163, 165, 186, 144, | ||
1470 | 145, 146, 147, 148, 150, 151, 152, 155, | ||
1471 | 157, 158, 160, 170, 171, 172, 175, 128, | ||
1472 | 159, 161, 169, 173, 191, 128, 191, 10, | ||
1473 | 13, 34, 36, 37, 92, 128, 191, 192, | ||
1474 | 223, 224, 239, 240, 247, 248, 255, 10, | ||
1475 | 13, 34, 36, 37, 92, 128, 191, 192, | ||
1476 | 223, 224, 239, 240, 247, 248, 255, 10, | ||
1477 | 13, 34, 36, 37, 92, 128, 191, 192, | ||
1478 | 223, 224, 239, 240, 247, 248, 255, 10, | ||
1479 | 13, 34, 36, 37, 92, 128, 191, 192, | ||
1480 | 223, 224, 239, 240, 247, 248, 255, 10, | ||
1481 | 13, 36, 37, 92, 128, 191, 192, 223, | ||
1482 | 224, 239, 240, 247, 248, 255, 36, 37, | ||
1483 | 92, 123, 192, 223, 224, 239, 240, 247, | ||
1484 | 10, 13, 34, 36, 37, 92, 123, 128, | ||
1485 | 191, 192, 223, 224, 239, 240, 247, 248, | ||
1486 | 255, 10, 13, 34, 36, 37, 92, 123, | ||
1487 | 128, 191, 192, 223, 224, 239, 240, 247, | ||
1488 | 248, 255, 10, 13, 34, 36, 37, 92, | ||
1489 | 123, 128, 191, 192, 223, 224, 239, 240, | ||
1490 | 247, 248, 255, 10, 13, 34, 36, 37, | ||
1491 | 92, 128, 191, 192, 223, 224, 239, 240, | ||
1492 | 247, 248, 255, 36, 37, 92, 123, 192, | ||
1493 | 223, 224, 239, 240, 247, 10, 13, 34, | ||
1494 | 36, 37, 92, 123, 128, 191, 192, 223, | ||
1495 | 224, 239, 240, 247, 248, 255, 10, 13, | ||
1496 | 34, 36, 37, 92, 128, 191, 192, 223, | ||
1497 | 224, 239, 240, 247, 248, 255, 10, 13, | ||
1498 | 34, 36, 37, 92, 128, 191, 192, 223, | ||
1499 | 224, 239, 240, 247, 248, 255, 10, 13, | ||
1500 | 34, 36, 37, 92, 128, 191, 192, 223, | ||
1501 | 224, 239, 240, 247, 248, 255, 10, 13, | ||
1502 | 34, 36, 37, 92, 128, 191, 192, 223, | ||
1503 | 224, 239, 240, 247, 248, 255, 10, 13, | ||
1504 | 34, 36, 37, 92, 128, 191, 192, 223, | ||
1505 | 224, 239, 240, 247, 248, 255, 10, 13, | ||
1506 | 34, 36, 37, 92, 128, 191, 192, 223, | ||
1507 | 224, 239, 240, 247, 248, 255, 10, 13, | ||
1508 | 34, 36, 37, 92, 128, 191, 192, 223, | ||
1509 | 224, 239, 240, 247, 248, 255, 123, 126, | ||
1510 | 123, 126, 128, 191, 128, 191, 128, 191, | ||
1511 | 10, 13, 36, 37, 128, 191, 192, 223, | ||
1512 | 224, 239, 240, 247, 248, 255, 10, 13, | ||
1513 | 36, 37, 128, 191, 192, 223, 224, 239, | ||
1514 | 240, 247, 248, 255, 10, 13, 36, 37, | ||
1515 | 128, 191, 192, 223, 224, 239, 240, 247, | ||
1516 | 248, 255, 10, 13, 36, 37, 128, 191, | ||
1517 | 192, 223, 224, 239, 240, 247, 248, 255, | ||
1518 | 126, 126, 128, 191, 128, 191, 128, 191, | ||
1519 | 10, 13, 36, 37, 128, 191, 192, 223, | ||
1520 | 224, 239, 240, 247, 248, 255, 10, 13, | ||
1521 | 36, 37, 128, 191, 192, 223, 224, 239, | ||
1522 | 240, 247, 248, 255, 126, 126, 128, 191, | ||
1523 | 128, 191, 128, 191, 95, 194, 195, 198, | ||
1524 | 199, 203, 204, 205, 206, 207, 210, 212, | ||
1525 | 213, 214, 215, 216, 217, 219, 220, 221, | ||
1526 | 222, 223, 224, 225, 226, 227, 228, 233, | ||
1527 | 234, 237, 238, 239, 240, 65, 90, 97, | ||
1528 | 122, 128, 191, 192, 193, 196, 218, 229, | ||
1529 | 236, 241, 247, 248, 255, 45, 95, 194, | ||
1530 | 195, 198, 199, 203, 204, 205, 206, 207, | ||
1531 | 210, 212, 213, 214, 215, 216, 217, 219, | ||
1532 | 220, 221, 222, 223, 224, 225, 226, 227, | ||
1533 | 228, 233, 234, 237, 239, 240, 243, 48, | ||
1534 | 57, 65, 90, 97, 122, 196, 218, 229, | ||
1535 | 236, 128, 191, 170, 181, 186, 128, 191, | ||
1536 | 151, 183, 128, 255, 192, 255, 0, 127, | ||
1537 | 173, 130, 133, 146, 159, 165, 171, 175, | ||
1538 | 191, 192, 255, 181, 190, 128, 175, 176, | ||
1539 | 183, 184, 185, 186, 191, 134, 139, 141, | ||
1540 | 162, 128, 135, 136, 255, 182, 130, 137, | ||
1541 | 176, 151, 152, 154, 160, 136, 191, 192, | ||
1542 | 255, 128, 143, 144, 170, 171, 175, 176, | ||
1543 | 178, 179, 191, 128, 159, 160, 191, 176, | ||
1544 | 128, 138, 139, 173, 174, 255, 148, 150, | ||
1545 | 164, 167, 173, 176, 185, 189, 190, 192, | ||
1546 | 255, 144, 128, 145, 146, 175, 176, 191, | ||
1547 | 128, 140, 141, 255, 166, 176, 178, 191, | ||
1548 | 192, 255, 186, 128, 137, 138, 170, 171, | ||
1549 | 179, 180, 181, 182, 191, 160, 161, 162, | ||
1550 | 164, 165, 166, 167, 168, 169, 170, 171, | ||
1551 | 172, 173, 174, 175, 176, 177, 178, 179, | ||
1552 | 180, 181, 182, 183, 184, 185, 186, 187, | ||
1553 | 188, 189, 190, 128, 191, 128, 129, 130, | ||
1554 | 131, 137, 138, 139, 140, 141, 142, 143, | ||
1555 | 144, 153, 154, 155, 156, 157, 158, 159, | ||
1556 | 160, 161, 162, 163, 164, 165, 166, 167, | ||
1557 | 168, 169, 170, 171, 172, 173, 174, 175, | ||
1558 | 176, 177, 178, 179, 180, 182, 183, 184, | ||
1559 | 188, 189, 190, 191, 132, 187, 129, 130, | ||
1560 | 132, 133, 134, 176, 177, 178, 179, 180, | ||
1561 | 181, 182, 183, 128, 191, 128, 129, 130, | ||
1562 | 131, 132, 133, 134, 135, 144, 136, 143, | ||
1563 | 145, 191, 192, 255, 182, 183, 184, 128, | ||
1564 | 191, 128, 191, 191, 128, 190, 192, 255, | ||
1565 | 128, 146, 147, 148, 152, 153, 154, 155, | ||
1566 | 156, 158, 159, 160, 161, 162, 163, 164, | ||
1567 | 165, 166, 167, 168, 169, 170, 171, 172, | ||
1568 | 173, 174, 175, 176, 129, 191, 192, 255, | ||
1569 | 158, 159, 128, 157, 160, 191, 192, 255, | ||
1570 | 128, 191, 164, 169, 171, 172, 173, 174, | ||
1571 | 175, 180, 181, 182, 183, 184, 185, 187, | ||
1572 | 188, 189, 190, 191, 128, 163, 165, 186, | ||
1573 | 144, 145, 146, 147, 148, 150, 151, 152, | ||
1574 | 155, 157, 158, 160, 170, 171, 172, 175, | ||
1575 | 128, 159, 161, 169, 173, 191, 128, 191, | ||
1576 | } | ||
1577 | |||
1578 | var _hcltok_single_lengths []byte = []byte{ | ||
1579 | 0, 1, 1, 1, 2, 3, 2, 0, | ||
1580 | 32, 31, 36, 1, 4, 0, 0, 0, | ||
1581 | 0, 1, 2, 1, 1, 1, 1, 0, | ||
1582 | 1, 1, 0, 0, 2, 0, 0, 0, | ||
1583 | 1, 32, 0, 0, 0, 0, 1, 3, | ||
1584 | 1, 1, 1, 0, 2, 0, 1, 1, | ||
1585 | 2, 0, 3, 0, 1, 0, 2, 1, | ||
1586 | 2, 0, 0, 5, 1, 4, 0, 0, | ||
1587 | 1, 43, 0, 0, 0, 2, 3, 2, | ||
1588 | 1, 1, 0, 0, 0, 0, 0, 0, | ||
1589 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
1590 | 0, 0, 0, 0, 0, 1, 1, 0, | ||
1591 | 0, 0, 0, 0, 0, 0, 0, 4, | ||
1592 | 1, 0, 15, 0, 0, 0, 1, 6, | ||
1593 | 1, 0, 0, 1, 0, 2, 0, 0, | ||
1594 | 0, 9, 0, 1, 1, 0, 0, 0, | ||
1595 | 3, 0, 1, 0, 28, 0, 0, 0, | ||
1596 | 1, 0, 1, 0, 0, 0, 1, 0, | ||
1597 | 0, 0, 0, 0, 0, 0, 1, 0, | ||
1598 | 2, 0, 0, 18, 0, 0, 1, 0, | ||
1599 | 0, 0, 0, 0, 0, 0, 0, 1, | ||
1600 | 0, 0, 0, 16, 36, 0, 0, 0, | ||
1601 | 0, 1, 0, 0, 0, 0, 0, 1, | ||
1602 | 0, 0, 0, 0, 0, 0, 2, 0, | ||
1603 | 0, 0, 0, 0, 1, 0, 0, 0, | ||
1604 | 0, 0, 0, 0, 28, 0, 0, 0, | ||
1605 | 1, 1, 1, 1, 0, 0, 2, 0, | ||
1606 | 1, 0, 0, 0, 0, 0, 0, 0, | ||
1607 | 0, 0, 1, 1, 4, 0, 0, 2, | ||
1608 | 2, 0, 11, 0, 0, 0, 0, 0, | ||
1609 | 0, 0, 1, 1, 3, 0, 0, 4, | ||
1610 | 0, 0, 0, 18, 0, 0, 0, 1, | ||
1611 | 4, 1, 4, 1, 0, 3, 2, 2, | ||
1612 | 2, 1, 0, 0, 1, 8, 0, 0, | ||
1613 | 0, 4, 12, 0, 2, 0, 3, 0, | ||
1614 | 1, 0, 2, 0, 1, 2, 0, 3, | ||
1615 | 1, 2, 0, 0, 0, 0, 0, 1, | ||
1616 | 1, 0, 0, 1, 28, 3, 0, 1, | ||
1617 | 1, 2, 1, 0, 1, 1, 2, 1, | ||
1618 | 1, 2, 1, 1, 0, 2, 1, 1, | ||
1619 | 1, 1, 0, 0, 6, 1, 1, 0, | ||
1620 | 0, 46, 1, 1, 0, 0, 0, 0, | ||
1621 | 2, 1, 0, 0, 0, 1, 0, 0, | ||
1622 | 0, 0, 0, 0, 0, 13, 2, 0, | ||
1623 | 0, 0, 9, 0, 1, 28, 0, 1, | ||
1624 | 3, 0, 2, 0, 0, 0, 1, 0, | ||
1625 | 1, 1, 2, 0, 18, 2, 0, 0, | ||
1626 | 16, 35, 0, 0, 0, 1, 0, 28, | ||
1627 | 0, 0, 0, 0, 1, 0, 2, 0, | ||
1628 | 0, 1, 0, 0, 1, 0, 0, 1, | ||
1629 | 0, 0, 0, 0, 1, 11, 0, 0, | ||
1630 | 0, 0, 4, 0, 12, 1, 7, 0, | ||
1631 | 4, 0, 0, 0, 0, 1, 2, 1, | ||
1632 | 1, 1, 1, 0, 1, 1, 0, 0, | ||
1633 | 2, 0, 0, 0, 1, 32, 0, 0, | ||
1634 | 0, 0, 1, 3, 1, 1, 1, 0, | ||
1635 | 2, 0, 1, 1, 2, 0, 3, 0, | ||
1636 | 1, 0, 2, 1, 2, 0, 0, 5, | ||
1637 | 1, 4, 0, 0, 1, 43, 0, 0, | ||
1638 | 0, 2, 3, 2, 1, 1, 0, 0, | ||
1639 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
1640 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
1641 | 0, 1, 1, 0, 0, 0, 0, 0, | ||
1642 | 0, 0, 0, 4, 1, 0, 15, 0, | ||
1643 | 0, 0, 1, 6, 1, 0, 0, 1, | ||
1644 | 0, 2, 0, 0, 0, 9, 0, 1, | ||
1645 | 1, 0, 0, 0, 3, 0, 1, 0, | ||
1646 | 28, 0, 0, 0, 1, 0, 1, 0, | ||
1647 | 0, 0, 1, 0, 0, 0, 0, 0, | ||
1648 | 0, 0, 1, 0, 2, 0, 0, 18, | ||
1649 | 0, 0, 1, 0, 0, 0, 0, 0, | ||
1650 | 0, 0, 0, 1, 0, 0, 0, 16, | ||
1651 | 36, 0, 0, 0, 0, 1, 0, 0, | ||
1652 | 0, 0, 0, 1, 0, 0, 0, 0, | ||
1653 | 0, 0, 2, 0, 0, 0, 0, 0, | ||
1654 | 1, 0, 0, 0, 0, 0, 0, 0, | ||
1655 | 28, 0, 0, 0, 1, 1, 1, 1, | ||
1656 | 0, 0, 2, 0, 1, 0, 0, 0, | ||
1657 | 0, 0, 0, 0, 0, 0, 1, 1, | ||
1658 | 4, 0, 0, 2, 2, 0, 11, 0, | ||
1659 | 0, 0, 0, 0, 0, 0, 1, 1, | ||
1660 | 3, 0, 0, 4, 0, 0, 0, 18, | ||
1661 | 0, 0, 0, 1, 4, 1, 4, 1, | ||
1662 | 0, 3, 2, 2, 2, 1, 0, 0, | ||
1663 | 1, 8, 0, 0, 0, 4, 12, 0, | ||
1664 | 2, 0, 3, 0, 1, 0, 2, 0, | ||
1665 | 1, 2, 0, 0, 3, 0, 1, 1, | ||
1666 | 1, 2, 2, 4, 1, 6, 2, 4, | ||
1667 | 2, 4, 1, 4, 0, 6, 1, 3, | ||
1668 | 1, 2, 0, 2, 11, 1, 1, 1, | ||
1669 | 0, 1, 1, 0, 2, 0, 3, 3, | ||
1670 | 2, 1, 0, 0, 0, 1, 0, 1, | ||
1671 | 0, 1, 1, 0, 2, 0, 0, 1, | ||
1672 | 0, 0, 0, 0, 0, 0, 0, 1, | ||
1673 | 0, 0, 0, 0, 0, 0, 0, 1, | ||
1674 | 0, 0, 0, 4, 3, 2, 2, 0, | ||
1675 | 6, 1, 0, 1, 1, 0, 2, 0, | ||
1676 | 4, 3, 0, 1, 1, 0, 0, 0, | ||
1677 | 0, 0, 0, 0, 1, 0, 0, 0, | ||
1678 | 1, 0, 3, 0, 2, 0, 0, 0, | ||
1679 | 3, 0, 2, 1, 1, 3, 1, 0, | ||
1680 | 0, 0, 0, 0, 5, 2, 0, 0, | ||
1681 | 0, 0, 0, 0, 1, 0, 0, 1, | ||
1682 | 1, 0, 0, 35, 4, 0, 0, 0, | ||
1683 | 0, 0, 0, 0, 1, 0, 0, 0, | ||
1684 | 0, 0, 0, 3, 0, 1, 0, 0, | ||
1685 | 3, 0, 0, 1, 0, 0, 0, 0, | ||
1686 | 28, 0, 0, 0, 0, 1, 0, 3, | ||
1687 | 1, 4, 0, 1, 0, 0, 1, 0, | ||
1688 | 0, 1, 0, 0, 0, 0, 1, 1, | ||
1689 | 0, 7, 0, 0, 2, 2, 0, 11, | ||
1690 | 0, 0, 0, 0, 0, 1, 1, 3, | ||
1691 | 0, 0, 4, 0, 0, 0, 12, 1, | ||
1692 | 4, 1, 5, 2, 0, 3, 2, 2, | ||
1693 | 2, 1, 7, 0, 7, 17, 3, 0, | ||
1694 | 2, 0, 3, 0, 0, 1, 0, 2, | ||
1695 | 0, 1, 1, 0, 0, 0, 0, 0, | ||
1696 | 1, 1, 1, 0, 0, 0, 1, 1, | ||
1697 | 1, 1, 0, 0, 0, 1, 1, 4, | ||
1698 | 0, 0, 0, 0, 1, 2, 1, 1, | ||
1699 | 1, 1, 0, 1, 1, 0, 0, 2, | ||
1700 | 0, 0, 0, 1, 32, 0, 0, 0, | ||
1701 | 0, 1, 3, 1, 1, 1, 0, 2, | ||
1702 | 0, 1, 1, 2, 0, 3, 0, 1, | ||
1703 | 0, 2, 1, 2, 0, 0, 5, 1, | ||
1704 | 4, 0, 0, 1, 43, 0, 0, 0, | ||
1705 | 2, 3, 2, 1, 1, 0, 0, 0, | ||
1706 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
1707 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
1708 | 1, 1, 0, 0, 0, 0, 0, 0, | ||
1709 | 0, 0, 4, 1, 0, 15, 0, 0, | ||
1710 | 0, 1, 6, 1, 0, 0, 1, 0, | ||
1711 | 2, 0, 0, 0, 9, 0, 1, 1, | ||
1712 | 0, 0, 0, 3, 0, 1, 0, 28, | ||
1713 | 0, 0, 0, 1, 0, 1, 0, 0, | ||
1714 | 0, 1, 0, 0, 0, 0, 0, 0, | ||
1715 | 0, 1, 0, 2, 0, 0, 18, 0, | ||
1716 | 0, 1, 0, 0, 0, 0, 0, 0, | ||
1717 | 0, 0, 1, 0, 0, 0, 16, 36, | ||
1718 | 0, 0, 0, 0, 1, 0, 0, 0, | ||
1719 | 0, 0, 1, 0, 0, 0, 0, 0, | ||
1720 | 0, 2, 0, 0, 0, 0, 0, 1, | ||
1721 | 0, 0, 0, 0, 0, 0, 0, 28, | ||
1722 | 0, 0, 0, 1, 1, 1, 1, 0, | ||
1723 | 0, 2, 0, 1, 0, 0, 0, 0, | ||
1724 | 0, 0, 0, 0, 0, 1, 1, 4, | ||
1725 | 0, 0, 2, 2, 0, 11, 0, 0, | ||
1726 | 0, 0, 0, 0, 0, 1, 1, 3, | ||
1727 | 0, 0, 4, 0, 0, 0, 18, 0, | ||
1728 | 0, 0, 1, 4, 1, 4, 1, 0, | ||
1729 | 3, 2, 2, 2, 1, 0, 0, 1, | ||
1730 | 8, 0, 0, 0, 4, 12, 0, 2, | ||
1731 | 0, 3, 0, 1, 0, 2, 0, 1, | ||
1732 | 2, 0, 0, 3, 0, 1, 1, 1, | ||
1733 | 2, 2, 4, 1, 6, 2, 4, 2, | ||
1734 | 4, 1, 4, 0, 6, 1, 3, 1, | ||
1735 | 2, 0, 2, 11, 1, 1, 1, 0, | ||
1736 | 1, 1, 0, 2, 0, 3, 3, 2, | ||
1737 | 1, 0, 0, 0, 1, 0, 1, 0, | ||
1738 | 1, 1, 0, 2, 0, 0, 1, 0, | ||
1739 | 0, 0, 0, 0, 0, 0, 1, 0, | ||
1740 | 0, 0, 0, 0, 0, 0, 1, 0, | ||
1741 | 0, 0, 4, 3, 2, 2, 0, 6, | ||
1742 | 1, 0, 1, 1, 0, 2, 0, 4, | ||
1743 | 3, 0, 1, 1, 0, 0, 0, 0, | ||
1744 | 0, 0, 0, 1, 0, 0, 0, 1, | ||
1745 | 0, 3, 0, 2, 0, 0, 0, 3, | ||
1746 | 0, 2, 1, 1, 3, 1, 0, 0, | ||
1747 | 0, 0, 0, 5, 2, 0, 0, 0, | ||
1748 | 0, 0, 0, 1, 0, 0, 1, 1, | ||
1749 | 0, 0, 35, 4, 0, 0, 0, 0, | ||
1750 | 0, 0, 0, 1, 0, 0, 0, 0, | ||
1751 | 0, 0, 3, 0, 1, 0, 0, 3, | ||
1752 | 0, 0, 1, 0, 0, 0, 0, 28, | ||
1753 | 0, 0, 0, 0, 1, 0, 3, 1, | ||
1754 | 4, 0, 1, 0, 0, 1, 0, 0, | ||
1755 | 1, 0, 0, 0, 0, 1, 1, 0, | ||
1756 | 7, 0, 0, 2, 2, 0, 11, 0, | ||
1757 | 0, 0, 0, 0, 1, 1, 3, 0, | ||
1758 | 0, 4, 0, 0, 0, 12, 1, 4, | ||
1759 | 1, 5, 2, 0, 3, 2, 2, 2, | ||
1760 | 1, 7, 0, 7, 17, 3, 0, 2, | ||
1761 | 0, 3, 0, 0, 1, 0, 2, 0, | ||
1762 | 54, 2, 1, 1, 1, 1, 1, 2, | ||
1763 | 1, 3, 2, 2, 1, 34, 1, 1, | ||
1764 | 0, 3, 2, 0, 0, 0, 1, 2, | ||
1765 | 4, 1, 0, 1, 0, 0, 0, 0, | ||
1766 | 1, 1, 1, 0, 0, 1, 30, 47, | ||
1767 | 13, 9, 3, 0, 1, 28, 2, 0, | ||
1768 | 18, 16, 0, 6, 6, 6, 6, 5, | ||
1769 | 4, 7, 7, 7, 6, 4, 7, 6, | ||
1770 | 6, 6, 6, 6, 6, 6, 1, 1, | ||
1771 | 1, 1, 0, 0, 0, 4, 4, 4, | ||
1772 | 4, 1, 1, 0, 0, 0, 4, 2, | ||
1773 | 1, 1, 0, 0, 0, 33, 34, 0, | ||
1774 | 3, 2, 0, 0, 0, 1, 2, 4, | ||
1775 | 1, 0, 1, 0, 0, 0, 0, 1, | ||
1776 | 1, 1, 0, 0, 1, 30, 47, 13, | ||
1777 | 9, 3, 0, 1, 28, 2, 0, 18, | ||
1778 | 16, 0, | ||
1779 | } | ||
1780 | |||
1781 | var _hcltok_range_lengths []byte = []byte{ | ||
1782 | 0, 0, 0, 0, 0, 1, 1, 1, | ||
1783 | 5, 5, 5, 0, 0, 3, 0, 1, | ||
1784 | 1, 4, 2, 3, 0, 1, 0, 2, | ||
1785 | 2, 4, 2, 2, 3, 1, 1, 1, | ||
1786 | 1, 0, 1, 1, 2, 2, 1, 4, | ||
1787 | 6, 9, 6, 8, 5, 8, 7, 10, | ||
1788 | 4, 6, 4, 7, 7, 5, 5, 4, | ||
1789 | 5, 1, 2, 8, 4, 3, 3, 3, | ||
1790 | 0, 3, 1, 2, 1, 2, 2, 3, | ||
1791 | 3, 1, 3, 2, 2, 1, 2, 2, | ||
1792 | 2, 3, 4, 4, 3, 1, 2, 1, | ||
1793 | 3, 2, 2, 2, 2, 2, 3, 3, | ||
1794 | 1, 1, 2, 1, 3, 2, 2, 3, | ||
1795 | 2, 7, 0, 1, 4, 1, 2, 4, | ||
1796 | 2, 1, 2, 0, 2, 2, 3, 5, | ||
1797 | 5, 1, 4, 1, 1, 2, 2, 1, | ||
1798 | 0, 0, 1, 1, 1, 1, 1, 2, | ||
1799 | 2, 2, 2, 1, 1, 1, 4, 2, | ||
1800 | 2, 3, 1, 4, 4, 6, 1, 3, | ||
1801 | 1, 1, 2, 1, 1, 1, 5, 3, | ||
1802 | 1, 1, 1, 2, 3, 3, 1, 2, | ||
1803 | 2, 1, 4, 1, 2, 5, 2, 1, | ||
1804 | 1, 0, 2, 2, 2, 2, 2, 2, | ||
1805 | 2, 2, 2, 1, 1, 2, 4, 2, | ||
1806 | 1, 2, 2, 2, 6, 1, 1, 2, | ||
1807 | 1, 2, 1, 1, 1, 2, 2, 2, | ||
1808 | 1, 3, 2, 5, 2, 8, 6, 2, | ||
1809 | 2, 2, 2, 3, 1, 3, 1, 2, | ||
1810 | 1, 3, 2, 2, 3, 1, 1, 1, | ||
1811 | 1, 1, 1, 1, 2, 2, 4, 1, | ||
1812 | 2, 1, 0, 1, 1, 1, 1, 0, | ||
1813 | 1, 2, 3, 1, 3, 3, 1, 0, | ||
1814 | 3, 0, 2, 3, 1, 0, 0, 0, | ||
1815 | 0, 2, 2, 2, 2, 1, 5, 2, | ||
1816 | 2, 5, 7, 5, 0, 1, 0, 1, | ||
1817 | 1, 1, 1, 1, 0, 1, 1, 0, | ||
1818 | 3, 3, 1, 1, 2, 1, 3, 5, | ||
1819 | 1, 1, 2, 2, 1, 1, 1, 1, | ||
1820 | 2, 6, 3, 7, 2, 6, 1, 6, | ||
1821 | 2, 8, 0, 4, 2, 5, 2, 3, | ||
1822 | 3, 3, 1, 2, 8, 2, 0, 2, | ||
1823 | 1, 2, 1, 5, 2, 1, 3, 3, | ||
1824 | 0, 2, 1, 2, 1, 0, 1, 1, | ||
1825 | 3, 1, 1, 2, 3, 0, 0, 3, | ||
1826 | 2, 4, 1, 4, 1, 1, 3, 1, | ||
1827 | 1, 1, 1, 2, 2, 1, 3, 1, | ||
1828 | 4, 3, 3, 1, 1, 5, 2, 1, | ||
1829 | 1, 2, 1, 2, 1, 3, 2, 0, | ||
1830 | 1, 1, 1, 1, 1, 1, 1, 2, | ||
1831 | 1, 1, 1, 1, 1, 1, 1, 0, | ||
1832 | 1, 1, 2, 2, 1, 1, 1, 3, | ||
1833 | 2, 1, 0, 2, 1, 1, 1, 1, | ||
1834 | 0, 3, 0, 1, 1, 4, 2, 3, | ||
1835 | 0, 1, 0, 2, 2, 4, 2, 2, | ||
1836 | 3, 1, 1, 1, 1, 0, 1, 1, | ||
1837 | 2, 2, 1, 4, 6, 9, 6, 8, | ||
1838 | 5, 8, 7, 10, 4, 6, 4, 7, | ||
1839 | 7, 5, 5, 4, 5, 1, 2, 8, | ||
1840 | 4, 3, 3, 3, 0, 3, 1, 2, | ||
1841 | 1, 2, 2, 3, 3, 1, 3, 2, | ||
1842 | 2, 1, 2, 2, 2, 3, 4, 4, | ||
1843 | 3, 1, 2, 1, 3, 2, 2, 2, | ||
1844 | 2, 2, 3, 3, 1, 1, 2, 1, | ||
1845 | 3, 2, 2, 3, 2, 7, 0, 1, | ||
1846 | 4, 1, 2, 4, 2, 1, 2, 0, | ||
1847 | 2, 2, 3, 5, 5, 1, 4, 1, | ||
1848 | 1, 2, 2, 1, 0, 0, 1, 1, | ||
1849 | 1, 1, 1, 2, 2, 2, 2, 1, | ||
1850 | 1, 1, 4, 2, 2, 3, 1, 4, | ||
1851 | 4, 6, 1, 3, 1, 1, 2, 1, | ||
1852 | 1, 1, 5, 3, 1, 1, 1, 2, | ||
1853 | 3, 3, 1, 2, 2, 1, 4, 1, | ||
1854 | 2, 5, 2, 1, 1, 0, 2, 2, | ||
1855 | 2, 2, 2, 2, 2, 2, 2, 1, | ||
1856 | 1, 2, 4, 2, 1, 2, 2, 2, | ||
1857 | 6, 1, 1, 2, 1, 2, 1, 1, | ||
1858 | 1, 2, 2, 2, 1, 3, 2, 5, | ||
1859 | 2, 8, 6, 2, 2, 2, 2, 3, | ||
1860 | 1, 3, 1, 2, 1, 3, 2, 2, | ||
1861 | 3, 1, 1, 1, 1, 1, 1, 1, | ||
1862 | 2, 2, 4, 1, 2, 1, 0, 1, | ||
1863 | 1, 1, 1, 0, 1, 2, 3, 1, | ||
1864 | 3, 3, 1, 0, 3, 0, 2, 3, | ||
1865 | 1, 0, 0, 0, 0, 2, 2, 2, | ||
1866 | 2, 1, 5, 2, 2, 5, 7, 5, | ||
1867 | 0, 1, 0, 1, 1, 1, 1, 1, | ||
1868 | 0, 1, 1, 1, 2, 2, 3, 3, | ||
1869 | 4, 7, 5, 7, 5, 3, 3, 7, | ||
1870 | 3, 13, 1, 3, 5, 3, 5, 3, | ||
1871 | 6, 5, 2, 2, 8, 4, 1, 2, | ||
1872 | 3, 2, 10, 2, 2, 0, 2, 3, | ||
1873 | 3, 1, 2, 3, 3, 1, 2, 3, | ||
1874 | 3, 4, 4, 2, 1, 2, 2, 3, | ||
1875 | 2, 2, 5, 3, 2, 3, 2, 1, | ||
1876 | 3, 3, 6, 2, 2, 5, 2, 5, | ||
1877 | 1, 1, 2, 4, 1, 11, 1, 3, | ||
1878 | 8, 4, 2, 1, 0, 4, 3, 3, | ||
1879 | 3, 2, 9, 1, 1, 4, 3, 2, | ||
1880 | 2, 2, 3, 4, 2, 3, 2, 4, | ||
1881 | 3, 2, 2, 3, 3, 4, 3, 3, | ||
1882 | 4, 2, 5, 4, 8, 7, 1, 2, | ||
1883 | 1, 3, 1, 2, 5, 1, 2, 2, | ||
1884 | 2, 2, 1, 3, 2, 2, 3, 3, | ||
1885 | 1, 9, 1, 5, 1, 3, 2, 2, | ||
1886 | 3, 2, 3, 3, 3, 1, 3, 3, | ||
1887 | 2, 2, 4, 5, 3, 3, 4, 3, | ||
1888 | 3, 3, 2, 2, 2, 4, 2, 2, | ||
1889 | 1, 3, 3, 3, 3, 3, 3, 2, | ||
1890 | 2, 3, 2, 3, 3, 2, 3, 2, | ||
1891 | 3, 1, 2, 2, 2, 2, 2, 2, | ||
1892 | 2, 2, 2, 2, 2, 3, 2, 3, | ||
1893 | 2, 3, 5, 3, 3, 1, 2, 3, | ||
1894 | 2, 2, 1, 2, 3, 4, 3, 0, | ||
1895 | 3, 0, 2, 3, 1, 0, 0, 0, | ||
1896 | 0, 2, 3, 2, 4, 6, 4, 1, | ||
1897 | 1, 2, 1, 2, 1, 3, 2, 3, | ||
1898 | 2, 0, 0, 1, 1, 1, 1, 1, | ||
1899 | 0, 0, 0, 1, 1, 1, 0, 0, | ||
1900 | 0, 0, 1, 1, 1, 0, 0, 0, | ||
1901 | 3, 0, 1, 1, 4, 2, 3, 0, | ||
1902 | 1, 0, 2, 2, 4, 2, 2, 3, | ||
1903 | 1, 1, 1, 1, 0, 1, 1, 2, | ||
1904 | 2, 1, 4, 6, 9, 6, 8, 5, | ||
1905 | 8, 7, 10, 4, 6, 4, 7, 7, | ||
1906 | 5, 5, 4, 5, 1, 2, 8, 4, | ||
1907 | 3, 3, 3, 0, 3, 1, 2, 1, | ||
1908 | 2, 2, 3, 3, 1, 3, 2, 2, | ||
1909 | 1, 2, 2, 2, 3, 4, 4, 3, | ||
1910 | 1, 2, 1, 3, 2, 2, 2, 2, | ||
1911 | 2, 3, 3, 1, 1, 2, 1, 3, | ||
1912 | 2, 2, 3, 2, 7, 0, 1, 4, | ||
1913 | 1, 2, 4, 2, 1, 2, 0, 2, | ||
1914 | 2, 3, 5, 5, 1, 4, 1, 1, | ||
1915 | 2, 2, 1, 0, 0, 1, 1, 1, | ||
1916 | 1, 1, 2, 2, 2, 2, 1, 1, | ||
1917 | 1, 4, 2, 2, 3, 1, 4, 4, | ||
1918 | 6, 1, 3, 1, 1, 2, 1, 1, | ||
1919 | 1, 5, 3, 1, 1, 1, 2, 3, | ||
1920 | 3, 1, 2, 2, 1, 4, 1, 2, | ||
1921 | 5, 2, 1, 1, 0, 2, 2, 2, | ||
1922 | 2, 2, 2, 2, 2, 2, 1, 1, | ||
1923 | 2, 4, 2, 1, 2, 2, 2, 6, | ||
1924 | 1, 1, 2, 1, 2, 1, 1, 1, | ||
1925 | 2, 2, 2, 1, 3, 2, 5, 2, | ||
1926 | 8, 6, 2, 2, 2, 2, 3, 1, | ||
1927 | 3, 1, 2, 1, 3, 2, 2, 3, | ||
1928 | 1, 1, 1, 1, 1, 1, 1, 2, | ||
1929 | 2, 4, 1, 2, 1, 0, 1, 1, | ||
1930 | 1, 1, 0, 1, 2, 3, 1, 3, | ||
1931 | 3, 1, 0, 3, 0, 2, 3, 1, | ||
1932 | 0, 0, 0, 0, 2, 2, 2, 2, | ||
1933 | 1, 5, 2, 2, 5, 7, 5, 0, | ||
1934 | 1, 0, 1, 1, 1, 1, 1, 0, | ||
1935 | 1, 1, 1, 2, 2, 3, 3, 4, | ||
1936 | 7, 5, 7, 5, 3, 3, 7, 3, | ||
1937 | 13, 1, 3, 5, 3, 5, 3, 6, | ||
1938 | 5, 2, 2, 8, 4, 1, 2, 3, | ||
1939 | 2, 10, 2, 2, 0, 2, 3, 3, | ||
1940 | 1, 2, 3, 3, 1, 2, 3, 3, | ||
1941 | 4, 4, 2, 1, 2, 2, 3, 2, | ||
1942 | 2, 5, 3, 2, 3, 2, 1, 3, | ||
1943 | 3, 6, 2, 2, 5, 2, 5, 1, | ||
1944 | 1, 2, 4, 1, 11, 1, 3, 8, | ||
1945 | 4, 2, 1, 0, 4, 3, 3, 3, | ||
1946 | 2, 9, 1, 1, 4, 3, 2, 2, | ||
1947 | 2, 3, 4, 2, 3, 2, 4, 3, | ||
1948 | 2, 2, 3, 3, 4, 3, 3, 4, | ||
1949 | 2, 5, 4, 8, 7, 1, 2, 1, | ||
1950 | 3, 1, 2, 5, 1, 2, 2, 2, | ||
1951 | 2, 1, 3, 2, 2, 3, 3, 1, | ||
1952 | 9, 1, 5, 1, 3, 2, 2, 3, | ||
1953 | 2, 3, 3, 3, 1, 3, 3, 2, | ||
1954 | 2, 4, 5, 3, 3, 4, 3, 3, | ||
1955 | 3, 2, 2, 2, 4, 2, 2, 1, | ||
1956 | 3, 3, 3, 3, 3, 3, 2, 2, | ||
1957 | 3, 2, 3, 3, 2, 3, 2, 3, | ||
1958 | 1, 2, 2, 2, 2, 2, 2, 2, | ||
1959 | 2, 2, 2, 2, 3, 2, 3, 2, | ||
1960 | 3, 5, 3, 3, 1, 2, 3, 2, | ||
1961 | 2, 1, 2, 3, 4, 3, 0, 3, | ||
1962 | 0, 2, 3, 1, 0, 0, 0, 0, | ||
1963 | 2, 3, 2, 4, 6, 4, 1, 1, | ||
1964 | 2, 1, 2, 1, 3, 2, 3, 2, | ||
1965 | 11, 0, 0, 0, 0, 0, 0, 0, | ||
1966 | 0, 1, 0, 0, 0, 5, 0, 0, | ||
1967 | 1, 1, 1, 0, 1, 1, 5, 4, | ||
1968 | 2, 0, 1, 0, 2, 2, 5, 2, | ||
1969 | 3, 5, 3, 2, 3, 5, 1, 1, | ||
1970 | 1, 3, 1, 1, 2, 2, 3, 1, | ||
1971 | 2, 3, 1, 5, 5, 5, 5, 5, | ||
1972 | 3, 5, 5, 5, 5, 3, 5, 5, | ||
1973 | 5, 5, 5, 5, 5, 5, 0, 0, | ||
1974 | 0, 0, 1, 1, 1, 5, 5, 5, | ||
1975 | 5, 0, 0, 1, 1, 1, 5, 6, | ||
1976 | 0, 0, 1, 1, 1, 8, 5, 1, | ||
1977 | 1, 1, 0, 1, 1, 5, 4, 2, | ||
1978 | 0, 1, 0, 2, 2, 5, 2, 3, | ||
1979 | 5, 3, 2, 3, 5, 1, 1, 1, | ||
1980 | 3, 1, 1, 2, 2, 3, 1, 2, | ||
1981 | 3, 1, | ||
1982 | } | ||
1983 | |||
1984 | var _hcltok_index_offsets []int16 = []int16{ | ||
1985 | 0, 0, 2, 4, 6, 9, 14, 18, | ||
1986 | 20, 58, 95, 137, 139, 144, 148, 149, | ||
1987 | 151, 153, 159, 164, 169, 171, 174, 176, | ||
1988 | 179, 183, 189, 192, 195, 201, 203, 205, | ||
1989 | 207, 210, 243, 245, 247, 250, 253, 256, | ||
1990 | 264, 272, 283, 291, 300, 308, 317, 326, | ||
1991 | 338, 345, 352, 360, 368, 377, 383, 391, | ||
1992 | 397, 405, 407, 410, 424, 430, 438, 442, | ||
1993 | 446, 448, 495, 497, 500, 502, 507, 513, | ||
1994 | 519, 524, 527, 531, 534, 537, 539, 542, | ||
1995 | 545, 548, 552, 557, 562, 566, 568, 571, | ||
1996 | 573, 577, 580, 583, 586, 589, 593, 598, | ||
1997 | 602, 604, 606, 609, 611, 615, 618, 621, | ||
1998 | 629, 633, 641, 657, 659, 664, 666, 670, | ||
1999 | 681, 685, 687, 690, 692, 695, 700, 704, | ||
2000 | 710, 716, 727, 732, 735, 738, 741, 744, | ||
2001 | 746, 750, 751, 754, 756, 786, 788, 790, | ||
2002 | 793, 797, 800, 804, 806, 808, 810, 816, | ||
2003 | 819, 822, 826, 828, 833, 838, 845, 848, | ||
2004 | 852, 856, 858, 861, 881, 883, 885, 892, | ||
2005 | 896, 898, 900, 902, 905, 909, 913, 915, | ||
2006 | 919, 922, 924, 929, 947, 986, 992, 995, | ||
2007 | 997, 999, 1001, 1004, 1007, 1010, 1013, 1016, | ||
2008 | 1020, 1023, 1026, 1029, 1031, 1033, 1036, 1043, | ||
2009 | 1046, 1048, 1051, 1054, 1057, 1065, 1067, 1069, | ||
2010 | 1072, 1074, 1077, 1079, 1081, 1111, 1114, 1117, | ||
2011 | 1120, 1123, 1128, 1132, 1139, 1142, 1151, 1160, | ||
2012 | 1163, 1167, 1170, 1173, 1177, 1179, 1183, 1185, | ||
2013 | 1188, 1190, 1194, 1198, 1202, 1210, 1212, 1214, | ||
2014 | 1218, 1222, 1224, 1237, 1239, 1242, 1245, 1250, | ||
2015 | 1252, 1255, 1257, 1259, 1262, 1267, 1269, 1271, | ||
2016 | 1276, 1278, 1281, 1285, 1305, 1309, 1313, 1315, | ||
2017 | 1317, 1325, 1327, 1334, 1339, 1341, 1345, 1348, | ||
2018 | 1351, 1354, 1358, 1361, 1364, 1368, 1378, 1384, | ||
2019 | 1387, 1390, 1400, 1420, 1426, 1429, 1431, 1435, | ||
2020 | 1437, 1440, 1442, 1446, 1448, 1450, 1454, 1456, | ||
2021 | 1460, 1465, 1471, 1473, 1475, 1478, 1480, 1484, | ||
2022 | 1491, 1494, 1496, 1499, 1503, 1533, 1538, 1540, | ||
2023 | 1543, 1547, 1556, 1561, 1569, 1573, 1581, 1585, | ||
2024 | 1593, 1597, 1608, 1610, 1616, 1619, 1627, 1631, | ||
2025 | 1636, 1641, 1646, 1648, 1651, 1666, 1670, 1672, | ||
2026 | 1675, 1677, 1726, 1729, 1736, 1739, 1741, 1745, | ||
2027 | 1749, 1752, 1756, 1758, 1761, 1763, 1765, 1767, | ||
2028 | 1769, 1773, 1775, 1777, 1780, 1784, 1798, 1801, | ||
2029 | 1805, 1808, 1813, 1824, 1829, 1832, 1862, 1866, | ||
2030 | 1869, 1874, 1876, 1880, 1883, 1886, 1888, 1893, | ||
2031 | 1895, 1901, 1906, 1912, 1914, 1934, 1942, 1945, | ||
2032 | 1947, 1965, 2003, 2005, 2008, 2010, 2015, 2018, | ||
2033 | 2047, 2049, 2051, 2053, 2055, 2058, 2060, 2064, | ||
2034 | 2067, 2069, 2072, 2074, 2076, 2079, 2081, 2083, | ||
2035 | 2085, 2087, 2089, 2092, 2095, 2098, 2111, 2113, | ||
2036 | 2117, 2120, 2122, 2127, 2130, 2144, 2147, 2156, | ||
2037 | 2158, 2163, 2167, 2168, 2170, 2172, 2178, 2183, | ||
2038 | 2188, 2190, 2193, 2195, 2198, 2202, 2208, 2211, | ||
2039 | 2214, 2220, 2222, 2224, 2226, 2229, 2262, 2264, | ||
2040 | 2266, 2269, 2272, 2275, 2283, 2291, 2302, 2310, | ||
2041 | 2319, 2327, 2336, 2345, 2357, 2364, 2371, 2379, | ||
2042 | 2387, 2396, 2402, 2410, 2416, 2424, 2426, 2429, | ||
2043 | 2443, 2449, 2457, 2461, 2465, 2467, 2514, 2516, | ||
2044 | 2519, 2521, 2526, 2532, 2538, 2543, 2546, 2550, | ||
2045 | 2553, 2556, 2558, 2561, 2564, 2567, 2571, 2576, | ||
2046 | 2581, 2585, 2587, 2590, 2592, 2596, 2599, 2602, | ||
2047 | 2605, 2608, 2612, 2617, 2621, 2623, 2625, 2628, | ||
2048 | 2630, 2634, 2637, 2640, 2648, 2652, 2660, 2676, | ||
2049 | 2678, 2683, 2685, 2689, 2700, 2704, 2706, 2709, | ||
2050 | 2711, 2714, 2719, 2723, 2729, 2735, 2746, 2751, | ||
2051 | 2754, 2757, 2760, 2763, 2765, 2769, 2770, 2773, | ||
2052 | 2775, 2805, 2807, 2809, 2812, 2816, 2819, 2823, | ||
2053 | 2825, 2827, 2829, 2835, 2838, 2841, 2845, 2847, | ||
2054 | 2852, 2857, 2864, 2867, 2871, 2875, 2877, 2880, | ||
2055 | 2900, 2902, 2904, 2911, 2915, 2917, 2919, 2921, | ||
2056 | 2924, 2928, 2932, 2934, 2938, 2941, 2943, 2948, | ||
2057 | 2966, 3005, 3011, 3014, 3016, 3018, 3020, 3023, | ||
2058 | 3026, 3029, 3032, 3035, 3039, 3042, 3045, 3048, | ||
2059 | 3050, 3052, 3055, 3062, 3065, 3067, 3070, 3073, | ||
2060 | 3076, 3084, 3086, 3088, 3091, 3093, 3096, 3098, | ||
2061 | 3100, 3130, 3133, 3136, 3139, 3142, 3147, 3151, | ||
2062 | 3158, 3161, 3170, 3179, 3182, 3186, 3189, 3192, | ||
2063 | 3196, 3198, 3202, 3204, 3207, 3209, 3213, 3217, | ||
2064 | 3221, 3229, 3231, 3233, 3237, 3241, 3243, 3256, | ||
2065 | 3258, 3261, 3264, 3269, 3271, 3274, 3276, 3278, | ||
2066 | 3281, 3286, 3288, 3290, 3295, 3297, 3300, 3304, | ||
2067 | 3324, 3328, 3332, 3334, 3336, 3344, 3346, 3353, | ||
2068 | 3358, 3360, 3364, 3367, 3370, 3373, 3377, 3380, | ||
2069 | 3383, 3387, 3397, 3403, 3406, 3409, 3419, 3439, | ||
2070 | 3445, 3448, 3450, 3454, 3456, 3459, 3461, 3465, | ||
2071 | 3467, 3469, 3473, 3475, 3477, 3483, 3486, 3491, | ||
2072 | 3496, 3502, 3512, 3520, 3532, 3539, 3549, 3555, | ||
2073 | 3567, 3573, 3591, 3594, 3602, 3608, 3618, 3625, | ||
2074 | 3632, 3640, 3648, 3651, 3656, 3676, 3682, 3685, | ||
2075 | 3689, 3693, 3697, 3709, 3712, 3717, 3718, 3724, | ||
2076 | 3731, 3737, 3740, 3743, 3747, 3751, 3754, 3757, | ||
2077 | 3762, 3766, 3772, 3778, 3781, 3785, 3788, 3791, | ||
2078 | 3796, 3799, 3802, 3808, 3812, 3815, 3819, 3822, | ||
2079 | 3825, 3829, 3833, 3840, 3843, 3846, 3852, 3855, | ||
2080 | 3862, 3864, 3866, 3869, 3878, 3883, 3897, 3901, | ||
2081 | 3905, 3920, 3926, 3929, 3932, 3934, 3939, 3945, | ||
2082 | 3949, 3957, 3963, 3973, 3976, 3979, 3984, 3988, | ||
2083 | 3991, 3994, 3997, 4001, 4006, 4010, 4014, 4017, | ||
2084 | 4022, 4027, 4030, 4036, 4040, 4046, 4051, 4055, | ||
2085 | 4059, 4067, 4070, 4078, 4084, 4094, 4105, 4108, | ||
2086 | 4111, 4113, 4117, 4119, 4122, 4133, 4137, 4140, | ||
2087 | 4143, 4146, 4149, 4151, 4155, 4159, 4162, 4166, | ||
2088 | 4171, 4174, 4184, 4186, 4227, 4233, 4237, 4240, | ||
2089 | 4243, 4247, 4250, 4254, 4258, 4263, 4265, 4269, | ||
2090 | 4273, 4276, 4279, 4284, 4293, 4297, 4302, 4307, | ||
2091 | 4311, 4318, 4322, 4325, 4329, 4332, 4337, 4340, | ||
2092 | 4343, 4373, 4377, 4381, 4385, 4389, 4394, 4398, | ||
2093 | 4404, 4408, 4416, 4419, 4424, 4428, 4431, 4436, | ||
2094 | 4439, 4443, 4446, 4449, 4452, 4455, 4458, 4462, | ||
2095 | 4466, 4469, 4479, 4482, 4485, 4490, 4496, 4499, | ||
2096 | 4514, 4517, 4521, 4527, 4531, 4535, 4538, 4542, | ||
2097 | 4549, 4552, 4555, 4561, 4564, 4568, 4573, 4589, | ||
2098 | 4591, 4599, 4601, 4609, 4615, 4617, 4621, 4624, | ||
2099 | 4627, 4630, 4634, 4645, 4648, 4660, 4684, 4692, | ||
2100 | 4694, 4698, 4701, 4706, 4709, 4711, 4716, 4719, | ||
2101 | 4725, 4728, 4730, 4732, 4734, 4736, 4738, 4740, | ||
2102 | 4742, 4744, 4746, 4748, 4750, 4752, 4754, 4756, | ||
2103 | 4758, 4760, 4762, 4764, 4766, 4768, 4770, 4772, | ||
2104 | 4777, 4781, 4782, 4784, 4786, 4792, 4797, 4802, | ||
2105 | 4804, 4807, 4809, 4812, 4816, 4822, 4825, 4828, | ||
2106 | 4834, 4836, 4838, 4840, 4843, 4876, 4878, 4880, | ||
2107 | 4883, 4886, 4889, 4897, 4905, 4916, 4924, 4933, | ||
2108 | 4941, 4950, 4959, 4971, 4978, 4985, 4993, 5001, | ||
2109 | 5010, 5016, 5024, 5030, 5038, 5040, 5043, 5057, | ||
2110 | 5063, 5071, 5075, 5079, 5081, 5128, 5130, 5133, | ||
2111 | 5135, 5140, 5146, 5152, 5157, 5160, 5164, 5167, | ||
2112 | 5170, 5172, 5175, 5178, 5181, 5185, 5190, 5195, | ||
2113 | 5199, 5201, 5204, 5206, 5210, 5213, 5216, 5219, | ||
2114 | 5222, 5226, 5231, 5235, 5237, 5239, 5242, 5244, | ||
2115 | 5248, 5251, 5254, 5262, 5266, 5274, 5290, 5292, | ||
2116 | 5297, 5299, 5303, 5314, 5318, 5320, 5323, 5325, | ||
2117 | 5328, 5333, 5337, 5343, 5349, 5360, 5365, 5368, | ||
2118 | 5371, 5374, 5377, 5379, 5383, 5384, 5387, 5389, | ||
2119 | 5419, 5421, 5423, 5426, 5430, 5433, 5437, 5439, | ||
2120 | 5441, 5443, 5449, 5452, 5455, 5459, 5461, 5466, | ||
2121 | 5471, 5478, 5481, 5485, 5489, 5491, 5494, 5514, | ||
2122 | 5516, 5518, 5525, 5529, 5531, 5533, 5535, 5538, | ||
2123 | 5542, 5546, 5548, 5552, 5555, 5557, 5562, 5580, | ||
2124 | 5619, 5625, 5628, 5630, 5632, 5634, 5637, 5640, | ||
2125 | 5643, 5646, 5649, 5653, 5656, 5659, 5662, 5664, | ||
2126 | 5666, 5669, 5676, 5679, 5681, 5684, 5687, 5690, | ||
2127 | 5698, 5700, 5702, 5705, 5707, 5710, 5712, 5714, | ||
2128 | 5744, 5747, 5750, 5753, 5756, 5761, 5765, 5772, | ||
2129 | 5775, 5784, 5793, 5796, 5800, 5803, 5806, 5810, | ||
2130 | 5812, 5816, 5818, 5821, 5823, 5827, 5831, 5835, | ||
2131 | 5843, 5845, 5847, 5851, 5855, 5857, 5870, 5872, | ||
2132 | 5875, 5878, 5883, 5885, 5888, 5890, 5892, 5895, | ||
2133 | 5900, 5902, 5904, 5909, 5911, 5914, 5918, 5938, | ||
2134 | 5942, 5946, 5948, 5950, 5958, 5960, 5967, 5972, | ||
2135 | 5974, 5978, 5981, 5984, 5987, 5991, 5994, 5997, | ||
2136 | 6001, 6011, 6017, 6020, 6023, 6033, 6053, 6059, | ||
2137 | 6062, 6064, 6068, 6070, 6073, 6075, 6079, 6081, | ||
2138 | 6083, 6087, 6089, 6091, 6097, 6100, 6105, 6110, | ||
2139 | 6116, 6126, 6134, 6146, 6153, 6163, 6169, 6181, | ||
2140 | 6187, 6205, 6208, 6216, 6222, 6232, 6239, 6246, | ||
2141 | 6254, 6262, 6265, 6270, 6290, 6296, 6299, 6303, | ||
2142 | 6307, 6311, 6323, 6326, 6331, 6332, 6338, 6345, | ||
2143 | 6351, 6354, 6357, 6361, 6365, 6368, 6371, 6376, | ||
2144 | 6380, 6386, 6392, 6395, 6399, 6402, 6405, 6410, | ||
2145 | 6413, 6416, 6422, 6426, 6429, 6433, 6436, 6439, | ||
2146 | 6443, 6447, 6454, 6457, 6460, 6466, 6469, 6476, | ||
2147 | 6478, 6480, 6483, 6492, 6497, 6511, 6515, 6519, | ||
2148 | 6534, 6540, 6543, 6546, 6548, 6553, 6559, 6563, | ||
2149 | 6571, 6577, 6587, 6590, 6593, 6598, 6602, 6605, | ||
2150 | 6608, 6611, 6615, 6620, 6624, 6628, 6631, 6636, | ||
2151 | 6641, 6644, 6650, 6654, 6660, 6665, 6669, 6673, | ||
2152 | 6681, 6684, 6692, 6698, 6708, 6719, 6722, 6725, | ||
2153 | 6727, 6731, 6733, 6736, 6747, 6751, 6754, 6757, | ||
2154 | 6760, 6763, 6765, 6769, 6773, 6776, 6780, 6785, | ||
2155 | 6788, 6798, 6800, 6841, 6847, 6851, 6854, 6857, | ||
2156 | 6861, 6864, 6868, 6872, 6877, 6879, 6883, 6887, | ||
2157 | 6890, 6893, 6898, 6907, 6911, 6916, 6921, 6925, | ||
2158 | 6932, 6936, 6939, 6943, 6946, 6951, 6954, 6957, | ||
2159 | 6987, 6991, 6995, 6999, 7003, 7008, 7012, 7018, | ||
2160 | 7022, 7030, 7033, 7038, 7042, 7045, 7050, 7053, | ||
2161 | 7057, 7060, 7063, 7066, 7069, 7072, 7076, 7080, | ||
2162 | 7083, 7093, 7096, 7099, 7104, 7110, 7113, 7128, | ||
2163 | 7131, 7135, 7141, 7145, 7149, 7152, 7156, 7163, | ||
2164 | 7166, 7169, 7175, 7178, 7182, 7187, 7203, 7205, | ||
2165 | 7213, 7215, 7223, 7229, 7231, 7235, 7238, 7241, | ||
2166 | 7244, 7248, 7259, 7262, 7274, 7298, 7306, 7308, | ||
2167 | 7312, 7315, 7320, 7323, 7325, 7330, 7333, 7339, | ||
2168 | 7342, 7408, 7411, 7413, 7415, 7417, 7419, 7421, | ||
2169 | 7424, 7426, 7431, 7434, 7437, 7439, 7479, 7481, | ||
2170 | 7483, 7485, 7490, 7494, 7495, 7497, 7499, 7506, | ||
2171 | 7513, 7520, 7522, 7524, 7526, 7529, 7532, 7538, | ||
2172 | 7541, 7546, 7553, 7558, 7561, 7565, 7572, 7604, | ||
2173 | 7653, 7668, 7681, 7686, 7688, 7692, 7723, 7729, | ||
2174 | 7731, 7752, 7772, 7774, 7786, 7798, 7810, 7822, | ||
2175 | 7833, 7841, 7854, 7867, 7880, 7892, 7900, 7913, | ||
2176 | 7925, 7937, 7949, 7961, 7973, 7985, 7997, 7999, | ||
2177 | 8001, 8003, 8005, 8007, 8009, 8011, 8021, 8031, | ||
2178 | 8041, 8051, 8053, 8055, 8057, 8059, 8061, 8071, | ||
2179 | 8080, 8082, 8084, 8086, 8088, 8090, 8132, 8172, | ||
2180 | 8174, 8179, 8183, 8184, 8186, 8188, 8195, 8202, | ||
2181 | 8209, 8211, 8213, 8215, 8218, 8221, 8227, 8230, | ||
2182 | 8235, 8242, 8247, 8250, 8254, 8261, 8293, 8342, | ||
2183 | 8357, 8370, 8375, 8377, 8381, 8412, 8418, 8420, | ||
2184 | 8441, 8461, | ||
2185 | } | ||
2186 | |||
2187 | var _hcltok_indicies []int16 = []int16{ | ||
2188 | 2, 1, 4, 3, 6, 5, 6, 7, | ||
2189 | 5, 9, 11, 11, 10, 8, 12, 12, | ||
2190 | 10, 8, 10, 8, 13, 14, 15, 16, | ||
2191 | 18, 19, 20, 21, 22, 23, 24, 25, | ||
2192 | 26, 27, 28, 29, 30, 31, 32, 33, | ||
2193 | 34, 35, 36, 37, 38, 39, 40, 42, | ||
2194 | 43, 44, 45, 46, 14, 14, 17, 17, | ||
2195 | 41, 3, 14, 15, 16, 18, 19, 20, | ||
2196 | 21, 22, 23, 24, 25, 26, 27, 28, | ||
2197 | 29, 30, 31, 32, 33, 34, 35, 36, | ||
2198 | 37, 38, 39, 40, 42, 43, 44, 45, | ||
2199 | 46, 14, 14, 17, 17, 41, 3, 47, | ||
2200 | 48, 14, 14, 49, 16, 18, 19, 20, | ||
2201 | 19, 50, 51, 23, 52, 25, 26, 53, | ||
2202 | 54, 55, 56, 57, 58, 59, 60, 61, | ||
2203 | 62, 63, 64, 65, 40, 42, 66, 44, | ||
2204 | 67, 68, 69, 14, 14, 14, 17, 41, | ||
2205 | 3, 47, 3, 14, 14, 14, 14, 3, | ||
2206 | 14, 14, 14, 3, 14, 3, 14, 3, | ||
2207 | 14, 3, 3, 3, 3, 3, 14, 3, | ||
2208 | 3, 3, 3, 14, 14, 14, 14, 14, | ||
2209 | 3, 3, 14, 3, 3, 14, 3, 14, | ||
2210 | 3, 3, 14, 3, 3, 3, 14, 14, | ||
2211 | 14, 14, 14, 14, 3, 14, 14, 3, | ||
2212 | 14, 14, 3, 3, 3, 3, 3, 3, | ||
2213 | 14, 14, 3, 3, 14, 3, 14, 14, | ||
2214 | 14, 3, 70, 71, 72, 73, 17, 74, | ||
2215 | 75, 76, 77, 78, 79, 80, 81, 82, | ||
2216 | 83, 84, 85, 86, 87, 88, 89, 90, | ||
2217 | 91, 92, 93, 94, 95, 96, 97, 98, | ||
2218 | 99, 100, 3, 14, 3, 14, 3, 14, | ||
2219 | 14, 3, 14, 14, 3, 3, 3, 14, | ||
2220 | 3, 3, 3, 3, 3, 3, 3, 14, | ||
2221 | 3, 3, 3, 3, 3, 3, 3, 14, | ||
2222 | 14, 14, 14, 14, 14, 14, 14, 14, | ||
2223 | 14, 14, 3, 3, 3, 3, 3, 3, | ||
2224 | 3, 3, 14, 14, 14, 14, 14, 14, | ||
2225 | 14, 14, 14, 3, 3, 3, 3, 3, | ||
2226 | 3, 3, 3, 14, 14, 14, 14, 14, | ||
2227 | 14, 14, 14, 14, 3, 14, 14, 14, | ||
2228 | 14, 14, 14, 14, 14, 3, 14, 14, | ||
2229 | 14, 14, 14, 14, 14, 14, 14, 14, | ||
2230 | 14, 3, 14, 14, 14, 14, 14, 14, | ||
2231 | 3, 14, 14, 14, 14, 14, 14, 3, | ||
2232 | 3, 3, 3, 3, 3, 3, 3, 14, | ||
2233 | 14, 14, 14, 14, 14, 14, 14, 3, | ||
2234 | 14, 14, 14, 14, 14, 14, 14, 14, | ||
2235 | 3, 14, 14, 14, 14, 14, 3, 3, | ||
2236 | 3, 3, 3, 3, 3, 3, 14, 14, | ||
2237 | 14, 14, 14, 14, 3, 14, 14, 14, | ||
2238 | 14, 14, 14, 14, 3, 14, 3, 14, | ||
2239 | 14, 3, 14, 14, 14, 14, 14, 14, | ||
2240 | 14, 14, 14, 14, 14, 14, 14, 3, | ||
2241 | 14, 14, 14, 14, 14, 3, 14, 14, | ||
2242 | 14, 14, 14, 14, 14, 3, 14, 14, | ||
2243 | 14, 3, 14, 14, 14, 3, 14, 3, | ||
2244 | 101, 102, 103, 104, 105, 106, 107, 108, | ||
2245 | 109, 110, 111, 112, 113, 114, 115, 116, | ||
2246 | 117, 19, 118, 119, 120, 121, 122, 123, | ||
2247 | 124, 125, 126, 127, 128, 129, 130, 131, | ||
2248 | 132, 133, 134, 135, 17, 18, 136, 137, | ||
2249 | 138, 139, 140, 17, 19, 17, 3, 14, | ||
2250 | 3, 14, 14, 3, 3, 14, 3, 3, | ||
2251 | 3, 3, 14, 3, 3, 3, 3, 3, | ||
2252 | 14, 3, 3, 3, 3, 3, 14, 14, | ||
2253 | 14, 14, 14, 3, 3, 3, 14, 3, | ||
2254 | 3, 3, 14, 14, 14, 3, 3, 3, | ||
2255 | 14, 14, 3, 3, 3, 14, 14, 14, | ||
2256 | 3, 3, 3, 14, 14, 14, 14, 3, | ||
2257 | 14, 14, 14, 14, 3, 3, 3, 3, | ||
2258 | 3, 14, 14, 14, 14, 3, 3, 14, | ||
2259 | 14, 14, 3, 3, 14, 14, 14, 14, | ||
2260 | 3, 14, 14, 3, 14, 14, 3, 3, | ||
2261 | 3, 14, 14, 14, 3, 3, 3, 3, | ||
2262 | 14, 14, 14, 14, 14, 3, 3, 3, | ||
2263 | 3, 14, 3, 14, 14, 3, 14, 14, | ||
2264 | 3, 14, 3, 14, 14, 14, 3, 14, | ||
2265 | 14, 3, 3, 3, 14, 3, 3, 3, | ||
2266 | 3, 3, 3, 3, 14, 14, 14, 14, | ||
2267 | 3, 14, 14, 14, 14, 14, 14, 14, | ||
2268 | 3, 141, 142, 143, 144, 145, 146, 147, | ||
2269 | 148, 149, 17, 150, 151, 152, 153, 154, | ||
2270 | 3, 14, 3, 3, 3, 3, 3, 14, | ||
2271 | 14, 3, 14, 14, 14, 3, 14, 14, | ||
2272 | 14, 14, 14, 14, 14, 14, 14, 14, | ||
2273 | 3, 14, 14, 14, 3, 3, 14, 14, | ||
2274 | 14, 3, 3, 14, 3, 3, 14, 14, | ||
2275 | 14, 14, 14, 3, 3, 3, 3, 14, | ||
2276 | 14, 14, 14, 14, 14, 3, 14, 14, | ||
2277 | 14, 14, 14, 3, 155, 112, 156, 157, | ||
2278 | 158, 17, 159, 160, 19, 17, 3, 14, | ||
2279 | 14, 14, 14, 3, 3, 3, 14, 3, | ||
2280 | 3, 14, 14, 14, 3, 3, 3, 14, | ||
2281 | 14, 3, 122, 3, 19, 17, 17, 161, | ||
2282 | 3, 17, 3, 14, 19, 162, 163, 19, | ||
2283 | 164, 165, 19, 60, 166, 167, 168, 169, | ||
2284 | 170, 19, 171, 172, 173, 19, 174, 175, | ||
2285 | 176, 18, 177, 178, 179, 18, 180, 19, | ||
2286 | 17, 3, 3, 14, 14, 3, 3, 3, | ||
2287 | 14, 14, 14, 14, 3, 14, 14, 3, | ||
2288 | 3, 3, 3, 14, 14, 3, 3, 14, | ||
2289 | 14, 3, 3, 3, 3, 3, 3, 14, | ||
2290 | 14, 14, 3, 3, 3, 14, 3, 3, | ||
2291 | 3, 14, 14, 3, 14, 14, 14, 14, | ||
2292 | 3, 14, 14, 14, 14, 3, 14, 14, | ||
2293 | 14, 14, 14, 14, 3, 3, 3, 14, | ||
2294 | 14, 14, 14, 3, 181, 182, 3, 17, | ||
2295 | 3, 14, 3, 3, 14, 19, 183, 184, | ||
2296 | 185, 186, 60, 187, 188, 58, 189, 190, | ||
2297 | 191, 192, 193, 194, 195, 196, 197, 17, | ||
2298 | 3, 3, 14, 3, 14, 14, 14, 14, | ||
2299 | 14, 14, 14, 3, 14, 14, 14, 3, | ||
2300 | 14, 3, 3, 14, 3, 14, 3, 3, | ||
2301 | 14, 14, 14, 14, 3, 14, 14, 14, | ||
2302 | 3, 3, 14, 14, 14, 14, 3, 14, | ||
2303 | 14, 3, 3, 14, 14, 14, 14, 14, | ||
2304 | 3, 198, 199, 200, 201, 202, 203, 204, | ||
2305 | 205, 206, 207, 208, 204, 209, 210, 211, | ||
2306 | 212, 41, 3, 213, 214, 19, 215, 216, | ||
2307 | 217, 218, 219, 220, 221, 222, 223, 19, | ||
2308 | 17, 224, 225, 226, 227, 19, 228, 229, | ||
2309 | 230, 231, 232, 233, 234, 235, 236, 237, | ||
2310 | 238, 239, 240, 241, 242, 19, 147, 17, | ||
2311 | 243, 3, 14, 14, 14, 14, 14, 3, | ||
2312 | 3, 3, 14, 3, 14, 14, 3, 14, | ||
2313 | 3, 14, 14, 3, 3, 3, 14, 14, | ||
2314 | 14, 3, 3, 3, 14, 14, 14, 3, | ||
2315 | 3, 3, 3, 14, 3, 3, 14, 3, | ||
2316 | 3, 14, 14, 14, 3, 3, 14, 3, | ||
2317 | 14, 14, 14, 3, 14, 14, 14, 14, | ||
2318 | 14, 14, 3, 3, 3, 14, 14, 3, | ||
2319 | 14, 14, 3, 14, 14, 3, 14, 14, | ||
2320 | 3, 14, 14, 14, 14, 14, 14, 14, | ||
2321 | 3, 14, 3, 14, 3, 14, 14, 3, | ||
2322 | 14, 3, 14, 14, 3, 14, 3, 14, | ||
2323 | 3, 244, 215, 245, 246, 247, 248, 249, | ||
2324 | 250, 251, 252, 253, 101, 254, 19, 255, | ||
2325 | 256, 257, 19, 258, 132, 259, 260, 261, | ||
2326 | 262, 263, 264, 265, 266, 19, 3, 3, | ||
2327 | 3, 14, 14, 14, 3, 14, 14, 3, | ||
2328 | 14, 14, 3, 3, 3, 3, 3, 14, | ||
2329 | 14, 14, 14, 3, 14, 14, 14, 14, | ||
2330 | 14, 14, 3, 3, 3, 14, 14, 14, | ||
2331 | 14, 14, 14, 14, 14, 14, 3, 14, | ||
2332 | 14, 14, 14, 14, 14, 14, 14, 3, | ||
2333 | 14, 14, 3, 3, 3, 3, 14, 14, | ||
2334 | 14, 3, 3, 3, 14, 3, 3, 3, | ||
2335 | 14, 14, 3, 14, 14, 14, 3, 14, | ||
2336 | 3, 3, 3, 14, 14, 3, 14, 14, | ||
2337 | 14, 3, 14, 14, 14, 3, 3, 3, | ||
2338 | 3, 14, 19, 184, 267, 268, 17, 19, | ||
2339 | 17, 3, 3, 14, 3, 14, 19, 267, | ||
2340 | 17, 3, 19, 269, 17, 3, 3, 14, | ||
2341 | 19, 270, 271, 272, 175, 273, 274, 19, | ||
2342 | 275, 276, 277, 17, 3, 3, 14, 14, | ||
2343 | 14, 3, 14, 14, 3, 14, 14, 14, | ||
2344 | 14, 3, 3, 14, 3, 3, 14, 14, | ||
2345 | 3, 14, 3, 19, 17, 3, 278, 19, | ||
2346 | 279, 3, 17, 3, 14, 3, 14, 280, | ||
2347 | 19, 281, 282, 3, 14, 3, 3, 3, | ||
2348 | 14, 14, 14, 14, 3, 283, 284, 285, | ||
2349 | 19, 286, 287, 288, 289, 290, 291, 292, | ||
2350 | 293, 294, 295, 296, 297, 298, 299, 17, | ||
2351 | 3, 14, 14, 14, 3, 3, 3, 3, | ||
2352 | 14, 14, 3, 3, 14, 3, 3, 3, | ||
2353 | 3, 3, 3, 3, 14, 3, 14, 3, | ||
2354 | 3, 3, 3, 3, 3, 14, 14, 14, | ||
2355 | 14, 14, 3, 3, 14, 3, 3, 3, | ||
2356 | 14, 3, 3, 14, 3, 3, 14, 3, | ||
2357 | 3, 14, 3, 3, 3, 14, 14, 14, | ||
2358 | 3, 3, 3, 14, 14, 14, 14, 3, | ||
2359 | 300, 19, 301, 19, 302, 303, 304, 305, | ||
2360 | 17, 3, 14, 14, 14, 14, 14, 3, | ||
2361 | 3, 3, 14, 3, 3, 14, 14, 14, | ||
2362 | 14, 14, 14, 14, 14, 14, 14, 3, | ||
2363 | 14, 14, 14, 14, 14, 14, 14, 14, | ||
2364 | 14, 14, 14, 14, 14, 14, 14, 14, | ||
2365 | 14, 14, 14, 3, 14, 14, 14, 14, | ||
2366 | 14, 3, 306, 19, 17, 3, 14, 307, | ||
2367 | 19, 103, 17, 3, 14, 308, 3, 17, | ||
2368 | 3, 14, 19, 309, 17, 3, 3, 14, | ||
2369 | 310, 3, 19, 311, 17, 3, 3, 14, | ||
2370 | 14, 14, 14, 3, 14, 14, 14, 14, | ||
2371 | 3, 14, 14, 14, 14, 14, 3, 3, | ||
2372 | 14, 3, 14, 14, 14, 3, 14, 3, | ||
2373 | 14, 14, 14, 3, 3, 3, 3, 3, | ||
2374 | 3, 3, 14, 14, 14, 3, 14, 3, | ||
2375 | 3, 3, 14, 14, 14, 14, 3, 312, | ||
2376 | 313, 72, 314, 315, 316, 317, 318, 319, | ||
2377 | 320, 321, 322, 323, 324, 325, 326, 327, | ||
2378 | 328, 329, 330, 331, 332, 334, 335, 336, | ||
2379 | 337, 338, 339, 333, 3, 14, 14, 14, | ||
2380 | 14, 3, 14, 3, 14, 14, 3, 14, | ||
2381 | 14, 14, 3, 3, 3, 3, 3, 3, | ||
2382 | 3, 3, 3, 14, 14, 14, 14, 14, | ||
2383 | 3, 14, 14, 14, 14, 14, 14, 14, | ||
2384 | 3, 14, 14, 14, 3, 14, 14, 14, | ||
2385 | 14, 14, 14, 14, 3, 14, 14, 14, | ||
2386 | 3, 14, 14, 14, 14, 14, 14, 14, | ||
2387 | 3, 14, 14, 14, 3, 14, 14, 14, | ||
2388 | 14, 14, 14, 14, 14, 14, 14, 3, | ||
2389 | 14, 3, 14, 14, 14, 14, 14, 3, | ||
2390 | 14, 14, 3, 14, 14, 14, 14, 14, | ||
2391 | 14, 14, 3, 14, 14, 14, 3, 14, | ||
2392 | 14, 14, 14, 3, 14, 14, 14, 14, | ||
2393 | 3, 14, 14, 14, 14, 3, 14, 3, | ||
2394 | 14, 14, 3, 14, 14, 14, 14, 14, | ||
2395 | 14, 14, 14, 14, 14, 14, 14, 14, | ||
2396 | 14, 3, 14, 14, 14, 3, 14, 3, | ||
2397 | 14, 14, 3, 14, 3, 340, 341, 342, | ||
2398 | 104, 105, 106, 107, 108, 343, 110, 111, | ||
2399 | 112, 113, 114, 115, 344, 345, 170, 346, | ||
2400 | 261, 120, 347, 122, 232, 272, 125, 348, | ||
2401 | 349, 350, 351, 352, 353, 354, 355, 356, | ||
2402 | 357, 134, 358, 19, 17, 18, 19, 137, | ||
2403 | 138, 139, 140, 17, 17, 3, 14, 14, | ||
2404 | 3, 14, 14, 14, 14, 14, 14, 3, | ||
2405 | 3, 3, 14, 3, 14, 14, 14, 14, | ||
2406 | 3, 14, 14, 14, 3, 14, 14, 3, | ||
2407 | 14, 14, 14, 3, 3, 14, 14, 14, | ||
2408 | 3, 3, 14, 14, 3, 14, 3, 14, | ||
2409 | 3, 14, 14, 14, 3, 3, 14, 14, | ||
2410 | 3, 14, 14, 3, 14, 14, 14, 3, | ||
2411 | 359, 143, 145, 146, 147, 148, 149, 17, | ||
2412 | 360, 151, 361, 153, 362, 3, 14, 14, | ||
2413 | 3, 3, 3, 3, 14, 3, 3, 14, | ||
2414 | 14, 14, 14, 14, 3, 363, 112, 364, | ||
2415 | 157, 158, 17, 159, 160, 19, 17, 3, | ||
2416 | 14, 14, 14, 14, 3, 3, 3, 14, | ||
2417 | 19, 162, 163, 19, 365, 366, 222, 311, | ||
2418 | 166, 167, 168, 367, 170, 368, 369, 370, | ||
2419 | 371, 372, 373, 374, 375, 376, 377, 178, | ||
2420 | 179, 18, 378, 19, 17, 3, 3, 3, | ||
2421 | 3, 14, 14, 14, 3, 3, 3, 3, | ||
2422 | 3, 14, 14, 3, 14, 14, 14, 3, | ||
2423 | 14, 14, 3, 3, 3, 14, 14, 3, | ||
2424 | 14, 14, 14, 14, 3, 14, 3, 14, | ||
2425 | 14, 14, 14, 14, 3, 3, 3, 3, | ||
2426 | 3, 14, 14, 14, 14, 14, 14, 3, | ||
2427 | 14, 3, 19, 183, 184, 379, 186, 60, | ||
2428 | 187, 188, 58, 189, 190, 380, 17, 193, | ||
2429 | 381, 195, 196, 197, 17, 3, 14, 14, | ||
2430 | 14, 14, 14, 14, 14, 3, 14, 14, | ||
2431 | 3, 14, 3, 382, 383, 200, 201, 202, | ||
2432 | 384, 204, 205, 385, 386, 387, 204, 209, | ||
2433 | 210, 211, 212, 41, 3, 213, 214, 19, | ||
2434 | 215, 216, 218, 388, 220, 389, 222, 223, | ||
2435 | 19, 17, 390, 225, 226, 227, 19, 228, | ||
2436 | 229, 230, 231, 232, 233, 234, 235, 391, | ||
2437 | 237, 238, 392, 240, 241, 242, 19, 147, | ||
2438 | 17, 243, 3, 3, 14, 3, 3, 14, | ||
2439 | 3, 14, 14, 14, 14, 14, 3, 14, | ||
2440 | 14, 3, 393, 394, 395, 396, 397, 398, | ||
2441 | 399, 400, 250, 401, 322, 402, 216, 403, | ||
2442 | 404, 405, 406, 407, 404, 408, 409, 410, | ||
2443 | 261, 411, 263, 412, 413, 274, 3, 14, | ||
2444 | 3, 14, 3, 14, 3, 14, 3, 14, | ||
2445 | 14, 3, 14, 3, 14, 14, 14, 3, | ||
2446 | 14, 14, 3, 3, 14, 14, 14, 3, | ||
2447 | 14, 3, 14, 3, 14, 14, 3, 14, | ||
2448 | 3, 14, 3, 14, 3, 14, 3, 14, | ||
2449 | 3, 3, 3, 14, 14, 14, 3, 14, | ||
2450 | 14, 3, 19, 270, 232, 414, 404, 415, | ||
2451 | 274, 19, 416, 417, 277, 17, 3, 14, | ||
2452 | 3, 14, 14, 14, 3, 3, 3, 14, | ||
2453 | 14, 3, 280, 19, 281, 418, 3, 14, | ||
2454 | 14, 3, 19, 286, 287, 288, 289, 290, | ||
2455 | 291, 292, 293, 294, 295, 419, 17, 3, | ||
2456 | 3, 3, 14, 19, 420, 19, 268, 303, | ||
2457 | 304, 305, 17, 3, 3, 14, 422, 422, | ||
2458 | 422, 422, 421, 422, 422, 422, 421, 422, | ||
2459 | 421, 422, 422, 421, 421, 421, 421, 421, | ||
2460 | 421, 422, 421, 421, 421, 421, 422, 422, | ||
2461 | 422, 422, 422, 421, 421, 422, 421, 421, | ||
2462 | 422, 421, 422, 421, 421, 422, 421, 421, | ||
2463 | 421, 422, 422, 422, 422, 422, 422, 421, | ||
2464 | 422, 422, 421, 422, 422, 421, 421, 421, | ||
2465 | 421, 421, 421, 422, 422, 421, 421, 422, | ||
2466 | 421, 422, 422, 422, 421, 423, 424, 425, | ||
2467 | 426, 427, 428, 429, 430, 431, 432, 433, | ||
2468 | 434, 435, 436, 437, 438, 439, 440, 441, | ||
2469 | 442, 443, 444, 445, 446, 447, 448, 449, | ||
2470 | 450, 451, 452, 453, 454, 421, 422, 421, | ||
2471 | 422, 421, 422, 422, 421, 422, 422, 421, | ||
2472 | 421, 421, 422, 421, 421, 421, 421, 421, | ||
2473 | 421, 421, 422, 421, 421, 421, 421, 421, | ||
2474 | 421, 421, 422, 422, 422, 422, 422, 422, | ||
2475 | 422, 422, 422, 422, 422, 421, 421, 421, | ||
2476 | 421, 421, 421, 421, 421, 422, 422, 422, | ||
2477 | 422, 422, 422, 422, 422, 422, 421, 421, | ||
2478 | 421, 421, 421, 421, 421, 421, 422, 422, | ||
2479 | 422, 422, 422, 422, 422, 422, 422, 421, | ||
2480 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
2481 | 421, 422, 422, 422, 422, 422, 422, 422, | ||
2482 | 422, 422, 422, 422, 421, 422, 422, 422, | ||
2483 | 422, 422, 422, 421, 422, 422, 422, 422, | ||
2484 | 422, 422, 421, 421, 421, 421, 421, 421, | ||
2485 | 421, 421, 422, 422, 422, 422, 422, 422, | ||
2486 | 422, 422, 421, 422, 422, 422, 422, 422, | ||
2487 | 422, 422, 422, 421, 422, 422, 422, 422, | ||
2488 | 422, 421, 421, 421, 421, 421, 421, 421, | ||
2489 | 421, 422, 422, 422, 422, 422, 422, 421, | ||
2490 | 422, 422, 422, 422, 422, 422, 422, 421, | ||
2491 | 422, 421, 422, 422, 421, 422, 422, 422, | ||
2492 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
2493 | 422, 422, 421, 422, 422, 422, 422, 422, | ||
2494 | 421, 422, 422, 422, 422, 422, 422, 422, | ||
2495 | 421, 422, 422, 422, 421, 422, 422, 422, | ||
2496 | 421, 422, 421, 455, 456, 457, 458, 459, | ||
2497 | 460, 461, 462, 463, 464, 465, 466, 467, | ||
2498 | 468, 469, 470, 471, 472, 473, 474, 475, | ||
2499 | 476, 477, 478, 479, 480, 481, 482, 483, | ||
2500 | 484, 485, 486, 487, 488, 489, 490, 427, | ||
2501 | 491, 492, 493, 494, 495, 496, 427, 472, | ||
2502 | 427, 421, 422, 421, 422, 422, 421, 421, | ||
2503 | 422, 421, 421, 421, 421, 422, 421, 421, | ||
2504 | 421, 421, 421, 422, 421, 421, 421, 421, | ||
2505 | 421, 422, 422, 422, 422, 422, 421, 421, | ||
2506 | 421, 422, 421, 421, 421, 422, 422, 422, | ||
2507 | 421, 421, 421, 422, 422, 421, 421, 421, | ||
2508 | 422, 422, 422, 421, 421, 421, 422, 422, | ||
2509 | 422, 422, 421, 422, 422, 422, 422, 421, | ||
2510 | 421, 421, 421, 421, 422, 422, 422, 422, | ||
2511 | 421, 421, 422, 422, 422, 421, 421, 422, | ||
2512 | 422, 422, 422, 421, 422, 422, 421, 422, | ||
2513 | 422, 421, 421, 421, 422, 422, 422, 421, | ||
2514 | 421, 421, 421, 422, 422, 422, 422, 422, | ||
2515 | 421, 421, 421, 421, 422, 421, 422, 422, | ||
2516 | 421, 422, 422, 421, 422, 421, 422, 422, | ||
2517 | 422, 421, 422, 422, 421, 421, 421, 422, | ||
2518 | 421, 421, 421, 421, 421, 421, 421, 422, | ||
2519 | 422, 422, 422, 421, 422, 422, 422, 422, | ||
2520 | 422, 422, 422, 421, 497, 498, 499, 500, | ||
2521 | 501, 502, 503, 504, 505, 427, 506, 507, | ||
2522 | 508, 509, 510, 421, 422, 421, 421, 421, | ||
2523 | 421, 421, 422, 422, 421, 422, 422, 422, | ||
2524 | 421, 422, 422, 422, 422, 422, 422, 422, | ||
2525 | 422, 422, 422, 421, 422, 422, 422, 421, | ||
2526 | 421, 422, 422, 422, 421, 421, 422, 421, | ||
2527 | 421, 422, 422, 422, 422, 422, 421, 421, | ||
2528 | 421, 421, 422, 422, 422, 422, 422, 422, | ||
2529 | 421, 422, 422, 422, 422, 422, 421, 511, | ||
2530 | 466, 512, 513, 514, 427, 515, 516, 472, | ||
2531 | 427, 421, 422, 422, 422, 422, 421, 421, | ||
2532 | 421, 422, 421, 421, 422, 422, 422, 421, | ||
2533 | 421, 421, 422, 422, 421, 477, 421, 472, | ||
2534 | 427, 427, 517, 421, 427, 421, 422, 472, | ||
2535 | 518, 519, 472, 520, 521, 472, 522, 523, | ||
2536 | 524, 525, 526, 527, 472, 528, 529, 530, | ||
2537 | 472, 531, 532, 533, 491, 534, 535, 536, | ||
2538 | 491, 537, 472, 427, 421, 421, 422, 422, | ||
2539 | 421, 421, 421, 422, 422, 422, 422, 421, | ||
2540 | 422, 422, 421, 421, 421, 421, 422, 422, | ||
2541 | 421, 421, 422, 422, 421, 421, 421, 421, | ||
2542 | 421, 421, 422, 422, 422, 421, 421, 421, | ||
2543 | 422, 421, 421, 421, 422, 422, 421, 422, | ||
2544 | 422, 422, 422, 421, 422, 422, 422, 422, | ||
2545 | 421, 422, 422, 422, 422, 422, 422, 421, | ||
2546 | 421, 421, 422, 422, 422, 422, 421, 538, | ||
2547 | 539, 421, 427, 421, 422, 421, 421, 422, | ||
2548 | 472, 540, 541, 542, 543, 522, 544, 545, | ||
2549 | 546, 547, 548, 549, 550, 551, 552, 553, | ||
2550 | 554, 555, 427, 421, 421, 422, 421, 422, | ||
2551 | 422, 422, 422, 422, 422, 422, 421, 422, | ||
2552 | 422, 422, 421, 422, 421, 421, 422, 421, | ||
2553 | 422, 421, 421, 422, 422, 422, 422, 421, | ||
2554 | 422, 422, 422, 421, 421, 422, 422, 422, | ||
2555 | 422, 421, 422, 422, 421, 421, 422, 422, | ||
2556 | 422, 422, 422, 421, 556, 557, 558, 559, | ||
2557 | 560, 561, 562, 563, 564, 565, 566, 562, | ||
2558 | 568, 569, 570, 571, 567, 421, 572, 573, | ||
2559 | 472, 574, 575, 576, 577, 578, 579, 580, | ||
2560 | 581, 582, 472, 427, 583, 584, 585, 586, | ||
2561 | 472, 587, 588, 589, 590, 591, 592, 593, | ||
2562 | 594, 595, 596, 597, 598, 599, 600, 601, | ||
2563 | 472, 503, 427, 602, 421, 422, 422, 422, | ||
2564 | 422, 422, 421, 421, 421, 422, 421, 422, | ||
2565 | 422, 421, 422, 421, 422, 422, 421, 421, | ||
2566 | 421, 422, 422, 422, 421, 421, 421, 422, | ||
2567 | 422, 422, 421, 421, 421, 421, 422, 421, | ||
2568 | 421, 422, 421, 421, 422, 422, 422, 421, | ||
2569 | 421, 422, 421, 422, 422, 422, 421, 422, | ||
2570 | 422, 422, 422, 422, 422, 421, 421, 421, | ||
2571 | 422, 422, 421, 422, 422, 421, 422, 422, | ||
2572 | 421, 422, 422, 421, 422, 422, 422, 422, | ||
2573 | 422, 422, 422, 421, 422, 421, 422, 421, | ||
2574 | 422, 422, 421, 422, 421, 422, 422, 421, | ||
2575 | 422, 421, 422, 421, 603, 574, 604, 605, | ||
2576 | 606, 607, 608, 609, 610, 611, 612, 455, | ||
2577 | 613, 472, 614, 615, 616, 472, 617, 487, | ||
2578 | 618, 619, 620, 621, 622, 623, 624, 625, | ||
2579 | 472, 421, 421, 421, 422, 422, 422, 421, | ||
2580 | 422, 422, 421, 422, 422, 421, 421, 421, | ||
2581 | 421, 421, 422, 422, 422, 422, 421, 422, | ||
2582 | 422, 422, 422, 422, 422, 421, 421, 421, | ||
2583 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
2584 | 422, 421, 422, 422, 422, 422, 422, 422, | ||
2585 | 422, 422, 421, 422, 422, 421, 421, 421, | ||
2586 | 421, 422, 422, 422, 421, 421, 421, 422, | ||
2587 | 421, 421, 421, 422, 422, 421, 422, 422, | ||
2588 | 422, 421, 422, 421, 421, 421, 422, 422, | ||
2589 | 421, 422, 422, 422, 421, 422, 422, 422, | ||
2590 | 421, 421, 421, 421, 422, 472, 541, 626, | ||
2591 | 627, 427, 472, 427, 421, 421, 422, 421, | ||
2592 | 422, 472, 626, 427, 421, 472, 628, 427, | ||
2593 | 421, 421, 422, 472, 629, 630, 631, 532, | ||
2594 | 632, 633, 472, 634, 635, 636, 427, 421, | ||
2595 | 421, 422, 422, 422, 421, 422, 422, 421, | ||
2596 | 422, 422, 422, 422, 421, 421, 422, 421, | ||
2597 | 421, 422, 422, 421, 422, 421, 472, 427, | ||
2598 | 421, 637, 472, 638, 421, 427, 421, 422, | ||
2599 | 421, 422, 639, 472, 640, 641, 421, 422, | ||
2600 | 421, 421, 421, 422, 422, 422, 422, 421, | ||
2601 | 642, 643, 644, 472, 645, 646, 647, 648, | ||
2602 | 649, 650, 651, 652, 653, 654, 655, 656, | ||
2603 | 657, 658, 427, 421, 422, 422, 422, 421, | ||
2604 | 421, 421, 421, 422, 422, 421, 421, 422, | ||
2605 | 421, 421, 421, 421, 421, 421, 421, 422, | ||
2606 | 421, 422, 421, 421, 421, 421, 421, 421, | ||
2607 | 422, 422, 422, 422, 422, 421, 421, 422, | ||
2608 | 421, 421, 421, 422, 421, 421, 422, 421, | ||
2609 | 421, 422, 421, 421, 422, 421, 421, 421, | ||
2610 | 422, 422, 422, 421, 421, 421, 422, 422, | ||
2611 | 422, 422, 421, 659, 472, 660, 472, 661, | ||
2612 | 662, 663, 664, 427, 421, 422, 422, 422, | ||
2613 | 422, 422, 421, 421, 421, 422, 421, 421, | ||
2614 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
2615 | 422, 422, 421, 422, 422, 422, 422, 422, | ||
2616 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
2617 | 422, 422, 422, 422, 422, 422, 421, 422, | ||
2618 | 422, 422, 422, 422, 421, 665, 472, 427, | ||
2619 | 421, 422, 666, 472, 457, 427, 421, 422, | ||
2620 | 667, 421, 427, 421, 422, 472, 668, 427, | ||
2621 | 421, 421, 422, 669, 421, 472, 670, 427, | ||
2622 | 421, 421, 422, 672, 671, 422, 422, 422, | ||
2623 | 422, 672, 671, 422, 672, 671, 672, 672, | ||
2624 | 422, 672, 671, 422, 672, 422, 672, 671, | ||
2625 | 422, 672, 422, 672, 422, 671, 672, 672, | ||
2626 | 672, 672, 672, 672, 672, 672, 671, 422, | ||
2627 | 422, 672, 672, 422, 672, 422, 672, 671, | ||
2628 | 672, 672, 672, 672, 672, 422, 672, 422, | ||
2629 | 672, 422, 672, 671, 672, 672, 422, 672, | ||
2630 | 422, 672, 671, 672, 672, 672, 672, 672, | ||
2631 | 422, 672, 422, 672, 671, 422, 422, 672, | ||
2632 | 422, 672, 671, 672, 672, 672, 422, 672, | ||
2633 | 422, 672, 422, 672, 422, 672, 671, 672, | ||
2634 | 422, 672, 422, 672, 671, 422, 672, 672, | ||
2635 | 672, 672, 422, 672, 422, 672, 422, 672, | ||
2636 | 422, 672, 422, 672, 422, 672, 671, 422, | ||
2637 | 672, 671, 672, 672, 672, 422, 672, 422, | ||
2638 | 672, 671, 672, 422, 672, 422, 672, 671, | ||
2639 | 422, 672, 672, 672, 672, 422, 672, 422, | ||
2640 | 672, 671, 422, 672, 422, 672, 422, 672, | ||
2641 | 671, 672, 672, 422, 672, 422, 672, 671, | ||
2642 | 422, 672, 422, 672, 422, 672, 422, 671, | ||
2643 | 672, 672, 672, 422, 672, 422, 672, 671, | ||
2644 | 422, 672, 671, 672, 672, 422, 672, 671, | ||
2645 | 672, 672, 672, 422, 672, 672, 672, 672, | ||
2646 | 672, 672, 422, 422, 672, 422, 672, 422, | ||
2647 | 672, 422, 672, 671, 672, 422, 672, 422, | ||
2648 | 672, 671, 422, 672, 671, 672, 422, 672, | ||
2649 | 671, 672, 422, 672, 671, 422, 422, 672, | ||
2650 | 671, 422, 672, 422, 672, 422, 672, 422, | ||
2651 | 672, 422, 672, 422, 671, 672, 672, 422, | ||
2652 | 672, 672, 672, 672, 422, 422, 672, 672, | ||
2653 | 672, 672, 672, 422, 672, 672, 672, 672, | ||
2654 | 672, 671, 422, 672, 672, 422, 672, 422, | ||
2655 | 671, 672, 672, 422, 672, 671, 422, 422, | ||
2656 | 672, 422, 671, 672, 672, 671, 422, 672, | ||
2657 | 422, 671, 672, 671, 422, 672, 422, 672, | ||
2658 | 422, 671, 672, 672, 671, 422, 672, 422, | ||
2659 | 672, 422, 672, 671, 672, 422, 672, 422, | ||
2660 | 672, 671, 422, 672, 671, 422, 422, 672, | ||
2661 | 671, 672, 422, 671, 672, 671, 422, 672, | ||
2662 | 422, 672, 422, 671, 672, 671, 422, 422, | ||
2663 | 672, 671, 672, 422, 672, 422, 672, 671, | ||
2664 | 422, 672, 422, 671, 672, 671, 422, 422, | ||
2665 | 672, 422, 671, 672, 671, 422, 422, 672, | ||
2666 | 671, 672, 422, 672, 671, 672, 422, 672, | ||
2667 | 671, 672, 422, 672, 422, 672, 422, 671, | ||
2668 | 672, 671, 422, 422, 672, 671, 672, 422, | ||
2669 | 672, 422, 672, 671, 422, 672, 671, 672, | ||
2670 | 672, 422, 672, 422, 672, 671, 671, 422, | ||
2671 | 671, 422, 672, 672, 422, 672, 672, 672, | ||
2672 | 672, 672, 672, 672, 671, 422, 672, 672, | ||
2673 | 672, 422, 671, 672, 672, 672, 422, 672, | ||
2674 | 422, 672, 422, 672, 422, 672, 422, 672, | ||
2675 | 671, 422, 422, 672, 671, 672, 422, 672, | ||
2676 | 671, 422, 422, 672, 422, 422, 422, 672, | ||
2677 | 422, 672, 422, 672, 422, 672, 422, 671, | ||
2678 | 422, 672, 422, 672, 422, 671, 672, 671, | ||
2679 | 422, 672, 422, 671, 672, 422, 672, 672, | ||
2680 | 672, 671, 422, 672, 422, 422, 672, 422, | ||
2681 | 671, 672, 672, 671, 422, 672, 672, 672, | ||
2682 | 672, 422, 672, 422, 671, 672, 672, 672, | ||
2683 | 422, 672, 671, 672, 422, 672, 422, 672, | ||
2684 | 422, 672, 422, 672, 671, 672, 672, 422, | ||
2685 | 672, 671, 422, 672, 422, 672, 422, 671, | ||
2686 | 672, 672, 671, 422, 672, 422, 671, 672, | ||
2687 | 671, 422, 672, 671, 422, 672, 422, 672, | ||
2688 | 671, 672, 672, 672, 671, 422, 422, 422, | ||
2689 | 672, 671, 422, 672, 422, 671, 672, 671, | ||
2690 | 422, 672, 422, 672, 422, 671, 672, 672, | ||
2691 | 672, 671, 422, 672, 422, 671, 672, 672, | ||
2692 | 672, 672, 671, 422, 672, 422, 672, 671, | ||
2693 | 422, 422, 672, 422, 672, 671, 672, 422, | ||
2694 | 672, 422, 671, 672, 672, 671, 422, 672, | ||
2695 | 422, 672, 671, 422, 672, 672, 672, 422, | ||
2696 | 672, 422, 671, 422, 672, 671, 672, 422, | ||
2697 | 422, 672, 422, 672, 422, 671, 672, 672, | ||
2698 | 672, 672, 671, 422, 672, 422, 672, 422, | ||
2699 | 672, 422, 672, 422, 672, 671, 672, 672, | ||
2700 | 672, 422, 672, 422, 672, 422, 672, 422, | ||
2701 | 671, 672, 672, 422, 422, 672, 671, 672, | ||
2702 | 422, 672, 672, 671, 422, 672, 422, 672, | ||
2703 | 671, 422, 422, 672, 672, 672, 672, 422, | ||
2704 | 672, 422, 672, 422, 671, 672, 672, 422, | ||
2705 | 671, 672, 671, 422, 672, 422, 671, 672, | ||
2706 | 671, 422, 672, 422, 671, 672, 422, 672, | ||
2707 | 672, 671, 422, 672, 672, 422, 671, 672, | ||
2708 | 671, 422, 672, 422, 672, 671, 672, 422, | ||
2709 | 672, 422, 671, 672, 671, 422, 672, 422, | ||
2710 | 672, 422, 672, 422, 672, 422, 672, 671, | ||
2711 | 673, 671, 674, 675, 676, 677, 678, 679, | ||
2712 | 680, 681, 682, 683, 684, 676, 685, 686, | ||
2713 | 687, 688, 689, 676, 690, 691, 692, 693, | ||
2714 | 694, 695, 696, 697, 698, 699, 700, 701, | ||
2715 | 702, 703, 704, 676, 705, 673, 685, 673, | ||
2716 | 706, 673, 671, 672, 672, 672, 672, 422, | ||
2717 | 671, 672, 672, 671, 422, 672, 671, 422, | ||
2718 | 422, 672, 671, 422, 672, 422, 671, 672, | ||
2719 | 671, 422, 422, 672, 422, 671, 672, 672, | ||
2720 | 671, 422, 672, 672, 672, 671, 422, 672, | ||
2721 | 422, 672, 672, 671, 422, 422, 672, 422, | ||
2722 | 671, 672, 671, 422, 672, 671, 422, 422, | ||
2723 | 672, 422, 672, 671, 422, 672, 422, 422, | ||
2724 | 672, 422, 672, 422, 671, 672, 672, 671, | ||
2725 | 422, 672, 672, 422, 672, 671, 422, 672, | ||
2726 | 422, 672, 671, 422, 672, 422, 671, 422, | ||
2727 | 672, 672, 672, 422, 672, 671, 672, 422, | ||
2728 | 672, 671, 422, 672, 671, 672, 422, 672, | ||
2729 | 671, 422, 672, 671, 422, 672, 422, 672, | ||
2730 | 671, 422, 672, 671, 422, 672, 671, 707, | ||
2731 | 708, 709, 710, 711, 712, 713, 714, 715, | ||
2732 | 716, 717, 718, 678, 719, 720, 721, 722, | ||
2733 | 723, 720, 724, 725, 726, 727, 728, 729, | ||
2734 | 730, 731, 732, 673, 671, 672, 422, 672, | ||
2735 | 671, 672, 422, 672, 671, 672, 422, 672, | ||
2736 | 671, 672, 422, 672, 671, 422, 672, 422, | ||
2737 | 672, 671, 672, 422, 672, 671, 672, 422, | ||
2738 | 422, 422, 672, 671, 672, 422, 672, 671, | ||
2739 | 672, 672, 672, 672, 422, 672, 422, 671, | ||
2740 | 672, 671, 422, 422, 672, 422, 672, 671, | ||
2741 | 672, 422, 672, 671, 422, 672, 671, 672, | ||
2742 | 672, 422, 672, 671, 422, 672, 671, 672, | ||
2743 | 422, 672, 671, 422, 672, 671, 422, 672, | ||
2744 | 671, 422, 672, 671, 672, 671, 422, 422, | ||
2745 | 672, 671, 672, 422, 672, 671, 422, 672, | ||
2746 | 422, 671, 672, 671, 422, 676, 733, 673, | ||
2747 | 676, 734, 676, 735, 685, 673, 671, 672, | ||
2748 | 671, 422, 672, 671, 422, 676, 734, 685, | ||
2749 | 673, 671, 676, 736, 673, 685, 673, 671, | ||
2750 | 672, 671, 422, 676, 737, 694, 738, 720, | ||
2751 | 739, 732, 676, 740, 741, 742, 673, 685, | ||
2752 | 673, 671, 672, 671, 422, 672, 422, 672, | ||
2753 | 671, 422, 672, 422, 672, 422, 671, 672, | ||
2754 | 672, 671, 422, 672, 422, 672, 671, 422, | ||
2755 | 672, 671, 676, 685, 427, 671, 743, 676, | ||
2756 | 744, 685, 673, 671, 427, 672, 671, 422, | ||
2757 | 672, 671, 422, 745, 676, 746, 747, 673, | ||
2758 | 671, 422, 672, 671, 672, 672, 671, 422, | ||
2759 | 422, 672, 422, 672, 671, 676, 748, 749, | ||
2760 | 750, 751, 752, 753, 754, 755, 756, 757, | ||
2761 | 758, 673, 685, 673, 671, 672, 422, 672, | ||
2762 | 672, 672, 672, 672, 672, 672, 422, 672, | ||
2763 | 422, 672, 672, 672, 672, 672, 672, 671, | ||
2764 | 422, 672, 672, 422, 672, 422, 671, 672, | ||
2765 | 422, 672, 672, 672, 422, 672, 672, 422, | ||
2766 | 672, 672, 422, 672, 672, 422, 672, 672, | ||
2767 | 671, 422, 676, 759, 676, 735, 760, 761, | ||
2768 | 762, 673, 685, 673, 671, 672, 671, 422, | ||
2769 | 672, 672, 672, 422, 672, 672, 672, 422, | ||
2770 | 672, 422, 672, 671, 422, 422, 422, 422, | ||
2771 | 672, 672, 422, 422, 422, 422, 422, 672, | ||
2772 | 672, 672, 672, 672, 672, 672, 422, 672, | ||
2773 | 422, 672, 422, 671, 672, 672, 672, 422, | ||
2774 | 672, 422, 672, 671, 685, 427, 763, 676, | ||
2775 | 685, 427, 672, 671, 422, 764, 676, 765, | ||
2776 | 685, 427, 672, 671, 422, 672, 422, 766, | ||
2777 | 685, 673, 671, 427, 672, 671, 422, 676, | ||
2778 | 767, 673, 685, 673, 671, 672, 671, 422, | ||
2779 | 768, 769, 768, 770, 771, 768, 772, 768, | ||
2780 | 773, 768, 771, 774, 775, 774, 777, 776, | ||
2781 | 778, 779, 778, 780, 781, 776, 782, 776, | ||
2782 | 783, 778, 784, 779, 785, 780, 787, 786, | ||
2783 | 788, 789, 789, 786, 790, 786, 791, 788, | ||
2784 | 792, 789, 793, 789, 795, 795, 795, 795, | ||
2785 | 794, 795, 795, 795, 794, 795, 794, 795, | ||
2786 | 795, 794, 794, 794, 794, 794, 794, 795, | ||
2787 | 794, 794, 794, 794, 795, 795, 795, 795, | ||
2788 | 795, 794, 794, 795, 794, 794, 795, 794, | ||
2789 | 795, 794, 794, 795, 794, 794, 794, 795, | ||
2790 | 795, 795, 795, 795, 795, 794, 795, 795, | ||
2791 | 794, 795, 795, 794, 794, 794, 794, 794, | ||
2792 | 794, 795, 795, 794, 794, 795, 794, 795, | ||
2793 | 795, 795, 794, 797, 798, 799, 800, 801, | ||
2794 | 802, 803, 804, 805, 806, 807, 808, 809, | ||
2795 | 810, 811, 812, 813, 814, 815, 816, 817, | ||
2796 | 818, 819, 820, 821, 822, 823, 824, 825, | ||
2797 | 826, 827, 828, 794, 795, 794, 795, 794, | ||
2798 | 795, 795, 794, 795, 795, 794, 794, 794, | ||
2799 | 795, 794, 794, 794, 794, 794, 794, 794, | ||
2800 | 795, 794, 794, 794, 794, 794, 794, 794, | ||
2801 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
2802 | 795, 795, 795, 794, 794, 794, 794, 794, | ||
2803 | 794, 794, 794, 795, 795, 795, 795, 795, | ||
2804 | 795, 795, 795, 795, 794, 794, 794, 794, | ||
2805 | 794, 794, 794, 794, 795, 795, 795, 795, | ||
2806 | 795, 795, 795, 795, 795, 794, 795, 795, | ||
2807 | 795, 795, 795, 795, 795, 795, 794, 795, | ||
2808 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
2809 | 795, 795, 794, 795, 795, 795, 795, 795, | ||
2810 | 795, 794, 795, 795, 795, 795, 795, 795, | ||
2811 | 794, 794, 794, 794, 794, 794, 794, 794, | ||
2812 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
2813 | 794, 795, 795, 795, 795, 795, 795, 795, | ||
2814 | 795, 794, 795, 795, 795, 795, 795, 794, | ||
2815 | 794, 794, 794, 794, 794, 794, 794, 795, | ||
2816 | 795, 795, 795, 795, 795, 794, 795, 795, | ||
2817 | 795, 795, 795, 795, 795, 794, 795, 794, | ||
2818 | 795, 795, 794, 795, 795, 795, 795, 795, | ||
2819 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
2820 | 794, 795, 795, 795, 795, 795, 794, 795, | ||
2821 | 795, 795, 795, 795, 795, 795, 794, 795, | ||
2822 | 795, 795, 794, 795, 795, 795, 794, 795, | ||
2823 | 794, 829, 830, 831, 832, 833, 834, 835, | ||
2824 | 836, 837, 838, 839, 840, 841, 842, 843, | ||
2825 | 844, 845, 846, 847, 848, 849, 850, 851, | ||
2826 | 852, 853, 854, 855, 856, 857, 858, 859, | ||
2827 | 860, 861, 862, 863, 864, 801, 865, 866, | ||
2828 | 867, 868, 869, 870, 801, 846, 801, 794, | ||
2829 | 795, 794, 795, 795, 794, 794, 795, 794, | ||
2830 | 794, 794, 794, 795, 794, 794, 794, 794, | ||
2831 | 794, 795, 794, 794, 794, 794, 794, 795, | ||
2832 | 795, 795, 795, 795, 794, 794, 794, 795, | ||
2833 | 794, 794, 794, 795, 795, 795, 794, 794, | ||
2834 | 794, 795, 795, 794, 794, 794, 795, 795, | ||
2835 | 795, 794, 794, 794, 795, 795, 795, 795, | ||
2836 | 794, 795, 795, 795, 795, 794, 794, 794, | ||
2837 | 794, 794, 795, 795, 795, 795, 794, 794, | ||
2838 | 795, 795, 795, 794, 794, 795, 795, 795, | ||
2839 | 795, 794, 795, 795, 794, 795, 795, 794, | ||
2840 | 794, 794, 795, 795, 795, 794, 794, 794, | ||
2841 | 794, 795, 795, 795, 795, 795, 794, 794, | ||
2842 | 794, 794, 795, 794, 795, 795, 794, 795, | ||
2843 | 795, 794, 795, 794, 795, 795, 795, 794, | ||
2844 | 795, 795, 794, 794, 794, 795, 794, 794, | ||
2845 | 794, 794, 794, 794, 794, 795, 795, 795, | ||
2846 | 795, 794, 795, 795, 795, 795, 795, 795, | ||
2847 | 795, 794, 871, 872, 873, 874, 875, 876, | ||
2848 | 877, 878, 879, 801, 880, 881, 882, 883, | ||
2849 | 884, 794, 795, 794, 794, 794, 794, 794, | ||
2850 | 795, 795, 794, 795, 795, 795, 794, 795, | ||
2851 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
2852 | 795, 794, 795, 795, 795, 794, 794, 795, | ||
2853 | 795, 795, 794, 794, 795, 794, 794, 795, | ||
2854 | 795, 795, 795, 795, 794, 794, 794, 794, | ||
2855 | 795, 795, 795, 795, 795, 795, 794, 795, | ||
2856 | 795, 795, 795, 795, 794, 885, 840, 886, | ||
2857 | 887, 888, 801, 889, 890, 846, 801, 794, | ||
2858 | 795, 795, 795, 795, 794, 794, 794, 795, | ||
2859 | 794, 794, 795, 795, 795, 794, 794, 794, | ||
2860 | 795, 795, 794, 851, 794, 846, 801, 801, | ||
2861 | 891, 794, 801, 794, 795, 846, 892, 893, | ||
2862 | 846, 894, 895, 846, 896, 897, 898, 899, | ||
2863 | 900, 901, 846, 902, 903, 904, 846, 905, | ||
2864 | 906, 907, 865, 908, 909, 910, 865, 911, | ||
2865 | 846, 801, 794, 794, 795, 795, 794, 794, | ||
2866 | 794, 795, 795, 795, 795, 794, 795, 795, | ||
2867 | 794, 794, 794, 794, 795, 795, 794, 794, | ||
2868 | 795, 795, 794, 794, 794, 794, 794, 794, | ||
2869 | 795, 795, 795, 794, 794, 794, 795, 794, | ||
2870 | 794, 794, 795, 795, 794, 795, 795, 795, | ||
2871 | 795, 794, 795, 795, 795, 795, 794, 795, | ||
2872 | 795, 795, 795, 795, 795, 794, 794, 794, | ||
2873 | 795, 795, 795, 795, 794, 912, 913, 794, | ||
2874 | 801, 794, 795, 794, 794, 795, 846, 914, | ||
2875 | 915, 916, 917, 896, 918, 919, 920, 921, | ||
2876 | 922, 923, 924, 925, 926, 927, 928, 929, | ||
2877 | 801, 794, 794, 795, 794, 795, 795, 795, | ||
2878 | 795, 795, 795, 795, 794, 795, 795, 795, | ||
2879 | 794, 795, 794, 794, 795, 794, 795, 794, | ||
2880 | 794, 795, 795, 795, 795, 794, 795, 795, | ||
2881 | 795, 794, 794, 795, 795, 795, 795, 794, | ||
2882 | 795, 795, 794, 794, 795, 795, 795, 795, | ||
2883 | 795, 794, 930, 931, 932, 933, 934, 935, | ||
2884 | 936, 937, 938, 939, 940, 936, 942, 943, | ||
2885 | 944, 945, 941, 794, 946, 947, 846, 948, | ||
2886 | 949, 950, 951, 952, 953, 954, 955, 956, | ||
2887 | 846, 801, 957, 958, 959, 960, 846, 961, | ||
2888 | 962, 963, 964, 965, 966, 967, 968, 969, | ||
2889 | 970, 971, 972, 973, 974, 975, 846, 877, | ||
2890 | 801, 976, 794, 795, 795, 795, 795, 795, | ||
2891 | 794, 794, 794, 795, 794, 795, 795, 794, | ||
2892 | 795, 794, 795, 795, 794, 794, 794, 795, | ||
2893 | 795, 795, 794, 794, 794, 795, 795, 795, | ||
2894 | 794, 794, 794, 794, 795, 794, 794, 795, | ||
2895 | 794, 794, 795, 795, 795, 794, 794, 795, | ||
2896 | 794, 795, 795, 795, 794, 795, 795, 795, | ||
2897 | 795, 795, 795, 794, 794, 794, 795, 795, | ||
2898 | 794, 795, 795, 794, 795, 795, 794, 795, | ||
2899 | 795, 794, 795, 795, 795, 795, 795, 795, | ||
2900 | 795, 794, 795, 794, 795, 794, 795, 795, | ||
2901 | 794, 795, 794, 795, 795, 794, 795, 794, | ||
2902 | 795, 794, 977, 948, 978, 979, 980, 981, | ||
2903 | 982, 983, 984, 985, 986, 829, 987, 846, | ||
2904 | 988, 989, 990, 846, 991, 861, 992, 993, | ||
2905 | 994, 995, 996, 997, 998, 999, 846, 794, | ||
2906 | 794, 794, 795, 795, 795, 794, 795, 795, | ||
2907 | 794, 795, 795, 794, 794, 794, 794, 794, | ||
2908 | 795, 795, 795, 795, 794, 795, 795, 795, | ||
2909 | 795, 795, 795, 794, 794, 794, 795, 795, | ||
2910 | 795, 795, 795, 795, 795, 795, 795, 794, | ||
2911 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
2912 | 794, 795, 795, 794, 794, 794, 794, 795, | ||
2913 | 795, 795, 794, 794, 794, 795, 794, 794, | ||
2914 | 794, 795, 795, 794, 795, 795, 795, 794, | ||
2915 | 795, 794, 794, 794, 795, 795, 794, 795, | ||
2916 | 795, 795, 794, 795, 795, 795, 794, 794, | ||
2917 | 794, 794, 795, 846, 915, 1000, 1001, 801, | ||
2918 | 846, 801, 794, 794, 795, 794, 795, 846, | ||
2919 | 1000, 801, 794, 846, 1002, 801, 794, 794, | ||
2920 | 795, 846, 1003, 1004, 1005, 906, 1006, 1007, | ||
2921 | 846, 1008, 1009, 1010, 801, 794, 794, 795, | ||
2922 | 795, 795, 794, 795, 795, 794, 795, 795, | ||
2923 | 795, 795, 794, 794, 795, 794, 794, 795, | ||
2924 | 795, 794, 795, 794, 846, 801, 794, 1011, | ||
2925 | 846, 1012, 794, 801, 794, 795, 794, 795, | ||
2926 | 1013, 846, 1014, 1015, 794, 795, 794, 794, | ||
2927 | 794, 795, 795, 795, 795, 794, 1016, 1017, | ||
2928 | 1018, 846, 1019, 1020, 1021, 1022, 1023, 1024, | ||
2929 | 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032, | ||
2930 | 801, 794, 795, 795, 795, 794, 794, 794, | ||
2931 | 794, 795, 795, 794, 794, 795, 794, 794, | ||
2932 | 794, 794, 794, 794, 794, 795, 794, 795, | ||
2933 | 794, 794, 794, 794, 794, 794, 795, 795, | ||
2934 | 795, 795, 795, 794, 794, 795, 794, 794, | ||
2935 | 794, 795, 794, 794, 795, 794, 794, 795, | ||
2936 | 794, 794, 795, 794, 794, 794, 795, 795, | ||
2937 | 795, 794, 794, 794, 795, 795, 795, 795, | ||
2938 | 794, 1033, 846, 1034, 846, 1035, 1036, 1037, | ||
2939 | 1038, 801, 794, 795, 795, 795, 795, 795, | ||
2940 | 794, 794, 794, 795, 794, 794, 795, 795, | ||
2941 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
2942 | 794, 795, 795, 795, 795, 795, 795, 795, | ||
2943 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
2944 | 795, 795, 795, 795, 794, 795, 795, 795, | ||
2945 | 795, 795, 794, 1039, 846, 801, 794, 795, | ||
2946 | 1040, 846, 831, 801, 794, 795, 1041, 794, | ||
2947 | 801, 794, 795, 846, 1042, 801, 794, 794, | ||
2948 | 795, 1043, 794, 846, 1044, 801, 794, 794, | ||
2949 | 795, 1046, 1045, 795, 795, 795, 795, 1046, | ||
2950 | 1045, 795, 1046, 1045, 1046, 1046, 795, 1046, | ||
2951 | 1045, 795, 1046, 795, 1046, 1045, 795, 1046, | ||
2952 | 795, 1046, 795, 1045, 1046, 1046, 1046, 1046, | ||
2953 | 1046, 1046, 1046, 1046, 1045, 795, 795, 1046, | ||
2954 | 1046, 795, 1046, 795, 1046, 1045, 1046, 1046, | ||
2955 | 1046, 1046, 1046, 795, 1046, 795, 1046, 795, | ||
2956 | 1046, 1045, 1046, 1046, 795, 1046, 795, 1046, | ||
2957 | 1045, 1046, 1046, 1046, 1046, 1046, 795, 1046, | ||
2958 | 795, 1046, 1045, 795, 795, 1046, 795, 1046, | ||
2959 | 1045, 1046, 1046, 1046, 795, 1046, 795, 1046, | ||
2960 | 795, 1046, 795, 1046, 1045, 1046, 795, 1046, | ||
2961 | 795, 1046, 1045, 795, 1046, 1046, 1046, 1046, | ||
2962 | 795, 1046, 795, 1046, 795, 1046, 795, 1046, | ||
2963 | 795, 1046, 795, 1046, 1045, 795, 1046, 1045, | ||
2964 | 1046, 1046, 1046, 795, 1046, 795, 1046, 1045, | ||
2965 | 1046, 795, 1046, 795, 1046, 1045, 795, 1046, | ||
2966 | 1046, 1046, 1046, 795, 1046, 795, 1046, 1045, | ||
2967 | 795, 1046, 795, 1046, 795, 1046, 1045, 1046, | ||
2968 | 1046, 795, 1046, 795, 1046, 1045, 795, 1046, | ||
2969 | 795, 1046, 795, 1046, 795, 1045, 1046, 1046, | ||
2970 | 1046, 795, 1046, 795, 1046, 1045, 795, 1046, | ||
2971 | 1045, 1046, 1046, 795, 1046, 1045, 1046, 1046, | ||
2972 | 1046, 795, 1046, 1046, 1046, 1046, 1046, 1046, | ||
2973 | 795, 795, 1046, 795, 1046, 795, 1046, 795, | ||
2974 | 1046, 1045, 1046, 795, 1046, 795, 1046, 1045, | ||
2975 | 795, 1046, 1045, 1046, 795, 1046, 1045, 1046, | ||
2976 | 795, 1046, 1045, 795, 795, 1046, 1045, 795, | ||
2977 | 1046, 795, 1046, 795, 1046, 795, 1046, 795, | ||
2978 | 1046, 795, 1045, 1046, 1046, 795, 1046, 1046, | ||
2979 | 1046, 1046, 795, 795, 1046, 1046, 1046, 1046, | ||
2980 | 1046, 795, 1046, 1046, 1046, 1046, 1046, 1045, | ||
2981 | 795, 1046, 1046, 795, 1046, 795, 1045, 1046, | ||
2982 | 1046, 795, 1046, 1045, 795, 795, 1046, 795, | ||
2983 | 1045, 1046, 1046, 1045, 795, 1046, 795, 1045, | ||
2984 | 1046, 1045, 795, 1046, 795, 1046, 795, 1045, | ||
2985 | 1046, 1046, 1045, 795, 1046, 795, 1046, 795, | ||
2986 | 1046, 1045, 1046, 795, 1046, 795, 1046, 1045, | ||
2987 | 795, 1046, 1045, 795, 795, 1046, 1045, 1046, | ||
2988 | 795, 1045, 1046, 1045, 795, 1046, 795, 1046, | ||
2989 | 795, 1045, 1046, 1045, 795, 795, 1046, 1045, | ||
2990 | 1046, 795, 1046, 795, 1046, 1045, 795, 1046, | ||
2991 | 795, 1045, 1046, 1045, 795, 795, 1046, 795, | ||
2992 | 1045, 1046, 1045, 795, 795, 1046, 1045, 1046, | ||
2993 | 795, 1046, 1045, 1046, 795, 1046, 1045, 1046, | ||
2994 | 795, 1046, 795, 1046, 795, 1045, 1046, 1045, | ||
2995 | 795, 795, 1046, 1045, 1046, 795, 1046, 795, | ||
2996 | 1046, 1045, 795, 1046, 1045, 1046, 1046, 795, | ||
2997 | 1046, 795, 1046, 1045, 1045, 795, 1045, 795, | ||
2998 | 1046, 1046, 795, 1046, 1046, 1046, 1046, 1046, | ||
2999 | 1046, 1046, 1045, 795, 1046, 1046, 1046, 795, | ||
3000 | 1045, 1046, 1046, 1046, 795, 1046, 795, 1046, | ||
3001 | 795, 1046, 795, 1046, 795, 1046, 1045, 795, | ||
3002 | 795, 1046, 1045, 1046, 795, 1046, 1045, 795, | ||
3003 | 795, 1046, 795, 795, 795, 1046, 795, 1046, | ||
3004 | 795, 1046, 795, 1046, 795, 1045, 795, 1046, | ||
3005 | 795, 1046, 795, 1045, 1046, 1045, 795, 1046, | ||
3006 | 795, 1045, 1046, 795, 1046, 1046, 1046, 1045, | ||
3007 | 795, 1046, 795, 795, 1046, 795, 1045, 1046, | ||
3008 | 1046, 1045, 795, 1046, 1046, 1046, 1046, 795, | ||
3009 | 1046, 795, 1045, 1046, 1046, 1046, 795, 1046, | ||
3010 | 1045, 1046, 795, 1046, 795, 1046, 795, 1046, | ||
3011 | 795, 1046, 1045, 1046, 1046, 795, 1046, 1045, | ||
3012 | 795, 1046, 795, 1046, 795, 1045, 1046, 1046, | ||
3013 | 1045, 795, 1046, 795, 1045, 1046, 1045, 795, | ||
3014 | 1046, 1045, 795, 1046, 795, 1046, 1045, 1046, | ||
3015 | 1046, 1046, 1045, 795, 795, 795, 1046, 1045, | ||
3016 | 795, 1046, 795, 1045, 1046, 1045, 795, 1046, | ||
3017 | 795, 1046, 795, 1045, 1046, 1046, 1046, 1045, | ||
3018 | 795, 1046, 795, 1045, 1046, 1046, 1046, 1046, | ||
3019 | 1045, 795, 1046, 795, 1046, 1045, 795, 795, | ||
3020 | 1046, 795, 1046, 1045, 1046, 795, 1046, 795, | ||
3021 | 1045, 1046, 1046, 1045, 795, 1046, 795, 1046, | ||
3022 | 1045, 795, 1046, 1046, 1046, 795, 1046, 795, | ||
3023 | 1045, 795, 1046, 1045, 1046, 795, 795, 1046, | ||
3024 | 795, 1046, 795, 1045, 1046, 1046, 1046, 1046, | ||
3025 | 1045, 795, 1046, 795, 1046, 795, 1046, 795, | ||
3026 | 1046, 795, 1046, 1045, 1046, 1046, 1046, 795, | ||
3027 | 1046, 795, 1046, 795, 1046, 795, 1045, 1046, | ||
3028 | 1046, 795, 795, 1046, 1045, 1046, 795, 1046, | ||
3029 | 1046, 1045, 795, 1046, 795, 1046, 1045, 795, | ||
3030 | 795, 1046, 1046, 1046, 1046, 795, 1046, 795, | ||
3031 | 1046, 795, 1045, 1046, 1046, 795, 1045, 1046, | ||
3032 | 1045, 795, 1046, 795, 1045, 1046, 1045, 795, | ||
3033 | 1046, 795, 1045, 1046, 795, 1046, 1046, 1045, | ||
3034 | 795, 1046, 1046, 795, 1045, 1046, 1045, 795, | ||
3035 | 1046, 795, 1046, 1045, 1046, 795, 1046, 795, | ||
3036 | 1045, 1046, 1045, 795, 1046, 795, 1046, 795, | ||
3037 | 1046, 795, 1046, 795, 1046, 1045, 1047, 1045, | ||
3038 | 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, | ||
3039 | 1056, 1057, 1058, 1050, 1059, 1060, 1061, 1062, | ||
3040 | 1063, 1050, 1064, 1065, 1066, 1067, 1068, 1069, | ||
3041 | 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, | ||
3042 | 1078, 1050, 1079, 1047, 1059, 1047, 1080, 1047, | ||
3043 | 1045, 1046, 1046, 1046, 1046, 795, 1045, 1046, | ||
3044 | 1046, 1045, 795, 1046, 1045, 795, 795, 1046, | ||
3045 | 1045, 795, 1046, 795, 1045, 1046, 1045, 795, | ||
3046 | 795, 1046, 795, 1045, 1046, 1046, 1045, 795, | ||
3047 | 1046, 1046, 1046, 1045, 795, 1046, 795, 1046, | ||
3048 | 1046, 1045, 795, 795, 1046, 795, 1045, 1046, | ||
3049 | 1045, 795, 1046, 1045, 795, 795, 1046, 795, | ||
3050 | 1046, 1045, 795, 1046, 795, 795, 1046, 795, | ||
3051 | 1046, 795, 1045, 1046, 1046, 1045, 795, 1046, | ||
3052 | 1046, 795, 1046, 1045, 795, 1046, 795, 1046, | ||
3053 | 1045, 795, 1046, 795, 1045, 795, 1046, 1046, | ||
3054 | 1046, 795, 1046, 1045, 1046, 795, 1046, 1045, | ||
3055 | 795, 1046, 1045, 1046, 795, 1046, 1045, 795, | ||
3056 | 1046, 1045, 795, 1046, 795, 1046, 1045, 795, | ||
3057 | 1046, 1045, 795, 1046, 1045, 1081, 1082, 1083, | ||
3058 | 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, | ||
3059 | 1092, 1052, 1093, 1094, 1095, 1096, 1097, 1094, | ||
3060 | 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, | ||
3061 | 1106, 1047, 1045, 1046, 795, 1046, 1045, 1046, | ||
3062 | 795, 1046, 1045, 1046, 795, 1046, 1045, 1046, | ||
3063 | 795, 1046, 1045, 795, 1046, 795, 1046, 1045, | ||
3064 | 1046, 795, 1046, 1045, 1046, 795, 795, 795, | ||
3065 | 1046, 1045, 1046, 795, 1046, 1045, 1046, 1046, | ||
3066 | 1046, 1046, 795, 1046, 795, 1045, 1046, 1045, | ||
3067 | 795, 795, 1046, 795, 1046, 1045, 1046, 795, | ||
3068 | 1046, 1045, 795, 1046, 1045, 1046, 1046, 795, | ||
3069 | 1046, 1045, 795, 1046, 1045, 1046, 795, 1046, | ||
3070 | 1045, 795, 1046, 1045, 795, 1046, 1045, 795, | ||
3071 | 1046, 1045, 1046, 1045, 795, 795, 1046, 1045, | ||
3072 | 1046, 795, 1046, 1045, 795, 1046, 795, 1045, | ||
3073 | 1046, 1045, 795, 1050, 1107, 1047, 1050, 1108, | ||
3074 | 1050, 1109, 1059, 1047, 1045, 1046, 1045, 795, | ||
3075 | 1046, 1045, 795, 1050, 1108, 1059, 1047, 1045, | ||
3076 | 1050, 1110, 1047, 1059, 1047, 1045, 1046, 1045, | ||
3077 | 795, 1050, 1111, 1068, 1112, 1094, 1113, 1106, | ||
3078 | 1050, 1114, 1115, 1116, 1047, 1059, 1047, 1045, | ||
3079 | 1046, 1045, 795, 1046, 795, 1046, 1045, 795, | ||
3080 | 1046, 795, 1046, 795, 1045, 1046, 1046, 1045, | ||
3081 | 795, 1046, 795, 1046, 1045, 795, 1046, 1045, | ||
3082 | 1050, 1059, 801, 1045, 1117, 1050, 1118, 1059, | ||
3083 | 1047, 1045, 801, 1046, 1045, 795, 1046, 1045, | ||
3084 | 795, 1119, 1050, 1120, 1121, 1047, 1045, 795, | ||
3085 | 1046, 1045, 1046, 1046, 1045, 795, 795, 1046, | ||
3086 | 795, 1046, 1045, 1050, 1122, 1123, 1124, 1125, | ||
3087 | 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1047, | ||
3088 | 1059, 1047, 1045, 1046, 795, 1046, 1046, 1046, | ||
3089 | 1046, 1046, 1046, 1046, 795, 1046, 795, 1046, | ||
3090 | 1046, 1046, 1046, 1046, 1046, 1045, 795, 1046, | ||
3091 | 1046, 795, 1046, 795, 1045, 1046, 795, 1046, | ||
3092 | 1046, 1046, 795, 1046, 1046, 795, 1046, 1046, | ||
3093 | 795, 1046, 1046, 795, 1046, 1046, 1045, 795, | ||
3094 | 1050, 1133, 1050, 1109, 1134, 1135, 1136, 1047, | ||
3095 | 1059, 1047, 1045, 1046, 1045, 795, 1046, 1046, | ||
3096 | 1046, 795, 1046, 1046, 1046, 795, 1046, 795, | ||
3097 | 1046, 1045, 795, 795, 795, 795, 1046, 1046, | ||
3098 | 795, 795, 795, 795, 795, 1046, 1046, 1046, | ||
3099 | 1046, 1046, 1046, 1046, 795, 1046, 795, 1046, | ||
3100 | 795, 1045, 1046, 1046, 1046, 795, 1046, 795, | ||
3101 | 1046, 1045, 1059, 801, 1137, 1050, 1059, 801, | ||
3102 | 1046, 1045, 795, 1138, 1050, 1139, 1059, 801, | ||
3103 | 1046, 1045, 795, 1046, 795, 1140, 1059, 1047, | ||
3104 | 1045, 801, 1046, 1045, 795, 1050, 1141, 1047, | ||
3105 | 1059, 1047, 1045, 1046, 1045, 795, 1142, 1143, | ||
3106 | 1144, 1142, 1145, 1146, 1147, 1148, 1149, 1150, | ||
3107 | 1151, 1152, 1153, 1154, 672, 672, 422, 1155, | ||
3108 | 1156, 1157, 1158, 672, 1161, 1162, 1164, 1165, | ||
3109 | 1166, 1160, 1167, 1168, 1169, 1170, 1171, 1172, | ||
3110 | 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1180, | ||
3111 | 1181, 1182, 1183, 1184, 1185, 1186, 1188, 1189, | ||
3112 | 1190, 1191, 1192, 1193, 672, 1148, 10, 1148, | ||
3113 | 422, 1148, 422, 1160, 1163, 1187, 1194, 1159, | ||
3114 | 1142, 1142, 1195, 1143, 1196, 1198, 1197, 2, | ||
3115 | 1, 1199, 1197, 1200, 1197, 5, 1, 1197, | ||
3116 | 6, 5, 9, 11, 11, 10, 1202, 1203, | ||
3117 | 1204, 1197, 1205, 1206, 1197, 1207, 1197, 422, | ||
3118 | 422, 1209, 1210, 491, 472, 1211, 472, 1212, | ||
3119 | 1213, 1214, 1215, 1216, 1217, 1218, 1219, 1220, | ||
3120 | 1221, 1222, 546, 1223, 522, 1224, 1225, 1226, | ||
3121 | 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, | ||
3122 | 1235, 422, 422, 422, 427, 567, 1208, 1236, | ||
3123 | 1197, 1237, 1197, 672, 1238, 422, 422, 422, | ||
3124 | 672, 1238, 672, 672, 422, 1238, 422, 1238, | ||
3125 | 422, 1238, 422, 672, 672, 672, 672, 672, | ||
3126 | 1238, 422, 672, 672, 672, 422, 672, 422, | ||
3127 | 1238, 422, 672, 672, 672, 672, 422, 1238, | ||
3128 | 672, 422, 672, 422, 672, 422, 672, 672, | ||
3129 | 422, 672, 1238, 422, 672, 422, 672, 422, | ||
3130 | 672, 1238, 672, 422, 1238, 672, 422, 672, | ||
3131 | 422, 1238, 672, 672, 672, 672, 672, 1238, | ||
3132 | 422, 422, 672, 422, 672, 1238, 672, 422, | ||
3133 | 1238, 672, 672, 1238, 422, 422, 672, 422, | ||
3134 | 672, 422, 672, 1238, 1239, 1240, 1241, 1242, | ||
3135 | 1243, 1244, 1245, 1246, 1247, 1248, 1249, 717, | ||
3136 | 1250, 1251, 1252, 1253, 1254, 1255, 1256, 1257, | ||
3137 | 1258, 1259, 1260, 1261, 1260, 1262, 1263, 1264, | ||
3138 | 1265, 1266, 673, 1238, 1267, 1268, 1269, 1270, | ||
3139 | 1271, 1272, 1273, 1274, 1275, 1276, 1277, 1278, | ||
3140 | 1279, 1280, 1281, 1282, 1283, 1284, 1285, 727, | ||
3141 | 1286, 1287, 1288, 694, 1289, 1290, 1291, 1292, | ||
3142 | 1293, 1294, 673, 1295, 1296, 1297, 1298, 1299, | ||
3143 | 1300, 1301, 1302, 676, 1303, 673, 676, 1304, | ||
3144 | 1305, 1306, 1307, 685, 1238, 1308, 1309, 1310, | ||
3145 | 1311, 705, 1312, 1313, 685, 1314, 1315, 1316, | ||
3146 | 1317, 1318, 673, 1238, 1319, 1278, 1320, 1321, | ||
3147 | 1322, 685, 1323, 1324, 676, 673, 685, 427, | ||
3148 | 1238, 1288, 673, 676, 685, 427, 685, 427, | ||
3149 | 1325, 685, 1238, 427, 676, 1326, 1327, 676, | ||
3150 | 1328, 1329, 683, 1330, 1331, 1332, 1333, 1334, | ||
3151 | 1284, 1335, 1336, 1337, 1338, 1339, 1340, 1341, | ||
3152 | 1342, 1343, 1344, 1345, 1346, 1303, 1347, 676, | ||
3153 | 685, 427, 1238, 1348, 1349, 685, 673, 1238, | ||
3154 | 427, 673, 1238, 676, 1350, 733, 1351, 1352, | ||
3155 | 1353, 1354, 1355, 1356, 1357, 1358, 673, 1359, | ||
3156 | 1360, 1361, 1362, 1363, 1364, 673, 685, 1238, | ||
3157 | 1366, 1367, 1368, 1369, 1370, 1371, 1372, 1373, | ||
3158 | 1374, 1375, 1376, 1372, 1378, 1379, 1380, 1381, | ||
3159 | 1365, 1377, 1365, 1238, 1365, 1238, 1382, 1382, | ||
3160 | 1383, 1384, 1385, 1386, 1387, 1388, 1389, 1390, | ||
3161 | 1387, 771, 1391, 1391, 1391, 1392, 1393, 1386, | ||
3162 | 1391, 772, 773, 1394, 1391, 771, 1395, 1395, | ||
3163 | 1395, 1397, 1398, 1399, 1395, 1400, 1401, 1402, | ||
3164 | 1395, 1396, 1403, 1403, 1403, 1405, 1406, 1407, | ||
3165 | 1403, 1408, 1409, 1410, 1403, 1404, 1391, 1391, | ||
3166 | 1411, 1412, 1386, 1391, 772, 773, 1394, 1391, | ||
3167 | 771, 1413, 1414, 1415, 771, 1416, 1417, 1418, | ||
3168 | 769, 769, 769, 769, 1420, 1421, 1422, 1396, | ||
3169 | 769, 1423, 1424, 1425, 769, 1419, 770, 770, | ||
3170 | 770, 1427, 1428, 1429, 1396, 770, 1430, 1431, | ||
3171 | 1432, 770, 1426, 769, 769, 769, 1434, 1435, | ||
3172 | 1436, 1404, 769, 1437, 1438, 1439, 769, 1433, | ||
3173 | 1395, 1395, 771, 1440, 1441, 1399, 1395, 1400, | ||
3174 | 1401, 1402, 1395, 1396, 1442, 1443, 1444, 771, | ||
3175 | 1445, 1446, 1447, 770, 770, 770, 770, 1449, | ||
3176 | 1450, 1451, 1404, 770, 1452, 1453, 1454, 770, | ||
3177 | 1448, 1403, 1403, 771, 1455, 1456, 1407, 1403, | ||
3178 | 1408, 1409, 1410, 1403, 1404, 1403, 1403, 1403, | ||
3179 | 1405, 1406, 1407, 771, 1408, 1409, 1410, 1403, | ||
3180 | 1404, 1403, 1403, 1403, 1405, 1406, 1407, 772, | ||
3181 | 1408, 1409, 1410, 1403, 1404, 1403, 1403, 1403, | ||
3182 | 1405, 1406, 1407, 773, 1408, 1409, 1410, 1403, | ||
3183 | 1404, 1395, 1395, 1395, 1397, 1398, 1399, 771, | ||
3184 | 1400, 1401, 1402, 1395, 1396, 1395, 1395, 1395, | ||
3185 | 1397, 1398, 1399, 772, 1400, 1401, 1402, 1395, | ||
3186 | 1396, 1395, 1395, 1395, 1397, 1398, 1399, 773, | ||
3187 | 1400, 1401, 1402, 1395, 1396, 1458, 769, 1460, | ||
3188 | 1459, 1461, 770, 1463, 1462, 771, 1464, 775, | ||
3189 | 1464, 1465, 1464, 777, 1466, 1467, 1468, 1469, | ||
3190 | 1470, 1471, 1472, 1469, 781, 777, 1466, 1474, | ||
3191 | 1475, 1473, 782, 783, 1476, 1473, 781, 1479, | ||
3192 | 1480, 1481, 1482, 1477, 1483, 1484, 1485, 1477, | ||
3193 | 1478, 1488, 1489, 1490, 1491, 1486, 1492, 1493, | ||
3194 | 1494, 1486, 1487, 1496, 1495, 1498, 1497, 781, | ||
3195 | 1499, 782, 1499, 783, 1499, 787, 1500, 1501, | ||
3196 | 1502, 1503, 1504, 1505, 1506, 1503, 789, 787, | ||
3197 | 1500, 1508, 1507, 790, 791, 1509, 1507, 789, | ||
3198 | 1511, 1510, 1513, 1512, 789, 1514, 790, 1514, | ||
3199 | 791, 1514, 795, 1517, 1518, 1520, 1521, 1522, | ||
3200 | 1516, 1523, 1524, 1525, 1526, 1527, 1528, 1529, | ||
3201 | 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, | ||
3202 | 1538, 1539, 1540, 1541, 1542, 1544, 1545, 1546, | ||
3203 | 1547, 1548, 1549, 795, 795, 1515, 1516, 1519, | ||
3204 | 1543, 1550, 1515, 1046, 795, 795, 1552, 1553, | ||
3205 | 865, 846, 1554, 846, 1555, 1556, 1557, 1558, | ||
3206 | 1559, 1560, 1561, 1562, 1563, 1564, 1565, 920, | ||
3207 | 1566, 896, 1567, 1568, 1569, 1570, 1571, 1572, | ||
3208 | 1573, 1574, 1575, 1576, 1577, 1578, 795, 795, | ||
3209 | 795, 801, 941, 1551, 1046, 1579, 795, 795, | ||
3210 | 795, 1046, 1579, 1046, 1046, 795, 1579, 795, | ||
3211 | 1579, 795, 1579, 795, 1046, 1046, 1046, 1046, | ||
3212 | 1046, 1579, 795, 1046, 1046, 1046, 795, 1046, | ||
3213 | 795, 1579, 795, 1046, 1046, 1046, 1046, 795, | ||
3214 | 1579, 1046, 795, 1046, 795, 1046, 795, 1046, | ||
3215 | 1046, 795, 1046, 1579, 795, 1046, 795, 1046, | ||
3216 | 795, 1046, 1579, 1046, 795, 1579, 1046, 795, | ||
3217 | 1046, 795, 1579, 1046, 1046, 1046, 1046, 1046, | ||
3218 | 1579, 795, 795, 1046, 795, 1046, 1579, 1046, | ||
3219 | 795, 1579, 1046, 1046, 1579, 795, 795, 1046, | ||
3220 | 795, 1046, 795, 1046, 1579, 1580, 1581, 1582, | ||
3221 | 1583, 1584, 1585, 1586, 1587, 1588, 1589, 1590, | ||
3222 | 1091, 1591, 1592, 1593, 1594, 1595, 1596, 1597, | ||
3223 | 1598, 1599, 1600, 1601, 1602, 1601, 1603, 1604, | ||
3224 | 1605, 1606, 1607, 1047, 1579, 1608, 1609, 1610, | ||
3225 | 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, | ||
3226 | 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, | ||
3227 | 1101, 1627, 1628, 1629, 1068, 1630, 1631, 1632, | ||
3228 | 1633, 1634, 1635, 1047, 1636, 1637, 1638, 1639, | ||
3229 | 1640, 1641, 1642, 1643, 1050, 1644, 1047, 1050, | ||
3230 | 1645, 1646, 1647, 1648, 1059, 1579, 1649, 1650, | ||
3231 | 1651, 1652, 1079, 1653, 1654, 1059, 1655, 1656, | ||
3232 | 1657, 1658, 1659, 1047, 1579, 1660, 1619, 1661, | ||
3233 | 1662, 1663, 1059, 1664, 1665, 1050, 1047, 1059, | ||
3234 | 801, 1579, 1629, 1047, 1050, 1059, 801, 1059, | ||
3235 | 801, 1666, 1059, 1579, 801, 1050, 1667, 1668, | ||
3236 | 1050, 1669, 1670, 1057, 1671, 1672, 1673, 1674, | ||
3237 | 1675, 1625, 1676, 1677, 1678, 1679, 1680, 1681, | ||
3238 | 1682, 1683, 1684, 1685, 1686, 1687, 1644, 1688, | ||
3239 | 1050, 1059, 801, 1579, 1689, 1690, 1059, 1047, | ||
3240 | 1579, 801, 1047, 1579, 1050, 1691, 1107, 1692, | ||
3241 | 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1047, | ||
3242 | 1700, 1701, 1702, 1703, 1704, 1705, 1047, 1059, | ||
3243 | 1579, 1707, 1708, 1709, 1710, 1711, 1712, 1713, | ||
3244 | 1714, 1715, 1716, 1717, 1713, 1719, 1720, 1721, | ||
3245 | 1722, 1706, 1718, 1706, 1579, 1706, 1579, | ||
3246 | } | ||
3247 | |||
3248 | var _hcltok_trans_targs []int16 = []int16{ | ||
3249 | 1464, 1, 1464, 1464, 1464, 3, 4, 1472, | ||
3250 | 1464, 5, 1473, 6, 7, 9, 10, 287, | ||
3251 | 13, 14, 15, 16, 17, 288, 289, 20, | ||
3252 | 290, 22, 23, 291, 292, 293, 294, 295, | ||
3253 | 296, 297, 298, 299, 300, 329, 349, 354, | ||
3254 | 128, 129, 130, 357, 152, 372, 376, 1464, | ||
3255 | 11, 12, 18, 19, 21, 24, 25, 26, | ||
3256 | 27, 28, 29, 30, 31, 32, 33, 65, | ||
3257 | 106, 121, 132, 155, 171, 284, 34, 35, | ||
3258 | 36, 37, 38, 39, 40, 41, 42, 43, | ||
3259 | 44, 45, 46, 47, 48, 49, 50, 51, | ||
3260 | 52, 53, 54, 55, 56, 57, 58, 59, | ||
3261 | 60, 61, 62, 63, 64, 66, 67, 68, | ||
3262 | 69, 70, 71, 72, 73, 74, 75, 76, | ||
3263 | 77, 78, 79, 80, 81, 82, 83, 84, | ||
3264 | 85, 86, 87, 88, 89, 90, 91, 92, | ||
3265 | 93, 94, 95, 96, 97, 98, 99, 100, | ||
3266 | 101, 102, 103, 104, 105, 107, 108, 109, | ||
3267 | 110, 111, 112, 113, 114, 115, 116, 117, | ||
3268 | 118, 119, 120, 122, 123, 124, 125, 126, | ||
3269 | 127, 131, 133, 134, 135, 136, 137, 138, | ||
3270 | 139, 140, 141, 142, 143, 144, 145, 146, | ||
3271 | 147, 148, 149, 150, 151, 153, 154, 156, | ||
3272 | 157, 158, 159, 160, 161, 162, 163, 164, | ||
3273 | 165, 166, 167, 168, 169, 170, 172, 204, | ||
3274 | 228, 231, 232, 234, 243, 244, 247, 251, | ||
3275 | 269, 276, 278, 280, 282, 173, 174, 175, | ||
3276 | 176, 177, 178, 179, 180, 181, 182, 183, | ||
3277 | 184, 185, 186, 187, 188, 189, 190, 191, | ||
3278 | 192, 193, 194, 195, 196, 197, 198, 199, | ||
3279 | 200, 201, 202, 203, 205, 206, 207, 208, | ||
3280 | 209, 210, 211, 212, 213, 214, 215, 216, | ||
3281 | 217, 218, 219, 220, 221, 222, 223, 224, | ||
3282 | 225, 226, 227, 229, 230, 233, 235, 236, | ||
3283 | 237, 238, 239, 240, 241, 242, 245, 246, | ||
3284 | 248, 249, 250, 252, 253, 254, 255, 256, | ||
3285 | 257, 258, 259, 260, 261, 262, 263, 264, | ||
3286 | 265, 266, 267, 268, 270, 271, 272, 273, | ||
3287 | 274, 275, 277, 279, 281, 283, 285, 286, | ||
3288 | 301, 302, 303, 304, 305, 306, 307, 308, | ||
3289 | 309, 310, 311, 312, 313, 314, 315, 316, | ||
3290 | 317, 318, 319, 320, 321, 322, 323, 324, | ||
3291 | 325, 326, 327, 328, 330, 331, 332, 333, | ||
3292 | 334, 335, 336, 337, 338, 339, 340, 341, | ||
3293 | 342, 343, 344, 345, 346, 347, 348, 350, | ||
3294 | 351, 352, 353, 355, 356, 358, 359, 360, | ||
3295 | 361, 362, 363, 364, 365, 366, 367, 368, | ||
3296 | 369, 370, 371, 373, 374, 375, 377, 383, | ||
3297 | 405, 410, 412, 414, 378, 379, 380, 381, | ||
3298 | 382, 384, 385, 386, 387, 388, 389, 390, | ||
3299 | 391, 392, 393, 394, 395, 396, 397, 398, | ||
3300 | 399, 400, 401, 402, 403, 404, 406, 407, | ||
3301 | 408, 409, 411, 413, 415, 1464, 1477, 438, | ||
3302 | 439, 440, 441, 418, 442, 443, 444, 445, | ||
3303 | 446, 447, 448, 449, 450, 451, 452, 453, | ||
3304 | 454, 455, 456, 457, 458, 459, 460, 461, | ||
3305 | 462, 463, 464, 465, 466, 467, 468, 470, | ||
3306 | 471, 472, 473, 474, 475, 476, 477, 478, | ||
3307 | 479, 480, 481, 482, 483, 484, 485, 486, | ||
3308 | 420, 487, 488, 489, 490, 491, 492, 493, | ||
3309 | 494, 495, 496, 497, 498, 499, 500, 501, | ||
3310 | 502, 503, 504, 419, 505, 506, 507, 508, | ||
3311 | 509, 511, 512, 513, 514, 515, 516, 517, | ||
3312 | 518, 519, 520, 521, 522, 523, 524, 526, | ||
3313 | 527, 528, 529, 530, 531, 535, 537, 538, | ||
3314 | 539, 540, 435, 541, 542, 543, 544, 545, | ||
3315 | 546, 547, 548, 549, 550, 551, 552, 553, | ||
3316 | 554, 555, 557, 558, 560, 561, 562, 563, | ||
3317 | 564, 565, 433, 566, 567, 568, 569, 570, | ||
3318 | 571, 572, 573, 574, 576, 608, 632, 635, | ||
3319 | 636, 638, 647, 648, 651, 655, 673, 533, | ||
3320 | 680, 682, 684, 686, 577, 578, 579, 580, | ||
3321 | 581, 582, 583, 584, 585, 586, 587, 588, | ||
3322 | 589, 590, 591, 592, 593, 594, 595, 596, | ||
3323 | 597, 598, 599, 600, 601, 602, 603, 604, | ||
3324 | 605, 606, 607, 609, 610, 611, 612, 613, | ||
3325 | 614, 615, 616, 617, 618, 619, 620, 621, | ||
3326 | 622, 623, 624, 625, 626, 627, 628, 629, | ||
3327 | 630, 631, 633, 634, 637, 639, 640, 641, | ||
3328 | 642, 643, 644, 645, 646, 649, 650, 652, | ||
3329 | 653, 654, 656, 657, 658, 659, 660, 661, | ||
3330 | 662, 663, 664, 665, 666, 667, 668, 669, | ||
3331 | 670, 671, 672, 674, 675, 676, 677, 678, | ||
3332 | 679, 681, 683, 685, 687, 689, 690, 1464, | ||
3333 | 1464, 691, 828, 829, 760, 830, 831, 832, | ||
3334 | 833, 834, 835, 789, 836, 725, 837, 838, | ||
3335 | 839, 840, 841, 842, 843, 844, 745, 845, | ||
3336 | 846, 847, 848, 849, 850, 851, 852, 853, | ||
3337 | 854, 770, 855, 857, 858, 859, 860, 861, | ||
3338 | 862, 863, 864, 865, 866, 703, 867, 868, | ||
3339 | 869, 870, 871, 872, 873, 874, 875, 741, | ||
3340 | 876, 877, 878, 879, 880, 811, 882, 883, | ||
3341 | 886, 888, 889, 890, 891, 892, 893, 896, | ||
3342 | 897, 899, 900, 901, 903, 904, 905, 906, | ||
3343 | 907, 908, 909, 910, 911, 912, 913, 915, | ||
3344 | 916, 917, 918, 921, 923, 924, 926, 928, | ||
3345 | 1515, 1517, 1518, 1516, 931, 932, 1515, 934, | ||
3346 | 1541, 1541, 1541, 1543, 1544, 1542, 939, 940, | ||
3347 | 1545, 1546, 1550, 1550, 1550, 1551, 946, 947, | ||
3348 | 1552, 1553, 1557, 1558, 1557, 973, 974, 975, | ||
3349 | 976, 953, 977, 978, 979, 980, 981, 982, | ||
3350 | 983, 984, 985, 986, 987, 988, 989, 990, | ||
3351 | 991, 992, 993, 994, 995, 996, 997, 998, | ||
3352 | 999, 1000, 1001, 1002, 1003, 1005, 1006, 1007, | ||
3353 | 1008, 1009, 1010, 1011, 1012, 1013, 1014, 1015, | ||
3354 | 1016, 1017, 1018, 1019, 1020, 1021, 955, 1022, | ||
3355 | 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030, | ||
3356 | 1031, 1032, 1033, 1034, 1035, 1036, 1037, 1038, | ||
3357 | 1039, 954, 1040, 1041, 1042, 1043, 1044, 1046, | ||
3358 | 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, | ||
3359 | 1055, 1056, 1057, 1058, 1059, 1061, 1062, 1063, | ||
3360 | 1064, 1065, 1066, 1070, 1072, 1073, 1074, 1075, | ||
3361 | 970, 1076, 1077, 1078, 1079, 1080, 1081, 1082, | ||
3362 | 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, | ||
3363 | 1092, 1093, 1095, 1096, 1097, 1098, 1099, 1100, | ||
3364 | 968, 1101, 1102, 1103, 1104, 1105, 1106, 1107, | ||
3365 | 1108, 1109, 1111, 1143, 1167, 1170, 1171, 1173, | ||
3366 | 1182, 1183, 1186, 1190, 1208, 1068, 1215, 1217, | ||
3367 | 1219, 1221, 1112, 1113, 1114, 1115, 1116, 1117, | ||
3368 | 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, | ||
3369 | 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, | ||
3370 | 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, | ||
3371 | 1142, 1144, 1145, 1146, 1147, 1148, 1149, 1150, | ||
3372 | 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, | ||
3373 | 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, | ||
3374 | 1168, 1169, 1172, 1174, 1175, 1176, 1177, 1178, | ||
3375 | 1179, 1180, 1181, 1184, 1185, 1187, 1188, 1189, | ||
3376 | 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, | ||
3377 | 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, | ||
3378 | 1207, 1209, 1210, 1211, 1212, 1213, 1214, 1216, | ||
3379 | 1218, 1220, 1222, 1224, 1225, 1557, 1557, 1226, | ||
3380 | 1363, 1364, 1295, 1365, 1366, 1367, 1368, 1369, | ||
3381 | 1370, 1324, 1371, 1260, 1372, 1373, 1374, 1375, | ||
3382 | 1376, 1377, 1378, 1379, 1280, 1380, 1381, 1382, | ||
3383 | 1383, 1384, 1385, 1386, 1387, 1388, 1389, 1305, | ||
3384 | 1390, 1392, 1393, 1394, 1395, 1396, 1397, 1398, | ||
3385 | 1399, 1400, 1401, 1238, 1402, 1403, 1404, 1405, | ||
3386 | 1406, 1407, 1408, 1409, 1410, 1276, 1411, 1412, | ||
3387 | 1413, 1414, 1415, 1346, 1417, 1418, 1421, 1423, | ||
3388 | 1424, 1425, 1426, 1427, 1428, 1431, 1432, 1434, | ||
3389 | 1435, 1436, 1438, 1439, 1440, 1441, 1442, 1443, | ||
3390 | 1444, 1445, 1446, 1447, 1448, 1450, 1451, 1452, | ||
3391 | 1453, 1456, 1458, 1459, 1461, 1463, 1465, 1464, | ||
3392 | 1466, 1467, 1464, 1468, 1464, 1469, 1470, 1471, | ||
3393 | 1474, 1475, 1476, 1464, 1478, 1464, 1479, 1464, | ||
3394 | 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, | ||
3395 | 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, | ||
3396 | 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, | ||
3397 | 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, | ||
3398 | 1512, 1513, 1514, 1464, 1464, 1464, 1464, 1464, | ||
3399 | 2, 1464, 1464, 8, 1464, 1464, 1464, 1464, | ||
3400 | 1464, 416, 417, 421, 422, 423, 424, 425, | ||
3401 | 426, 427, 428, 429, 430, 431, 432, 434, | ||
3402 | 436, 437, 469, 510, 525, 532, 534, 536, | ||
3403 | 556, 559, 575, 688, 1464, 1464, 1464, 692, | ||
3404 | 693, 694, 695, 696, 697, 698, 699, 700, | ||
3405 | 701, 702, 704, 705, 706, 707, 708, 709, | ||
3406 | 710, 711, 712, 713, 714, 715, 716, 717, | ||
3407 | 718, 719, 720, 721, 722, 723, 724, 726, | ||
3408 | 727, 728, 729, 730, 731, 732, 733, 734, | ||
3409 | 735, 736, 737, 738, 739, 740, 742, 743, | ||
3410 | 744, 746, 747, 748, 749, 750, 751, 752, | ||
3411 | 753, 754, 755, 756, 757, 758, 759, 761, | ||
3412 | 762, 763, 764, 765, 766, 767, 768, 769, | ||
3413 | 771, 772, 773, 774, 775, 776, 777, 778, | ||
3414 | 779, 780, 781, 782, 783, 784, 785, 786, | ||
3415 | 787, 788, 790, 791, 792, 793, 794, 795, | ||
3416 | 796, 797, 798, 799, 800, 801, 802, 803, | ||
3417 | 804, 805, 806, 807, 808, 809, 810, 812, | ||
3418 | 813, 814, 815, 816, 817, 818, 819, 820, | ||
3419 | 821, 822, 823, 824, 825, 826, 827, 856, | ||
3420 | 881, 884, 885, 887, 894, 895, 898, 902, | ||
3421 | 914, 919, 920, 922, 925, 927, 1515, 1515, | ||
3422 | 1534, 1536, 1519, 1515, 1538, 1539, 1540, 1515, | ||
3423 | 929, 930, 933, 1515, 1516, 929, 930, 1519, | ||
3424 | 931, 932, 933, 1515, 1516, 929, 930, 1519, | ||
3425 | 931, 932, 933, 1520, 1525, 1521, 1522, 1524, | ||
3426 | 1531, 1532, 1533, 1517, 1521, 1522, 1524, 1531, | ||
3427 | 1532, 1533, 1518, 1523, 1526, 1527, 1528, 1529, | ||
3428 | 1530, 1517, 1521, 1522, 1524, 1531, 1532, 1533, | ||
3429 | 1520, 1525, 1523, 1526, 1527, 1528, 1529, 1530, | ||
3430 | 1518, 1523, 1526, 1527, 1528, 1529, 1530, 1520, | ||
3431 | 1525, 1515, 1535, 1515, 1515, 1537, 1515, 1515, | ||
3432 | 1515, 935, 936, 942, 943, 1541, 1547, 1548, | ||
3433 | 1549, 1541, 937, 938, 941, 1541, 1542, 1541, | ||
3434 | 936, 937, 938, 939, 940, 941, 1541, 1542, | ||
3435 | 1541, 936, 937, 938, 939, 940, 941, 1541, | ||
3436 | 1541, 1541, 1541, 1541, 944, 949, 950, 1550, | ||
3437 | 1554, 1555, 1556, 1550, 945, 948, 1550, 1550, | ||
3438 | 1550, 1550, 1550, 1557, 1559, 1560, 1561, 1562, | ||
3439 | 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, | ||
3440 | 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, | ||
3441 | 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1586, | ||
3442 | 1587, 1588, 1589, 1590, 1591, 1592, 1593, 1557, | ||
3443 | 951, 952, 956, 957, 958, 959, 960, 961, | ||
3444 | 962, 963, 964, 965, 966, 967, 969, 971, | ||
3445 | 972, 1004, 1045, 1060, 1067, 1069, 1071, 1091, | ||
3446 | 1094, 1110, 1223, 1557, 1227, 1228, 1229, 1230, | ||
3447 | 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1239, | ||
3448 | 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, | ||
3449 | 1248, 1249, 1250, 1251, 1252, 1253, 1254, 1255, | ||
3450 | 1256, 1257, 1258, 1259, 1261, 1262, 1263, 1264, | ||
3451 | 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1272, | ||
3452 | 1273, 1274, 1275, 1277, 1278, 1279, 1281, 1282, | ||
3453 | 1283, 1284, 1285, 1286, 1287, 1288, 1289, 1290, | ||
3454 | 1291, 1292, 1293, 1294, 1296, 1297, 1298, 1299, | ||
3455 | 1300, 1301, 1302, 1303, 1304, 1306, 1307, 1308, | ||
3456 | 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, | ||
3457 | 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, | ||
3458 | 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, | ||
3459 | 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, | ||
3460 | 1342, 1343, 1344, 1345, 1347, 1348, 1349, 1350, | ||
3461 | 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1358, | ||
3462 | 1359, 1360, 1361, 1362, 1391, 1416, 1419, 1420, | ||
3463 | 1422, 1429, 1430, 1433, 1437, 1449, 1454, 1455, | ||
3464 | 1457, 1460, 1462, | ||
3465 | } | ||
3466 | |||
3467 | var _hcltok_trans_actions []byte = []byte{ | ||
3468 | 151, 0, 93, 147, 109, 0, 0, 201, | ||
3469 | 143, 0, 13, 0, 0, 0, 0, 0, | ||
3470 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3471 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3472 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3473 | 0, 0, 0, 0, 0, 0, 0, 123, | ||
3474 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3475 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3476 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3477 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3478 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3479 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3480 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3481 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3482 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3483 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3484 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3485 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3486 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3487 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3488 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3489 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3490 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3491 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3492 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3493 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3494 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3495 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3496 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3497 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3498 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3499 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3500 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3501 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3502 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3503 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3504 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3505 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3506 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3507 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3508 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3509 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3510 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3511 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3512 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3513 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3514 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3515 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3516 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3517 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3518 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3519 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3520 | 0, 0, 0, 0, 0, 145, 198, 0, | ||
3521 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3522 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3523 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3524 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3525 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3526 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3527 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3528 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3529 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3530 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3531 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3532 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3533 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3534 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3535 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3536 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3537 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3538 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3539 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3540 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3541 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3542 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3543 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3544 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3545 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3546 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3547 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3548 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3549 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3550 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3551 | 0, 0, 0, 0, 0, 0, 0, 149, | ||
3552 | 127, 0, 0, 0, 0, 0, 0, 0, | ||
3553 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3554 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3555 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3556 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3557 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3558 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3559 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3560 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3561 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3562 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3563 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3564 | 35, 13, 13, 13, 0, 0, 37, 0, | ||
3565 | 57, 43, 55, 180, 180, 180, 0, 0, | ||
3566 | 0, 0, 77, 63, 75, 186, 0, 0, | ||
3567 | 0, 0, 87, 192, 91, 0, 0, 0, | ||
3568 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3569 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3570 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3571 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3572 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3573 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3574 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3575 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3576 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3577 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3578 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3579 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3580 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3581 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3582 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3583 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3584 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3585 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3586 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3587 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3588 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3589 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3590 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3591 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3592 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3593 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3594 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3595 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3596 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3597 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3598 | 0, 0, 0, 0, 0, 89, 81, 0, | ||
3599 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3600 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3601 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3602 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3603 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3604 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3605 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3606 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3607 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3608 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3609 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3610 | 0, 0, 0, 0, 0, 0, 0, 95, | ||
3611 | 0, 0, 121, 210, 113, 0, 13, 204, | ||
3612 | 13, 0, 0, 115, 0, 117, 0, 125, | ||
3613 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3614 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3615 | 0, 0, 0, 0, 0, 0, 13, 13, | ||
3616 | 13, 207, 207, 207, 207, 207, 207, 13, | ||
3617 | 13, 207, 13, 129, 141, 137, 99, 105, | ||
3618 | 0, 135, 131, 0, 103, 97, 111, 101, | ||
3619 | 133, 0, 0, 0, 0, 0, 0, 0, | ||
3620 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3621 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3622 | 0, 0, 0, 0, 107, 119, 139, 0, | ||
3623 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3624 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3625 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3626 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3627 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3628 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3629 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3630 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3631 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3632 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3633 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3634 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3635 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3636 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3637 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3638 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3639 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3640 | 0, 0, 0, 0, 0, 0, 21, 19, | ||
3641 | 0, 0, 13, 23, 0, 13, 13, 29, | ||
3642 | 0, 0, 0, 153, 174, 1, 1, 174, | ||
3643 | 1, 1, 1, 156, 177, 3, 3, 177, | ||
3644 | 3, 3, 3, 0, 0, 0, 0, 13, | ||
3645 | 13, 13, 13, 174, 1, 1, 174, 174, | ||
3646 | 174, 174, 174, 1, 1, 174, 174, 174, | ||
3647 | 174, 177, 3, 3, 177, 177, 177, 177, | ||
3648 | 1, 1, 0, 0, 13, 13, 13, 13, | ||
3649 | 177, 3, 3, 177, 177, 177, 177, 3, | ||
3650 | 3, 31, 0, 25, 15, 0, 27, 17, | ||
3651 | 33, 0, 0, 0, 0, 45, 0, 183, | ||
3652 | 183, 51, 0, 0, 0, 162, 213, 159, | ||
3653 | 5, 5, 5, 5, 5, 5, 168, 217, | ||
3654 | 165, 7, 7, 7, 7, 7, 7, 47, | ||
3655 | 39, 49, 41, 53, 0, 0, 0, 65, | ||
3656 | 0, 189, 189, 71, 0, 0, 67, 59, | ||
3657 | 69, 61, 73, 79, 0, 0, 0, 0, | ||
3658 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3659 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3660 | 0, 0, 13, 13, 13, 195, 195, 195, | ||
3661 | 195, 195, 195, 13, 13, 195, 13, 83, | ||
3662 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3663 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3664 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3665 | 0, 0, 0, 85, 0, 0, 0, 0, | ||
3666 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3667 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3668 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3669 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3670 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3671 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3672 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3673 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3674 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3675 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3676 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3677 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3678 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3679 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3680 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3681 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3682 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3683 | 0, 0, 0, | ||
3684 | } | ||
3685 | |||
3686 | var _hcltok_to_state_actions []byte = []byte{ | ||
3687 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3688 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3689 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3690 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3691 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3692 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3693 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3694 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3695 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3696 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3697 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3698 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3699 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3700 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3701 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3702 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3703 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3704 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3705 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3706 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3707 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3708 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3709 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3710 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3711 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3712 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3713 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3714 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3715 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3716 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3717 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3718 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3719 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3720 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3721 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3722 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3723 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3724 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3725 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3726 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3727 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3728 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3729 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3730 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3731 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3732 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3733 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3734 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3735 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3736 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3737 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3738 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3739 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3740 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3741 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3742 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3743 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3744 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3745 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3746 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3747 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3748 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3749 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3750 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3751 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3752 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3753 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3754 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3755 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3756 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3757 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3758 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3759 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3760 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3761 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3762 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3763 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3764 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3765 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3766 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3767 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3768 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3769 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3770 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3771 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3772 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3773 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3774 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3775 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3776 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3777 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3778 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3779 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3780 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3781 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3782 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3783 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3784 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3785 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3786 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3787 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3788 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3789 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3790 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3791 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3792 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3793 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3794 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3795 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3796 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3797 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3798 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3799 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3800 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3801 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3802 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3803 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3804 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3805 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3806 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3807 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3808 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3809 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3810 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3811 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3812 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3813 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3814 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3815 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3816 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3817 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3818 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3819 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3820 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3821 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3822 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3823 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3824 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3825 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3826 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3827 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3828 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3829 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3830 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3831 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3832 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3833 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3834 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3835 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3836 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3837 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3838 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3839 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3840 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3841 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3842 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3843 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3844 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3845 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3846 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3847 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3848 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3849 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3850 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3851 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3852 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3853 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3854 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3855 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3856 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3857 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3858 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3859 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3860 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3861 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3862 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3863 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3864 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3865 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3866 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3867 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3868 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3869 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3870 | 9, 0, 0, 0, 0, 0, 0, 0, | ||
3871 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3872 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3873 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3874 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3875 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3876 | 0, 0, 0, 9, 0, 0, 0, 0, | ||
3877 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3878 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3879 | 0, 0, 0, 0, 0, 171, 0, 0, | ||
3880 | 0, 0, 0, 0, 0, 0, 171, 0, | ||
3881 | 0, 0, 0, 0, 0, 9, 0, 0, | ||
3882 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3883 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3884 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3885 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3886 | 0, 0, | ||
3887 | } | ||
3888 | |||
3889 | var _hcltok_from_state_actions []byte = []byte{ | ||
3890 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3891 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3892 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3893 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3894 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3895 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3896 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3897 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3898 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3899 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3900 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3901 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3902 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3903 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3904 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3905 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3906 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3907 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3908 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3909 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3910 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3911 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3912 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3913 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3914 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3915 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3916 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3917 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3918 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3919 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3920 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3921 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3922 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3923 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3924 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3925 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3926 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3927 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3928 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3929 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3930 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3931 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3932 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3933 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3934 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3935 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3936 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3937 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3938 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3939 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3940 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3941 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3942 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3943 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3944 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3945 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3946 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3947 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3948 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3949 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3950 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3951 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3952 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3953 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3954 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3955 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3956 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3957 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3958 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3959 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3960 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3961 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3962 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3963 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3964 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3965 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3966 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3967 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3968 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3969 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3970 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3971 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3972 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3973 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3974 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3975 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3976 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3977 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3978 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3979 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3980 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3981 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3982 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3983 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3984 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3985 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3986 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3987 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3988 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3989 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3990 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3991 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3992 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3993 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3994 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3995 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3996 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3997 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3998 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
3999 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4000 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4001 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4002 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4003 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4004 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4005 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4006 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4007 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4008 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4009 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4010 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4011 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4012 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4013 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4014 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4015 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4016 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4017 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4018 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4019 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4020 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4021 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4022 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4023 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4024 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4025 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4026 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4027 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4028 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4029 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4030 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4031 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4032 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4033 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4034 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4035 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4036 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4037 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4038 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4039 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4040 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4041 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4042 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4043 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4044 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4045 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4046 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4047 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4048 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4049 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4050 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4051 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4052 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4053 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4054 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4055 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4056 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4057 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4058 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4059 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4060 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4061 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4062 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4063 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4064 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4065 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4066 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4067 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4068 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4069 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4070 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4071 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4072 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4073 | 11, 0, 0, 0, 0, 0, 0, 0, | ||
4074 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4075 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4076 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4077 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4078 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4079 | 0, 0, 0, 11, 0, 0, 0, 0, | ||
4080 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4081 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4082 | 0, 0, 0, 0, 0, 11, 0, 0, | ||
4083 | 0, 0, 0, 0, 0, 0, 11, 0, | ||
4084 | 0, 0, 0, 0, 0, 11, 0, 0, | ||
4085 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4086 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4087 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4088 | 0, 0, 0, 0, 0, 0, 0, 0, | ||
4089 | 0, 0, | ||
4090 | } | ||
4091 | |||
4092 | var _hcltok_eof_trans []int16 = []int16{ | ||
4093 | 0, 1, 4, 1, 1, 9, 9, 9, | ||
4094 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4095 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4096 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4097 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4098 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4099 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4100 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4101 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4102 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4103 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4104 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4105 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4106 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4107 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4108 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4109 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4110 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4111 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4112 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4113 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4114 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4115 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4116 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4117 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4118 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4119 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4120 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4121 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4122 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4123 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4124 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4125 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4126 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4127 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4128 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4129 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4130 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4131 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4132 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4133 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4134 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4135 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4136 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4137 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4138 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4139 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4140 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4141 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4142 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4143 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4144 | 4, 4, 4, 4, 4, 4, 4, 4, | ||
4145 | 422, 422, 1, 422, 422, 422, 422, 422, | ||
4146 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4147 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4148 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4149 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4150 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4151 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4152 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4153 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4154 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4155 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4156 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4157 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4158 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4159 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4160 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4161 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4162 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4163 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4164 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4165 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4166 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4167 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4168 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4169 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4170 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4171 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4172 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4173 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4174 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4175 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4176 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4177 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4178 | 422, 422, 422, 422, 422, 422, 422, 422, | ||
4179 | 422, 422, 422, 672, 672, 672, 672, 672, | ||
4180 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4181 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4182 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4183 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4184 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4185 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4186 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4187 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4188 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4189 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4190 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4191 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4192 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4193 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4194 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4195 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4196 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4197 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4198 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4199 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4200 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4201 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4202 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4203 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4204 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4205 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4206 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4207 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4208 | 672, 672, 672, 672, 672, 672, 672, 672, | ||
4209 | 672, 769, 769, 769, 769, 769, 775, 775, | ||
4210 | 777, 779, 779, 777, 777, 779, 0, 0, | ||
4211 | 787, 789, 787, 787, 789, 0, 0, 795, | ||
4212 | 795, 797, 795, 795, 795, 795, 795, 795, | ||
4213 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4214 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4215 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4216 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4217 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4218 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4219 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4220 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4221 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4222 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4223 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4224 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4225 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4226 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4227 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4228 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4229 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4230 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4231 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4232 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4233 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4234 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4235 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4236 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4237 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4238 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4239 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4240 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4241 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4242 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4243 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4244 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4245 | 795, 795, 795, 795, 795, 795, 795, 795, | ||
4246 | 795, 795, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4247 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4248 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4249 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4250 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4251 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4252 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4253 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4254 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4255 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4256 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4257 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4258 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4259 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4260 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4261 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4262 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4263 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4264 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4265 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4266 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4267 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4268 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4269 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4270 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4271 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4272 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4273 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4274 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4275 | 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046, | ||
4276 | 0, 1196, 1197, 1198, 1197, 1198, 1198, 1198, | ||
4277 | 1202, 1203, 1198, 1198, 1198, 1209, 1198, 1198, | ||
4278 | 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, | ||
4279 | 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, | ||
4280 | 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, | ||
4281 | 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239, | ||
4282 | 1239, 1239, 1239, 0, 1392, 1396, 1404, 1392, | ||
4283 | 1392, 1396, 1396, 1404, 1396, 1392, 1404, 1404, | ||
4284 | 1404, 1404, 1404, 1396, 1396, 1396, 1458, 1460, | ||
4285 | 1458, 1463, 1465, 1465, 1465, 0, 1474, 1478, | ||
4286 | 1487, 1496, 1498, 1500, 1500, 1500, 0, 1508, | ||
4287 | 1511, 1513, 1515, 1515, 1515, 0, 1552, 1580, | ||
4288 | 1580, 1580, 1580, 1580, 1580, 1580, 1580, 1580, | ||
4289 | 1580, 1580, 1580, 1580, 1580, 1580, 1580, 1580, | ||
4290 | 1580, 1580, 1580, 1580, 1580, 1580, 1580, 1580, | ||
4291 | 1580, 1580, 1580, 1580, 1580, 1580, 1580, 1580, | ||
4292 | 1580, 1580, | ||
4293 | } | ||
4294 | |||
4295 | const hcltok_start int = 1464 | ||
4296 | const hcltok_first_final int = 1464 | ||
4297 | const hcltok_error int = 0 | ||
4298 | |||
4299 | const hcltok_en_stringTemplate int = 1515 | ||
4300 | const hcltok_en_heredocTemplate int = 1541 | ||
4301 | const hcltok_en_bareTemplate int = 1550 | ||
4302 | const hcltok_en_identOnly int = 1557 | ||
4303 | const hcltok_en_main int = 1464 | ||
4304 | |||
4305 | // line 16 "scan_tokens.rl" | ||
4306 | |||
4307 | func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []Token { | ||
4308 | f := &tokenAccum{ | ||
4309 | Filename: filename, | ||
4310 | Bytes: data, | ||
4311 | Pos: start, | ||
4312 | } | ||
4313 | |||
4314 | // line 294 "scan_tokens.rl" | ||
4315 | |||
4316 | // Ragel state | ||
4317 | p := 0 // "Pointer" into data | ||
4318 | pe := len(data) // End-of-data "pointer" | ||
4319 | ts := 0 | ||
4320 | te := 0 | ||
4321 | act := 0 | ||
4322 | eof := pe | ||
4323 | var stack []int | ||
4324 | var top int | ||
4325 | |||
4326 | var cs int // current state | ||
4327 | switch mode { | ||
4328 | case scanNormal: | ||
4329 | cs = hcltok_en_main | ||
4330 | case scanTemplate: | ||
4331 | cs = hcltok_en_bareTemplate | ||
4332 | case scanIdentOnly: | ||
4333 | cs = hcltok_en_identOnly | ||
4334 | default: | ||
4335 | panic("invalid scanMode") | ||
4336 | } | ||
4337 | |||
4338 | braces := 0 | ||
4339 | var retBraces []int // stack of brace levels that cause us to use fret | ||
4340 | var heredocs []heredocInProgress // stack of heredocs we're currently processing | ||
4341 | |||
4342 | // line 329 "scan_tokens.rl" | ||
4343 | |||
4344 | // Make Go compiler happy | ||
4345 | _ = ts | ||
4346 | _ = te | ||
4347 | _ = act | ||
4348 | _ = eof | ||
4349 | |||
4350 | token := func(ty TokenType) { | ||
4351 | f.emitToken(ty, ts, te) | ||
4352 | } | ||
4353 | selfToken := func() { | ||
4354 | b := data[ts:te] | ||
4355 | if len(b) != 1 { | ||
4356 | // should never happen | ||
4357 | panic("selfToken only works for single-character tokens") | ||
4358 | } | ||
4359 | f.emitToken(TokenType(b[0]), ts, te) | ||
4360 | } | ||
4361 | |||
4362 | // line 4372 "scan_tokens.go" | ||
4363 | { | ||
4364 | top = 0 | ||
4365 | ts = 0 | ||
4366 | te = 0 | ||
4367 | act = 0 | ||
4368 | } | ||
4369 | |||
4370 | // line 4380 "scan_tokens.go" | ||
4371 | { | ||
4372 | var _klen int | ||
4373 | var _trans int | ||
4374 | var _acts int | ||
4375 | var _nacts uint | ||
4376 | var _keys int | ||
4377 | if p == pe { | ||
4378 | goto _test_eof | ||
4379 | } | ||
4380 | if cs == 0 { | ||
4381 | goto _out | ||
4382 | } | ||
4383 | _resume: | ||
4384 | _acts = int(_hcltok_from_state_actions[cs]) | ||
4385 | _nacts = uint(_hcltok_actions[_acts]) | ||
4386 | _acts++ | ||
4387 | for ; _nacts > 0; _nacts-- { | ||
4388 | _acts++ | ||
4389 | switch _hcltok_actions[_acts-1] { | ||
4390 | case 6: | ||
4391 | // line 1 "NONE" | ||
4392 | |||
4393 | ts = p | ||
4394 | |||
4395 | // line 4404 "scan_tokens.go" | ||
4396 | } | ||
4397 | } | ||
4398 | |||
4399 | _keys = int(_hcltok_key_offsets[cs]) | ||
4400 | _trans = int(_hcltok_index_offsets[cs]) | ||
4401 | |||
4402 | _klen = int(_hcltok_single_lengths[cs]) | ||
4403 | if _klen > 0 { | ||
4404 | _lower := int(_keys) | ||
4405 | var _mid int | ||
4406 | _upper := int(_keys + _klen - 1) | ||
4407 | for { | ||
4408 | if _upper < _lower { | ||
4409 | break | ||
4410 | } | ||
4411 | |||
4412 | _mid = _lower + ((_upper - _lower) >> 1) | ||
4413 | switch { | ||
4414 | case data[p] < _hcltok_trans_keys[_mid]: | ||
4415 | _upper = _mid - 1 | ||
4416 | case data[p] > _hcltok_trans_keys[_mid]: | ||
4417 | _lower = _mid + 1 | ||
4418 | default: | ||
4419 | _trans += int(_mid - int(_keys)) | ||
4420 | goto _match | ||
4421 | } | ||
4422 | } | ||
4423 | _keys += _klen | ||
4424 | _trans += _klen | ||
4425 | } | ||
4426 | |||
4427 | _klen = int(_hcltok_range_lengths[cs]) | ||
4428 | if _klen > 0 { | ||
4429 | _lower := int(_keys) | ||
4430 | var _mid int | ||
4431 | _upper := int(_keys + (_klen << 1) - 2) | ||
4432 | for { | ||
4433 | if _upper < _lower { | ||
4434 | break | ||
4435 | } | ||
4436 | |||
4437 | _mid = _lower + (((_upper - _lower) >> 1) & ^1) | ||
4438 | switch { | ||
4439 | case data[p] < _hcltok_trans_keys[_mid]: | ||
4440 | _upper = _mid - 2 | ||
4441 | case data[p] > _hcltok_trans_keys[_mid+1]: | ||
4442 | _lower = _mid + 2 | ||
4443 | default: | ||
4444 | _trans += int((_mid - int(_keys)) >> 1) | ||
4445 | goto _match | ||
4446 | } | ||
4447 | } | ||
4448 | _trans += _klen | ||
4449 | } | ||
4450 | |||
4451 | _match: | ||
4452 | _trans = int(_hcltok_indicies[_trans]) | ||
4453 | _eof_trans: | ||
4454 | cs = int(_hcltok_trans_targs[_trans]) | ||
4455 | |||
4456 | if _hcltok_trans_actions[_trans] == 0 { | ||
4457 | goto _again | ||
4458 | } | ||
4459 | |||
4460 | _acts = int(_hcltok_trans_actions[_trans]) | ||
4461 | _nacts = uint(_hcltok_actions[_acts]) | ||
4462 | _acts++ | ||
4463 | for ; _nacts > 0; _nacts-- { | ||
4464 | _acts++ | ||
4465 | switch _hcltok_actions[_acts-1] { | ||
4466 | case 0: | ||
4467 | // line 218 "scan_tokens.rl" | ||
4468 | |||
4469 | p-- | ||
4470 | |||
4471 | case 1: | ||
4472 | // line 219 "scan_tokens.rl" | ||
4473 | |||
4474 | p-- | ||
4475 | |||
4476 | case 2: | ||
4477 | // line 224 "scan_tokens.rl" | ||
4478 | |||
4479 | p-- | ||
4480 | |||
4481 | case 3: | ||
4482 | // line 225 "scan_tokens.rl" | ||
4483 | |||
4484 | p-- | ||
4485 | |||
4486 | case 7: | ||
4487 | // line 1 "NONE" | ||
4488 | |||
4489 | te = p + 1 | ||
4490 | |||
4491 | case 8: | ||
4492 | // line 155 "scan_tokens.rl" | ||
4493 | |||
4494 | te = p + 1 | ||
4495 | { | ||
4496 | token(TokenTemplateInterp) | ||
4497 | braces++ | ||
4498 | retBraces = append(retBraces, braces) | ||
4499 | if len(heredocs) > 0 { | ||
4500 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4501 | } | ||
4502 | { | ||
4503 | stack = append(stack, 0) | ||
4504 | stack[top] = cs | ||
4505 | top++ | ||
4506 | cs = 1464 | ||
4507 | goto _again | ||
4508 | } | ||
4509 | } | ||
4510 | case 9: | ||
4511 | // line 165 "scan_tokens.rl" | ||
4512 | |||
4513 | te = p + 1 | ||
4514 | { | ||
4515 | token(TokenTemplateControl) | ||
4516 | braces++ | ||
4517 | retBraces = append(retBraces, braces) | ||
4518 | if len(heredocs) > 0 { | ||
4519 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4520 | } | ||
4521 | { | ||
4522 | stack = append(stack, 0) | ||
4523 | stack[top] = cs | ||
4524 | top++ | ||
4525 | cs = 1464 | ||
4526 | goto _again | ||
4527 | } | ||
4528 | } | ||
4529 | case 10: | ||
4530 | // line 79 "scan_tokens.rl" | ||
4531 | |||
4532 | te = p + 1 | ||
4533 | { | ||
4534 | token(TokenCQuote) | ||
4535 | top-- | ||
4536 | cs = stack[top] | ||
4537 | { | ||
4538 | stack = stack[:len(stack)-1] | ||
4539 | } | ||
4540 | goto _again | ||
4541 | |||
4542 | } | ||
4543 | case 11: | ||
4544 | // line 239 "scan_tokens.rl" | ||
4545 | |||
4546 | te = p + 1 | ||
4547 | { | ||
4548 | token(TokenInvalid) | ||
4549 | } | ||
4550 | case 12: | ||
4551 | // line 240 "scan_tokens.rl" | ||
4552 | |||
4553 | te = p + 1 | ||
4554 | { | ||
4555 | token(TokenBadUTF8) | ||
4556 | } | ||
4557 | case 13: | ||
4558 | // line 155 "scan_tokens.rl" | ||
4559 | |||
4560 | te = p | ||
4561 | p-- | ||
4562 | { | ||
4563 | token(TokenTemplateInterp) | ||
4564 | braces++ | ||
4565 | retBraces = append(retBraces, braces) | ||
4566 | if len(heredocs) > 0 { | ||
4567 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4568 | } | ||
4569 | { | ||
4570 | stack = append(stack, 0) | ||
4571 | stack[top] = cs | ||
4572 | top++ | ||
4573 | cs = 1464 | ||
4574 | goto _again | ||
4575 | } | ||
4576 | } | ||
4577 | case 14: | ||
4578 | // line 165 "scan_tokens.rl" | ||
4579 | |||
4580 | te = p | ||
4581 | p-- | ||
4582 | { | ||
4583 | token(TokenTemplateControl) | ||
4584 | braces++ | ||
4585 | retBraces = append(retBraces, braces) | ||
4586 | if len(heredocs) > 0 { | ||
4587 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4588 | } | ||
4589 | { | ||
4590 | stack = append(stack, 0) | ||
4591 | stack[top] = cs | ||
4592 | top++ | ||
4593 | cs = 1464 | ||
4594 | goto _again | ||
4595 | } | ||
4596 | } | ||
4597 | case 15: | ||
4598 | // line 238 "scan_tokens.rl" | ||
4599 | |||
4600 | te = p | ||
4601 | p-- | ||
4602 | { | ||
4603 | token(TokenQuotedLit) | ||
4604 | } | ||
4605 | case 16: | ||
4606 | // line 239 "scan_tokens.rl" | ||
4607 | |||
4608 | te = p | ||
4609 | p-- | ||
4610 | { | ||
4611 | token(TokenInvalid) | ||
4612 | } | ||
4613 | case 17: | ||
4614 | // line 240 "scan_tokens.rl" | ||
4615 | |||
4616 | te = p | ||
4617 | p-- | ||
4618 | { | ||
4619 | token(TokenBadUTF8) | ||
4620 | } | ||
4621 | case 18: | ||
4622 | // line 238 "scan_tokens.rl" | ||
4623 | |||
4624 | p = (te) - 1 | ||
4625 | { | ||
4626 | token(TokenQuotedLit) | ||
4627 | } | ||
4628 | case 19: | ||
4629 | // line 240 "scan_tokens.rl" | ||
4630 | |||
4631 | p = (te) - 1 | ||
4632 | { | ||
4633 | token(TokenBadUTF8) | ||
4634 | } | ||
4635 | case 20: | ||
4636 | // line 143 "scan_tokens.rl" | ||
4637 | |||
4638 | act = 10 | ||
4639 | case 21: | ||
4640 | // line 248 "scan_tokens.rl" | ||
4641 | |||
4642 | act = 11 | ||
4643 | case 22: | ||
4644 | // line 155 "scan_tokens.rl" | ||
4645 | |||
4646 | te = p + 1 | ||
4647 | { | ||
4648 | token(TokenTemplateInterp) | ||
4649 | braces++ | ||
4650 | retBraces = append(retBraces, braces) | ||
4651 | if len(heredocs) > 0 { | ||
4652 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4653 | } | ||
4654 | { | ||
4655 | stack = append(stack, 0) | ||
4656 | stack[top] = cs | ||
4657 | top++ | ||
4658 | cs = 1464 | ||
4659 | goto _again | ||
4660 | } | ||
4661 | } | ||
4662 | case 23: | ||
4663 | // line 165 "scan_tokens.rl" | ||
4664 | |||
4665 | te = p + 1 | ||
4666 | { | ||
4667 | token(TokenTemplateControl) | ||
4668 | braces++ | ||
4669 | retBraces = append(retBraces, braces) | ||
4670 | if len(heredocs) > 0 { | ||
4671 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4672 | } | ||
4673 | { | ||
4674 | stack = append(stack, 0) | ||
4675 | stack[top] = cs | ||
4676 | top++ | ||
4677 | cs = 1464 | ||
4678 | goto _again | ||
4679 | } | ||
4680 | } | ||
4681 | case 24: | ||
4682 | // line 106 "scan_tokens.rl" | ||
4683 | |||
4684 | te = p + 1 | ||
4685 | { | ||
4686 | // This action is called specificially when a heredoc literal | ||
4687 | // ends with a newline character. | ||
4688 | |||
4689 | // This might actually be our end marker. | ||
4690 | topdoc := &heredocs[len(heredocs)-1] | ||
4691 | if topdoc.StartOfLine { | ||
4692 | maybeMarker := bytes.TrimSpace(data[ts:te]) | ||
4693 | if bytes.Equal(maybeMarker, topdoc.Marker) { | ||
4694 | // We actually emit two tokens here: the end-of-heredoc | ||
4695 | // marker first, and then separately the newline that | ||
4696 | // follows it. This then avoids issues with the closing | ||
4697 | // marker consuming a newline that would normally be used | ||
4698 | // to mark the end of an attribute definition. | ||
4699 | // We might have either a \n sequence or an \r\n sequence | ||
4700 | // here, so we must handle both. | ||
4701 | nls := te - 1 | ||
4702 | nle := te | ||
4703 | te-- | ||
4704 | if data[te-1] == '\r' { | ||
4705 | // back up one more byte | ||
4706 | nls-- | ||
4707 | te-- | ||
4708 | } | ||
4709 | token(TokenCHeredoc) | ||
4710 | ts = nls | ||
4711 | te = nle | ||
4712 | token(TokenNewline) | ||
4713 | heredocs = heredocs[:len(heredocs)-1] | ||
4714 | top-- | ||
4715 | cs = stack[top] | ||
4716 | { | ||
4717 | stack = stack[:len(stack)-1] | ||
4718 | } | ||
4719 | goto _again | ||
4720 | |||
4721 | } | ||
4722 | } | ||
4723 | |||
4724 | topdoc.StartOfLine = true | ||
4725 | token(TokenStringLit) | ||
4726 | } | ||
4727 | case 25: | ||
4728 | // line 248 "scan_tokens.rl" | ||
4729 | |||
4730 | te = p + 1 | ||
4731 | { | ||
4732 | token(TokenBadUTF8) | ||
4733 | } | ||
4734 | case 26: | ||
4735 | // line 155 "scan_tokens.rl" | ||
4736 | |||
4737 | te = p | ||
4738 | p-- | ||
4739 | { | ||
4740 | token(TokenTemplateInterp) | ||
4741 | braces++ | ||
4742 | retBraces = append(retBraces, braces) | ||
4743 | if len(heredocs) > 0 { | ||
4744 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4745 | } | ||
4746 | { | ||
4747 | stack = append(stack, 0) | ||
4748 | stack[top] = cs | ||
4749 | top++ | ||
4750 | cs = 1464 | ||
4751 | goto _again | ||
4752 | } | ||
4753 | } | ||
4754 | case 27: | ||
4755 | // line 165 "scan_tokens.rl" | ||
4756 | |||
4757 | te = p | ||
4758 | p-- | ||
4759 | { | ||
4760 | token(TokenTemplateControl) | ||
4761 | braces++ | ||
4762 | retBraces = append(retBraces, braces) | ||
4763 | if len(heredocs) > 0 { | ||
4764 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4765 | } | ||
4766 | { | ||
4767 | stack = append(stack, 0) | ||
4768 | stack[top] = cs | ||
4769 | top++ | ||
4770 | cs = 1464 | ||
4771 | goto _again | ||
4772 | } | ||
4773 | } | ||
4774 | case 28: | ||
4775 | // line 143 "scan_tokens.rl" | ||
4776 | |||
4777 | te = p | ||
4778 | p-- | ||
4779 | { | ||
4780 | // This action is called when a heredoc literal _doesn't_ end | ||
4781 | // with a newline character, e.g. because we're about to enter | ||
4782 | // an interpolation sequence. | ||
4783 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4784 | token(TokenStringLit) | ||
4785 | } | ||
4786 | case 29: | ||
4787 | // line 248 "scan_tokens.rl" | ||
4788 | |||
4789 | te = p | ||
4790 | p-- | ||
4791 | { | ||
4792 | token(TokenBadUTF8) | ||
4793 | } | ||
4794 | case 30: | ||
4795 | // line 143 "scan_tokens.rl" | ||
4796 | |||
4797 | p = (te) - 1 | ||
4798 | { | ||
4799 | // This action is called when a heredoc literal _doesn't_ end | ||
4800 | // with a newline character, e.g. because we're about to enter | ||
4801 | // an interpolation sequence. | ||
4802 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4803 | token(TokenStringLit) | ||
4804 | } | ||
4805 | case 31: | ||
4806 | // line 1 "NONE" | ||
4807 | |||
4808 | switch act { | ||
4809 | case 0: | ||
4810 | { | ||
4811 | cs = 0 | ||
4812 | goto _again | ||
4813 | } | ||
4814 | case 10: | ||
4815 | { | ||
4816 | p = (te) - 1 | ||
4817 | |||
4818 | // This action is called when a heredoc literal _doesn't_ end | ||
4819 | // with a newline character, e.g. because we're about to enter | ||
4820 | // an interpolation sequence. | ||
4821 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4822 | token(TokenStringLit) | ||
4823 | } | ||
4824 | case 11: | ||
4825 | { | ||
4826 | p = (te) - 1 | ||
4827 | token(TokenBadUTF8) | ||
4828 | } | ||
4829 | } | ||
4830 | |||
4831 | case 32: | ||
4832 | // line 151 "scan_tokens.rl" | ||
4833 | |||
4834 | act = 14 | ||
4835 | case 33: | ||
4836 | // line 255 "scan_tokens.rl" | ||
4837 | |||
4838 | act = 15 | ||
4839 | case 34: | ||
4840 | // line 155 "scan_tokens.rl" | ||
4841 | |||
4842 | te = p + 1 | ||
4843 | { | ||
4844 | token(TokenTemplateInterp) | ||
4845 | braces++ | ||
4846 | retBraces = append(retBraces, braces) | ||
4847 | if len(heredocs) > 0 { | ||
4848 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4849 | } | ||
4850 | { | ||
4851 | stack = append(stack, 0) | ||
4852 | stack[top] = cs | ||
4853 | top++ | ||
4854 | cs = 1464 | ||
4855 | goto _again | ||
4856 | } | ||
4857 | } | ||
4858 | case 35: | ||
4859 | // line 165 "scan_tokens.rl" | ||
4860 | |||
4861 | te = p + 1 | ||
4862 | { | ||
4863 | token(TokenTemplateControl) | ||
4864 | braces++ | ||
4865 | retBraces = append(retBraces, braces) | ||
4866 | if len(heredocs) > 0 { | ||
4867 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4868 | } | ||
4869 | { | ||
4870 | stack = append(stack, 0) | ||
4871 | stack[top] = cs | ||
4872 | top++ | ||
4873 | cs = 1464 | ||
4874 | goto _again | ||
4875 | } | ||
4876 | } | ||
4877 | case 36: | ||
4878 | // line 151 "scan_tokens.rl" | ||
4879 | |||
4880 | te = p + 1 | ||
4881 | { | ||
4882 | token(TokenStringLit) | ||
4883 | } | ||
4884 | case 37: | ||
4885 | // line 255 "scan_tokens.rl" | ||
4886 | |||
4887 | te = p + 1 | ||
4888 | { | ||
4889 | token(TokenBadUTF8) | ||
4890 | } | ||
4891 | case 38: | ||
4892 | // line 155 "scan_tokens.rl" | ||
4893 | |||
4894 | te = p | ||
4895 | p-- | ||
4896 | { | ||
4897 | token(TokenTemplateInterp) | ||
4898 | braces++ | ||
4899 | retBraces = append(retBraces, braces) | ||
4900 | if len(heredocs) > 0 { | ||
4901 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4902 | } | ||
4903 | { | ||
4904 | stack = append(stack, 0) | ||
4905 | stack[top] = cs | ||
4906 | top++ | ||
4907 | cs = 1464 | ||
4908 | goto _again | ||
4909 | } | ||
4910 | } | ||
4911 | case 39: | ||
4912 | // line 165 "scan_tokens.rl" | ||
4913 | |||
4914 | te = p | ||
4915 | p-- | ||
4916 | { | ||
4917 | token(TokenTemplateControl) | ||
4918 | braces++ | ||
4919 | retBraces = append(retBraces, braces) | ||
4920 | if len(heredocs) > 0 { | ||
4921 | heredocs[len(heredocs)-1].StartOfLine = false | ||
4922 | } | ||
4923 | { | ||
4924 | stack = append(stack, 0) | ||
4925 | stack[top] = cs | ||
4926 | top++ | ||
4927 | cs = 1464 | ||
4928 | goto _again | ||
4929 | } | ||
4930 | } | ||
4931 | case 40: | ||
4932 | // line 151 "scan_tokens.rl" | ||
4933 | |||
4934 | te = p | ||
4935 | p-- | ||
4936 | { | ||
4937 | token(TokenStringLit) | ||
4938 | } | ||
4939 | case 41: | ||
4940 | // line 255 "scan_tokens.rl" | ||
4941 | |||
4942 | te = p | ||
4943 | p-- | ||
4944 | { | ||
4945 | token(TokenBadUTF8) | ||
4946 | } | ||
4947 | case 42: | ||
4948 | // line 151 "scan_tokens.rl" | ||
4949 | |||
4950 | p = (te) - 1 | ||
4951 | { | ||
4952 | token(TokenStringLit) | ||
4953 | } | ||
4954 | case 43: | ||
4955 | // line 1 "NONE" | ||
4956 | |||
4957 | switch act { | ||
4958 | case 0: | ||
4959 | { | ||
4960 | cs = 0 | ||
4961 | goto _again | ||
4962 | } | ||
4963 | case 14: | ||
4964 | { | ||
4965 | p = (te) - 1 | ||
4966 | |||
4967 | token(TokenStringLit) | ||
4968 | } | ||
4969 | case 15: | ||
4970 | { | ||
4971 | p = (te) - 1 | ||
4972 | token(TokenBadUTF8) | ||
4973 | } | ||
4974 | } | ||
4975 | |||
4976 | case 44: | ||
4977 | // line 259 "scan_tokens.rl" | ||
4978 | |||
4979 | act = 16 | ||
4980 | case 45: | ||
4981 | // line 260 "scan_tokens.rl" | ||
4982 | |||
4983 | act = 17 | ||
4984 | case 46: | ||
4985 | // line 260 "scan_tokens.rl" | ||
4986 | |||
4987 | te = p + 1 | ||
4988 | { | ||
4989 | token(TokenBadUTF8) | ||
4990 | } | ||
4991 | case 47: | ||
4992 | // line 261 "scan_tokens.rl" | ||
4993 | |||
4994 | te = p + 1 | ||
4995 | { | ||
4996 | token(TokenInvalid) | ||
4997 | } | ||
4998 | case 48: | ||
4999 | // line 259 "scan_tokens.rl" | ||
5000 | |||
5001 | te = p | ||
5002 | p-- | ||
5003 | { | ||
5004 | token(TokenIdent) | ||
5005 | } | ||
5006 | case 49: | ||
5007 | // line 260 "scan_tokens.rl" | ||
5008 | |||
5009 | te = p | ||
5010 | p-- | ||
5011 | { | ||
5012 | token(TokenBadUTF8) | ||
5013 | } | ||
5014 | case 50: | ||
5015 | // line 259 "scan_tokens.rl" | ||
5016 | |||
5017 | p = (te) - 1 | ||
5018 | { | ||
5019 | token(TokenIdent) | ||
5020 | } | ||
5021 | case 51: | ||
5022 | // line 260 "scan_tokens.rl" | ||
5023 | |||
5024 | p = (te) - 1 | ||
5025 | { | ||
5026 | token(TokenBadUTF8) | ||
5027 | } | ||
5028 | case 52: | ||
5029 | // line 1 "NONE" | ||
5030 | |||
5031 | switch act { | ||
5032 | case 16: | ||
5033 | { | ||
5034 | p = (te) - 1 | ||
5035 | token(TokenIdent) | ||
5036 | } | ||
5037 | case 17: | ||
5038 | { | ||
5039 | p = (te) - 1 | ||
5040 | token(TokenBadUTF8) | ||
5041 | } | ||
5042 | } | ||
5043 | |||
5044 | case 53: | ||
5045 | // line 267 "scan_tokens.rl" | ||
5046 | |||
5047 | act = 21 | ||
5048 | case 54: | ||
5049 | // line 269 "scan_tokens.rl" | ||
5050 | |||
5051 | act = 22 | ||
5052 | case 55: | ||
5053 | // line 280 "scan_tokens.rl" | ||
5054 | |||
5055 | act = 32 | ||
5056 | case 56: | ||
5057 | // line 290 "scan_tokens.rl" | ||
5058 | |||
5059 | act = 38 | ||
5060 | case 57: | ||
5061 | // line 291 "scan_tokens.rl" | ||
5062 | |||
5063 | act = 39 | ||
5064 | case 58: | ||
5065 | // line 269 "scan_tokens.rl" | ||
5066 | |||
5067 | te = p + 1 | ||
5068 | { | ||
5069 | token(TokenComment) | ||
5070 | } | ||
5071 | case 59: | ||
5072 | // line 270 "scan_tokens.rl" | ||
5073 | |||
5074 | te = p + 1 | ||
5075 | { | ||
5076 | token(TokenNewline) | ||
5077 | } | ||
5078 | case 60: | ||
5079 | // line 272 "scan_tokens.rl" | ||
5080 | |||
5081 | te = p + 1 | ||
5082 | { | ||
5083 | token(TokenEqualOp) | ||
5084 | } | ||
5085 | case 61: | ||
5086 | // line 273 "scan_tokens.rl" | ||
5087 | |||
5088 | te = p + 1 | ||
5089 | { | ||
5090 | token(TokenNotEqual) | ||
5091 | } | ||
5092 | case 62: | ||
5093 | // line 274 "scan_tokens.rl" | ||
5094 | |||
5095 | te = p + 1 | ||
5096 | { | ||
5097 | token(TokenGreaterThanEq) | ||
5098 | } | ||
5099 | case 63: | ||
5100 | // line 275 "scan_tokens.rl" | ||
5101 | |||
5102 | te = p + 1 | ||
5103 | { | ||
5104 | token(TokenLessThanEq) | ||
5105 | } | ||
5106 | case 64: | ||
5107 | // line 276 "scan_tokens.rl" | ||
5108 | |||
5109 | te = p + 1 | ||
5110 | { | ||
5111 | token(TokenAnd) | ||
5112 | } | ||
5113 | case 65: | ||
5114 | // line 277 "scan_tokens.rl" | ||
5115 | |||
5116 | te = p + 1 | ||
5117 | { | ||
5118 | token(TokenOr) | ||
5119 | } | ||
5120 | case 66: | ||
5121 | // line 278 "scan_tokens.rl" | ||
5122 | |||
5123 | te = p + 1 | ||
5124 | { | ||
5125 | token(TokenEllipsis) | ||
5126 | } | ||
5127 | case 67: | ||
5128 | // line 279 "scan_tokens.rl" | ||
5129 | |||
5130 | te = p + 1 | ||
5131 | { | ||
5132 | token(TokenFatArrow) | ||
5133 | } | ||
5134 | case 68: | ||
5135 | // line 280 "scan_tokens.rl" | ||
5136 | |||
5137 | te = p + 1 | ||
5138 | { | ||
5139 | selfToken() | ||
5140 | } | ||
5141 | case 69: | ||
5142 | // line 175 "scan_tokens.rl" | ||
5143 | |||
5144 | te = p + 1 | ||
5145 | { | ||
5146 | token(TokenOBrace) | ||
5147 | braces++ | ||
5148 | } | ||
5149 | case 70: | ||
5150 | // line 180 "scan_tokens.rl" | ||
5151 | |||
5152 | te = p + 1 | ||
5153 | { | ||
5154 | if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces { | ||
5155 | token(TokenTemplateSeqEnd) | ||
5156 | braces-- | ||
5157 | retBraces = retBraces[0 : len(retBraces)-1] | ||
5158 | top-- | ||
5159 | cs = stack[top] | ||
5160 | { | ||
5161 | stack = stack[:len(stack)-1] | ||
5162 | } | ||
5163 | goto _again | ||
5164 | |||
5165 | } else { | ||
5166 | token(TokenCBrace) | ||
5167 | braces-- | ||
5168 | } | ||
5169 | } | ||
5170 | case 71: | ||
5171 | // line 192 "scan_tokens.rl" | ||
5172 | |||
5173 | te = p + 1 | ||
5174 | { | ||
5175 | // Only consume from the retBraces stack and return if we are at | ||
5176 | // a suitable brace nesting level, otherwise things will get | ||
5177 | // confused. (Not entering this branch indicates a syntax error, | ||
5178 | // which we will catch in the parser.) | ||
5179 | if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces { | ||
5180 | token(TokenTemplateSeqEnd) | ||
5181 | braces-- | ||
5182 | retBraces = retBraces[0 : len(retBraces)-1] | ||
5183 | top-- | ||
5184 | cs = stack[top] | ||
5185 | { | ||
5186 | stack = stack[:len(stack)-1] | ||
5187 | } | ||
5188 | goto _again | ||
5189 | |||
5190 | } else { | ||
5191 | // We intentionally generate a TokenTemplateSeqEnd here, | ||
5192 | // even though the user apparently wanted a brace, because | ||
5193 | // we want to allow the parser to catch the incorrect use | ||
5194 | // of a ~} to balance a generic opening brace, rather than | ||
5195 | // a template sequence. | ||
5196 | token(TokenTemplateSeqEnd) | ||
5197 | braces-- | ||
5198 | } | ||
5199 | } | ||
5200 | case 72: | ||
5201 | // line 74 "scan_tokens.rl" | ||
5202 | |||
5203 | te = p + 1 | ||
5204 | { | ||
5205 | token(TokenOQuote) | ||
5206 | { | ||
5207 | stack = append(stack, 0) | ||
5208 | stack[top] = cs | ||
5209 | top++ | ||
5210 | cs = 1515 | ||
5211 | goto _again | ||
5212 | } | ||
5213 | } | ||
5214 | case 73: | ||
5215 | // line 84 "scan_tokens.rl" | ||
5216 | |||
5217 | te = p + 1 | ||
5218 | { | ||
5219 | token(TokenOHeredoc) | ||
5220 | // the token is currently the whole heredoc introducer, like | ||
5221 | // <<EOT or <<-EOT, followed by a newline. We want to extract | ||
5222 | // just the "EOT" portion that we'll use as the closing marker. | ||
5223 | |||
5224 | marker := data[ts+2 : te-1] | ||
5225 | if marker[0] == '-' { | ||
5226 | marker = marker[1:] | ||
5227 | } | ||
5228 | if marker[len(marker)-1] == '\r' { | ||
5229 | marker = marker[:len(marker)-1] | ||
5230 | } | ||
5231 | |||
5232 | heredocs = append(heredocs, heredocInProgress{ | ||
5233 | Marker: marker, | ||
5234 | StartOfLine: true, | ||
5235 | }) | ||
5236 | |||
5237 | { | ||
5238 | stack = append(stack, 0) | ||
5239 | stack[top] = cs | ||
5240 | top++ | ||
5241 | cs = 1541 | ||
5242 | goto _again | ||
5243 | } | ||
5244 | } | ||
5245 | case 74: | ||
5246 | // line 290 "scan_tokens.rl" | ||
5247 | |||
5248 | te = p + 1 | ||
5249 | { | ||
5250 | token(TokenBadUTF8) | ||
5251 | } | ||
5252 | case 75: | ||
5253 | // line 291 "scan_tokens.rl" | ||
5254 | |||
5255 | te = p + 1 | ||
5256 | { | ||
5257 | token(TokenInvalid) | ||
5258 | } | ||
5259 | case 76: | ||
5260 | // line 265 "scan_tokens.rl" | ||
5261 | |||
5262 | te = p | ||
5263 | p-- | ||
5264 | |||
5265 | case 77: | ||
5266 | // line 266 "scan_tokens.rl" | ||
5267 | |||
5268 | te = p | ||
5269 | p-- | ||
5270 | { | ||
5271 | token(TokenNumberLit) | ||
5272 | } | ||
5273 | case 78: | ||
5274 | // line 267 "scan_tokens.rl" | ||
5275 | |||
5276 | te = p | ||
5277 | p-- | ||
5278 | { | ||
5279 | token(TokenIdent) | ||
5280 | } | ||
5281 | case 79: | ||
5282 | // line 269 "scan_tokens.rl" | ||
5283 | |||
5284 | te = p | ||
5285 | p-- | ||
5286 | { | ||
5287 | token(TokenComment) | ||
5288 | } | ||
5289 | case 80: | ||
5290 | // line 280 "scan_tokens.rl" | ||
5291 | |||
5292 | te = p | ||
5293 | p-- | ||
5294 | { | ||
5295 | selfToken() | ||
5296 | } | ||
5297 | case 81: | ||
5298 | // line 290 "scan_tokens.rl" | ||
5299 | |||
5300 | te = p | ||
5301 | p-- | ||
5302 | { | ||
5303 | token(TokenBadUTF8) | ||
5304 | } | ||
5305 | case 82: | ||
5306 | // line 291 "scan_tokens.rl" | ||
5307 | |||
5308 | te = p | ||
5309 | p-- | ||
5310 | { | ||
5311 | token(TokenInvalid) | ||
5312 | } | ||
5313 | case 83: | ||
5314 | // line 266 "scan_tokens.rl" | ||
5315 | |||
5316 | p = (te) - 1 | ||
5317 | { | ||
5318 | token(TokenNumberLit) | ||
5319 | } | ||
5320 | case 84: | ||
5321 | // line 267 "scan_tokens.rl" | ||
5322 | |||
5323 | p = (te) - 1 | ||
5324 | { | ||
5325 | token(TokenIdent) | ||
5326 | } | ||
5327 | case 85: | ||
5328 | // line 280 "scan_tokens.rl" | ||
5329 | |||
5330 | p = (te) - 1 | ||
5331 | { | ||
5332 | selfToken() | ||
5333 | } | ||
5334 | case 86: | ||
5335 | // line 290 "scan_tokens.rl" | ||
5336 | |||
5337 | p = (te) - 1 | ||
5338 | { | ||
5339 | token(TokenBadUTF8) | ||
5340 | } | ||
5341 | case 87: | ||
5342 | // line 1 "NONE" | ||
5343 | |||
5344 | switch act { | ||
5345 | case 21: | ||
5346 | { | ||
5347 | p = (te) - 1 | ||
5348 | token(TokenIdent) | ||
5349 | } | ||
5350 | case 22: | ||
5351 | { | ||
5352 | p = (te) - 1 | ||
5353 | token(TokenComment) | ||
5354 | } | ||
5355 | case 32: | ||
5356 | { | ||
5357 | p = (te) - 1 | ||
5358 | selfToken() | ||
5359 | } | ||
5360 | case 38: | ||
5361 | { | ||
5362 | p = (te) - 1 | ||
5363 | token(TokenBadUTF8) | ||
5364 | } | ||
5365 | case 39: | ||
5366 | { | ||
5367 | p = (te) - 1 | ||
5368 | token(TokenInvalid) | ||
5369 | } | ||
5370 | } | ||
5371 | |||
5372 | // line 5232 "scan_tokens.go" | ||
5373 | } | ||
5374 | } | ||
5375 | |||
5376 | _again: | ||
5377 | _acts = int(_hcltok_to_state_actions[cs]) | ||
5378 | _nacts = uint(_hcltok_actions[_acts]) | ||
5379 | _acts++ | ||
5380 | for ; _nacts > 0; _nacts-- { | ||
5381 | _acts++ | ||
5382 | switch _hcltok_actions[_acts-1] { | ||
5383 | case 4: | ||
5384 | // line 1 "NONE" | ||
5385 | |||
5386 | ts = 0 | ||
5387 | |||
5388 | case 5: | ||
5389 | // line 1 "NONE" | ||
5390 | |||
5391 | act = 0 | ||
5392 | |||
5393 | // line 5252 "scan_tokens.go" | ||
5394 | } | ||
5395 | } | ||
5396 | |||
5397 | if cs == 0 { | ||
5398 | goto _out | ||
5399 | } | ||
5400 | p++ | ||
5401 | if p != pe { | ||
5402 | goto _resume | ||
5403 | } | ||
5404 | _test_eof: | ||
5405 | { | ||
5406 | } | ||
5407 | if p == eof { | ||
5408 | if _hcltok_eof_trans[cs] > 0 { | ||
5409 | _trans = int(_hcltok_eof_trans[cs] - 1) | ||
5410 | goto _eof_trans | ||
5411 | } | ||
5412 | } | ||
5413 | |||
5414 | _out: | ||
5415 | { | ||
5416 | } | ||
5417 | } | ||
5418 | |||
5419 | // line 352 "scan_tokens.rl" | ||
5420 | |||
5421 | // If we fall out here without being in a final state then we've | ||
5422 | // encountered something that the scanner can't match, which we'll | ||
5423 | // deal with as an invalid. | ||
5424 | if cs < hcltok_first_final { | ||
5425 | if mode == scanTemplate && len(stack) == 0 { | ||
5426 | // If we're scanning a bare template then any straggling | ||
5427 | // top-level stuff is actually literal string, rather than | ||
5428 | // invalid. This handles the case where the template ends | ||
5429 | // with a single "$" or "%", which trips us up because we | ||
5430 | // want to see another character to decide if it's a sequence | ||
5431 | // or an escape. | ||
5432 | f.emitToken(TokenStringLit, ts, len(data)) | ||
5433 | } else { | ||
5434 | f.emitToken(TokenInvalid, ts, len(data)) | ||
5435 | } | ||
5436 | } | ||
5437 | |||
5438 | // We always emit a synthetic EOF token at the end, since it gives the | ||
5439 | // parser position information for an "unexpected EOF" diagnostic. | ||
5440 | f.emitToken(TokenEOF, len(data), len(data)) | ||
5441 | |||
5442 | return f.Tokens | ||
5443 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.rl b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.rl new file mode 100644 index 0000000..83ef65b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.rl | |||
@@ -0,0 +1,376 @@ | |||
1 | |||
2 | package hclsyntax | ||
3 | |||
4 | import ( | ||
5 | "bytes" | ||
6 | |||
7 | "github.com/hashicorp/hcl2/hcl" | ||
8 | ) | ||
9 | |||
10 | // This file is generated from scan_tokens.rl. DO NOT EDIT. | ||
11 | %%{ | ||
12 | # (except you are actually in scan_tokens.rl here, so edit away!) | ||
13 | |||
14 | machine hcltok; | ||
15 | write data; | ||
16 | }%% | ||
17 | |||
18 | func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []Token { | ||
19 | f := &tokenAccum{ | ||
20 | Filename: filename, | ||
21 | Bytes: data, | ||
22 | Pos: start, | ||
23 | } | ||
24 | |||
25 | %%{ | ||
26 | include UnicodeDerived "unicode_derived.rl"; | ||
27 | |||
28 | UTF8Cont = 0x80 .. 0xBF; | ||
29 | AnyUTF8 = ( | ||
30 | 0x00..0x7F | | ||
31 | 0xC0..0xDF . UTF8Cont | | ||
32 | 0xE0..0xEF . UTF8Cont . UTF8Cont | | ||
33 | 0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont | ||
34 | ); | ||
35 | BrokenUTF8 = any - AnyUTF8; | ||
36 | |||
37 | NumberLitContinue = (digit|'.'|('e'|'E') ('+'|'-')? digit); | ||
38 | NumberLit = digit ("" | (NumberLitContinue - '.') | (NumberLitContinue* (NumberLitContinue - '.'))); | ||
39 | Ident = (ID_Start | '_') (ID_Continue | '-')*; | ||
40 | |||
41 | # Symbols that just represent themselves are handled as a single rule. | ||
42 | SelfToken = "[" | "]" | "(" | ")" | "." | "," | "*" | "/" | "%" | "+" | "-" | "=" | "<" | ">" | "!" | "?" | ":" | "\n" | "&" | "|" | "~" | "^" | ";" | "`"; | ||
43 | |||
44 | EqualOp = "=="; | ||
45 | NotEqual = "!="; | ||
46 | GreaterThanEqual = ">="; | ||
47 | LessThanEqual = "<="; | ||
48 | LogicalAnd = "&&"; | ||
49 | LogicalOr = "||"; | ||
50 | |||
51 | Ellipsis = "..."; | ||
52 | FatArrow = "=>"; | ||
53 | |||
54 | Newline = '\r' ? '\n'; | ||
55 | EndOfLine = Newline; | ||
56 | |||
57 | BeginStringTmpl = '"'; | ||
58 | BeginHeredocTmpl = '<<' ('-')? Ident Newline; | ||
59 | |||
60 | Comment = ( | ||
61 | ("#" (any - EndOfLine)* EndOfLine) | | ||
62 | ("//" (any - EndOfLine)* EndOfLine) | | ||
63 | ("/*" any* "*/") | ||
64 | ); | ||
65 | |||
66 | # Note: hclwrite assumes that only ASCII spaces appear between tokens, | ||
67 | # and uses this assumption to recreate the spaces between tokens by | ||
68 | # looking at byte offset differences. This means it will produce | ||
69 | # incorrect results in the presence of tabs, but that's acceptable | ||
70 | # because the canonical style (which hclwrite itself can impose | ||
71 | # automatically is to never use tabs). | ||
72 | Spaces = (' ' | 0x09)+; | ||
73 | |||
74 | action beginStringTemplate { | ||
75 | token(TokenOQuote); | ||
76 | fcall stringTemplate; | ||
77 | } | ||
78 | |||
79 | action endStringTemplate { | ||
80 | token(TokenCQuote); | ||
81 | fret; | ||
82 | } | ||
83 | |||
84 | action beginHeredocTemplate { | ||
85 | token(TokenOHeredoc); | ||
86 | // the token is currently the whole heredoc introducer, like | ||
87 | // <<EOT or <<-EOT, followed by a newline. We want to extract | ||
88 | // just the "EOT" portion that we'll use as the closing marker. | ||
89 | |||
90 | marker := data[ts+2:te-1] | ||
91 | if marker[0] == '-' { | ||
92 | marker = marker[1:] | ||
93 | } | ||
94 | if marker[len(marker)-1] == '\r' { | ||
95 | marker = marker[:len(marker)-1] | ||
96 | } | ||
97 | |||
98 | heredocs = append(heredocs, heredocInProgress{ | ||
99 | Marker: marker, | ||
100 | StartOfLine: true, | ||
101 | }) | ||
102 | |||
103 | fcall heredocTemplate; | ||
104 | } | ||
105 | |||
106 | action heredocLiteralEOL { | ||
107 | // This action is called specificially when a heredoc literal | ||
108 | // ends with a newline character. | ||
109 | |||
110 | // This might actually be our end marker. | ||
111 | topdoc := &heredocs[len(heredocs)-1] | ||
112 | if topdoc.StartOfLine { | ||
113 | maybeMarker := bytes.TrimSpace(data[ts:te]) | ||
114 | if bytes.Equal(maybeMarker, topdoc.Marker) { | ||
115 | // We actually emit two tokens here: the end-of-heredoc | ||
116 | // marker first, and then separately the newline that | ||
117 | // follows it. This then avoids issues with the closing | ||
118 | // marker consuming a newline that would normally be used | ||
119 | // to mark the end of an attribute definition. | ||
120 | // We might have either a \n sequence or an \r\n sequence | ||
121 | // here, so we must handle both. | ||
122 | nls := te-1 | ||
123 | nle := te | ||
124 | te-- | ||
125 | if data[te-1] == '\r' { | ||
126 | // back up one more byte | ||
127 | nls-- | ||
128 | te-- | ||
129 | } | ||
130 | token(TokenCHeredoc); | ||
131 | ts = nls | ||
132 | te = nle | ||
133 | token(TokenNewline); | ||
134 | heredocs = heredocs[:len(heredocs)-1] | ||
135 | fret; | ||
136 | } | ||
137 | } | ||
138 | |||
139 | topdoc.StartOfLine = true; | ||
140 | token(TokenStringLit); | ||
141 | } | ||
142 | |||
143 | action heredocLiteralMidline { | ||
144 | // This action is called when a heredoc literal _doesn't_ end | ||
145 | // with a newline character, e.g. because we're about to enter | ||
146 | // an interpolation sequence. | ||
147 | heredocs[len(heredocs)-1].StartOfLine = false; | ||
148 | token(TokenStringLit); | ||
149 | } | ||
150 | |||
151 | action bareTemplateLiteral { | ||
152 | token(TokenStringLit); | ||
153 | } | ||
154 | |||
155 | action beginTemplateInterp { | ||
156 | token(TokenTemplateInterp); | ||
157 | braces++; | ||
158 | retBraces = append(retBraces, braces); | ||
159 | if len(heredocs) > 0 { | ||
160 | heredocs[len(heredocs)-1].StartOfLine = false; | ||
161 | } | ||
162 | fcall main; | ||
163 | } | ||
164 | |||
165 | action beginTemplateControl { | ||
166 | token(TokenTemplateControl); | ||
167 | braces++; | ||
168 | retBraces = append(retBraces, braces); | ||
169 | if len(heredocs) > 0 { | ||
170 | heredocs[len(heredocs)-1].StartOfLine = false; | ||
171 | } | ||
172 | fcall main; | ||
173 | } | ||
174 | |||
175 | action openBrace { | ||
176 | token(TokenOBrace); | ||
177 | braces++; | ||
178 | } | ||
179 | |||
180 | action closeBrace { | ||
181 | if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces { | ||
182 | token(TokenTemplateSeqEnd); | ||
183 | braces--; | ||
184 | retBraces = retBraces[0:len(retBraces)-1] | ||
185 | fret; | ||
186 | } else { | ||
187 | token(TokenCBrace); | ||
188 | braces--; | ||
189 | } | ||
190 | } | ||
191 | |||
192 | action closeTemplateSeqEatWhitespace { | ||
193 | // Only consume from the retBraces stack and return if we are at | ||
194 | // a suitable brace nesting level, otherwise things will get | ||
195 | // confused. (Not entering this branch indicates a syntax error, | ||
196 | // which we will catch in the parser.) | ||
197 | if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces { | ||
198 | token(TokenTemplateSeqEnd); | ||
199 | braces--; | ||
200 | retBraces = retBraces[0:len(retBraces)-1] | ||
201 | fret; | ||
202 | } else { | ||
203 | // We intentionally generate a TokenTemplateSeqEnd here, | ||
204 | // even though the user apparently wanted a brace, because | ||
205 | // we want to allow the parser to catch the incorrect use | ||
206 | // of a ~} to balance a generic opening brace, rather than | ||
207 | // a template sequence. | ||
208 | token(TokenTemplateSeqEnd); | ||
209 | braces--; | ||
210 | } | ||
211 | } | ||
212 | |||
213 | TemplateInterp = "${" ("~")?; | ||
214 | TemplateControl = "%{" ("~")?; | ||
215 | EndStringTmpl = '"'; | ||
216 | StringLiteralChars = (AnyUTF8 - ("\r"|"\n")); | ||
217 | TemplateStringLiteral = ( | ||
218 | ('$' ^'{' %{ fhold; }) | | ||
219 | ('%' ^'{' %{ fhold; }) | | ||
220 | ('\\' StringLiteralChars) | | ||
221 | (StringLiteralChars - ("$" | '%' | '"')) | ||
222 | )+; | ||
223 | HeredocStringLiteral = ( | ||
224 | ('$' ^'{' %{ fhold; }) | | ||
225 | ('%' ^'{' %{ fhold; }) | | ||
226 | (StringLiteralChars - ("$" | '%')) | ||
227 | )*; | ||
228 | BareStringLiteral = ( | ||
229 | ('$' ^'{') | | ||
230 | ('%' ^'{') | | ||
231 | (StringLiteralChars - ("$" | '%')) | ||
232 | )* Newline?; | ||
233 | |||
234 | stringTemplate := |* | ||
235 | TemplateInterp => beginTemplateInterp; | ||
236 | TemplateControl => beginTemplateControl; | ||
237 | EndStringTmpl => endStringTemplate; | ||
238 | TemplateStringLiteral => { token(TokenQuotedLit); }; | ||
239 | AnyUTF8 => { token(TokenInvalid); }; | ||
240 | BrokenUTF8 => { token(TokenBadUTF8); }; | ||
241 | *|; | ||
242 | |||
243 | heredocTemplate := |* | ||
244 | TemplateInterp => beginTemplateInterp; | ||
245 | TemplateControl => beginTemplateControl; | ||
246 | HeredocStringLiteral EndOfLine => heredocLiteralEOL; | ||
247 | HeredocStringLiteral => heredocLiteralMidline; | ||
248 | BrokenUTF8 => { token(TokenBadUTF8); }; | ||
249 | *|; | ||
250 | |||
251 | bareTemplate := |* | ||
252 | TemplateInterp => beginTemplateInterp; | ||
253 | TemplateControl => beginTemplateControl; | ||
254 | BareStringLiteral => bareTemplateLiteral; | ||
255 | BrokenUTF8 => { token(TokenBadUTF8); }; | ||
256 | *|; | ||
257 | |||
258 | identOnly := |* | ||
259 | Ident => { token(TokenIdent) }; | ||
260 | BrokenUTF8 => { token(TokenBadUTF8) }; | ||
261 | AnyUTF8 => { token(TokenInvalid) }; | ||
262 | *|; | ||
263 | |||
264 | main := |* | ||
265 | Spaces => {}; | ||
266 | NumberLit => { token(TokenNumberLit) }; | ||
267 | Ident => { token(TokenIdent) }; | ||
268 | |||
269 | Comment => { token(TokenComment) }; | ||
270 | Newline => { token(TokenNewline) }; | ||
271 | |||
272 | EqualOp => { token(TokenEqualOp); }; | ||
273 | NotEqual => { token(TokenNotEqual); }; | ||
274 | GreaterThanEqual => { token(TokenGreaterThanEq); }; | ||
275 | LessThanEqual => { token(TokenLessThanEq); }; | ||
276 | LogicalAnd => { token(TokenAnd); }; | ||
277 | LogicalOr => { token(TokenOr); }; | ||
278 | Ellipsis => { token(TokenEllipsis); }; | ||
279 | FatArrow => { token(TokenFatArrow); }; | ||
280 | SelfToken => { selfToken() }; | ||
281 | |||
282 | "{" => openBrace; | ||
283 | "}" => closeBrace; | ||
284 | |||
285 | "~}" => closeTemplateSeqEatWhitespace; | ||
286 | |||
287 | BeginStringTmpl => beginStringTemplate; | ||
288 | BeginHeredocTmpl => beginHeredocTemplate; | ||
289 | |||
290 | BrokenUTF8 => { token(TokenBadUTF8) }; | ||
291 | AnyUTF8 => { token(TokenInvalid) }; | ||
292 | *|; | ||
293 | |||
294 | }%% | ||
295 | |||
296 | // Ragel state | ||
297 | p := 0 // "Pointer" into data | ||
298 | pe := len(data) // End-of-data "pointer" | ||
299 | ts := 0 | ||
300 | te := 0 | ||
301 | act := 0 | ||
302 | eof := pe | ||
303 | var stack []int | ||
304 | var top int | ||
305 | |||
306 | var cs int // current state | ||
307 | switch mode { | ||
308 | case scanNormal: | ||
309 | cs = hcltok_en_main | ||
310 | case scanTemplate: | ||
311 | cs = hcltok_en_bareTemplate | ||
312 | case scanIdentOnly: | ||
313 | cs = hcltok_en_identOnly | ||
314 | default: | ||
315 | panic("invalid scanMode") | ||
316 | } | ||
317 | |||
318 | braces := 0 | ||
319 | var retBraces []int // stack of brace levels that cause us to use fret | ||
320 | var heredocs []heredocInProgress // stack of heredocs we're currently processing | ||
321 | |||
322 | %%{ | ||
323 | prepush { | ||
324 | stack = append(stack, 0); | ||
325 | } | ||
326 | postpop { | ||
327 | stack = stack[:len(stack)-1]; | ||
328 | } | ||
329 | }%% | ||
330 | |||
331 | // Make Go compiler happy | ||
332 | _ = ts | ||
333 | _ = te | ||
334 | _ = act | ||
335 | _ = eof | ||
336 | |||
337 | token := func (ty TokenType) { | ||
338 | f.emitToken(ty, ts, te) | ||
339 | } | ||
340 | selfToken := func () { | ||
341 | b := data[ts:te] | ||
342 | if len(b) != 1 { | ||
343 | // should never happen | ||
344 | panic("selfToken only works for single-character tokens") | ||
345 | } | ||
346 | f.emitToken(TokenType(b[0]), ts, te) | ||
347 | } | ||
348 | |||
349 | %%{ | ||
350 | write init nocs; | ||
351 | write exec; | ||
352 | }%% | ||
353 | |||
354 | // If we fall out here without being in a final state then we've | ||
355 | // encountered something that the scanner can't match, which we'll | ||
356 | // deal with as an invalid. | ||
357 | if cs < hcltok_first_final { | ||
358 | if mode == scanTemplate && len(stack) == 0 { | ||
359 | // If we're scanning a bare template then any straggling | ||
360 | // top-level stuff is actually literal string, rather than | ||
361 | // invalid. This handles the case where the template ends | ||
362 | // with a single "$" or "%", which trips us up because we | ||
363 | // want to see another character to decide if it's a sequence | ||
364 | // or an escape. | ||
365 | f.emitToken(TokenStringLit, ts, len(data)) | ||
366 | } else { | ||
367 | f.emitToken(TokenInvalid, ts, len(data)) | ||
368 | } | ||
369 | } | ||
370 | |||
371 | // We always emit a synthetic EOF token at the end, since it gives the | ||
372 | // parser position information for an "unexpected EOF" diagnostic. | ||
373 | f.emitToken(TokenEOF, len(data), len(data)) | ||
374 | |||
375 | return f.Tokens | ||
376 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/spec.md b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/spec.md new file mode 100644 index 0000000..49b9a3e --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/spec.md | |||
@@ -0,0 +1,923 @@ | |||
1 | # HCL Native Syntax Specification | ||
2 | |||
3 | This is the specification of the syntax and semantics of the native syntax | ||
4 | for HCL. HCL is a system for defining configuration languages for applications. | ||
5 | The HCL information model is designed to support multiple concrete syntaxes | ||
6 | for configuration, but this native syntax is considered the primary format | ||
7 | and is optimized for human authoring and maintenence, as opposed to machine | ||
8 | generation of configuration. | ||
9 | |||
10 | The language consists of three integrated sub-languages: | ||
11 | |||
12 | * The _structural_ language defines the overall heirarchical configuration | ||
13 | structure, and is a serialization of HCL bodies, blocks and attributes. | ||
14 | |||
15 | * The _expression_ language is used to express attribute values, either as | ||
16 | literals or as derivations of other values. | ||
17 | |||
18 | * The _template_ language is used to compose values together into strings, | ||
19 | as one of several types of expression in the expression language. | ||
20 | |||
21 | In normal use these three sub-languages are used together within configuration | ||
22 | files to describe an overall configuration, with the structural language | ||
23 | being used at the top level. The expression and template languages can also | ||
24 | be used in isolation, to implement features such as REPLs, debuggers, and | ||
25 | integration into more limited HCL syntaxes such as the JSON profile. | ||
26 | |||
27 | ## Syntax Notation | ||
28 | |||
29 | Within this specification a semi-formal notation is used to illustrate the | ||
30 | details of syntax. This notation is intended for human consumption rather | ||
31 | than machine consumption, with the following conventions: | ||
32 | |||
33 | * A naked name starting with an uppercase letter is a global production, | ||
34 | common to all of the syntax specifications in this document. | ||
35 | * A naked name starting with a lowercase letter is a local production, | ||
36 | meaningful only within the specification where it is defined. | ||
37 | * Double and single quotes (`"` and `'`) are used to mark literal character | ||
38 | sequences, which may be either punctuation markers or keywords. | ||
39 | * The default operator for combining items, which has no punctuation, | ||
40 | is concatenation. | ||
41 | * The symbol `|` indicates that any one of its left and right operands may | ||
42 | be present. | ||
43 | * The `*` symbol indicates zero or more repetitions of the item to its left. | ||
44 | * The `?` symbol indicates zero or one of the item to its left. | ||
45 | * Parentheses (`(` and `)`) are used to group items together to apply | ||
46 | the `|`, `*` and `?` operators to them collectively. | ||
47 | |||
48 | The grammar notation does not fully describe the language. The prose may | ||
49 | augment or conflict with the illustrated grammar. In case of conflict, prose | ||
50 | has priority. | ||
51 | |||
52 | ## Source Code Representation | ||
53 | |||
54 | Source code is unicode text expressed in the UTF-8 encoding. The language | ||
55 | itself does not perform unicode normalization, so syntax features such as | ||
56 | identifiers are sequences of unicode code points and so e.g. a precombined | ||
57 | accented character is distinct from a letter associated with a combining | ||
58 | accent. (String literals have some special handling with regard to Unicode | ||
59 | normalization which will be covered later in the relevant section.) | ||
60 | |||
61 | UTF-8 encoded Unicode byte order marks are not permitted. Invalid or | ||
62 | non-normalized UTF-8 encoding is always a parse error. | ||
63 | |||
64 | ## Lexical Elements | ||
65 | |||
66 | ### Comments and Whitespace | ||
67 | |||
68 | Comments and Whitespace are recognized as lexical elements but are ignored | ||
69 | except as described below. | ||
70 | |||
71 | Whitespace is defined as a sequence of zero or more space characters | ||
72 | (U+0020). Newline sequences (either U+000A or U+000D followed by U+000A) | ||
73 | are _not_ considered whitespace but are ignored as such in certain contexts. | ||
74 | |||
75 | Horizontal tab characters (U+0009) are not considered to be whitespace and | ||
76 | are not valid within HCL native syntax. | ||
77 | |||
78 | Comments serve as program documentation and come in two forms: | ||
79 | |||
80 | * _Line comments_ start with either the `//` or `#` sequences and end with | ||
81 | the next newline sequence. A line comments is considered equivalent to a | ||
82 | newline sequence. | ||
83 | |||
84 | * _Inline comments_ start with the `/*` sequence and end with the `*/` | ||
85 | sequence, and may have any characters within except the ending sequence. | ||
86 | An inline comments is considered equivalent to a whitespace sequence. | ||
87 | |||
88 | Comments and whitespace cannot begin within within other comments, or within | ||
89 | template literals except inside an interpolation sequence or template directive. | ||
90 | |||
91 | ### Identifiers | ||
92 | |||
93 | Identifiers name entities such as blocks, attributes and expression variables. | ||
94 | Identifiers are interpreted as per [UAX #31][UAX31] Section 2. Specifically, | ||
95 | their syntax is defined in terms of the `ID_Start` and `ID_Continue` | ||
96 | character properties as follows: | ||
97 | |||
98 | ```ebnf | ||
99 | Identifier = ID_Start (ID_Continue | '-')*; | ||
100 | ``` | ||
101 | |||
102 | The Unicode specification provides the normative requirements for identifier | ||
103 | parsing. Non-normatively, the spirit of this specification is that `ID_Start` | ||
104 | consists of Unicode letter and certain unambiguous punctuation tokens, while | ||
105 | `ID_Continue` augments that set with Unicode digits, combining marks, etc. | ||
106 | |||
107 | The dash character `-` is additionally allowed in identifiers, even though | ||
108 | that is not part of the unicode `ID_Continue` definition. This is to allow | ||
109 | attribute names and block type names to contain dashes, although underscores | ||
110 | as word separators are considered the idiomatic usage. | ||
111 | |||
112 | [UAX31]: http://unicode.org/reports/tr31/ "Unicode Identifier and Pattern Syntax" | ||
113 | |||
114 | ### Keywords | ||
115 | |||
116 | There are no globally-reserved words, but in some contexts certain identifiers | ||
117 | are reserved to function as keywords. These are discussed further in the | ||
118 | relevant documentation sections that follow. In such situations, the | ||
119 | identifier's role as a keyword supersedes any other valid interpretation that | ||
120 | may be possible. Outside of these specific situations, the keywords have no | ||
121 | special meaning and are interpreted as regular identifiers. | ||
122 | |||
123 | ### Operators and Delimiters | ||
124 | |||
125 | The following character sequences represent operators, delimiters, and other | ||
126 | special tokens: | ||
127 | |||
128 | ``` | ||
129 | + && == < : { [ ( ${ | ||
130 | - || != > ? } ] ) %{ | ||
131 | * ! <= = . | ||
132 | / >= => , | ||
133 | % ... | ||
134 | ``` | ||
135 | |||
136 | ### Numeric Literals | ||
137 | |||
138 | A numeric literal is a decimal representation of a | ||
139 | real number. It has an integer part, a fractional part, | ||
140 | and an exponent part. | ||
141 | |||
142 | ```ebnf | ||
143 | NumericLit = decimal+ ("." decimal+)? (expmark decimal+)?; | ||
144 | decimal = '0' .. '9'; | ||
145 | expmark = ('e' | 'E') ("+" | "-")?; | ||
146 | ``` | ||
147 | |||
148 | ## Structural Elements | ||
149 | |||
150 | The structural language consists of syntax representing the following | ||
151 | constructs: | ||
152 | |||
153 | * _Attributes_, which assign a value to a specified name. | ||
154 | * _Blocks_, which create a child body annotated by a type and optional labels. | ||
155 | * _Body Content_, which consists of a collection of attributes and blocks. | ||
156 | |||
157 | These constructs correspond to the similarly-named concepts in the | ||
158 | language-agnostic HCL information model. | ||
159 | |||
160 | ```ebnf | ||
161 | ConfigFile = Body; | ||
162 | Body = (Attribute | Block)*; | ||
163 | Attribute = Identifier "=" Expression Newline; | ||
164 | Block = Identifier (StringLit|Identifier)* "{" Newline Body "}" Newline; | ||
165 | ``` | ||
166 | |||
167 | ### Configuration Files | ||
168 | |||
169 | A _configuration file_ is a sequence of characters whose top-level is | ||
170 | interpreted as a Body. | ||
171 | |||
172 | ### Bodies | ||
173 | |||
174 | A _body_ is a collection of associated attributes and blocks. The meaning of | ||
175 | this association is defined by the calling application. | ||
176 | |||
177 | ### Attribute Definitions | ||
178 | |||
179 | An _attribute definition_ assigns a value to a particular attribute name within | ||
180 | a body. Each distinct attribute name may be defined no more than once within a | ||
181 | single body. | ||
182 | |||
183 | The attribute value is given as an expression, which is retained literally | ||
184 | for later evaluation by the calling application. | ||
185 | |||
186 | ### Blocks | ||
187 | |||
188 | A _block_ creates a child body that is annotated with a block _type_ and | ||
189 | zero or more block _labels_. Blocks create a structural heirachy which can be | ||
190 | interpreted by the calling application. | ||
191 | |||
192 | Block labels can either be quoted literal strings or naked identifiers. | ||
193 | |||
194 | ## Expressions | ||
195 | |||
196 | The expression sub-language is used within attribute definitions to specify | ||
197 | values. | ||
198 | |||
199 | ```ebnf | ||
200 | Expression = ( | ||
201 | ExprTerm | | ||
202 | Operation | | ||
203 | Conditional | ||
204 | ); | ||
205 | ``` | ||
206 | |||
207 | ### Types | ||
208 | |||
209 | The value types used within the expression language are those defined by the | ||
210 | syntax-agnostic HCL information model. An expression may return any valid | ||
211 | type, but only a subset of the available types have first-class syntax. | ||
212 | A calling application may make other types available via _variables_ and | ||
213 | _functions_. | ||
214 | |||
215 | ### Expression Terms | ||
216 | |||
217 | Expression _terms_ are the operands for unary and binary expressions, as well | ||
218 | as acting as expressions in their own right. | ||
219 | |||
220 | ```ebnf | ||
221 | ExprTerm = ( | ||
222 | LiteralValue | | ||
223 | CollectionValue | | ||
224 | TemplateExpr | | ||
225 | VariableExpr | | ||
226 | FunctionCall | | ||
227 | ForExpr | | ||
228 | ExprTerm Index | | ||
229 | ExprTerm GetAttr | | ||
230 | ExprTerm Splat | | ||
231 | "(" Expression ")" | ||
232 | ); | ||
233 | ``` | ||
234 | |||
235 | The productions for these different term types are given in their corresponding | ||
236 | sections. | ||
237 | |||
238 | Between the `(` and `)` characters denoting a sub-expression, newline | ||
239 | characters are ignored as whitespace. | ||
240 | |||
241 | ### Literal Values | ||
242 | |||
243 | A _literal value_ immediately represents a particular value of a primitive | ||
244 | type. | ||
245 | |||
246 | ```ebnf | ||
247 | LiteralValue = ( | ||
248 | NumericLit | | ||
249 | "true" | | ||
250 | "false" | | ||
251 | "null" | ||
252 | ); | ||
253 | ``` | ||
254 | |||
255 | * Numeric literals represent values of type _number_. | ||
256 | * The `true` and `false` keywords represent values of type _bool_. | ||
257 | * The `null` keyword represents a null value of the dynamic pseudo-type. | ||
258 | |||
259 | String literals are not directly available in the expression sub-language, but | ||
260 | are available via the template sub-language, which can in turn be incorporated | ||
261 | via _template expressions_. | ||
262 | |||
263 | ### Collection Values | ||
264 | |||
265 | A _collection value_ combines zero or more other expressions to produce a | ||
266 | collection value. | ||
267 | |||
268 | ```ebnf | ||
269 | CollectionValue = tuple | object; | ||
270 | tuple = "[" ( | ||
271 | (Expression ("," Expression)* ","?)? | ||
272 | ) "]"; | ||
273 | object = "{" ( | ||
274 | (objectelem ("," objectelem)* ","?)? | ||
275 | ) "}"; | ||
276 | objectelem = (Identifier | Expression) "=" Expression; | ||
277 | ``` | ||
278 | |||
279 | Only tuple and object values can be directly constructed via native syntax. | ||
280 | Tuple and object values can in turn be converted to list, set and map values | ||
281 | with other operations, which behaves as defined by the syntax-agnostic HCL | ||
282 | information model. | ||
283 | |||
284 | When specifying an object element, an identifier is interpreted as a literal | ||
285 | attribute name as opposed to a variable reference. To populate an item key | ||
286 | from a variable, use parentheses to disambiguate: | ||
287 | |||
288 | * `{foo = "baz"}` is interpreted as an attribute literally named `foo`. | ||
289 | * `{(foo) = "baz"}` is interpreted as an attribute whose name is taken | ||
290 | from the variable named `foo`. | ||
291 | |||
292 | Between the open and closing delimiters of these sequences, newline sequences | ||
293 | are ignored as whitespace. | ||
294 | |||
295 | There is a syntax ambiguity between _for expressions_ and collection values | ||
296 | whose first element is a reference to a variable named `for`. The | ||
297 | _for expression_ interpretation has priority, so to produce a tuple whose | ||
298 | first element is the value of a variable named `for`, or an object with a | ||
299 | key named `for`, use paretheses to disambiguate: | ||
300 | |||
301 | * `[for, foo, baz]` is a syntax error. | ||
302 | * `[(for), foo, baz]` is a tuple whose first element is the value of variable | ||
303 | `for`. | ||
304 | * `{for: 1, baz: 2}` is a syntax error. | ||
305 | * `{(for): 1, baz: 2}` is an object with an attribute literally named `for`. | ||
306 | * `{baz: 2, for: 1}` is equivalent to the previous example, and resolves the | ||
307 | ambiguity by reordering. | ||
308 | |||
309 | ### Template Expressions | ||
310 | |||
311 | A _template expression_ embeds a program written in the template sub-language | ||
312 | as an expression. Template expressions come in two forms: | ||
313 | |||
314 | * A _quoted_ template expression is delimited by quote characters (`"`) and | ||
315 | defines a template as a single-line expression with escape characters. | ||
316 | * A _heredoc_ template expression is introduced by a `<<` sequence and | ||
317 | defines a template via a multi-line sequence terminated by a user-chosen | ||
318 | delimiter. | ||
319 | |||
320 | In both cases the template interpolation and directive syntax is available for | ||
321 | use within the delimiters, and any text outside of these special sequences is | ||
322 | interpreted as a literal string. | ||
323 | |||
324 | In _quoted_ template expressions any literal string sequences within the | ||
325 | template behave in a special way: literal newline sequences are not permitted | ||
326 | and instead _escape sequences_ can be included, starting with the | ||
327 | backslash `\`: | ||
328 | |||
329 | ``` | ||
330 | \n Unicode newline control character | ||
331 | \r Unicode carriage return control character | ||
332 | \t Unicode tab control character | ||
333 | \" Literal quote mark, used to prevent interpretation as end of string | ||
334 | \\ Literal backslash, used to prevent interpretation as escape sequence | ||
335 | \uNNNN Unicode character from Basic Multilingual Plane (NNNN is four hexadecimal digits) | ||
336 | \UNNNNNNNN Unicode character from supplementary planes (NNNNNNNN is eight hexadecimal digits) | ||
337 | ``` | ||
338 | |||
339 | The _heredoc_ template expression type is introduced by either `<<` or `<<-`, | ||
340 | followed by an identifier. The template expression ends when the given | ||
341 | identifier subsequently appears again on a line of its own. | ||
342 | |||
343 | If a heredoc template is introduced with the `<<-` symbol, any literal string | ||
344 | at the start of each line is analyzed to find the minimum number of leading | ||
345 | spaces, and then that number of prefix spaces is removed from all line-leading | ||
346 | literal strings. The final closing marker may also have an arbitrary number | ||
347 | of spaces preceding it on its line. | ||
348 | |||
349 | ```ebnf | ||
350 | TemplateExpr = quotedTemplate | heredocTemplate; | ||
351 | quotedTemplate = (as defined in prose above); | ||
352 | heredocTemplate = ( | ||
353 | ("<<" | "<<-") Identifier Newline | ||
354 | (content as defined in prose above) | ||
355 | Identifier Newline | ||
356 | ); | ||
357 | ``` | ||
358 | |||
359 | A quoted template expression containing only a single literal string serves | ||
360 | as a syntax for defining literal string _expressions_. In certain contexts | ||
361 | the template syntax is restricted in this manner: | ||
362 | |||
363 | ```ebnf | ||
364 | StringLit = '"' (quoted literals as defined in prose above) '"'; | ||
365 | ``` | ||
366 | |||
367 | The `StringLit` production permits the escape sequences discussed for quoted | ||
368 | template expressions as above, but does _not_ permit template interpolation | ||
369 | or directive sequences. | ||
370 | |||
371 | ### Variables and Variable Expressions | ||
372 | |||
373 | A _variable_ is a value that has been assigned a symbolic name. Variables are | ||
374 | made available for use in expressions by the calling application, by populating | ||
375 | the _global scope_ used for expression evaluation. | ||
376 | |||
377 | Variables can also be created by expressions themselves, which always creates | ||
378 | a _child scope_ that incorporates the variables from its parent scope but | ||
379 | (re-)defines zero or more names with new values. | ||
380 | |||
381 | The value of a variable is accessed using a _variable expression_, which is | ||
382 | a standalone `Identifier` whose name corresponds to a defined variable: | ||
383 | |||
384 | ```ebnf | ||
385 | VariableExpr = Identifier; | ||
386 | ``` | ||
387 | |||
388 | Variables in a particular scope are immutable, but child scopes may _hide_ | ||
389 | a variable from an ancestor scope by defining a new variable of the same name. | ||
390 | When looking up variables, the most locally-defined variable of the given name | ||
391 | is used, and ancestor-scoped variables of the same name cannot be accessed. | ||
392 | |||
393 | No direct syntax is provided for declaring or assigning variables, but other | ||
394 | expression constructs implicitly create child scopes and define variables as | ||
395 | part of their evaluation. | ||
396 | |||
397 | ### Functions and Function Calls | ||
398 | |||
399 | A _function_ is an operation that has been assigned a symbolic name. Functions | ||
400 | are made available for use in expressions by the calling application, by | ||
401 | populating the _function table_ used for expression evaluation. | ||
402 | |||
403 | The namespace of functions is distinct from the namespace of variables. A | ||
404 | function and a variable may share the same name with no implication that they | ||
405 | are in any way related. | ||
406 | |||
407 | A function can be executed via a _function call_ expression: | ||
408 | |||
409 | ```ebnf | ||
410 | FunctionCall = Identifier "(" arguments ")"; | ||
411 | Arguments = ( | ||
412 | () || | ||
413 | (Expression ("," Expression)* ("," | "...")?) | ||
414 | ); | ||
415 | ``` | ||
416 | |||
417 | The definition of functions and the semantics of calling them are defined by | ||
418 | the language-agnostic HCL information model. The given arguments are mapped | ||
419 | onto the function's _parameters_ and the result of a function call expression | ||
420 | is the return value of the named function when given those arguments. | ||
421 | |||
422 | If the final argument expression is followed by the ellipsis symbol (`...`), | ||
423 | the final argument expression must evaluate to either a list or tuple value. | ||
424 | The elements of the value are each mapped to a single parameter of the | ||
425 | named function, beginning at the first parameter remaining after all other | ||
426 | argument expressions have been mapped. | ||
427 | |||
428 | Within the parentheses that delimit the function arguments, newline sequences | ||
429 | are ignored as whitespace. | ||
430 | |||
431 | ### For Expressions | ||
432 | |||
433 | A _for expression_ is a construct for constructing a collection by projecting | ||
434 | the items from another collection. | ||
435 | |||
436 | ```ebnf | ||
437 | ForExpr = forTupleExpr | forObjectExpr; | ||
438 | forTupleExpr = "[" forIntro Expression forCond? "]"; | ||
439 | forObjectExpr = "{" forIntro Expression "=>" Expression "..."? forCond? "}"; | ||
440 | forIntro = "for" Identifier ("," Identifier)? "in" Expression ":"; | ||
441 | forCond = "if" Expression; | ||
442 | ``` | ||
443 | |||
444 | The punctuation used to delimit a for expression decide whether it will produce | ||
445 | a tuple value (`[` and `]`) or an object value (`{` and `}`). | ||
446 | |||
447 | The "introduction" is equivalent in both cases: the keyword `for` followed by | ||
448 | either one or two identifiers separated by a comma which define the temporary | ||
449 | variable names used for iteration, followed by the keyword `in` and then | ||
450 | an expression that must evaluate to a value that can be iterated. The | ||
451 | introduction is then terminated by the colon (`:`) symbol. | ||
452 | |||
453 | If only one identifier is provided, it is the name of a variable that will | ||
454 | be temporarily assigned the value of each element during iteration. If both | ||
455 | are provided, the first is the key and the second is the value. | ||
456 | |||
457 | Tuple, object, list, map, and set types are iterable. The type of collection | ||
458 | used defines how the key and value variables are populated: | ||
459 | |||
460 | * For tuple and list types, the _key_ is the zero-based index into the | ||
461 | sequence for each element, and the _value_ is the element value. The | ||
462 | elements are visited in index order. | ||
463 | * For object and map types, the _key_ is the string attribute name or element | ||
464 | key, and the _value_ is the attribute or element value. The elements are | ||
465 | visited in the order defined by a lexicographic sort of the attribute names | ||
466 | or keys. | ||
467 | * For set types, the _key_ and _value_ are both the element value. The elements | ||
468 | are visited in an undefined but consistent order. | ||
469 | |||
470 | The expression after the colon and (in the case of object `for`) the expression | ||
471 | after the `=>` are both evaluated once for each element of the source | ||
472 | collection, in a local scope that defines the key and value variable names | ||
473 | specified. | ||
474 | |||
475 | The results of evaluating these expressions for each input element are used | ||
476 | to populate an element in the new collection. In the case of tuple `for`, the | ||
477 | single expression becomes an element, appending values to the tuple in visit | ||
478 | order. In the case of object `for`, the pair of expressions is used as an | ||
479 | attribute name and value respectively, creating an element in the resulting | ||
480 | object. | ||
481 | |||
482 | In the case of object `for`, it is an error if two input elements produce | ||
483 | the same result from the attribute name expression, since duplicate | ||
484 | attributes are not possible. If the ellipsis symbol (`...`) appears | ||
485 | immediately after the value experssion, this activates the grouping mode in | ||
486 | which each value in the resulting object is a _tuple_ of all of the values | ||
487 | that were produced against each distinct key. | ||
488 | |||
489 | * `[for v in ["a", "b"]: v]` returns `["a", "b"]`. | ||
490 | * `[for i, v in ["a", "b"]: i]` returns `[0, 1]`. | ||
491 | * `{for i, v in ["a", "b"]: v => i}` returns `{a = 0, b = 1}`. | ||
492 | * `{for i, v in ["a", "a", "b"]: k => v}` produces an error, because attribute | ||
493 | `a` is defined twice. | ||
494 | * `{for i, v in ["a", "a", "b"]: v => i...}` returns `{a = [0, 1], b = [2]}`. | ||
495 | |||
496 | If the `if` keyword is used after the element expression(s), it applies an | ||
497 | additional predicate that can be used to conditionally filter elements from | ||
498 | the source collection from consideration. The expression following `if` is | ||
499 | evaluated once for each source element, in the same scope used for the | ||
500 | element expression(s). It must evaluate to a boolean value; if `true`, the | ||
501 | element will be evaluated as normal, while if `false` the element will be | ||
502 | skipped. | ||
503 | |||
504 | * `[for i, v in ["a", "b", "c"]: v if i < 2]` returns `["a", "b"]`. | ||
505 | |||
506 | If the collection value, element expression(s) or condition expression return | ||
507 | unknown values that are otherwise type-valid, the result is a value of the | ||
508 | dynamic pseudo-type. | ||
509 | |||
510 | ### Index Operator | ||
511 | |||
512 | The _index_ operator returns the value of a single element of a collection | ||
513 | value. It is a postfix operator and can be applied to any value that has | ||
514 | a tuple, object, map, or list type. | ||
515 | |||
516 | ```ebnf | ||
517 | Index = "[" Expression "]"; | ||
518 | ``` | ||
519 | |||
520 | The expression delimited by the brackets is the _key_ by which an element | ||
521 | will be looked up. | ||
522 | |||
523 | If the index operator is applied to a value of tuple or list type, the | ||
524 | key expression must be an non-negative integer number representing the | ||
525 | zero-based element index to access. If applied to a value of object or map | ||
526 | type, the key expression must be a string representing the attribute name | ||
527 | or element key. If the given key value is not of the appropriate type, a | ||
528 | conversion is attempted using the conversion rules from the HCL | ||
529 | syntax-agnostic information model. | ||
530 | |||
531 | An error is produced if the given key expression does not correspond to | ||
532 | an element in the collection, either because it is of an unconvertable type, | ||
533 | because it is outside the range of elements for a tuple or list, or because | ||
534 | the given attribute or key does not exist. | ||
535 | |||
536 | If either the collection or the key are an unknown value of an | ||
537 | otherwise-suitable type, the return value is an unknown value whose type | ||
538 | matches what type would be returned given known values, or a value of the | ||
539 | dynamic pseudo-type if type information alone cannot determine a suitable | ||
540 | return type. | ||
541 | |||
542 | Within the brackets that delimit the index key, newline sequences are ignored | ||
543 | as whitespace. | ||
544 | |||
545 | ### Attribute Access Operator | ||
546 | |||
547 | The _attribute access_ operator returns the value of a single attribute in | ||
548 | an object value. It is a postfix operator and can be applied to any value | ||
549 | that has an object type. | ||
550 | |||
551 | ```ebnf | ||
552 | GetAttr = "." Identifier; | ||
553 | ``` | ||
554 | |||
555 | The given identifier is interpreted as the name of the attribute to access. | ||
556 | An error is produced if the object to which the operator is applied does not | ||
557 | have an attribute with the given name. | ||
558 | |||
559 | If the object is an unknown value of a type that has the attribute named, the | ||
560 | result is an unknown value of the attribute's type. | ||
561 | |||
562 | ### Splat Operators | ||
563 | |||
564 | The _splat operators_ allow convenient access to attributes or elements of | ||
565 | elements in a tuple, list, or set value. | ||
566 | |||
567 | There are two kinds of "splat" operator: | ||
568 | |||
569 | * The _attribute-only_ splat operator supports only attribute lookups into | ||
570 | the elements from a list, but supports an arbitrary number of them. | ||
571 | |||
572 | * The _full_ splat operator additionally supports indexing into the elements | ||
573 | from a list, and allows any combination of attribute access and index | ||
574 | operations. | ||
575 | |||
576 | ```ebnf | ||
577 | Splat = attrSplat | fullSplat; | ||
578 | attrSplat = "." "*" GetAttr*; | ||
579 | fullSplat = "[" "*" "]" (GetAttr | Index)*; | ||
580 | ``` | ||
581 | |||
582 | The splat operators can be thought of as shorthands for common operations that | ||
583 | could otherwise be performed using _for expressions_: | ||
584 | |||
585 | * `tuple.*.foo.bar[0]` is approximately equivalent to | ||
586 | `[for v in tuple: v.foo.bar][0]`. | ||
587 | * `tuple[*].foo.bar[0]` is approximately equivalent to | ||
588 | `[for v in tuple: v.foo.bar[0]]` | ||
589 | |||
590 | Note the difference in how the trailing index operator is interpreted in | ||
591 | each case. This different interpretation is the key difference between the | ||
592 | _attribute-only_ and _full_ splat operators. | ||
593 | |||
594 | Splat operators have one additional behavior compared to the equivalent | ||
595 | _for expressions_ shown above: if a splat operator is applied to a value that | ||
596 | is _not_ of tuple, list, or set type, the value is coerced automatically into | ||
597 | a single-value list of the value type: | ||
598 | |||
599 | * `any_object.*.id` is equivalent to `[any_object.id]`, assuming that `any_object` | ||
600 | is a single object. | ||
601 | * `any_number.*` is equivalent to `[any_number]`, assuming that `any_number` | ||
602 | is a single number. | ||
603 | |||
604 | If the left operand of a splat operator is an unknown value of any type, the | ||
605 | result is a value of the dynamic pseudo-type. | ||
606 | |||
607 | ### Operations | ||
608 | |||
609 | Operations apply a particular operator to either one or two expression terms. | ||
610 | |||
611 | ```ebnf | ||
612 | Operation = unaryOp | binaryOp; | ||
613 | unaryOp = ("-" | "!") ExprTerm; | ||
614 | binaryOp = ExprTerm binaryOperator ExprTerm; | ||
615 | binaryOperator = compareOperator | arithmeticOperator | logicOperator; | ||
616 | compareOperator = "==" | "!=" | "<" | ">" | "<=" | ">="; | ||
617 | arithmeticOperator = "+" | "-" | "*" | "/" | "%"; | ||
618 | logicOperator = "&&" | "||" | "!"; | ||
619 | ``` | ||
620 | |||
621 | The unary operators have the highest precedence. | ||
622 | |||
623 | The binary operators are grouped into the following precedence levels: | ||
624 | |||
625 | ``` | ||
626 | Level Operators | ||
627 | 6 * / % | ||
628 | 5 + - | ||
629 | 4 > >= < <= | ||
630 | 3 == != | ||
631 | 2 && | ||
632 | 1 || | ||
633 | ``` | ||
634 | |||
635 | Higher values of "level" bind tighter. Operators within the same precedence | ||
636 | level have left-to-right associativity. For example, `x / y * z` is equivalent | ||
637 | to `(x / y) * z`. | ||
638 | |||
639 | ### Comparison Operators | ||
640 | |||
641 | Comparison operators always produce boolean values, as a result of testing | ||
642 | the relationship between two values. | ||
643 | |||
644 | The two equality operators apply to values of any type: | ||
645 | |||
646 | ``` | ||
647 | a == b equal | ||
648 | a != b not equal | ||
649 | ``` | ||
650 | |||
651 | Two values are equal if the are of identical types and their values are | ||
652 | equal as defined in the HCL syntax-agnostic information model. The equality | ||
653 | operators are commutative and opposite, such that `(a == b) == !(a != b)` | ||
654 | and `(a == b) == (b == a)` for all values `a` and `b`. | ||
655 | |||
656 | The four numeric comparison operators apply only to numbers: | ||
657 | |||
658 | ``` | ||
659 | a < b less than | ||
660 | a <= b less than or equal to | ||
661 | a > b greater than | ||
662 | a >= b greater than or equal to | ||
663 | ``` | ||
664 | |||
665 | If either operand of a comparison operator is a correctly-typed unknown value | ||
666 | or a value of the dynamic pseudo-type, the result is an unknown boolean. | ||
667 | |||
668 | ### Arithmetic Operators | ||
669 | |||
670 | Arithmetic operators apply only to number values and always produce number | ||
671 | values as results. | ||
672 | |||
673 | ``` | ||
674 | a + b sum (addition) | ||
675 | a - b difference (subtraction) | ||
676 | a * b product (multiplication) | ||
677 | a / b quotient (division) | ||
678 | a % b remainder (modulo) | ||
679 | -a negation | ||
680 | ``` | ||
681 | |||
682 | Arithmetic operations are considered to be performed in an arbitrary-precision | ||
683 | number space. | ||
684 | |||
685 | If either operand of an arithmetic operator is an unknown number or a value | ||
686 | of the dynamic pseudo-type, the result is an unknown number. | ||
687 | |||
688 | ### Logic Operators | ||
689 | |||
690 | Logic operators apply only to boolean values and always produce boolean values | ||
691 | as results. | ||
692 | |||
693 | ``` | ||
694 | a && b logical AND | ||
695 | a || b logical OR | ||
696 | !a logical NOT | ||
697 | ``` | ||
698 | |||
699 | If either operand of a logic operator is an unknown bool value or a value | ||
700 | of the dynamic pseudo-type, the result is an unknown bool value. | ||
701 | |||
702 | ### Conditional Operator | ||
703 | |||
704 | The conditional operator allows selecting from one of two expressions based on | ||
705 | the outcome of a boolean expression. | ||
706 | |||
707 | ```ebnf | ||
708 | Conditional = Expression "?" Expression ":" Expression; | ||
709 | ``` | ||
710 | |||
711 | The first expression is the _predicate_, which is evaluated and must produce | ||
712 | a boolean result. If the predicate value is `true`, the result of the second | ||
713 | expression is the result of the conditional. If the predicate value is | ||
714 | `false`, the result of the third expression is the result of the conditional. | ||
715 | |||
716 | The second and third expressions must be of the same type or must be able to | ||
717 | unify into a common type using the type unification rules defined in the | ||
718 | HCL syntax-agnostic information model. This unified type is the result type | ||
719 | of the conditional, with both expressions converted as necessary to the | ||
720 | unified type. | ||
721 | |||
722 | If the predicate is an unknown boolean value or a value of the dynamic | ||
723 | pseudo-type then the result is an unknown value of the unified type of the | ||
724 | other two expressions. | ||
725 | |||
726 | If either the second or third expressions produce errors when evaluated, | ||
727 | these errors are passed through only if the erroneous expression is selected. | ||
728 | This allows for expressions such as | ||
729 | `length(some_list) > 0 ? some_list[0] : default` (given some suitable `length` | ||
730 | function) without producing an error when the predicate is `false`. | ||
731 | |||
732 | ## Templates | ||
733 | |||
734 | The template sub-language is used within template expressions to concisely | ||
735 | combine strings and other values to produce other strings. It can also be | ||
736 | used in isolation as a standalone template language. | ||
737 | |||
738 | ```ebnf | ||
739 | Template = ( | ||
740 | TemplateLiteral | | ||
741 | TemplateInterpolation | | ||
742 | TemplateDirective | ||
743 | )* | ||
744 | TemplateDirective = TemplateIf | TemplateFor; | ||
745 | ``` | ||
746 | |||
747 | A template behaves like an expression that always returns a string value. | ||
748 | The different elements of the template are evaluated and combined into a | ||
749 | single string to return. If any of the elements produce an unknown string | ||
750 | or a value of the dynamic pseudo-type, the result is an unknown string. | ||
751 | |||
752 | An important use-case for standalone templates is to enable the use of | ||
753 | expressions in alternative HCL syntaxes where a native expression grammar is | ||
754 | not available. For example, the HCL JSON profile treats the values of JSON | ||
755 | strings as standalone templates when attributes are evaluated in expression | ||
756 | mode. | ||
757 | |||
758 | ### Template Literals | ||
759 | |||
760 | A template literal is a literal sequence of characters to include in the | ||
761 | resulting string. When the template sub-language is used standalone, a | ||
762 | template literal can contain any unicode character, with the exception | ||
763 | of the sequences that introduce interpolations and directives, and for the | ||
764 | sequences that escape those introductions. | ||
765 | |||
766 | The interpolation and directive introductions are escaped by doubling their | ||
767 | leading characters. The `${` sequence is escaped as `$${` and the `%{` | ||
768 | sequence is escaped as `%%{`. | ||
769 | |||
770 | When the template sub-language is embedded in the expression language via | ||
771 | _template expressions_, additional constraints and transforms are applied to | ||
772 | template literalsas described in the definition of template expressions. | ||
773 | |||
774 | The value of a template literal can be modified by _strip markers_ in any | ||
775 | interpolations or directives that are adjacent to it. A strip marker is | ||
776 | a tilde (`~`) placed immediately after the opening `{` or before the closing | ||
777 | `}` of a template sequence: | ||
778 | |||
779 | * `hello ${~ "world" }` produces `"helloworld"`. | ||
780 | * `%{ if true ~} hello %{~ endif }` produces `"hello"`. | ||
781 | |||
782 | When a strip marker is present, any spaces adjacent to it in the corresponding | ||
783 | string literal (if any) are removed before producing the final value. Space | ||
784 | characters are interpreted as per Unicode's definition. | ||
785 | |||
786 | Stripping is done at syntax level rather than value level. Values returned | ||
787 | by interpolations or directives are not subject to stripping: | ||
788 | |||
789 | * `${"hello" ~}${" world"}` produces `"hello world"`, and not `"helloworld"`, | ||
790 | because the space is not in a template literal directly adjacent to the | ||
791 | strip marker. | ||
792 | |||
793 | ### Template Interpolations | ||
794 | |||
795 | An _interpolation sequence_ evaluates an expression (written in the | ||
796 | expression sub-language), converts the result to a string value, and | ||
797 | replaces itself with the resulting string. | ||
798 | |||
799 | ```ebnf | ||
800 | TemplateInterpolation = ("${" | "${~") Expression ("}" | "~}"; | ||
801 | ``` | ||
802 | |||
803 | If the expression result cannot be converted to a string, an error is | ||
804 | produced. | ||
805 | |||
806 | ### Template If Directive | ||
807 | |||
808 | The template `if` directive is the template equivalent of the | ||
809 | _conditional expression_, allowing selection of one of two sub-templates based | ||
810 | on the value of a predicate expression. | ||
811 | |||
812 | ```ebnf | ||
813 | TemplateIf = ( | ||
814 | ("%{" | "%{~") "if" Expression ("}" | "~}") | ||
815 | Template | ||
816 | ( | ||
817 | ("%{" | "%{~") "else" ("}" | "~}") | ||
818 | Template | ||
819 | )? | ||
820 | ("%{" | "%{~") "endif" ("}" | "~}") | ||
821 | ); | ||
822 | ``` | ||
823 | |||
824 | The evaluation of the `if` directive is equivalent to the conditional | ||
825 | expression, with the following exceptions: | ||
826 | |||
827 | * The two sub-templates always produce strings, and thus the result value is | ||
828 | also always a string. | ||
829 | * The `else` clause may be omitted, in which case the conditional's third | ||
830 | expression result is implied to be the empty string. | ||
831 | |||
832 | ### Template For Directive | ||
833 | |||
834 | The template `for` directive is the template equivalent of the _for expression_, | ||
835 | producing zero or more copies of its sub-template based on the elements of | ||
836 | a collection. | ||
837 | |||
838 | ```ebnf | ||
839 | TemplateFor = ( | ||
840 | ("%{" | "%{~") "for" Identifier ("," Identifier) "in" Expression ("}" | "~}") | ||
841 | Template | ||
842 | ("%{" | "%{~") "endfor" ("}" | "~}") | ||
843 | ); | ||
844 | ``` | ||
845 | |||
846 | The evaluation of the `for` directive is equivalent to the _for expression_ | ||
847 | when producing a tuple, with the following exceptions: | ||
848 | |||
849 | * The sub-template always produces a string. | ||
850 | * There is no equivalent of the "if" clause on the for expression. | ||
851 | * The elements of the resulting tuple are all converted to strings and | ||
852 | concatenated to produce a flat string result. | ||
853 | |||
854 | ### Template Interpolation Unwrapping | ||
855 | |||
856 | As a special case, a template that consists only of a single interpolation, | ||
857 | with no surrounding literals, directives or other interpolations, is | ||
858 | "unwrapped". In this case, the result of the interpolation expression is | ||
859 | returned verbatim, without conversion to string. | ||
860 | |||
861 | This special case exists primarily to enable the native template language | ||
862 | to be used inside strings in alternative HCL syntaxes that lack a first-class | ||
863 | template or expression syntax. Unwrapping allows arbitrary expressions to be | ||
864 | used to populate attributes when strings in such languages are interpreted | ||
865 | as templates. | ||
866 | |||
867 | * `${true}` produces the boolean value `true` | ||
868 | * `${"${true}"}` produces the boolean value `true`, because both the inner | ||
869 | and outer interpolations are subject to unwrapping. | ||
870 | * `hello ${true}` produces the string `"hello true"` | ||
871 | * `${""}${true}` produces the string `"true"` because there are two | ||
872 | interpolation sequences, even though one produces an empty result. | ||
873 | * `%{ for v in [true] }${v}%{ endif }` produces the string `true` because | ||
874 | the presence of the `for` directive circumvents the unwrapping even though | ||
875 | the final result is a single value. | ||
876 | |||
877 | In some contexts this unwrapping behavior may be circumvented by the calling | ||
878 | application, by converting the final template result to string. This is | ||
879 | necessary, for example, if a standalone template is being used to produce | ||
880 | the direct contents of a file, since the result in that case must always be a | ||
881 | string. | ||
882 | |||
883 | ## Static Analysis | ||
884 | |||
885 | The HCL static analysis operations are implemented for some expression types | ||
886 | in the native syntax, as described in the following sections. | ||
887 | |||
888 | A goal for static analysis of the native syntax is for the interpretation to | ||
889 | be as consistent as possible with the dynamic evaluation interpretation of | ||
890 | the given expression, though some deviations are intentionally made in order | ||
891 | to maximize the potential for analysis. | ||
892 | |||
893 | ### Static List | ||
894 | |||
895 | The tuple construction syntax can be interpreted as a static list. All of | ||
896 | the expression elements given are returned as the static list elements, | ||
897 | with no further interpretation. | ||
898 | |||
899 | ### Static Map | ||
900 | |||
901 | The object construction syntax can be interpreted as a static map. All of the | ||
902 | key/value pairs given are returned as the static pairs, with no further | ||
903 | interpretation. | ||
904 | |||
905 | The usual requirement that an attribute name be interpretable as a string | ||
906 | does not apply to this static analyis, allowing callers to provide map-like | ||
907 | constructs with different key types by building on the map syntax. | ||
908 | |||
909 | ### Static Call | ||
910 | |||
911 | The function call syntax can be interpreted as a static call. The called | ||
912 | function name is returned verbatim and the given argument expressions are | ||
913 | returned as the static arguments, with no further interpretation. | ||
914 | |||
915 | ### Static Traversal | ||
916 | |||
917 | A variable expression and any attached attribute access operations and | ||
918 | constant index operations can be interpreted as a static traversal. | ||
919 | |||
920 | The keywords `true`, `false` and `null` can also be interpreted as | ||
921 | static traversals, behaving as if they were references to variables of those | ||
922 | names, to allow callers to redefine the meaning of those keywords in certain | ||
923 | contexts. | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure.go new file mode 100644 index 0000000..d69f65b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure.go | |||
@@ -0,0 +1,379 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | "strings" | ||
6 | |||
7 | "github.com/hashicorp/hcl2/hcl" | ||
8 | ) | ||
9 | |||
10 | // AsHCLBlock returns the block data expressed as a *hcl.Block. | ||
11 | func (b *Block) AsHCLBlock() *hcl.Block { | ||
12 | lastHeaderRange := b.TypeRange | ||
13 | if len(b.LabelRanges) > 0 { | ||
14 | lastHeaderRange = b.LabelRanges[len(b.LabelRanges)-1] | ||
15 | } | ||
16 | |||
17 | return &hcl.Block{ | ||
18 | Type: b.Type, | ||
19 | Labels: b.Labels, | ||
20 | Body: b.Body, | ||
21 | |||
22 | DefRange: hcl.RangeBetween(b.TypeRange, lastHeaderRange), | ||
23 | TypeRange: b.TypeRange, | ||
24 | LabelRanges: b.LabelRanges, | ||
25 | } | ||
26 | } | ||
27 | |||
28 | // Body is the implementation of hcl.Body for the HCL native syntax. | ||
29 | type Body struct { | ||
30 | Attributes Attributes | ||
31 | Blocks Blocks | ||
32 | |||
33 | // These are used with PartialContent to produce a "remaining items" | ||
34 | // body to return. They are nil on all bodies fresh out of the parser. | ||
35 | hiddenAttrs map[string]struct{} | ||
36 | hiddenBlocks map[string]struct{} | ||
37 | |||
38 | SrcRange hcl.Range | ||
39 | EndRange hcl.Range // Final token of the body, for reporting missing items | ||
40 | } | ||
41 | |||
42 | // Assert that *Body implements hcl.Body | ||
43 | var assertBodyImplBody hcl.Body = &Body{} | ||
44 | |||
45 | func (b *Body) walkChildNodes(w internalWalkFunc) { | ||
46 | b.Attributes = w(b.Attributes).(Attributes) | ||
47 | b.Blocks = w(b.Blocks).(Blocks) | ||
48 | } | ||
49 | |||
50 | func (b *Body) Range() hcl.Range { | ||
51 | return b.SrcRange | ||
52 | } | ||
53 | |||
54 | func (b *Body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) { | ||
55 | content, remainHCL, diags := b.PartialContent(schema) | ||
56 | |||
57 | // No we'll see if anything actually remains, to produce errors about | ||
58 | // extraneous items. | ||
59 | remain := remainHCL.(*Body) | ||
60 | |||
61 | for name, attr := range b.Attributes { | ||
62 | if _, hidden := remain.hiddenAttrs[name]; !hidden { | ||
63 | var suggestions []string | ||
64 | for _, attrS := range schema.Attributes { | ||
65 | if _, defined := content.Attributes[attrS.Name]; defined { | ||
66 | continue | ||
67 | } | ||
68 | suggestions = append(suggestions, attrS.Name) | ||
69 | } | ||
70 | suggestion := nameSuggestion(name, suggestions) | ||
71 | if suggestion != "" { | ||
72 | suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) | ||
73 | } else { | ||
74 | // Is there a block of the same name? | ||
75 | for _, blockS := range schema.Blocks { | ||
76 | if blockS.Type == name { | ||
77 | suggestion = fmt.Sprintf(" Did you mean to define a block of type %q?", name) | ||
78 | break | ||
79 | } | ||
80 | } | ||
81 | } | ||
82 | |||
83 | diags = append(diags, &hcl.Diagnostic{ | ||
84 | Severity: hcl.DiagError, | ||
85 | Summary: "Unsupported attribute", | ||
86 | Detail: fmt.Sprintf("An attribute named %q is not expected here.%s", name, suggestion), | ||
87 | Subject: &attr.NameRange, | ||
88 | }) | ||
89 | } | ||
90 | } | ||
91 | |||
92 | for _, block := range b.Blocks { | ||
93 | blockTy := block.Type | ||
94 | if _, hidden := remain.hiddenBlocks[blockTy]; !hidden { | ||
95 | var suggestions []string | ||
96 | for _, blockS := range schema.Blocks { | ||
97 | suggestions = append(suggestions, blockS.Type) | ||
98 | } | ||
99 | suggestion := nameSuggestion(blockTy, suggestions) | ||
100 | if suggestion != "" { | ||
101 | suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) | ||
102 | } else { | ||
103 | // Is there an attribute of the same name? | ||
104 | for _, attrS := range schema.Attributes { | ||
105 | if attrS.Name == blockTy { | ||
106 | suggestion = fmt.Sprintf(" Did you mean to define attribute %q?", blockTy) | ||
107 | break | ||
108 | } | ||
109 | } | ||
110 | } | ||
111 | |||
112 | diags = append(diags, &hcl.Diagnostic{ | ||
113 | Severity: hcl.DiagError, | ||
114 | Summary: "Unsupported block type", | ||
115 | Detail: fmt.Sprintf("Blocks of type %q are not expected here.%s", blockTy, suggestion), | ||
116 | Subject: &block.TypeRange, | ||
117 | }) | ||
118 | } | ||
119 | } | ||
120 | |||
121 | return content, diags | ||
122 | } | ||
123 | |||
124 | func (b *Body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) { | ||
125 | attrs := make(hcl.Attributes) | ||
126 | var blocks hcl.Blocks | ||
127 | var diags hcl.Diagnostics | ||
128 | hiddenAttrs := make(map[string]struct{}) | ||
129 | hiddenBlocks := make(map[string]struct{}) | ||
130 | |||
131 | if b.hiddenAttrs != nil { | ||
132 | for k, v := range b.hiddenAttrs { | ||
133 | hiddenAttrs[k] = v | ||
134 | } | ||
135 | } | ||
136 | if b.hiddenBlocks != nil { | ||
137 | for k, v := range b.hiddenBlocks { | ||
138 | hiddenBlocks[k] = v | ||
139 | } | ||
140 | } | ||
141 | |||
142 | for _, attrS := range schema.Attributes { | ||
143 | name := attrS.Name | ||
144 | attr, exists := b.Attributes[name] | ||
145 | _, hidden := hiddenAttrs[name] | ||
146 | if hidden || !exists { | ||
147 | if attrS.Required { | ||
148 | diags = append(diags, &hcl.Diagnostic{ | ||
149 | Severity: hcl.DiagError, | ||
150 | Summary: "Missing required attribute", | ||
151 | Detail: fmt.Sprintf("The attribute %q is required, but no definition was found.", attrS.Name), | ||
152 | Subject: b.MissingItemRange().Ptr(), | ||
153 | }) | ||
154 | } | ||
155 | continue | ||
156 | } | ||
157 | |||
158 | hiddenAttrs[name] = struct{}{} | ||
159 | attrs[name] = attr.AsHCLAttribute() | ||
160 | } | ||
161 | |||
162 | blocksWanted := make(map[string]hcl.BlockHeaderSchema) | ||
163 | for _, blockS := range schema.Blocks { | ||
164 | blocksWanted[blockS.Type] = blockS | ||
165 | } | ||
166 | |||
167 | for _, block := range b.Blocks { | ||
168 | if _, hidden := hiddenBlocks[block.Type]; hidden { | ||
169 | continue | ||
170 | } | ||
171 | blockS, wanted := blocksWanted[block.Type] | ||
172 | if !wanted { | ||
173 | continue | ||
174 | } | ||
175 | |||
176 | if len(block.Labels) > len(blockS.LabelNames) { | ||
177 | name := block.Type | ||
178 | if len(blockS.LabelNames) == 0 { | ||
179 | diags = append(diags, &hcl.Diagnostic{ | ||
180 | Severity: hcl.DiagError, | ||
181 | Summary: fmt.Sprintf("Extraneous label for %s", name), | ||
182 | Detail: fmt.Sprintf( | ||
183 | "No labels are expected for %s blocks.", name, | ||
184 | ), | ||
185 | Subject: block.LabelRanges[0].Ptr(), | ||
186 | Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(), | ||
187 | }) | ||
188 | } else { | ||
189 | diags = append(diags, &hcl.Diagnostic{ | ||
190 | Severity: hcl.DiagError, | ||
191 | Summary: fmt.Sprintf("Extraneous label for %s", name), | ||
192 | Detail: fmt.Sprintf( | ||
193 | "Only %d labels (%s) are expected for %s blocks.", | ||
194 | len(blockS.LabelNames), strings.Join(blockS.LabelNames, ", "), name, | ||
195 | ), | ||
196 | Subject: block.LabelRanges[len(blockS.LabelNames)].Ptr(), | ||
197 | Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(), | ||
198 | }) | ||
199 | } | ||
200 | continue | ||
201 | } | ||
202 | |||
203 | if len(block.Labels) < len(blockS.LabelNames) { | ||
204 | name := block.Type | ||
205 | diags = append(diags, &hcl.Diagnostic{ | ||
206 | Severity: hcl.DiagError, | ||
207 | Summary: fmt.Sprintf("Missing %s for %s", blockS.LabelNames[len(block.Labels)], name), | ||
208 | Detail: fmt.Sprintf( | ||
209 | "All %s blocks must have %d labels (%s).", | ||
210 | name, len(blockS.LabelNames), strings.Join(blockS.LabelNames, ", "), | ||
211 | ), | ||
212 | Subject: &block.OpenBraceRange, | ||
213 | Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(), | ||
214 | }) | ||
215 | continue | ||
216 | } | ||
217 | |||
218 | blocks = append(blocks, block.AsHCLBlock()) | ||
219 | } | ||
220 | |||
221 | // We hide blocks only after we've processed all of them, since otherwise | ||
222 | // we can't process more than one of the same type. | ||
223 | for _, blockS := range schema.Blocks { | ||
224 | hiddenBlocks[blockS.Type] = struct{}{} | ||
225 | } | ||
226 | |||
227 | remain := &Body{ | ||
228 | Attributes: b.Attributes, | ||
229 | Blocks: b.Blocks, | ||
230 | |||
231 | hiddenAttrs: hiddenAttrs, | ||
232 | hiddenBlocks: hiddenBlocks, | ||
233 | |||
234 | SrcRange: b.SrcRange, | ||
235 | EndRange: b.EndRange, | ||
236 | } | ||
237 | |||
238 | return &hcl.BodyContent{ | ||
239 | Attributes: attrs, | ||
240 | Blocks: blocks, | ||
241 | |||
242 | MissingItemRange: b.MissingItemRange(), | ||
243 | }, remain, diags | ||
244 | } | ||
245 | |||
246 | func (b *Body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) { | ||
247 | attrs := make(hcl.Attributes) | ||
248 | var diags hcl.Diagnostics | ||
249 | |||
250 | if len(b.Blocks) > 0 { | ||
251 | example := b.Blocks[0] | ||
252 | diags = append(diags, &hcl.Diagnostic{ | ||
253 | Severity: hcl.DiagError, | ||
254 | Summary: fmt.Sprintf("Unexpected %s block", example.Type), | ||
255 | Detail: "Blocks are not allowed here.", | ||
256 | Context: &example.TypeRange, | ||
257 | }) | ||
258 | // we will continue processing anyway, and return the attributes | ||
259 | // we are able to find so that certain analyses can still be done | ||
260 | // in the face of errors. | ||
261 | } | ||
262 | |||
263 | if b.Attributes == nil { | ||
264 | return attrs, diags | ||
265 | } | ||
266 | |||
267 | for name, attr := range b.Attributes { | ||
268 | if _, hidden := b.hiddenAttrs[name]; hidden { | ||
269 | continue | ||
270 | } | ||
271 | attrs[name] = attr.AsHCLAttribute() | ||
272 | } | ||
273 | |||
274 | return attrs, diags | ||
275 | } | ||
276 | |||
277 | func (b *Body) MissingItemRange() hcl.Range { | ||
278 | return b.EndRange | ||
279 | } | ||
280 | |||
281 | // Attributes is the collection of attribute definitions within a body. | ||
282 | type Attributes map[string]*Attribute | ||
283 | |||
284 | func (a Attributes) walkChildNodes(w internalWalkFunc) { | ||
285 | for k, attr := range a { | ||
286 | a[k] = w(attr).(*Attribute) | ||
287 | } | ||
288 | } | ||
289 | |||
290 | // Range returns the range of some arbitrary point within the set of | ||
291 | // attributes, or an invalid range if there are no attributes. | ||
292 | // | ||
293 | // This is provided only to complete the Node interface, but has no practical | ||
294 | // use. | ||
295 | func (a Attributes) Range() hcl.Range { | ||
296 | // An attributes doesn't really have a useful range to report, since | ||
297 | // it's just a grouping construct. So we'll arbitrarily take the | ||
298 | // range of one of the attributes, or produce an invalid range if we have | ||
299 | // none. In practice, there's little reason to ask for the range of | ||
300 | // an Attributes. | ||
301 | for _, attr := range a { | ||
302 | return attr.Range() | ||
303 | } | ||
304 | return hcl.Range{ | ||
305 | Filename: "<unknown>", | ||
306 | } | ||
307 | } | ||
308 | |||
309 | // Attribute represents a single attribute definition within a body. | ||
310 | type Attribute struct { | ||
311 | Name string | ||
312 | Expr Expression | ||
313 | |||
314 | SrcRange hcl.Range | ||
315 | NameRange hcl.Range | ||
316 | EqualsRange hcl.Range | ||
317 | } | ||
318 | |||
319 | func (a *Attribute) walkChildNodes(w internalWalkFunc) { | ||
320 | a.Expr = w(a.Expr).(Expression) | ||
321 | } | ||
322 | |||
323 | func (a *Attribute) Range() hcl.Range { | ||
324 | return a.SrcRange | ||
325 | } | ||
326 | |||
327 | // AsHCLAttribute returns the block data expressed as a *hcl.Attribute. | ||
328 | func (a *Attribute) AsHCLAttribute() *hcl.Attribute { | ||
329 | return &hcl.Attribute{ | ||
330 | Name: a.Name, | ||
331 | Expr: a.Expr, | ||
332 | |||
333 | Range: a.SrcRange, | ||
334 | NameRange: a.NameRange, | ||
335 | } | ||
336 | } | ||
337 | |||
338 | // Blocks is the list of nested blocks within a body. | ||
339 | type Blocks []*Block | ||
340 | |||
341 | func (bs Blocks) walkChildNodes(w internalWalkFunc) { | ||
342 | for i, block := range bs { | ||
343 | bs[i] = w(block).(*Block) | ||
344 | } | ||
345 | } | ||
346 | |||
347 | // Range returns the range of some arbitrary point within the list of | ||
348 | // blocks, or an invalid range if there are no blocks. | ||
349 | // | ||
350 | // This is provided only to complete the Node interface, but has no practical | ||
351 | // use. | ||
352 | func (bs Blocks) Range() hcl.Range { | ||
353 | if len(bs) > 0 { | ||
354 | return bs[0].Range() | ||
355 | } | ||
356 | return hcl.Range{ | ||
357 | Filename: "<unknown>", | ||
358 | } | ||
359 | } | ||
360 | |||
361 | // Block represents a nested block structure | ||
362 | type Block struct { | ||
363 | Type string | ||
364 | Labels []string | ||
365 | Body *Body | ||
366 | |||
367 | TypeRange hcl.Range | ||
368 | LabelRanges []hcl.Range | ||
369 | OpenBraceRange hcl.Range | ||
370 | CloseBraceRange hcl.Range | ||
371 | } | ||
372 | |||
373 | func (b *Block) walkChildNodes(w internalWalkFunc) { | ||
374 | b.Body = w(b.Body).(*Body) | ||
375 | } | ||
376 | |||
377 | func (b *Block) Range() hcl.Range { | ||
378 | return hcl.RangeBetween(b.TypeRange, b.CloseBraceRange) | ||
379 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token.go new file mode 100644 index 0000000..bcaa15f --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token.go | |||
@@ -0,0 +1,272 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | |||
6 | "github.com/apparentlymart/go-textseg/textseg" | ||
7 | "github.com/hashicorp/hcl2/hcl" | ||
8 | ) | ||
9 | |||
10 | // Token represents a sequence of bytes from some HCL code that has been | ||
11 | // tagged with a type and its range within the source file. | ||
12 | type Token struct { | ||
13 | Type TokenType | ||
14 | Bytes []byte | ||
15 | Range hcl.Range | ||
16 | } | ||
17 | |||
18 | // Tokens is a slice of Token. | ||
19 | type Tokens []Token | ||
20 | |||
21 | // TokenType is an enumeration used for the Type field on Token. | ||
22 | type TokenType rune | ||
23 | |||
24 | const ( | ||
25 | // Single-character tokens are represented by their own character, for | ||
26 | // convenience in producing these within the scanner. However, the values | ||
27 | // are otherwise arbitrary and just intended to be mnemonic for humans | ||
28 | // who might see them in debug output. | ||
29 | |||
30 | TokenOBrace TokenType = '{' | ||
31 | TokenCBrace TokenType = '}' | ||
32 | TokenOBrack TokenType = '[' | ||
33 | TokenCBrack TokenType = ']' | ||
34 | TokenOParen TokenType = '(' | ||
35 | TokenCParen TokenType = ')' | ||
36 | TokenOQuote TokenType = '«' | ||
37 | TokenCQuote TokenType = '»' | ||
38 | TokenOHeredoc TokenType = 'H' | ||
39 | TokenCHeredoc TokenType = 'h' | ||
40 | |||
41 | TokenStar TokenType = '*' | ||
42 | TokenSlash TokenType = '/' | ||
43 | TokenPlus TokenType = '+' | ||
44 | TokenMinus TokenType = '-' | ||
45 | TokenPercent TokenType = '%' | ||
46 | |||
47 | TokenEqual TokenType = '=' | ||
48 | TokenEqualOp TokenType = '≔' | ||
49 | TokenNotEqual TokenType = '≠' | ||
50 | TokenLessThan TokenType = '<' | ||
51 | TokenLessThanEq TokenType = '≤' | ||
52 | TokenGreaterThan TokenType = '>' | ||
53 | TokenGreaterThanEq TokenType = '≥' | ||
54 | |||
55 | TokenAnd TokenType = '∧' | ||
56 | TokenOr TokenType = '∨' | ||
57 | TokenBang TokenType = '!' | ||
58 | |||
59 | TokenDot TokenType = '.' | ||
60 | TokenComma TokenType = ',' | ||
61 | |||
62 | TokenEllipsis TokenType = '…' | ||
63 | TokenFatArrow TokenType = '⇒' | ||
64 | |||
65 | TokenQuestion TokenType = '?' | ||
66 | TokenColon TokenType = ':' | ||
67 | |||
68 | TokenTemplateInterp TokenType = '∫' | ||
69 | TokenTemplateControl TokenType = 'λ' | ||
70 | TokenTemplateSeqEnd TokenType = '∎' | ||
71 | |||
72 | TokenQuotedLit TokenType = 'Q' // might contain backslash escapes | ||
73 | TokenStringLit TokenType = 'S' // cannot contain backslash escapes | ||
74 | TokenNumberLit TokenType = 'N' | ||
75 | TokenIdent TokenType = 'I' | ||
76 | |||
77 | TokenComment TokenType = 'C' | ||
78 | |||
79 | TokenNewline TokenType = '\n' | ||
80 | TokenEOF TokenType = '␄' | ||
81 | |||
82 | // The rest are not used in the language but recognized by the scanner so | ||
83 | // we can generate good diagnostics in the parser when users try to write | ||
84 | // things that might work in other languages they are familiar with, or | ||
85 | // simply make incorrect assumptions about the HCL language. | ||
86 | |||
87 | TokenBitwiseAnd TokenType = '&' | ||
88 | TokenBitwiseOr TokenType = '|' | ||
89 | TokenBitwiseNot TokenType = '~' | ||
90 | TokenBitwiseXor TokenType = '^' | ||
91 | TokenStarStar TokenType = '➚' | ||
92 | TokenBacktick TokenType = '`' | ||
93 | TokenSemicolon TokenType = ';' | ||
94 | TokenTabs TokenType = '␉' | ||
95 | TokenInvalid TokenType = '�' | ||
96 | TokenBadUTF8 TokenType = '💩' | ||
97 | |||
98 | // TokenNil is a placeholder for when a token is required but none is | ||
99 | // available, e.g. when reporting errors. The scanner will never produce | ||
100 | // this as part of a token stream. | ||
101 | TokenNil TokenType = '\x00' | ||
102 | ) | ||
103 | |||
104 | func (t TokenType) GoString() string { | ||
105 | return fmt.Sprintf("hclsyntax.%s", t.String()) | ||
106 | } | ||
107 | |||
108 | type scanMode int | ||
109 | |||
110 | const ( | ||
111 | scanNormal scanMode = iota | ||
112 | scanTemplate | ||
113 | scanIdentOnly | ||
114 | ) | ||
115 | |||
116 | type tokenAccum struct { | ||
117 | Filename string | ||
118 | Bytes []byte | ||
119 | Pos hcl.Pos | ||
120 | Tokens []Token | ||
121 | } | ||
122 | |||
123 | func (f *tokenAccum) emitToken(ty TokenType, startOfs, endOfs int) { | ||
124 | // Walk through our buffer to figure out how much we need to adjust | ||
125 | // the start pos to get our end pos. | ||
126 | |||
127 | start := f.Pos | ||
128 | start.Column += startOfs - f.Pos.Byte // Safe because only ASCII spaces can be in the offset | ||
129 | start.Byte = startOfs | ||
130 | |||
131 | end := start | ||
132 | end.Byte = endOfs | ||
133 | b := f.Bytes[startOfs:endOfs] | ||
134 | for len(b) > 0 { | ||
135 | advance, seq, _ := textseg.ScanGraphemeClusters(b, true) | ||
136 | if (len(seq) == 1 && seq[0] == '\n') || (len(seq) == 2 && seq[0] == '\r' && seq[1] == '\n') { | ||
137 | end.Line++ | ||
138 | end.Column = 1 | ||
139 | } else { | ||
140 | end.Column++ | ||
141 | } | ||
142 | b = b[advance:] | ||
143 | } | ||
144 | |||
145 | f.Pos = end | ||
146 | |||
147 | f.Tokens = append(f.Tokens, Token{ | ||
148 | Type: ty, | ||
149 | Bytes: f.Bytes[startOfs:endOfs], | ||
150 | Range: hcl.Range{ | ||
151 | Filename: f.Filename, | ||
152 | Start: start, | ||
153 | End: end, | ||
154 | }, | ||
155 | }) | ||
156 | } | ||
157 | |||
158 | type heredocInProgress struct { | ||
159 | Marker []byte | ||
160 | StartOfLine bool | ||
161 | } | ||
162 | |||
163 | // checkInvalidTokens does a simple pass across the given tokens and generates | ||
164 | // diagnostics for tokens that should _never_ appear in HCL source. This | ||
165 | // is intended to avoid the need for the parser to have special support | ||
166 | // for them all over. | ||
167 | // | ||
168 | // Returns a diagnostics with no errors if everything seems acceptable. | ||
169 | // Otherwise, returns zero or more error diagnostics, though tries to limit | ||
170 | // repetition of the same information. | ||
171 | func checkInvalidTokens(tokens Tokens) hcl.Diagnostics { | ||
172 | var diags hcl.Diagnostics | ||
173 | |||
174 | toldBitwise := 0 | ||
175 | toldExponent := 0 | ||
176 | toldBacktick := 0 | ||
177 | toldSemicolon := 0 | ||
178 | toldTabs := 0 | ||
179 | toldBadUTF8 := 0 | ||
180 | |||
181 | for _, tok := range tokens { | ||
182 | switch tok.Type { | ||
183 | case TokenBitwiseAnd, TokenBitwiseOr, TokenBitwiseXor, TokenBitwiseNot: | ||
184 | if toldBitwise < 4 { | ||
185 | var suggestion string | ||
186 | switch tok.Type { | ||
187 | case TokenBitwiseAnd: | ||
188 | suggestion = " Did you mean boolean AND (\"&&\")?" | ||
189 | case TokenBitwiseOr: | ||
190 | suggestion = " Did you mean boolean OR (\"&&\")?" | ||
191 | case TokenBitwiseNot: | ||
192 | suggestion = " Did you mean boolean NOT (\"!\")?" | ||
193 | } | ||
194 | |||
195 | diags = append(diags, &hcl.Diagnostic{ | ||
196 | Severity: hcl.DiagError, | ||
197 | Summary: "Unsupported operator", | ||
198 | Detail: fmt.Sprintf("Bitwise operators are not supported.%s", suggestion), | ||
199 | Subject: &tok.Range, | ||
200 | }) | ||
201 | toldBitwise++ | ||
202 | } | ||
203 | case TokenStarStar: | ||
204 | if toldExponent < 1 { | ||
205 | diags = append(diags, &hcl.Diagnostic{ | ||
206 | Severity: hcl.DiagError, | ||
207 | Summary: "Unsupported operator", | ||
208 | Detail: "\"**\" is not a supported operator. Exponentiation is not supported as an operator.", | ||
209 | Subject: &tok.Range, | ||
210 | }) | ||
211 | |||
212 | toldExponent++ | ||
213 | } | ||
214 | case TokenBacktick: | ||
215 | // Only report for alternating (even) backticks, so we won't report both start and ends of the same | ||
216 | // backtick-quoted string. | ||
217 | if toldExponent < 4 && (toldExponent%2) == 0 { | ||
218 | diags = append(diags, &hcl.Diagnostic{ | ||
219 | Severity: hcl.DiagError, | ||
220 | Summary: "Invalid character", | ||
221 | Detail: "The \"`\" character is not valid. To create a multi-line string, use the \"heredoc\" syntax, like \"<<EOT\".", | ||
222 | Subject: &tok.Range, | ||
223 | }) | ||
224 | |||
225 | toldBacktick++ | ||
226 | } | ||
227 | case TokenSemicolon: | ||
228 | if toldSemicolon < 1 { | ||
229 | diags = append(diags, &hcl.Diagnostic{ | ||
230 | Severity: hcl.DiagError, | ||
231 | Summary: "Invalid character", | ||
232 | Detail: "The \";\" character is not valid. Use newlines to separate attributes and blocks, and commas to separate items in collection values.", | ||
233 | Subject: &tok.Range, | ||
234 | }) | ||
235 | |||
236 | toldSemicolon++ | ||
237 | } | ||
238 | case TokenTabs: | ||
239 | if toldTabs < 1 { | ||
240 | diags = append(diags, &hcl.Diagnostic{ | ||
241 | Severity: hcl.DiagError, | ||
242 | Summary: "Invalid character", | ||
243 | Detail: "Tab characters may not be used. The recommended indentation style is two spaces per indent.", | ||
244 | Subject: &tok.Range, | ||
245 | }) | ||
246 | |||
247 | toldTabs++ | ||
248 | } | ||
249 | case TokenBadUTF8: | ||
250 | if toldBadUTF8 < 1 { | ||
251 | diags = append(diags, &hcl.Diagnostic{ | ||
252 | Severity: hcl.DiagError, | ||
253 | Summary: "Invalid character encoding", | ||
254 | Detail: "All input files must be UTF-8 encoded. Ensure that UTF-8 encoding is selected in your editor.", | ||
255 | Subject: &tok.Range, | ||
256 | }) | ||
257 | |||
258 | toldBadUTF8++ | ||
259 | } | ||
260 | case TokenInvalid: | ||
261 | diags = append(diags, &hcl.Diagnostic{ | ||
262 | Severity: hcl.DiagError, | ||
263 | Summary: "Invalid character", | ||
264 | Detail: "This character is not used within the language.", | ||
265 | Subject: &tok.Range, | ||
266 | }) | ||
267 | |||
268 | toldTabs++ | ||
269 | } | ||
270 | } | ||
271 | return diags | ||
272 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token_type_string.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token_type_string.go new file mode 100644 index 0000000..93de7ee --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token_type_string.go | |||
@@ -0,0 +1,69 @@ | |||
1 | // Code generated by "stringer -type TokenType -output token_type_string.go"; DO NOT EDIT. | ||
2 | |||
3 | package hclsyntax | ||
4 | |||
5 | import "strconv" | ||
6 | |||
7 | const _TokenType_name = "TokenNilTokenNewlineTokenBangTokenPercentTokenBitwiseAndTokenOParenTokenCParenTokenStarTokenPlusTokenCommaTokenMinusTokenDotTokenSlashTokenColonTokenSemicolonTokenLessThanTokenEqualTokenGreaterThanTokenQuestionTokenCommentTokenOHeredocTokenIdentTokenNumberLitTokenQuotedLitTokenStringLitTokenOBrackTokenCBrackTokenBitwiseXorTokenBacktickTokenCHeredocTokenOBraceTokenBitwiseOrTokenCBraceTokenBitwiseNotTokenOQuoteTokenCQuoteTokenTemplateControlTokenEllipsisTokenFatArrowTokenTemplateSeqEndTokenAndTokenOrTokenTemplateInterpTokenEqualOpTokenNotEqualTokenLessThanEqTokenGreaterThanEqTokenEOFTokenTabsTokenStarStarTokenInvalidTokenBadUTF8" | ||
8 | |||
9 | var _TokenType_map = map[TokenType]string{ | ||
10 | 0: _TokenType_name[0:8], | ||
11 | 10: _TokenType_name[8:20], | ||
12 | 33: _TokenType_name[20:29], | ||
13 | 37: _TokenType_name[29:41], | ||
14 | 38: _TokenType_name[41:56], | ||
15 | 40: _TokenType_name[56:67], | ||
16 | 41: _TokenType_name[67:78], | ||
17 | 42: _TokenType_name[78:87], | ||
18 | 43: _TokenType_name[87:96], | ||
19 | 44: _TokenType_name[96:106], | ||
20 | 45: _TokenType_name[106:116], | ||
21 | 46: _TokenType_name[116:124], | ||
22 | 47: _TokenType_name[124:134], | ||
23 | 58: _TokenType_name[134:144], | ||
24 | 59: _TokenType_name[144:158], | ||
25 | 60: _TokenType_name[158:171], | ||
26 | 61: _TokenType_name[171:181], | ||
27 | 62: _TokenType_name[181:197], | ||
28 | 63: _TokenType_name[197:210], | ||
29 | 67: _TokenType_name[210:222], | ||
30 | 72: _TokenType_name[222:235], | ||
31 | 73: _TokenType_name[235:245], | ||
32 | 78: _TokenType_name[245:259], | ||
33 | 81: _TokenType_name[259:273], | ||
34 | 83: _TokenType_name[273:287], | ||
35 | 91: _TokenType_name[287:298], | ||
36 | 93: _TokenType_name[298:309], | ||
37 | 94: _TokenType_name[309:324], | ||
38 | 96: _TokenType_name[324:337], | ||
39 | 104: _TokenType_name[337:350], | ||
40 | 123: _TokenType_name[350:361], | ||
41 | 124: _TokenType_name[361:375], | ||
42 | 125: _TokenType_name[375:386], | ||
43 | 126: _TokenType_name[386:401], | ||
44 | 171: _TokenType_name[401:412], | ||
45 | 187: _TokenType_name[412:423], | ||
46 | 955: _TokenType_name[423:443], | ||
47 | 8230: _TokenType_name[443:456], | ||
48 | 8658: _TokenType_name[456:469], | ||
49 | 8718: _TokenType_name[469:488], | ||
50 | 8743: _TokenType_name[488:496], | ||
51 | 8744: _TokenType_name[496:503], | ||
52 | 8747: _TokenType_name[503:522], | ||
53 | 8788: _TokenType_name[522:534], | ||
54 | 8800: _TokenType_name[534:547], | ||
55 | 8804: _TokenType_name[547:562], | ||
56 | 8805: _TokenType_name[562:580], | ||
57 | 9220: _TokenType_name[580:588], | ||
58 | 9225: _TokenType_name[588:597], | ||
59 | 10138: _TokenType_name[597:610], | ||
60 | 65533: _TokenType_name[610:622], | ||
61 | 128169: _TokenType_name[622:634], | ||
62 | } | ||
63 | |||
64 | func (i TokenType) String() string { | ||
65 | if str, ok := _TokenType_map[i]; ok { | ||
66 | return str | ||
67 | } | ||
68 | return "TokenType(" + strconv.FormatInt(int64(i), 10) + ")" | ||
69 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode2ragel.rb b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode2ragel.rb new file mode 100644 index 0000000..422e4e5 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode2ragel.rb | |||
@@ -0,0 +1,335 @@ | |||
1 | #!/usr/bin/env ruby | ||
2 | # | ||
3 | # This scripted has been updated to accept more command-line arguments: | ||
4 | # | ||
5 | # -u, --url URL to process | ||
6 | # -m, --machine Machine name | ||
7 | # -p, --properties Properties to add to the machine | ||
8 | # -o, --output Write output to file | ||
9 | # | ||
10 | # Updated by: Marty Schoch <marty.schoch@gmail.com> | ||
11 | # | ||
12 | # This script uses the unicode spec to generate a Ragel state machine | ||
13 | # that recognizes unicode alphanumeric characters. It generates 5 | ||
14 | # character classes: uupper, ulower, ualpha, udigit, and ualnum. | ||
15 | # Currently supported encodings are UTF-8 [default] and UCS-4. | ||
16 | # | ||
17 | # Usage: unicode2ragel.rb [options] | ||
18 | # -e, --encoding [ucs4 | utf8] Data encoding | ||
19 | # -h, --help Show this message | ||
20 | # | ||
21 | # This script was originally written as part of the Ferret search | ||
22 | # engine library. | ||
23 | # | ||
24 | # Author: Rakan El-Khalil <rakan@well.com> | ||
25 | |||
26 | require 'optparse' | ||
27 | require 'open-uri' | ||
28 | |||
29 | ENCODINGS = [ :utf8, :ucs4 ] | ||
30 | ALPHTYPES = { :utf8 => "byte", :ucs4 => "rune" } | ||
31 | DEFAULT_CHART_URL = "http://www.unicode.org/Public/5.1.0/ucd/DerivedCoreProperties.txt" | ||
32 | DEFAULT_MACHINE_NAME= "WChar" | ||
33 | |||
34 | ### | ||
35 | # Display vars & default option | ||
36 | |||
37 | TOTAL_WIDTH = 80 | ||
38 | RANGE_WIDTH = 23 | ||
39 | @encoding = :utf8 | ||
40 | @chart_url = DEFAULT_CHART_URL | ||
41 | machine_name = DEFAULT_MACHINE_NAME | ||
42 | properties = [] | ||
43 | @output = $stdout | ||
44 | |||
45 | ### | ||
46 | # Option parsing | ||
47 | |||
48 | cli_opts = OptionParser.new do |opts| | ||
49 | opts.on("-e", "--encoding [ucs4 | utf8]", "Data encoding") do |o| | ||
50 | @encoding = o.downcase.to_sym | ||
51 | end | ||
52 | opts.on("-h", "--help", "Show this message") do | ||
53 | puts opts | ||
54 | exit | ||
55 | end | ||
56 | opts.on("-u", "--url URL", "URL to process") do |o| | ||
57 | @chart_url = o | ||
58 | end | ||
59 | opts.on("-m", "--machine MACHINE_NAME", "Machine name") do |o| | ||
60 | machine_name = o | ||
61 | end | ||
62 | opts.on("-p", "--properties x,y,z", Array, "Properties to add to machine") do |o| | ||
63 | properties = o | ||
64 | end | ||
65 | opts.on("-o", "--output FILE", "output file") do |o| | ||
66 | @output = File.new(o, "w+") | ||
67 | end | ||
68 | end | ||
69 | |||
70 | cli_opts.parse(ARGV) | ||
71 | unless ENCODINGS.member? @encoding | ||
72 | puts "Invalid encoding: #{@encoding}" | ||
73 | puts cli_opts | ||
74 | exit | ||
75 | end | ||
76 | |||
77 | ## | ||
78 | # Downloads the document at url and yields every alpha line's hex | ||
79 | # range and description. | ||
80 | |||
81 | def each_alpha( url, property ) | ||
82 | open( url ) do |file| | ||
83 | file.each_line do |line| | ||
84 | next if line =~ /^#/; | ||
85 | next if line !~ /; #{property} #/; | ||
86 | |||
87 | range, description = line.split(/;/) | ||
88 | range.strip! | ||
89 | description.gsub!(/.*#/, '').strip! | ||
90 | |||
91 | if range =~ /\.\./ | ||
92 | start, stop = range.split '..' | ||
93 | else start = stop = range | ||
94 | end | ||
95 | |||
96 | yield start.hex .. stop.hex, description | ||
97 | end | ||
98 | end | ||
99 | end | ||
100 | |||
101 | ### | ||
102 | # Formats to hex at minimum width | ||
103 | |||
104 | def to_hex( n ) | ||
105 | r = "%0X" % n | ||
106 | r = "0#{r}" unless (r.length % 2).zero? | ||
107 | r | ||
108 | end | ||
109 | |||
110 | ### | ||
111 | # UCS4 is just a straight hex conversion of the unicode codepoint. | ||
112 | |||
113 | def to_ucs4( range ) | ||
114 | rangestr = "0x" + to_hex(range.begin) | ||
115 | rangestr << "..0x" + to_hex(range.end) if range.begin != range.end | ||
116 | [ rangestr ] | ||
117 | end | ||
118 | |||
119 | ## | ||
120 | # 0x00 - 0x7f -> 0zzzzzzz[7] | ||
121 | # 0x80 - 0x7ff -> 110yyyyy[5] 10zzzzzz[6] | ||
122 | # 0x800 - 0xffff -> 1110xxxx[4] 10yyyyyy[6] 10zzzzzz[6] | ||
123 | # 0x010000 - 0x10ffff -> 11110www[3] 10xxxxxx[6] 10yyyyyy[6] 10zzzzzz[6] | ||
124 | |||
125 | UTF8_BOUNDARIES = [0x7f, 0x7ff, 0xffff, 0x10ffff] | ||
126 | |||
127 | def to_utf8_enc( n ) | ||
128 | r = 0 | ||
129 | if n <= 0x7f | ||
130 | r = n | ||
131 | elsif n <= 0x7ff | ||
132 | y = 0xc0 | (n >> 6) | ||
133 | z = 0x80 | (n & 0x3f) | ||
134 | r = y << 8 | z | ||
135 | elsif n <= 0xffff | ||
136 | x = 0xe0 | (n >> 12) | ||
137 | y = 0x80 | (n >> 6) & 0x3f | ||
138 | z = 0x80 | n & 0x3f | ||
139 | r = x << 16 | y << 8 | z | ||
140 | elsif n <= 0x10ffff | ||
141 | w = 0xf0 | (n >> 18) | ||
142 | x = 0x80 | (n >> 12) & 0x3f | ||
143 | y = 0x80 | (n >> 6) & 0x3f | ||
144 | z = 0x80 | n & 0x3f | ||
145 | r = w << 24 | x << 16 | y << 8 | z | ||
146 | end | ||
147 | |||
148 | to_hex(r) | ||
149 | end | ||
150 | |||
151 | def from_utf8_enc( n ) | ||
152 | n = n.hex | ||
153 | r = 0 | ||
154 | if n <= 0x7f | ||
155 | r = n | ||
156 | elsif n <= 0xdfff | ||
157 | y = (n >> 8) & 0x1f | ||
158 | z = n & 0x3f | ||
159 | r = y << 6 | z | ||
160 | elsif n <= 0xefffff | ||
161 | x = (n >> 16) & 0x0f | ||
162 | y = (n >> 8) & 0x3f | ||
163 | z = n & 0x3f | ||
164 | r = x << 10 | y << 6 | z | ||
165 | elsif n <= 0xf7ffffff | ||
166 | w = (n >> 24) & 0x07 | ||
167 | x = (n >> 16) & 0x3f | ||
168 | y = (n >> 8) & 0x3f | ||
169 | z = n & 0x3f | ||
170 | r = w << 18 | x << 12 | y << 6 | z | ||
171 | end | ||
172 | r | ||
173 | end | ||
174 | |||
175 | ### | ||
176 | # Given a range, splits it up into ranges that can be continuously | ||
177 | # encoded into utf8. Eg: 0x00 .. 0xff => [0x00..0x7f, 0x80..0xff] | ||
178 | # This is not strictly needed since the current [5.1] unicode standard | ||
179 | # doesn't have ranges that straddle utf8 boundaries. This is included | ||
180 | # for completeness as there is no telling if that will ever change. | ||
181 | |||
182 | def utf8_ranges( range ) | ||
183 | ranges = [] | ||
184 | UTF8_BOUNDARIES.each do |max| | ||
185 | if range.begin <= max | ||
186 | if range.end <= max | ||
187 | ranges << range | ||
188 | return ranges | ||
189 | end | ||
190 | |||
191 | ranges << (range.begin .. max) | ||
192 | range = (max + 1) .. range.end | ||
193 | end | ||
194 | end | ||
195 | ranges | ||
196 | end | ||
197 | |||
198 | def build_range( start, stop ) | ||
199 | size = start.size/2 | ||
200 | left = size - 1 | ||
201 | return [""] if size < 1 | ||
202 | |||
203 | a = start[0..1] | ||
204 | b = stop[0..1] | ||
205 | |||
206 | ### | ||
207 | # Shared prefix | ||
208 | |||
209 | if a == b | ||
210 | return build_range(start[2..-1], stop[2..-1]).map do |elt| | ||
211 | "0x#{a} " + elt | ||
212 | end | ||
213 | end | ||
214 | |||
215 | ### | ||
216 | # Unshared prefix, end of run | ||
217 | |||
218 | return ["0x#{a}..0x#{b} "] if left.zero? | ||
219 | |||
220 | ### | ||
221 | # Unshared prefix, not end of run | ||
222 | # Range can be 0x123456..0x56789A | ||
223 | # Which is equivalent to: | ||
224 | # 0x123456 .. 0x12FFFF | ||
225 | # 0x130000 .. 0x55FFFF | ||
226 | # 0x560000 .. 0x56789A | ||
227 | |||
228 | ret = [] | ||
229 | ret << build_range(start, a + "FF" * left) | ||
230 | |||
231 | ### | ||
232 | # Only generate middle range if need be. | ||
233 | |||
234 | if a.hex+1 != b.hex | ||
235 | max = to_hex(b.hex - 1) | ||
236 | max = "FF" if b == "FF" | ||
237 | ret << "0x#{to_hex(a.hex+1)}..0x#{max} " + "0x00..0xFF " * left | ||
238 | end | ||
239 | |||
240 | ### | ||
241 | # Don't generate last range if it is covered by first range | ||
242 | |||
243 | ret << build_range(b + "00" * left, stop) unless b == "FF" | ||
244 | ret.flatten! | ||
245 | end | ||
246 | |||
247 | def to_utf8( range ) | ||
248 | utf8_ranges( range ).map do |r| | ||
249 | begin_enc = to_utf8_enc(r.begin) | ||
250 | end_enc = to_utf8_enc(r.end) | ||
251 | build_range begin_enc, end_enc | ||
252 | end.flatten! | ||
253 | end | ||
254 | |||
255 | ## | ||
256 | # Perform a 3-way comparison of the number of codepoints advertised by | ||
257 | # the unicode spec for the given range, the originally parsed range, | ||
258 | # and the resulting utf8 encoded range. | ||
259 | |||
260 | def count_codepoints( code ) | ||
261 | code.split(' ').inject(1) do |acc, elt| | ||
262 | if elt =~ /0x(.+)\.\.0x(.+)/ | ||
263 | if @encoding == :utf8 | ||
264 | acc * (from_utf8_enc($2) - from_utf8_enc($1) + 1) | ||
265 | else | ||
266 | acc * ($2.hex - $1.hex + 1) | ||
267 | end | ||
268 | else | ||
269 | acc | ||
270 | end | ||
271 | end | ||
272 | end | ||
273 | |||
274 | def is_valid?( range, desc, codes ) | ||
275 | spec_count = 1 | ||
276 | spec_count = $1.to_i if desc =~ /\[(\d+)\]/ | ||
277 | range_count = range.end - range.begin + 1 | ||
278 | |||
279 | sum = codes.inject(0) { |acc, elt| acc + count_codepoints(elt) } | ||
280 | sum == spec_count and sum == range_count | ||
281 | end | ||
282 | |||
283 | ## | ||
284 | # Generate the state maching to stdout | ||
285 | |||
286 | def generate_machine( name, property ) | ||
287 | pipe = " " | ||
288 | @output.puts " #{name} = " | ||
289 | each_alpha( @chart_url, property ) do |range, desc| | ||
290 | |||
291 | codes = (@encoding == :ucs4) ? to_ucs4(range) : to_utf8(range) | ||
292 | |||
293 | #raise "Invalid encoding of range #{range}: #{codes.inspect}" unless | ||
294 | # is_valid? range, desc, codes | ||
295 | |||
296 | range_width = codes.map { |a| a.size }.max | ||
297 | range_width = RANGE_WIDTH if range_width < RANGE_WIDTH | ||
298 | |||
299 | desc_width = TOTAL_WIDTH - RANGE_WIDTH - 11 | ||
300 | desc_width -= (range_width - RANGE_WIDTH) if range_width > RANGE_WIDTH | ||
301 | |||
302 | if desc.size > desc_width | ||
303 | desc = desc[0..desc_width - 4] + "..." | ||
304 | end | ||
305 | |||
306 | codes.each_with_index do |r, idx| | ||
307 | desc = "" unless idx.zero? | ||
308 | code = "%-#{range_width}s" % r | ||
309 | @output.puts " #{pipe} #{code} ##{desc}" | ||
310 | pipe = "|" | ||
311 | end | ||
312 | end | ||
313 | @output.puts " ;" | ||
314 | @output.puts "" | ||
315 | end | ||
316 | |||
317 | @output.puts <<EOF | ||
318 | # The following Ragel file was autogenerated with #{$0} | ||
319 | # from: #{@chart_url} | ||
320 | # | ||
321 | # It defines #{properties}. | ||
322 | # | ||
323 | # To use this, make sure that your alphtype is set to #{ALPHTYPES[@encoding]}, | ||
324 | # and that your input is in #{@encoding}. | ||
325 | |||
326 | %%{ | ||
327 | machine #{machine_name}; | ||
328 | |||
329 | EOF | ||
330 | |||
331 | properties.each { |x| generate_machine( x, x ) } | ||
332 | |||
333 | @output.puts <<EOF | ||
334 | }%% | ||
335 | EOF | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode_derived.rl b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode_derived.rl new file mode 100644 index 0000000..612ad62 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode_derived.rl | |||
@@ -0,0 +1,2135 @@ | |||
1 | # The following Ragel file was autogenerated with unicode2ragel.rb | ||
2 | # from: http://www.unicode.org/Public/9.0.0/ucd/DerivedCoreProperties.txt | ||
3 | # | ||
4 | # It defines ["ID_Start", "ID_Continue"]. | ||
5 | # | ||
6 | # To use this, make sure that your alphtype is set to byte, | ||
7 | # and that your input is in utf8. | ||
8 | |||
9 | %%{ | ||
10 | machine UnicodeDerived; | ||
11 | |||
12 | ID_Start = | ||
13 | 0x41..0x5A #L& [26] LATIN CAPITAL LETTER A..LATIN CAPI... | ||
14 | | 0x61..0x7A #L& [26] LATIN SMALL LETTER A..LATIN SMALL ... | ||
15 | | 0xC2 0xAA #Lo FEMININE ORDINAL INDICATOR | ||
16 | | 0xC2 0xB5 #L& MICRO SIGN | ||
17 | | 0xC2 0xBA #Lo MASCULINE ORDINAL INDICATOR | ||
18 | | 0xC3 0x80..0x96 #L& [23] LATIN CAPITAL LETTER A WITH GRAVE.... | ||
19 | | 0xC3 0x98..0xB6 #L& [31] LATIN CAPITAL LETTER O WITH STROKE... | ||
20 | | 0xC3 0xB8..0xFF #L& [195] LATIN SMALL LETTER O WITH STROKE..... | ||
21 | | 0xC4..0xC5 0x00..0xFF # | ||
22 | | 0xC6 0x00..0xBA # | ||
23 | | 0xC6 0xBB #Lo LATIN LETTER TWO WITH STROKE | ||
24 | | 0xC6 0xBC..0xBF #L& [4] LATIN CAPITAL LETTER TONE FIVE..LA... | ||
25 | | 0xC7 0x80..0x83 #Lo [4] LATIN LETTER DENTAL CLICK..LATIN L... | ||
26 | | 0xC7 0x84..0xFF #L& [208] LATIN CAPITAL LETTER DZ WITH CARON... | ||
27 | | 0xC8..0xC9 0x00..0xFF # | ||
28 | | 0xCA 0x00..0x93 # | ||
29 | | 0xCA 0x94 #Lo LATIN LETTER GLOTTAL STOP | ||
30 | | 0xCA 0x95..0xAF #L& [27] LATIN LETTER PHARYNGEAL VOICED FRI... | ||
31 | | 0xCA 0xB0..0xFF #Lm [18] MODIFIER LETTER SMALL H..MODIFIER ... | ||
32 | | 0xCB 0x00..0x81 # | ||
33 | | 0xCB 0x86..0x91 #Lm [12] MODIFIER LETTER CIRCUMFLEX ACCENT.... | ||
34 | | 0xCB 0xA0..0xA4 #Lm [5] MODIFIER LETTER SMALL GAMMA..MODIF... | ||
35 | | 0xCB 0xAC #Lm MODIFIER LETTER VOICING | ||
36 | | 0xCB 0xAE #Lm MODIFIER LETTER DOUBLE APOSTROPHE | ||
37 | | 0xCD 0xB0..0xB3 #L& [4] GREEK CAPITAL LETTER HETA..GREEK S... | ||
38 | | 0xCD 0xB4 #Lm GREEK NUMERAL SIGN | ||
39 | | 0xCD 0xB6..0xB7 #L& [2] GREEK CAPITAL LETTER PAMPHYLIAN DI... | ||
40 | | 0xCD 0xBA #Lm GREEK YPOGEGRAMMENI | ||
41 | | 0xCD 0xBB..0xBD #L& [3] GREEK SMALL REVERSED LUNATE SIGMA ... | ||
42 | | 0xCD 0xBF #L& GREEK CAPITAL LETTER YOT | ||
43 | | 0xCE 0x86 #L& GREEK CAPITAL LETTER ALPHA WITH TONOS | ||
44 | | 0xCE 0x88..0x8A #L& [3] GREEK CAPITAL LETTER EPSILON WITH ... | ||
45 | | 0xCE 0x8C #L& GREEK CAPITAL LETTER OMICRON WITH ... | ||
46 | | 0xCE 0x8E..0xA1 #L& [20] GREEK CAPITAL LETTER UPSILON WITH ... | ||
47 | | 0xCE 0xA3..0xFF #L& [83] GREEK CAPITAL LETTER SIGMA..GREEK ... | ||
48 | | 0xCF 0x00..0xB5 # | ||
49 | | 0xCF 0xB7..0xFF #L& [139] GREEK CAPITAL LETTER SHO..CYRILLIC... | ||
50 | | 0xD0..0xD1 0x00..0xFF # | ||
51 | | 0xD2 0x00..0x81 # | ||
52 | | 0xD2 0x8A..0xFF #L& [166] CYRILLIC CAPITAL LETTER SHORT I WI... | ||
53 | | 0xD3..0xD3 0x00..0xFF # | ||
54 | | 0xD4 0x00..0xAF # | ||
55 | | 0xD4 0xB1..0xFF #L& [38] ARMENIAN CAPITAL LETTER AYB..ARMEN... | ||
56 | | 0xD5 0x00..0x96 # | ||
57 | | 0xD5 0x99 #Lm ARMENIAN MODIFIER LETTER LEFT HALF... | ||
58 | | 0xD5 0xA1..0xFF #L& [39] ARMENIAN SMALL LETTER AYB..ARMENIA... | ||
59 | | 0xD6 0x00..0x87 # | ||
60 | | 0xD7 0x90..0xAA #Lo [27] HEBREW LETTER ALEF..HEBREW LETTER TAV | ||
61 | | 0xD7 0xB0..0xB2 #Lo [3] HEBREW LIGATURE YIDDISH DOUBLE VAV... | ||
62 | | 0xD8 0xA0..0xBF #Lo [32] ARABIC LETTER KASHMIRI YEH..ARABIC... | ||
63 | | 0xD9 0x80 #Lm ARABIC TATWEEL | ||
64 | | 0xD9 0x81..0x8A #Lo [10] ARABIC LETTER FEH..ARABIC LETTER YEH | ||
65 | | 0xD9 0xAE..0xAF #Lo [2] ARABIC LETTER DOTLESS BEH..ARABIC ... | ||
66 | | 0xD9 0xB1..0xFF #Lo [99] ARABIC LETTER ALEF WASLA..ARABIC L... | ||
67 | | 0xDA..0xDA 0x00..0xFF # | ||
68 | | 0xDB 0x00..0x93 # | ||
69 | | 0xDB 0x95 #Lo ARABIC LETTER AE | ||
70 | | 0xDB 0xA5..0xA6 #Lm [2] ARABIC SMALL WAW..ARABIC SMALL YEH | ||
71 | | 0xDB 0xAE..0xAF #Lo [2] ARABIC LETTER DAL WITH INVERTED V.... | ||
72 | | 0xDB 0xBA..0xBC #Lo [3] ARABIC LETTER SHEEN WITH DOT BELOW... | ||
73 | | 0xDB 0xBF #Lo ARABIC LETTER HEH WITH INVERTED V | ||
74 | | 0xDC 0x90 #Lo SYRIAC LETTER ALAPH | ||
75 | | 0xDC 0x92..0xAF #Lo [30] SYRIAC LETTER BETH..SYRIAC LETTER ... | ||
76 | | 0xDD 0x8D..0xFF #Lo [89] SYRIAC LETTER SOGDIAN ZHAIN..THAAN... | ||
77 | | 0xDE 0x00..0xA5 # | ||
78 | | 0xDE 0xB1 #Lo THAANA LETTER NAA | ||
79 | | 0xDF 0x8A..0xAA #Lo [33] NKO LETTER A..NKO LETTER JONA RA | ||
80 | | 0xDF 0xB4..0xB5 #Lm [2] NKO HIGH TONE APOSTROPHE..NKO LOW ... | ||
81 | | 0xDF 0xBA #Lm NKO LAJANYALAN | ||
82 | | 0xE0 0xA0 0x80..0x95 #Lo [22] SAMARITAN LETTER ALAF..SAMARITAN L... | ||
83 | | 0xE0 0xA0 0x9A #Lm SAMARITAN MODIFIER LETTER EPENTHET... | ||
84 | | 0xE0 0xA0 0xA4 #Lm SAMARITAN MODIFIER LETTER SHORT A | ||
85 | | 0xE0 0xA0 0xA8 #Lm SAMARITAN MODIFIER LETTER I | ||
86 | | 0xE0 0xA1 0x80..0x98 #Lo [25] MANDAIC LETTER HALQA..MANDAIC LETT... | ||
87 | | 0xE0 0xA2 0xA0..0xB4 #Lo [21] ARABIC LETTER BEH WITH SMALL V BEL... | ||
88 | | 0xE0 0xA2 0xB6..0xBD #Lo [8] ARABIC LETTER BEH WITH SMALL MEEM ... | ||
89 | | 0xE0 0xA4 0x84..0xB9 #Lo [54] DEVANAGARI LETTER SHORT A..DEVANAG... | ||
90 | | 0xE0 0xA4 0xBD #Lo DEVANAGARI SIGN AVAGRAHA | ||
91 | | 0xE0 0xA5 0x90 #Lo DEVANAGARI OM | ||
92 | | 0xE0 0xA5 0x98..0xA1 #Lo [10] DEVANAGARI LETTER QA..DEVANAGARI L... | ||
93 | | 0xE0 0xA5 0xB1 #Lm DEVANAGARI SIGN HIGH SPACING DOT | ||
94 | | 0xE0 0xA5 0xB2..0xFF #Lo [15] DEVANAGARI LETTER CANDRA A..BENGAL... | ||
95 | | 0xE0 0xA6 0x00..0x80 # | ||
96 | | 0xE0 0xA6 0x85..0x8C #Lo [8] BENGALI LETTER A..BENGALI LETTER V... | ||
97 | | 0xE0 0xA6 0x8F..0x90 #Lo [2] BENGALI LETTER E..BENGALI LETTER AI | ||
98 | | 0xE0 0xA6 0x93..0xA8 #Lo [22] BENGALI LETTER O..BENGALI LETTER NA | ||
99 | | 0xE0 0xA6 0xAA..0xB0 #Lo [7] BENGALI LETTER PA..BENGALI LETTER RA | ||
100 | | 0xE0 0xA6 0xB2 #Lo BENGALI LETTER LA | ||
101 | | 0xE0 0xA6 0xB6..0xB9 #Lo [4] BENGALI LETTER SHA..BENGALI LETTER HA | ||
102 | | 0xE0 0xA6 0xBD #Lo BENGALI SIGN AVAGRAHA | ||
103 | | 0xE0 0xA7 0x8E #Lo BENGALI LETTER KHANDA TA | ||
104 | | 0xE0 0xA7 0x9C..0x9D #Lo [2] BENGALI LETTER RRA..BENGALI LETTER... | ||
105 | | 0xE0 0xA7 0x9F..0xA1 #Lo [3] BENGALI LETTER YYA..BENGALI LETTER... | ||
106 | | 0xE0 0xA7 0xB0..0xB1 #Lo [2] BENGALI LETTER RA WITH MIDDLE DIAG... | ||
107 | | 0xE0 0xA8 0x85..0x8A #Lo [6] GURMUKHI LETTER A..GURMUKHI LETTER UU | ||
108 | | 0xE0 0xA8 0x8F..0x90 #Lo [2] GURMUKHI LETTER EE..GURMUKHI LETTE... | ||
109 | | 0xE0 0xA8 0x93..0xA8 #Lo [22] GURMUKHI LETTER OO..GURMUKHI LETTE... | ||
110 | | 0xE0 0xA8 0xAA..0xB0 #Lo [7] GURMUKHI LETTER PA..GURMUKHI LETTE... | ||
111 | | 0xE0 0xA8 0xB2..0xB3 #Lo [2] GURMUKHI LETTER LA..GURMUKHI LETTE... | ||
112 | | 0xE0 0xA8 0xB5..0xB6 #Lo [2] GURMUKHI LETTER VA..GURMUKHI LETTE... | ||
113 | | 0xE0 0xA8 0xB8..0xB9 #Lo [2] GURMUKHI LETTER SA..GURMUKHI LETTE... | ||
114 | | 0xE0 0xA9 0x99..0x9C #Lo [4] GURMUKHI LETTER KHHA..GURMUKHI LET... | ||
115 | | 0xE0 0xA9 0x9E #Lo GURMUKHI LETTER FA | ||
116 | | 0xE0 0xA9 0xB2..0xB4 #Lo [3] GURMUKHI IRI..GURMUKHI EK ONKAR | ||
117 | | 0xE0 0xAA 0x85..0x8D #Lo [9] GUJARATI LETTER A..GUJARATI VOWEL ... | ||
118 | | 0xE0 0xAA 0x8F..0x91 #Lo [3] GUJARATI LETTER E..GUJARATI VOWEL ... | ||
119 | | 0xE0 0xAA 0x93..0xA8 #Lo [22] GUJARATI LETTER O..GUJARATI LETTER NA | ||
120 | | 0xE0 0xAA 0xAA..0xB0 #Lo [7] GUJARATI LETTER PA..GUJARATI LETTE... | ||
121 | | 0xE0 0xAA 0xB2..0xB3 #Lo [2] GUJARATI LETTER LA..GUJARATI LETTE... | ||
122 | | 0xE0 0xAA 0xB5..0xB9 #Lo [5] GUJARATI LETTER VA..GUJARATI LETTE... | ||
123 | | 0xE0 0xAA 0xBD #Lo GUJARATI SIGN AVAGRAHA | ||
124 | | 0xE0 0xAB 0x90 #Lo GUJARATI OM | ||
125 | | 0xE0 0xAB 0xA0..0xA1 #Lo [2] GUJARATI LETTER VOCALIC RR..GUJARA... | ||
126 | | 0xE0 0xAB 0xB9 #Lo GUJARATI LETTER ZHA | ||
127 | | 0xE0 0xAC 0x85..0x8C #Lo [8] ORIYA LETTER A..ORIYA LETTER VOCAL... | ||
128 | | 0xE0 0xAC 0x8F..0x90 #Lo [2] ORIYA LETTER E..ORIYA LETTER AI | ||
129 | | 0xE0 0xAC 0x93..0xA8 #Lo [22] ORIYA LETTER O..ORIYA LETTER NA | ||
130 | | 0xE0 0xAC 0xAA..0xB0 #Lo [7] ORIYA LETTER PA..ORIYA LETTER RA | ||
131 | | 0xE0 0xAC 0xB2..0xB3 #Lo [2] ORIYA LETTER LA..ORIYA LETTER LLA | ||
132 | | 0xE0 0xAC 0xB5..0xB9 #Lo [5] ORIYA LETTER VA..ORIYA LETTER HA | ||
133 | | 0xE0 0xAC 0xBD #Lo ORIYA SIGN AVAGRAHA | ||
134 | | 0xE0 0xAD 0x9C..0x9D #Lo [2] ORIYA LETTER RRA..ORIYA LETTER RHA | ||
135 | | 0xE0 0xAD 0x9F..0xA1 #Lo [3] ORIYA LETTER YYA..ORIYA LETTER VOC... | ||
136 | | 0xE0 0xAD 0xB1 #Lo ORIYA LETTER WA | ||
137 | | 0xE0 0xAE 0x83 #Lo TAMIL SIGN VISARGA | ||
138 | | 0xE0 0xAE 0x85..0x8A #Lo [6] TAMIL LETTER A..TAMIL LETTER UU | ||
139 | | 0xE0 0xAE 0x8E..0x90 #Lo [3] TAMIL LETTER E..TAMIL LETTER AI | ||
140 | | 0xE0 0xAE 0x92..0x95 #Lo [4] TAMIL LETTER O..TAMIL LETTER KA | ||
141 | | 0xE0 0xAE 0x99..0x9A #Lo [2] TAMIL LETTER NGA..TAMIL LETTER CA | ||
142 | | 0xE0 0xAE 0x9C #Lo TAMIL LETTER JA | ||
143 | | 0xE0 0xAE 0x9E..0x9F #Lo [2] TAMIL LETTER NYA..TAMIL LETTER TTA | ||
144 | | 0xE0 0xAE 0xA3..0xA4 #Lo [2] TAMIL LETTER NNA..TAMIL LETTER TA | ||
145 | | 0xE0 0xAE 0xA8..0xAA #Lo [3] TAMIL LETTER NA..TAMIL LETTER PA | ||
146 | | 0xE0 0xAE 0xAE..0xB9 #Lo [12] TAMIL LETTER MA..TAMIL LETTER HA | ||
147 | | 0xE0 0xAF 0x90 #Lo TAMIL OM | ||
148 | | 0xE0 0xB0 0x85..0x8C #Lo [8] TELUGU LETTER A..TELUGU LETTER VOC... | ||
149 | | 0xE0 0xB0 0x8E..0x90 #Lo [3] TELUGU LETTER E..TELUGU LETTER AI | ||
150 | | 0xE0 0xB0 0x92..0xA8 #Lo [23] TELUGU LETTER O..TELUGU LETTER NA | ||
151 | | 0xE0 0xB0 0xAA..0xB9 #Lo [16] TELUGU LETTER PA..TELUGU LETTER HA | ||
152 | | 0xE0 0xB0 0xBD #Lo TELUGU SIGN AVAGRAHA | ||
153 | | 0xE0 0xB1 0x98..0x9A #Lo [3] TELUGU LETTER TSA..TELUGU LETTER RRRA | ||
154 | | 0xE0 0xB1 0xA0..0xA1 #Lo [2] TELUGU LETTER VOCALIC RR..TELUGU L... | ||
155 | | 0xE0 0xB2 0x80 #Lo KANNADA SIGN SPACING CANDRABINDU | ||
156 | | 0xE0 0xB2 0x85..0x8C #Lo [8] KANNADA LETTER A..KANNADA LETTER V... | ||
157 | | 0xE0 0xB2 0x8E..0x90 #Lo [3] KANNADA LETTER E..KANNADA LETTER AI | ||
158 | | 0xE0 0xB2 0x92..0xA8 #Lo [23] KANNADA LETTER O..KANNADA LETTER NA | ||
159 | | 0xE0 0xB2 0xAA..0xB3 #Lo [10] KANNADA LETTER PA..KANNADA LETTER LLA | ||
160 | | 0xE0 0xB2 0xB5..0xB9 #Lo [5] KANNADA LETTER VA..KANNADA LETTER HA | ||
161 | | 0xE0 0xB2 0xBD #Lo KANNADA SIGN AVAGRAHA | ||
162 | | 0xE0 0xB3 0x9E #Lo KANNADA LETTER FA | ||
163 | | 0xE0 0xB3 0xA0..0xA1 #Lo [2] KANNADA LETTER VOCALIC RR..KANNADA... | ||
164 | | 0xE0 0xB3 0xB1..0xB2 #Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA ... | ||
165 | | 0xE0 0xB4 0x85..0x8C #Lo [8] MALAYALAM LETTER A..MALAYALAM LETT... | ||
166 | | 0xE0 0xB4 0x8E..0x90 #Lo [3] MALAYALAM LETTER E..MALAYALAM LETT... | ||
167 | | 0xE0 0xB4 0x92..0xBA #Lo [41] MALAYALAM LETTER O..MALAYALAM LETT... | ||
168 | | 0xE0 0xB4 0xBD #Lo MALAYALAM SIGN AVAGRAHA | ||
169 | | 0xE0 0xB5 0x8E #Lo MALAYALAM LETTER DOT REPH | ||
170 | | 0xE0 0xB5 0x94..0x96 #Lo [3] MALAYALAM LETTER CHILLU M..MALAYAL... | ||
171 | | 0xE0 0xB5 0x9F..0xA1 #Lo [3] MALAYALAM LETTER ARCHAIC II..MALAY... | ||
172 | | 0xE0 0xB5 0xBA..0xBF #Lo [6] MALAYALAM LETTER CHILLU NN..MALAYA... | ||
173 | | 0xE0 0xB6 0x85..0x96 #Lo [18] SINHALA LETTER AYANNA..SINHALA LET... | ||
174 | | 0xE0 0xB6 0x9A..0xB1 #Lo [24] SINHALA LETTER ALPAPRAANA KAYANNA.... | ||
175 | | 0xE0 0xB6 0xB3..0xBB #Lo [9] SINHALA LETTER SANYAKA DAYANNA..SI... | ||
176 | | 0xE0 0xB6 0xBD #Lo SINHALA LETTER DANTAJA LAYANNA | ||
177 | | 0xE0 0xB7 0x80..0x86 #Lo [7] SINHALA LETTER VAYANNA..SINHALA LE... | ||
178 | | 0xE0 0xB8 0x81..0xB0 #Lo [48] THAI CHARACTER KO KAI..THAI CHARAC... | ||
179 | | 0xE0 0xB8 0xB2..0xB3 #Lo [2] THAI CHARACTER SARA AA..THAI CHARA... | ||
180 | | 0xE0 0xB9 0x80..0x85 #Lo [6] THAI CHARACTER SARA E..THAI CHARAC... | ||
181 | | 0xE0 0xB9 0x86 #Lm THAI CHARACTER MAIYAMOK | ||
182 | | 0xE0 0xBA 0x81..0x82 #Lo [2] LAO LETTER KO..LAO LETTER KHO SUNG | ||
183 | | 0xE0 0xBA 0x84 #Lo LAO LETTER KHO TAM | ||
184 | | 0xE0 0xBA 0x87..0x88 #Lo [2] LAO LETTER NGO..LAO LETTER CO | ||
185 | | 0xE0 0xBA 0x8A #Lo LAO LETTER SO TAM | ||
186 | | 0xE0 0xBA 0x8D #Lo LAO LETTER NYO | ||
187 | | 0xE0 0xBA 0x94..0x97 #Lo [4] LAO LETTER DO..LAO LETTER THO TAM | ||
188 | | 0xE0 0xBA 0x99..0x9F #Lo [7] LAO LETTER NO..LAO LETTER FO SUNG | ||
189 | | 0xE0 0xBA 0xA1..0xA3 #Lo [3] LAO LETTER MO..LAO LETTER LO LING | ||
190 | | 0xE0 0xBA 0xA5 #Lo LAO LETTER LO LOOT | ||
191 | | 0xE0 0xBA 0xA7 #Lo LAO LETTER WO | ||
192 | | 0xE0 0xBA 0xAA..0xAB #Lo [2] LAO LETTER SO SUNG..LAO LETTER HO ... | ||
193 | | 0xE0 0xBA 0xAD..0xB0 #Lo [4] LAO LETTER O..LAO VOWEL SIGN A | ||
194 | | 0xE0 0xBA 0xB2..0xB3 #Lo [2] LAO VOWEL SIGN AA..LAO VOWEL SIGN AM | ||
195 | | 0xE0 0xBA 0xBD #Lo LAO SEMIVOWEL SIGN NYO | ||
196 | | 0xE0 0xBB 0x80..0x84 #Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI | ||
197 | | 0xE0 0xBB 0x86 #Lm LAO KO LA | ||
198 | | 0xE0 0xBB 0x9C..0x9F #Lo [4] LAO HO NO..LAO LETTER KHMU NYO | ||
199 | | 0xE0 0xBC 0x80 #Lo TIBETAN SYLLABLE OM | ||
200 | | 0xE0 0xBD 0x80..0x87 #Lo [8] TIBETAN LETTER KA..TIBETAN LETTER JA | ||
201 | | 0xE0 0xBD 0x89..0xAC #Lo [36] TIBETAN LETTER NYA..TIBETAN LETTER... | ||
202 | | 0xE0 0xBE 0x88..0x8C #Lo [5] TIBETAN SIGN LCE TSA CAN..TIBETAN ... | ||
203 | | 0xE1 0x80 0x80..0xAA #Lo [43] MYANMAR LETTER KA..MYANMAR LETTER AU | ||
204 | | 0xE1 0x80 0xBF #Lo MYANMAR LETTER GREAT SA | ||
205 | | 0xE1 0x81 0x90..0x95 #Lo [6] MYANMAR LETTER SHA..MYANMAR LETTER... | ||
206 | | 0xE1 0x81 0x9A..0x9D #Lo [4] MYANMAR LETTER MON NGA..MYANMAR LE... | ||
207 | | 0xE1 0x81 0xA1 #Lo MYANMAR LETTER SGAW KAREN SHA | ||
208 | | 0xE1 0x81 0xA5..0xA6 #Lo [2] MYANMAR LETTER WESTERN PWO KAREN T... | ||
209 | | 0xE1 0x81 0xAE..0xB0 #Lo [3] MYANMAR LETTER EASTERN PWO KAREN N... | ||
210 | | 0xE1 0x81 0xB5..0xFF #Lo [13] MYANMAR LETTER SHAN KA..MYANMAR LE... | ||
211 | | 0xE1 0x82 0x00..0x81 # | ||
212 | | 0xE1 0x82 0x8E #Lo MYANMAR LETTER RUMAI PALAUNG FA | ||
213 | | 0xE1 0x82 0xA0..0xFF #L& [38] GEORGIAN CAPITAL LETTER AN..GEORGI... | ||
214 | | 0xE1 0x83 0x00..0x85 # | ||
215 | | 0xE1 0x83 0x87 #L& GEORGIAN CAPITAL LETTER YN | ||
216 | | 0xE1 0x83 0x8D #L& GEORGIAN CAPITAL LETTER AEN | ||
217 | | 0xE1 0x83 0x90..0xBA #Lo [43] GEORGIAN LETTER AN..GEORGIAN LETTE... | ||
218 | | 0xE1 0x83 0xBC #Lm MODIFIER LETTER GEORGIAN NAR | ||
219 | | 0xE1 0x83 0xBD..0xFF #Lo [332] GEORGIAN LETTER AEN..ETHIOPIC ... | ||
220 | | 0xE1 0x84..0x88 0x00..0xFF # | ||
221 | | 0xE1 0x89 0x00..0x88 # | ||
222 | | 0xE1 0x89 0x8A..0x8D #Lo [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SY... | ||
223 | | 0xE1 0x89 0x90..0x96 #Lo [7] ETHIOPIC SYLLABLE QHA..ETHIOPIC SY... | ||
224 | | 0xE1 0x89 0x98 #Lo ETHIOPIC SYLLABLE QHWA | ||
225 | | 0xE1 0x89 0x9A..0x9D #Lo [4] ETHIOPIC SYLLABLE QHWI..ETHIOPIC S... | ||
226 | | 0xE1 0x89 0xA0..0xFF #Lo [41] ETHIOPIC SYLLABLE BA..ETHIOPIC SYL... | ||
227 | | 0xE1 0x8A 0x00..0x88 # | ||
228 | | 0xE1 0x8A 0x8A..0x8D #Lo [4] ETHIOPIC SYLLABLE XWI..ETHIOPIC SY... | ||
229 | | 0xE1 0x8A 0x90..0xB0 #Lo [33] ETHIOPIC SYLLABLE NA..ETHIOPIC SYL... | ||
230 | | 0xE1 0x8A 0xB2..0xB5 #Lo [4] ETHIOPIC SYLLABLE KWI..ETHIOPIC SY... | ||
231 | | 0xE1 0x8A 0xB8..0xBE #Lo [7] ETHIOPIC SYLLABLE KXA..ETHIOPIC SY... | ||
232 | | 0xE1 0x8B 0x80 #Lo ETHIOPIC SYLLABLE KXWA | ||
233 | | 0xE1 0x8B 0x82..0x85 #Lo [4] ETHIOPIC SYLLABLE KXWI..ETHIOPIC S... | ||
234 | | 0xE1 0x8B 0x88..0x96 #Lo [15] ETHIOPIC SYLLABLE WA..ETHIOPIC SYL... | ||
235 | | 0xE1 0x8B 0x98..0xFF #Lo [57] ETHIOPIC SYLLABLE ZA..ETHIOPIC SYL... | ||
236 | | 0xE1 0x8C 0x00..0x90 # | ||
237 | | 0xE1 0x8C 0x92..0x95 #Lo [4] ETHIOPIC SYLLABLE GWI..ETHIOPIC SY... | ||
238 | | 0xE1 0x8C 0x98..0xFF #Lo [67] ETHIOPIC SYLLABLE GGA..ETHIOPIC SY... | ||
239 | | 0xE1 0x8D 0x00..0x9A # | ||
240 | | 0xE1 0x8E 0x80..0x8F #Lo [16] ETHIOPIC SYLLABLE SEBATBEIT MWA..E... | ||
241 | | 0xE1 0x8E 0xA0..0xFF #L& [86] CHEROKEE LETTER A..CHEROKEE LETTER MV | ||
242 | | 0xE1 0x8F 0x00..0xB5 # | ||
243 | | 0xE1 0x8F 0xB8..0xBD #L& [6] CHEROKEE SMALL LETTER YE..CHEROKEE... | ||
244 | | 0xE1 0x90 0x81..0xFF #Lo [620] CANADIAN SYLLABICS E..CANADIAN... | ||
245 | | 0xE1 0x91..0x98 0x00..0xFF # | ||
246 | | 0xE1 0x99 0x00..0xAC # | ||
247 | | 0xE1 0x99 0xAF..0xBF #Lo [17] CANADIAN SYLLABICS QAI..CANADIAN S... | ||
248 | | 0xE1 0x9A 0x81..0x9A #Lo [26] OGHAM LETTER BEITH..OGHAM LETTER P... | ||
249 | | 0xE1 0x9A 0xA0..0xFF #Lo [75] RUNIC LETTER FEHU FEOH FE F..RUNIC... | ||
250 | | 0xE1 0x9B 0x00..0xAA # | ||
251 | | 0xE1 0x9B 0xAE..0xB0 #Nl [3] RUNIC ARLAUG SYMBOL..RUNIC BELGTHO... | ||
252 | | 0xE1 0x9B 0xB1..0xB8 #Lo [8] RUNIC LETTER K..RUNIC LETTER FRANK... | ||
253 | | 0xE1 0x9C 0x80..0x8C #Lo [13] TAGALOG LETTER A..TAGALOG LETTER YA | ||
254 | | 0xE1 0x9C 0x8E..0x91 #Lo [4] TAGALOG LETTER LA..TAGALOG LETTER HA | ||
255 | | 0xE1 0x9C 0xA0..0xB1 #Lo [18] HANUNOO LETTER A..HANUNOO LETTER HA | ||
256 | | 0xE1 0x9D 0x80..0x91 #Lo [18] BUHID LETTER A..BUHID LETTER HA | ||
257 | | 0xE1 0x9D 0xA0..0xAC #Lo [13] TAGBANWA LETTER A..TAGBANWA LETTER YA | ||
258 | | 0xE1 0x9D 0xAE..0xB0 #Lo [3] TAGBANWA LETTER LA..TAGBANWA LETTE... | ||
259 | | 0xE1 0x9E 0x80..0xB3 #Lo [52] KHMER LETTER KA..KHMER INDEPENDENT... | ||
260 | | 0xE1 0x9F 0x97 #Lm KHMER SIGN LEK TOO | ||
261 | | 0xE1 0x9F 0x9C #Lo KHMER SIGN AVAKRAHASANYA | ||
262 | | 0xE1 0xA0 0xA0..0xFF #Lo [35] MONGOLIAN LETTER A..MONGOLIAN LETT... | ||
263 | | 0xE1 0xA1 0x00..0x82 # | ||
264 | | 0xE1 0xA1 0x83 #Lm MONGOLIAN LETTER TODO LONG VOWEL SIGN | ||
265 | | 0xE1 0xA1 0x84..0xB7 #Lo [52] MONGOLIAN LETTER TODO E..MONGOLIAN... | ||
266 | | 0xE1 0xA2 0x80..0x84 #Lo [5] MONGOLIAN LETTER ALI GALI ANUSVARA... | ||
267 | | 0xE1 0xA2 0x85..0x86 #Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..... | ||
268 | | 0xE1 0xA2 0x87..0xA8 #Lo [34] MONGOLIAN LETTER ALI GALI A..MONGO... | ||
269 | | 0xE1 0xA2 0xAA #Lo MONGOLIAN LETTER MANCHU ALI GALI LHA | ||
270 | | 0xE1 0xA2 0xB0..0xFF #Lo [70] CANADIAN SYLLABICS OY..CANADIAN SY... | ||
271 | | 0xE1 0xA3 0x00..0xB5 # | ||
272 | | 0xE1 0xA4 0x80..0x9E #Lo [31] LIMBU VOWEL-CARRIER LETTER..LIMBU ... | ||
273 | | 0xE1 0xA5 0x90..0xAD #Lo [30] TAI LE LETTER KA..TAI LE LETTER AI | ||
274 | | 0xE1 0xA5 0xB0..0xB4 #Lo [5] TAI LE LETTER TONE-2..TAI LE LETTE... | ||
275 | | 0xE1 0xA6 0x80..0xAB #Lo [44] NEW TAI LUE LETTER HIGH QA..NEW TA... | ||
276 | | 0xE1 0xA6 0xB0..0xFF #Lo [26] NEW TAI LUE VOWEL SIGN VOWEL SHORT... | ||
277 | | 0xE1 0xA7 0x00..0x89 # | ||
278 | | 0xE1 0xA8 0x80..0x96 #Lo [23] BUGINESE LETTER KA..BUGINESE LETTE... | ||
279 | | 0xE1 0xA8 0xA0..0xFF #Lo [53] TAI THAM LETTER HIGH KA..TAI THAM ... | ||
280 | | 0xE1 0xA9 0x00..0x94 # | ||
281 | | 0xE1 0xAA 0xA7 #Lm TAI THAM SIGN MAI YAMOK | ||
282 | | 0xE1 0xAC 0x85..0xB3 #Lo [47] BALINESE LETTER AKARA..BALINESE LE... | ||
283 | | 0xE1 0xAD 0x85..0x8B #Lo [7] BALINESE LETTER KAF SASAK..BALINES... | ||
284 | | 0xE1 0xAE 0x83..0xA0 #Lo [30] SUNDANESE LETTER A..SUNDANESE LETT... | ||
285 | | 0xE1 0xAE 0xAE..0xAF #Lo [2] SUNDANESE LETTER KHA..SUNDANESE LE... | ||
286 | | 0xE1 0xAE 0xBA..0xFF #Lo [44] SUNDANESE AVAGRAHA..BATAK LETTER U | ||
287 | | 0xE1 0xAF 0x00..0xA5 # | ||
288 | | 0xE1 0xB0 0x80..0xA3 #Lo [36] LEPCHA LETTER KA..LEPCHA LETTER A | ||
289 | | 0xE1 0xB1 0x8D..0x8F #Lo [3] LEPCHA LETTER TTA..LEPCHA LETTER DDA | ||
290 | | 0xE1 0xB1 0x9A..0xB7 #Lo [30] OL CHIKI LETTER LA..OL CHIKI LETTE... | ||
291 | | 0xE1 0xB1 0xB8..0xBD #Lm [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD | ||
292 | | 0xE1 0xB2 0x80..0x88 #L& [9] CYRILLIC SMALL LETTER ROUNDED VE..... | ||
293 | | 0xE1 0xB3 0xA9..0xAC #Lo [4] VEDIC SIGN ANUSVARA ANTARGOMUKHA..... | ||
294 | | 0xE1 0xB3 0xAE..0xB1 #Lo [4] VEDIC SIGN HEXIFORM LONG ANUSVARA.... | ||
295 | | 0xE1 0xB3 0xB5..0xB6 #Lo [2] VEDIC SIGN JIHVAMULIYA..VEDIC SIGN... | ||
296 | | 0xE1 0xB4 0x80..0xAB #L& [44] LATIN LETTER SMALL CAPITAL A..CYRI... | ||
297 | | 0xE1 0xB4 0xAC..0xFF #Lm [63] MODIFIER LETTER CAPITAL A..GREEK S... | ||
298 | | 0xE1 0xB5 0x00..0xAA # | ||
299 | | 0xE1 0xB5 0xAB..0xB7 #L& [13] LATIN SMALL LETTER UE..LATIN SMALL... | ||
300 | | 0xE1 0xB5 0xB8 #Lm MODIFIER LETTER CYRILLIC EN | ||
301 | | 0xE1 0xB5 0xB9..0xFF #L& [34] LATIN SMALL LETTER INSULAR G..LATI... | ||
302 | | 0xE1 0xB6 0x00..0x9A # | ||
303 | | 0xE1 0xB6 0x9B..0xBF #Lm [37] MODIFIER LETTER SMALL TURNED ALPHA... | ||
304 | | 0xE1 0xB8 0x80..0xFF #L& [278] LATIN CAPITAL LETTER A WITH RI... | ||
305 | | 0xE1 0xB9..0xBB 0x00..0xFF # | ||
306 | | 0xE1 0xBC 0x00..0x95 # | ||
307 | | 0xE1 0xBC 0x98..0x9D #L& [6] GREEK CAPITAL LETTER EPSILON WITH ... | ||
308 | | 0xE1 0xBC 0xA0..0xFF #L& [38] GREEK SMALL LETTER ETA WITH PSILI.... | ||
309 | | 0xE1 0xBD 0x00..0x85 # | ||
310 | | 0xE1 0xBD 0x88..0x8D #L& [6] GREEK CAPITAL LETTER OMICRON WITH ... | ||
311 | | 0xE1 0xBD 0x90..0x97 #L& [8] GREEK SMALL LETTER UPSILON WITH PS... | ||
312 | | 0xE1 0xBD 0x99 #L& GREEK CAPITAL LETTER UPSILON WITH ... | ||
313 | | 0xE1 0xBD 0x9B #L& GREEK CAPITAL LETTER UPSILON WITH ... | ||
314 | | 0xE1 0xBD 0x9D #L& GREEK CAPITAL LETTER UPSILON WITH ... | ||
315 | | 0xE1 0xBD 0x9F..0xBD #L& [31] GREEK CAPITAL LETTER UPSILON WITH ... | ||
316 | | 0xE1 0xBE 0x80..0xB4 #L& [53] GREEK SMALL LETTER ALPHA WITH PSIL... | ||
317 | | 0xE1 0xBE 0xB6..0xBC #L& [7] GREEK SMALL LETTER ALPHA WITH PERI... | ||
318 | | 0xE1 0xBE 0xBE #L& GREEK PROSGEGRAMMENI | ||
319 | | 0xE1 0xBF 0x82..0x84 #L& [3] GREEK SMALL LETTER ETA WITH VARIA ... | ||
320 | | 0xE1 0xBF 0x86..0x8C #L& [7] GREEK SMALL LETTER ETA WITH PERISP... | ||
321 | | 0xE1 0xBF 0x90..0x93 #L& [4] GREEK SMALL LETTER IOTA WITH VRACH... | ||
322 | | 0xE1 0xBF 0x96..0x9B #L& [6] GREEK SMALL LETTER IOTA WITH PERIS... | ||
323 | | 0xE1 0xBF 0xA0..0xAC #L& [13] GREEK SMALL LETTER UPSILON WITH VR... | ||
324 | | 0xE1 0xBF 0xB2..0xB4 #L& [3] GREEK SMALL LETTER OMEGA WITH VARI... | ||
325 | | 0xE1 0xBF 0xB6..0xBC #L& [7] GREEK SMALL LETTER OMEGA WITH PERI... | ||
326 | | 0xE2 0x81 0xB1 #Lm SUPERSCRIPT LATIN SMALL LETTER I | ||
327 | | 0xE2 0x81 0xBF #Lm SUPERSCRIPT LATIN SMALL LETTER N | ||
328 | | 0xE2 0x82 0x90..0x9C #Lm [13] LATIN SUBSCRIPT SMALL LETTER A..LA... | ||
329 | | 0xE2 0x84 0x82 #L& DOUBLE-STRUCK CAPITAL C | ||
330 | | 0xE2 0x84 0x87 #L& EULER CONSTANT | ||
331 | | 0xE2 0x84 0x8A..0x93 #L& [10] SCRIPT SMALL G..SCRIPT SMALL L | ||
332 | | 0xE2 0x84 0x95 #L& DOUBLE-STRUCK CAPITAL N | ||
333 | | 0xE2 0x84 0x98 #Sm SCRIPT CAPITAL P | ||
334 | | 0xE2 0x84 0x99..0x9D #L& [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-ST... | ||
335 | | 0xE2 0x84 0xA4 #L& DOUBLE-STRUCK CAPITAL Z | ||
336 | | 0xE2 0x84 0xA6 #L& OHM SIGN | ||
337 | | 0xE2 0x84 0xA8 #L& BLACK-LETTER CAPITAL Z | ||
338 | | 0xE2 0x84 0xAA..0xAD #L& [4] KELVIN SIGN..BLACK-LETTER CAPITAL C | ||
339 | | 0xE2 0x84 0xAE #So ESTIMATED SYMBOL | ||
340 | | 0xE2 0x84 0xAF..0xB4 #L& [6] SCRIPT SMALL E..SCRIPT SMALL O | ||
341 | | 0xE2 0x84 0xB5..0xB8 #Lo [4] ALEF SYMBOL..DALET SYMBOL | ||
342 | | 0xE2 0x84 0xB9 #L& INFORMATION SOURCE | ||
343 | | 0xE2 0x84 0xBC..0xBF #L& [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STR... | ||
344 | | 0xE2 0x85 0x85..0x89 #L& [5] DOUBLE-STRUCK ITALIC CAPITAL D..DO... | ||
345 | | 0xE2 0x85 0x8E #L& TURNED SMALL F | ||
346 | | 0xE2 0x85 0xA0..0xFF #Nl [35] ROMAN NUMERAL ONE..ROMAN NUMERAL T... | ||
347 | | 0xE2 0x86 0x00..0x82 # | ||
348 | | 0xE2 0x86 0x83..0x84 #L& [2] ROMAN NUMERAL REVERSED ONE HUNDRED... | ||
349 | | 0xE2 0x86 0x85..0x88 #Nl [4] ROMAN NUMERAL SIX LATE FORM..ROMAN... | ||
350 | | 0xE2 0xB0 0x80..0xAE #L& [47] GLAGOLITIC CAPITAL LETTER AZU..GLA... | ||
351 | | 0xE2 0xB0 0xB0..0xFF #L& [47] GLAGOLITIC SMALL LETTER AZU..GLAGO... | ||
352 | | 0xE2 0xB1 0x00..0x9E # | ||
353 | | 0xE2 0xB1 0xA0..0xBB #L& [28] LATIN CAPITAL LETTER L WITH DOUBLE... | ||
354 | | 0xE2 0xB1 0xBC..0xBD #Lm [2] LATIN SUBSCRIPT SMALL LETTER J..MO... | ||
355 | | 0xE2 0xB1 0xBE..0xFF #L& [103] LATIN CAPITAL LETTER S WITH SW... | ||
356 | | 0xE2 0xB2..0xB2 0x00..0xFF # | ||
357 | | 0xE2 0xB3 0x00..0xA4 # | ||
358 | | 0xE2 0xB3 0xAB..0xAE #L& [4] COPTIC CAPITAL LETTER CRYPTOGRAMMI... | ||
359 | | 0xE2 0xB3 0xB2..0xB3 #L& [2] COPTIC CAPITAL LETTER BOHAIRIC KHE... | ||
360 | | 0xE2 0xB4 0x80..0xA5 #L& [38] GEORGIAN SMALL LETTER AN..GEORGIAN... | ||
361 | | 0xE2 0xB4 0xA7 #L& GEORGIAN SMALL LETTER YN | ||
362 | | 0xE2 0xB4 0xAD #L& GEORGIAN SMALL LETTER AEN | ||
363 | | 0xE2 0xB4 0xB0..0xFF #Lo [56] TIFINAGH LETTER YA..TIFINAGH LETTE... | ||
364 | | 0xE2 0xB5 0x00..0xA7 # | ||
365 | | 0xE2 0xB5 0xAF #Lm TIFINAGH MODIFIER LETTER LABIALIZA... | ||
366 | | 0xE2 0xB6 0x80..0x96 #Lo [23] ETHIOPIC SYLLABLE LOA..ETHIOPIC SY... | ||
367 | | 0xE2 0xB6 0xA0..0xA6 #Lo [7] ETHIOPIC SYLLABLE SSA..ETHIOPIC SY... | ||
368 | | 0xE2 0xB6 0xA8..0xAE #Lo [7] ETHIOPIC SYLLABLE CCA..ETHIOPIC SY... | ||
369 | | 0xE2 0xB6 0xB0..0xB6 #Lo [7] ETHIOPIC SYLLABLE ZZA..ETHIOPIC SY... | ||
370 | | 0xE2 0xB6 0xB8..0xBE #Lo [7] ETHIOPIC SYLLABLE CCHA..ETHIOPIC S... | ||
371 | | 0xE2 0xB7 0x80..0x86 #Lo [7] ETHIOPIC SYLLABLE QYA..ETHIOPIC SY... | ||
372 | | 0xE2 0xB7 0x88..0x8E #Lo [7] ETHIOPIC SYLLABLE KYA..ETHIOPIC SY... | ||
373 | | 0xE2 0xB7 0x90..0x96 #Lo [7] ETHIOPIC SYLLABLE XYA..ETHIOPIC SY... | ||
374 | | 0xE2 0xB7 0x98..0x9E #Lo [7] ETHIOPIC SYLLABLE GYA..ETHIOPIC SY... | ||
375 | | 0xE3 0x80 0x85 #Lm IDEOGRAPHIC ITERATION MARK | ||
376 | | 0xE3 0x80 0x86 #Lo IDEOGRAPHIC CLOSING MARK | ||
377 | | 0xE3 0x80 0x87 #Nl IDEOGRAPHIC NUMBER ZERO | ||
378 | | 0xE3 0x80 0xA1..0xA9 #Nl [9] HANGZHOU NUMERAL ONE..HANGZHOU NUM... | ||
379 | | 0xE3 0x80 0xB1..0xB5 #Lm [5] VERTICAL KANA REPEAT MARK..VERTICA... | ||
380 | | 0xE3 0x80 0xB8..0xBA #Nl [3] HANGZHOU NUMERAL TEN..HANGZHOU NUM... | ||
381 | | 0xE3 0x80 0xBB #Lm VERTICAL IDEOGRAPHIC ITERATION MARK | ||
382 | | 0xE3 0x80 0xBC #Lo MASU MARK | ||
383 | | 0xE3 0x81 0x81..0xFF #Lo [86] HIRAGANA LETTER SMALL A..HIRAGANA ... | ||
384 | | 0xE3 0x82 0x00..0x96 # | ||
385 | | 0xE3 0x82 0x9B..0x9C #Sk [2] KATAKANA-HIRAGANA VOICED SOUND MAR... | ||
386 | | 0xE3 0x82 0x9D..0x9E #Lm [2] HIRAGANA ITERATION MARK..HIRAGANA ... | ||
387 | | 0xE3 0x82 0x9F #Lo HIRAGANA DIGRAPH YORI | ||
388 | | 0xE3 0x82 0xA1..0xFF #Lo [90] KATAKANA LETTER SMALL A..KATAKANA ... | ||
389 | | 0xE3 0x83 0x00..0xBA # | ||
390 | | 0xE3 0x83 0xBC..0xBE #Lm [3] KATAKANA-HIRAGANA PROLONGED SOUND ... | ||
391 | | 0xE3 0x83 0xBF #Lo KATAKANA DIGRAPH KOTO | ||
392 | | 0xE3 0x84 0x85..0xAD #Lo [41] BOPOMOFO LETTER B..BOPOMOFO LETTER IH | ||
393 | | 0xE3 0x84 0xB1..0xFF #Lo [94] HANGUL LETTER KIYEOK..HANGUL L... | ||
394 | | 0xE3 0x85..0x85 0x00..0xFF # | ||
395 | | 0xE3 0x86 0x00..0x8E # | ||
396 | | 0xE3 0x86 0xA0..0xBA #Lo [27] BOPOMOFO LETTER BU..BOPOMOFO LETTE... | ||
397 | | 0xE3 0x87 0xB0..0xBF #Lo [16] KATAKANA LETTER SMALL KU..KATAKANA... | ||
398 | | 0xE3 0x90 0x80..0xFF #Lo [6582] CJK UNIFIED IDEOGRAPH-3400..C... | ||
399 | | 0xE3 0x91..0xFF 0x00..0xFF # | ||
400 | | 0xE4 0x00 0x00..0xFF # | ||
401 | | 0xE4 0x01..0xB5 0x00..0xFF # | ||
402 | | 0xE4 0xB6 0x00..0xB5 # | ||
403 | | 0xE4 0xB8 0x80..0xFF #Lo [20950] CJK UNIFIED IDEOGRAPH-... | ||
404 | | 0xE4 0xB9..0xFF 0x00..0xFF # | ||
405 | | 0xE5..0xE8 0x00..0xFF 0x00..0xFF # | ||
406 | | 0xE9 0x00 0x00..0xFF # | ||
407 | | 0xE9 0x01..0xBE 0x00..0xFF # | ||
408 | | 0xE9 0xBF 0x00..0x95 # | ||
409 | | 0xEA 0x80 0x80..0x94 #Lo [21] YI SYLLABLE IT..YI SYLLABLE E | ||
410 | | 0xEA 0x80 0x95 #Lm YI SYLLABLE WU | ||
411 | | 0xEA 0x80 0x96..0xFF #Lo [1143] YI SYLLABLE BIT..YI SYLLABLE YYR | ||
412 | | 0xEA 0x81..0x91 0x00..0xFF # | ||
413 | | 0xEA 0x92 0x00..0x8C # | ||
414 | | 0xEA 0x93 0x90..0xB7 #Lo [40] LISU LETTER BA..LISU LETTER OE | ||
415 | | 0xEA 0x93 0xB8..0xBD #Lm [6] LISU LETTER TONE MYA TI..LISU LETT... | ||
416 | | 0xEA 0x94 0x80..0xFF #Lo [268] VAI SYLLABLE EE..VAI SYLLABLE NG | ||
417 | | 0xEA 0x95..0x97 0x00..0xFF # | ||
418 | | 0xEA 0x98 0x00..0x8B # | ||
419 | | 0xEA 0x98 0x8C #Lm VAI SYLLABLE LENGTHENER | ||
420 | | 0xEA 0x98 0x90..0x9F #Lo [16] VAI SYLLABLE NDOLE FA..VAI SYMBOL ... | ||
421 | | 0xEA 0x98 0xAA..0xAB #Lo [2] VAI SYLLABLE NDOLE MA..VAI SYLLABL... | ||
422 | | 0xEA 0x99 0x80..0xAD #L& [46] CYRILLIC CAPITAL LETTER ZEMLYA..CY... | ||
423 | | 0xEA 0x99 0xAE #Lo CYRILLIC LETTER MULTIOCULAR O | ||
424 | | 0xEA 0x99 0xBF #Lm CYRILLIC PAYEROK | ||
425 | | 0xEA 0x9A 0x80..0x9B #L& [28] CYRILLIC CAPITAL LETTER DWE..CYRIL... | ||
426 | | 0xEA 0x9A 0x9C..0x9D #Lm [2] MODIFIER LETTER CYRILLIC HARD SIGN... | ||
427 | | 0xEA 0x9A 0xA0..0xFF #Lo [70] BAMUM LETTER A..BAMUM LETTER KI | ||
428 | | 0xEA 0x9B 0x00..0xA5 # | ||
429 | | 0xEA 0x9B 0xA6..0xAF #Nl [10] BAMUM LETTER MO..BAMUM LETTER KOGHOM | ||
430 | | 0xEA 0x9C 0x97..0x9F #Lm [9] MODIFIER LETTER DOT VERTICAL BAR..... | ||
431 | | 0xEA 0x9C 0xA2..0xFF #L& [78] LATIN CAPITAL LETTER EGYPTOLOGICAL... | ||
432 | | 0xEA 0x9D 0x00..0xAF # | ||
433 | | 0xEA 0x9D 0xB0 #Lm MODIFIER LETTER US | ||
434 | | 0xEA 0x9D 0xB1..0xFF #L& [23] LATIN SMALL LETTER DUM..LATIN SMAL... | ||
435 | | 0xEA 0x9E 0x00..0x87 # | ||
436 | | 0xEA 0x9E 0x88 #Lm MODIFIER LETTER LOW CIRCUMFLEX ACCENT | ||
437 | | 0xEA 0x9E 0x8B..0x8E #L& [4] LATIN CAPITAL LETTER SALTILLO..LAT... | ||
438 | | 0xEA 0x9E 0x8F #Lo LATIN LETTER SINOLOGICAL DOT | ||
439 | | 0xEA 0x9E 0x90..0xAE #L& [31] LATIN CAPITAL LETTER N WITH DESCEN... | ||
440 | | 0xEA 0x9E 0xB0..0xB7 #L& [8] LATIN CAPITAL LETTER TURNED K..LAT... | ||
441 | | 0xEA 0x9F 0xB7 #Lo LATIN EPIGRAPHIC LETTER SIDEWAYS I | ||
442 | | 0xEA 0x9F 0xB8..0xB9 #Lm [2] MODIFIER LETTER CAPITAL H WITH STR... | ||
443 | | 0xEA 0x9F 0xBA #L& LATIN LETTER SMALL CAPITAL TURNED M | ||
444 | | 0xEA 0x9F 0xBB..0xFF #Lo [7] LATIN EPIGRAPHIC LETTER REVERSED F... | ||
445 | | 0xEA 0xA0 0x00..0x81 # | ||
446 | | 0xEA 0xA0 0x83..0x85 #Lo [3] SYLOTI NAGRI LETTER U..SYLOTI NAGR... | ||
447 | | 0xEA 0xA0 0x87..0x8A #Lo [4] SYLOTI NAGRI LETTER KO..SYLOTI NAG... | ||
448 | | 0xEA 0xA0 0x8C..0xA2 #Lo [23] SYLOTI NAGRI LETTER CO..SYLOTI NAG... | ||
449 | | 0xEA 0xA1 0x80..0xB3 #Lo [52] PHAGS-PA LETTER KA..PHAGS-PA LETTE... | ||
450 | | 0xEA 0xA2 0x82..0xB3 #Lo [50] SAURASHTRA LETTER A..SAURASHTRA LE... | ||
451 | | 0xEA 0xA3 0xB2..0xB7 #Lo [6] DEVANAGARI SIGN SPACING CANDRABIND... | ||
452 | | 0xEA 0xA3 0xBB #Lo DEVANAGARI HEADSTROKE | ||
453 | | 0xEA 0xA3 0xBD #Lo DEVANAGARI JAIN OM | ||
454 | | 0xEA 0xA4 0x8A..0xA5 #Lo [28] KAYAH LI LETTER KA..KAYAH LI LETTE... | ||
455 | | 0xEA 0xA4 0xB0..0xFF #Lo [23] REJANG LETTER KA..REJANG LETTER A | ||
456 | | 0xEA 0xA5 0x00..0x86 # | ||
457 | | 0xEA 0xA5 0xA0..0xBC #Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANG... | ||
458 | | 0xEA 0xA6 0x84..0xB2 #Lo [47] JAVANESE LETTER A..JAVANESE LETTER HA | ||
459 | | 0xEA 0xA7 0x8F #Lm JAVANESE PANGRANGKEP | ||
460 | | 0xEA 0xA7 0xA0..0xA4 #Lo [5] MYANMAR LETTER SHAN GHA..MYANMAR L... | ||
461 | | 0xEA 0xA7 0xA6 #Lm MYANMAR MODIFIER LETTER SHAN REDUP... | ||
462 | | 0xEA 0xA7 0xA7..0xAF #Lo [9] MYANMAR LETTER TAI LAING NYA..MYAN... | ||
463 | | 0xEA 0xA7 0xBA..0xBE #Lo [5] MYANMAR LETTER TAI LAING LLA..MYAN... | ||
464 | | 0xEA 0xA8 0x80..0xA8 #Lo [41] CHAM LETTER A..CHAM LETTER HA | ||
465 | | 0xEA 0xA9 0x80..0x82 #Lo [3] CHAM LETTER FINAL K..CHAM LETTER F... | ||
466 | | 0xEA 0xA9 0x84..0x8B #Lo [8] CHAM LETTER FINAL CH..CHAM LETTER ... | ||
467 | | 0xEA 0xA9 0xA0..0xAF #Lo [16] MYANMAR LETTER KHAMTI GA..MYANMAR ... | ||
468 | | 0xEA 0xA9 0xB0 #Lm MYANMAR MODIFIER LETTER KHAMTI RED... | ||
469 | | 0xEA 0xA9 0xB1..0xB6 #Lo [6] MYANMAR LETTER KHAMTI XA..MYANMAR ... | ||
470 | | 0xEA 0xA9 0xBA #Lo MYANMAR LETTER AITON RA | ||
471 | | 0xEA 0xA9 0xBE..0xFF #Lo [50] MYANMAR LETTER SHWE PALAUNG CHA..T... | ||
472 | | 0xEA 0xAA 0x00..0xAF # | ||
473 | | 0xEA 0xAA 0xB1 #Lo TAI VIET VOWEL AA | ||
474 | | 0xEA 0xAA 0xB5..0xB6 #Lo [2] TAI VIET VOWEL E..TAI VIET VOWEL O | ||
475 | | 0xEA 0xAA 0xB9..0xBD #Lo [5] TAI VIET VOWEL UEA..TAI VIET VOWEL AN | ||
476 | | 0xEA 0xAB 0x80 #Lo TAI VIET TONE MAI NUENG | ||
477 | | 0xEA 0xAB 0x82 #Lo TAI VIET TONE MAI SONG | ||
478 | | 0xEA 0xAB 0x9B..0x9C #Lo [2] TAI VIET SYMBOL KON..TAI VIET SYMB... | ||
479 | | 0xEA 0xAB 0x9D #Lm TAI VIET SYMBOL SAM | ||
480 | | 0xEA 0xAB 0xA0..0xAA #Lo [11] MEETEI MAYEK LETTER E..MEETEI MAYE... | ||
481 | | 0xEA 0xAB 0xB2 #Lo MEETEI MAYEK ANJI | ||
482 | | 0xEA 0xAB 0xB3..0xB4 #Lm [2] MEETEI MAYEK SYLLABLE REPETITION M... | ||
483 | | 0xEA 0xAC 0x81..0x86 #Lo [6] ETHIOPIC SYLLABLE TTHU..ETHIOPIC S... | ||
484 | | 0xEA 0xAC 0x89..0x8E #Lo [6] ETHIOPIC SYLLABLE DDHU..ETHIOPIC S... | ||
485 | | 0xEA 0xAC 0x91..0x96 #Lo [6] ETHIOPIC SYLLABLE DZU..ETHIOPIC SY... | ||
486 | | 0xEA 0xAC 0xA0..0xA6 #Lo [7] ETHIOPIC SYLLABLE CCHHA..ETHIOPIC ... | ||
487 | | 0xEA 0xAC 0xA8..0xAE #Lo [7] ETHIOPIC SYLLABLE BBA..ETHIOPIC SY... | ||
488 | | 0xEA 0xAC 0xB0..0xFF #L& [43] LATIN SMALL LETTER BARRED ALPHA..L... | ||
489 | | 0xEA 0xAD 0x00..0x9A # | ||
490 | | 0xEA 0xAD 0x9C..0x9F #Lm [4] MODIFIER LETTER SMALL HENG..MODIFI... | ||
491 | | 0xEA 0xAD 0xA0..0xA5 #L& [6] LATIN SMALL LETTER SAKHA YAT..GREE... | ||
492 | | 0xEA 0xAD 0xB0..0xFF #L& [80] CHEROKEE SMALL LETTER A..CHEROKEE ... | ||
493 | | 0xEA 0xAE 0x00..0xBF # | ||
494 | | 0xEA 0xAF 0x80..0xA2 #Lo [35] MEETEI MAYEK LETTER KOK..MEETEI MA... | ||
495 | | 0xEA 0xB0 0x80..0xFF #Lo [11172] HANGUL SYLLABLE GA..HA... | ||
496 | | 0xEA 0xB1..0xFF 0x00..0xFF # | ||
497 | | 0xEB..0xEC 0x00..0xFF 0x00..0xFF # | ||
498 | | 0xED 0x00 0x00..0xFF # | ||
499 | | 0xED 0x01..0x9D 0x00..0xFF # | ||
500 | | 0xED 0x9E 0x00..0xA3 # | ||
501 | | 0xED 0x9E 0xB0..0xFF #Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUN... | ||
502 | | 0xED 0x9F 0x00..0x86 # | ||
503 | | 0xED 0x9F 0x8B..0xBB #Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANG... | ||
504 | | 0xEF 0xA4 0x80..0xFF #Lo [366] CJK COMPATIBILITY IDEOGRAPH-F9... | ||
505 | | 0xEF 0xA5..0xA8 0x00..0xFF # | ||
506 | | 0xEF 0xA9 0x00..0xAD # | ||
507 | | 0xEF 0xA9 0xB0..0xFF #Lo [106] CJK COMPATIBILITY IDEOGRAPH-FA... | ||
508 | | 0xEF 0xAA..0xAA 0x00..0xFF # | ||
509 | | 0xEF 0xAB 0x00..0x99 # | ||
510 | | 0xEF 0xAC 0x80..0x86 #L& [7] LATIN SMALL LIGATURE FF..LATIN SMA... | ||
511 | | 0xEF 0xAC 0x93..0x97 #L& [5] ARMENIAN SMALL LIGATURE MEN NOW..A... | ||
512 | | 0xEF 0xAC 0x9D #Lo HEBREW LETTER YOD WITH HIRIQ | ||
513 | | 0xEF 0xAC 0x9F..0xA8 #Lo [10] HEBREW LIGATURE YIDDISH YOD YOD PA... | ||
514 | | 0xEF 0xAC 0xAA..0xB6 #Lo [13] HEBREW LETTER SHIN WITH SHIN DOT..... | ||
515 | | 0xEF 0xAC 0xB8..0xBC #Lo [5] HEBREW LETTER TET WITH DAGESH..HEB... | ||
516 | | 0xEF 0xAC 0xBE #Lo HEBREW LETTER MEM WITH DAGESH | ||
517 | | 0xEF 0xAD 0x80..0x81 #Lo [2] HEBREW LETTER NUN WITH DAGESH..HEB... | ||
518 | | 0xEF 0xAD 0x83..0x84 #Lo [2] HEBREW LETTER FINAL PE WITH DAGESH... | ||
519 | | 0xEF 0xAD 0x86..0xFF #Lo [108] HEBREW LETTER TSADI WITH DAGESH..A... | ||
520 | | 0xEF 0xAE 0x00..0xB1 # | ||
521 | | 0xEF 0xAF 0x93..0xFF #Lo [363] ARABIC LETTER NG ISOLATED FORM... | ||
522 | | 0xEF 0xB0..0xB3 0x00..0xFF # | ||
523 | | 0xEF 0xB4 0x00..0xBD # | ||
524 | | 0xEF 0xB5 0x90..0xFF #Lo [64] ARABIC LIGATURE TEH WITH JEEM WITH... | ||
525 | | 0xEF 0xB6 0x00..0x8F # | ||
526 | | 0xEF 0xB6 0x92..0xFF #Lo [54] ARABIC LIGATURE MEEM WITH JEEM WIT... | ||
527 | | 0xEF 0xB7 0x00..0x87 # | ||
528 | | 0xEF 0xB7 0xB0..0xBB #Lo [12] ARABIC LIGATURE SALLA USED AS KORA... | ||
529 | | 0xEF 0xB9 0xB0..0xB4 #Lo [5] ARABIC FATHATAN ISOLATED FORM..ARA... | ||
530 | | 0xEF 0xB9 0xB6..0xFF #Lo [135] ARABIC FATHA ISOLATED FORM..AR... | ||
531 | | 0xEF 0xBA..0xBA 0x00..0xFF # | ||
532 | | 0xEF 0xBB 0x00..0xBC # | ||
533 | | 0xEF 0xBC 0xA1..0xBA #L& [26] FULLWIDTH LATIN CAPITAL LETTER A..... | ||
534 | | 0xEF 0xBD 0x81..0x9A #L& [26] FULLWIDTH LATIN SMALL LETTER A..FU... | ||
535 | | 0xEF 0xBD 0xA6..0xAF #Lo [10] HALFWIDTH KATAKANA LETTER WO..HALF... | ||
536 | | 0xEF 0xBD 0xB0 #Lm HALFWIDTH KATAKANA-HIRAGANA PROLON... | ||
537 | | 0xEF 0xBD 0xB1..0xFF #Lo [45] HALFWIDTH KATAKANA LETTER A..HALFW... | ||
538 | | 0xEF 0xBE 0x00..0x9D # | ||
539 | | 0xEF 0xBE 0x9E..0x9F #Lm [2] HALFWIDTH KATAKANA VOICED SOUND MA... | ||
540 | | 0xEF 0xBE 0xA0..0xBE #Lo [31] HALFWIDTH HANGUL FILLER..HALFWIDTH... | ||
541 | | 0xEF 0xBF 0x82..0x87 #Lo [6] HALFWIDTH HANGUL LETTER A..HALFWID... | ||
542 | | 0xEF 0xBF 0x8A..0x8F #Lo [6] HALFWIDTH HANGUL LETTER YEO..HALFW... | ||
543 | | 0xEF 0xBF 0x92..0x97 #Lo [6] HALFWIDTH HANGUL LETTER YO..HALFWI... | ||
544 | | 0xEF 0xBF 0x9A..0x9C #Lo [3] HALFWIDTH HANGUL LETTER EU..HALFWI... | ||
545 | | 0xF0 0x90 0x80 0x80..0x8B #Lo [12] LINEAR B SYLLABLE B008 A..LINEA... | ||
546 | | 0xF0 0x90 0x80 0x8D..0xA6 #Lo [26] LINEAR B SYLLABLE B036 JO..LINE... | ||
547 | | 0xF0 0x90 0x80 0xA8..0xBA #Lo [19] LINEAR B SYLLABLE B060 RA..LINE... | ||
548 | | 0xF0 0x90 0x80 0xBC..0xBD #Lo [2] LINEAR B SYLLABLE B017 ZA..LINE... | ||
549 | | 0xF0 0x90 0x80 0xBF..0xFF #Lo [15] LINEAR B SYLLABLE B020 ZO..LINE... | ||
550 | | 0xF0 0x90 0x81 0x00..0x8D # | ||
551 | | 0xF0 0x90 0x81 0x90..0x9D #Lo [14] LINEAR B SYMBOL B018..LINEAR B ... | ||
552 | | 0xF0 0x90 0x82 0x80..0xFF #Lo [123] LINEAR B IDEOGRAM B100 MAN..LIN... | ||
553 | | 0xF0 0x90 0x83 0x00..0xBA # | ||
554 | | 0xF0 0x90 0x85 0x80..0xB4 #Nl [53] GREEK ACROPHONIC ATTIC ONE QUAR... | ||
555 | | 0xF0 0x90 0x8A 0x80..0x9C #Lo [29] LYCIAN LETTER A..LYCIAN LETTER X | ||
556 | | 0xF0 0x90 0x8A 0xA0..0xFF #Lo [49] CARIAN LETTER A..CARIAN LETTER ... | ||
557 | | 0xF0 0x90 0x8B 0x00..0x90 # | ||
558 | | 0xF0 0x90 0x8C 0x80..0x9F #Lo [32] OLD ITALIC LETTER A..OLD ITALIC... | ||
559 | | 0xF0 0x90 0x8C 0xB0..0xFF #Lo [17] GOTHIC LETTER AHSA..GOTHIC LETT... | ||
560 | | 0xF0 0x90 0x8D 0x00..0x80 # | ||
561 | | 0xF0 0x90 0x8D 0x81 #Nl GOTHIC LETTER NINETY | ||
562 | | 0xF0 0x90 0x8D 0x82..0x89 #Lo [8] GOTHIC LETTER RAIDA..GOTHIC LET... | ||
563 | | 0xF0 0x90 0x8D 0x8A #Nl GOTHIC LETTER NINE HUNDRED | ||
564 | | 0xF0 0x90 0x8D 0x90..0xB5 #Lo [38] OLD PERMIC LETTER AN..OLD PERMI... | ||
565 | | 0xF0 0x90 0x8E 0x80..0x9D #Lo [30] UGARITIC LETTER ALPA..UGARITIC ... | ||
566 | | 0xF0 0x90 0x8E 0xA0..0xFF #Lo [36] OLD PERSIAN SIGN A..OLD PERSIAN... | ||
567 | | 0xF0 0x90 0x8F 0x00..0x83 # | ||
568 | | 0xF0 0x90 0x8F 0x88..0x8F #Lo [8] OLD PERSIAN SIGN AURAMAZDAA..OL... | ||
569 | | 0xF0 0x90 0x8F 0x91..0x95 #Nl [5] OLD PERSIAN NUMBER ONE..OLD PER... | ||
570 | | 0xF0 0x90 0x90 0x80..0xFF #L& [80] DESERET CAPITAL LETTER LONG I..... | ||
571 | | 0xF0 0x90 0x91 0x00..0x8F # | ||
572 | | 0xF0 0x90 0x91 0x90..0xFF #Lo [78] SHAVIAN LETTER PEEP..OSMANYA LE... | ||
573 | | 0xF0 0x90 0x92 0x00..0x9D # | ||
574 | | 0xF0 0x90 0x92 0xB0..0xFF #L& [36] OSAGE CAPITAL LETTER A..OSAGE C... | ||
575 | | 0xF0 0x90 0x93 0x00..0x93 # | ||
576 | | 0xF0 0x90 0x93 0x98..0xBB #L& [36] OSAGE SMALL LETTER A..OSAGE SMA... | ||
577 | | 0xF0 0x90 0x94 0x80..0xA7 #Lo [40] ELBASAN LETTER A..ELBASAN LETTE... | ||
578 | | 0xF0 0x90 0x94 0xB0..0xFF #Lo [52] CAUCASIAN ALBANIAN LETTER ALT..... | ||
579 | | 0xF0 0x90 0x95 0x00..0xA3 # | ||
580 | | 0xF0 0x90 0x98 0x80..0xFF #Lo [311] LINEAR A SIGN AB001..LINE... | ||
581 | | 0xF0 0x90 0x99..0x9B 0x00..0xFF # | ||
582 | | 0xF0 0x90 0x9C 0x00..0xB6 # | ||
583 | | 0xF0 0x90 0x9D 0x80..0x95 #Lo [22] LINEAR A SIGN A701 A..LINEAR A ... | ||
584 | | 0xF0 0x90 0x9D 0xA0..0xA7 #Lo [8] LINEAR A SIGN A800..LINEAR A SI... | ||
585 | | 0xF0 0x90 0xA0 0x80..0x85 #Lo [6] CYPRIOT SYLLABLE A..CYPRIOT SYL... | ||
586 | | 0xF0 0x90 0xA0 0x88 #Lo CYPRIOT SYLLABLE JO | ||
587 | | 0xF0 0x90 0xA0 0x8A..0xB5 #Lo [44] CYPRIOT SYLLABLE KA..CYPRIOT SY... | ||
588 | | 0xF0 0x90 0xA0 0xB7..0xB8 #Lo [2] CYPRIOT SYLLABLE XA..CYPRIOT SY... | ||
589 | | 0xF0 0x90 0xA0 0xBC #Lo CYPRIOT SYLLABLE ZA | ||
590 | | 0xF0 0x90 0xA0 0xBF..0xFF #Lo [23] CYPRIOT SYLLABLE ZO..IMPERIAL A... | ||
591 | | 0xF0 0x90 0xA1 0x00..0x95 # | ||
592 | | 0xF0 0x90 0xA1 0xA0..0xB6 #Lo [23] PALMYRENE LETTER ALEPH..PALMYRE... | ||
593 | | 0xF0 0x90 0xA2 0x80..0x9E #Lo [31] NABATAEAN LETTER FINAL ALEPH..N... | ||
594 | | 0xF0 0x90 0xA3 0xA0..0xB2 #Lo [19] HATRAN LETTER ALEPH..HATRAN LET... | ||
595 | | 0xF0 0x90 0xA3 0xB4..0xB5 #Lo [2] HATRAN LETTER SHIN..HATRAN LETT... | ||
596 | | 0xF0 0x90 0xA4 0x80..0x95 #Lo [22] PHOENICIAN LETTER ALF..PHOENICI... | ||
597 | | 0xF0 0x90 0xA4 0xA0..0xB9 #Lo [26] LYDIAN LETTER A..LYDIAN LETTER C | ||
598 | | 0xF0 0x90 0xA6 0x80..0xB7 #Lo [56] MEROITIC HIEROGLYPHIC LETTER A.... | ||
599 | | 0xF0 0x90 0xA6 0xBE..0xBF #Lo [2] MEROITIC CURSIVE LOGOGRAM RMT..... | ||
600 | | 0xF0 0x90 0xA8 0x80 #Lo KHAROSHTHI LETTER A | ||
601 | | 0xF0 0x90 0xA8 0x90..0x93 #Lo [4] KHAROSHTHI LETTER KA..KHAROSHTH... | ||
602 | | 0xF0 0x90 0xA8 0x95..0x97 #Lo [3] KHAROSHTHI LETTER CA..KHAROSHTH... | ||
603 | | 0xF0 0x90 0xA8 0x99..0xB3 #Lo [27] KHAROSHTHI LETTER NYA..KHAROSHT... | ||
604 | | 0xF0 0x90 0xA9 0xA0..0xBC #Lo [29] OLD SOUTH ARABIAN LETTER HE..OL... | ||
605 | | 0xF0 0x90 0xAA 0x80..0x9C #Lo [29] OLD NORTH ARABIAN LETTER HEH..O... | ||
606 | | 0xF0 0x90 0xAB 0x80..0x87 #Lo [8] MANICHAEAN LETTER ALEPH..MANICH... | ||
607 | | 0xF0 0x90 0xAB 0x89..0xA4 #Lo [28] MANICHAEAN LETTER ZAYIN..MANICH... | ||
608 | | 0xF0 0x90 0xAC 0x80..0xB5 #Lo [54] AVESTAN LETTER A..AVESTAN LETTE... | ||
609 | | 0xF0 0x90 0xAD 0x80..0x95 #Lo [22] INSCRIPTIONAL PARTHIAN LETTER A... | ||
610 | | 0xF0 0x90 0xAD 0xA0..0xB2 #Lo [19] INSCRIPTIONAL PAHLAVI LETTER AL... | ||
611 | | 0xF0 0x90 0xAE 0x80..0x91 #Lo [18] PSALTER PAHLAVI LETTER ALEPH..P... | ||
612 | | 0xF0 0x90 0xB0 0x80..0xFF #Lo [73] OLD TURKIC LETTER ORKHON A..OLD... | ||
613 | | 0xF0 0x90 0xB1 0x00..0x88 # | ||
614 | | 0xF0 0x90 0xB2 0x80..0xB2 #L& [51] OLD HUNGARIAN CAPITAL LETTER A.... | ||
615 | | 0xF0 0x90 0xB3 0x80..0xB2 #L& [51] OLD HUNGARIAN SMALL LETTER A..O... | ||
616 | | 0xF0 0x91 0x80 0x83..0xB7 #Lo [53] BRAHMI SIGN JIHVAMULIYA..BRAHMI... | ||
617 | | 0xF0 0x91 0x82 0x83..0xAF #Lo [45] KAITHI LETTER A..KAITHI LETTER HA | ||
618 | | 0xF0 0x91 0x83 0x90..0xA8 #Lo [25] SORA SOMPENG LETTER SAH..SORA S... | ||
619 | | 0xF0 0x91 0x84 0x83..0xA6 #Lo [36] CHAKMA LETTER AA..CHAKMA LETTER... | ||
620 | | 0xF0 0x91 0x85 0x90..0xB2 #Lo [35] MAHAJANI LETTER A..MAHAJANI LET... | ||
621 | | 0xF0 0x91 0x85 0xB6 #Lo MAHAJANI LIGATURE SHRI | ||
622 | | 0xF0 0x91 0x86 0x83..0xB2 #Lo [48] SHARADA LETTER A..SHARADA LETTE... | ||
623 | | 0xF0 0x91 0x87 0x81..0x84 #Lo [4] SHARADA SIGN AVAGRAHA..SHARADA OM | ||
624 | | 0xF0 0x91 0x87 0x9A #Lo SHARADA EKAM | ||
625 | | 0xF0 0x91 0x87 0x9C #Lo SHARADA HEADSTROKE | ||
626 | | 0xF0 0x91 0x88 0x80..0x91 #Lo [18] KHOJKI LETTER A..KHOJKI LETTER JJA | ||
627 | | 0xF0 0x91 0x88 0x93..0xAB #Lo [25] KHOJKI LETTER NYA..KHOJKI LETTE... | ||
628 | | 0xF0 0x91 0x8A 0x80..0x86 #Lo [7] MULTANI LETTER A..MULTANI LETTE... | ||
629 | | 0xF0 0x91 0x8A 0x88 #Lo MULTANI LETTER GHA | ||
630 | | 0xF0 0x91 0x8A 0x8A..0x8D #Lo [4] MULTANI LETTER CA..MULTANI LETT... | ||
631 | | 0xF0 0x91 0x8A 0x8F..0x9D #Lo [15] MULTANI LETTER NYA..MULTANI LET... | ||
632 | | 0xF0 0x91 0x8A 0x9F..0xA8 #Lo [10] MULTANI LETTER BHA..MULTANI LET... | ||
633 | | 0xF0 0x91 0x8A 0xB0..0xFF #Lo [47] KHUDAWADI LETTER A..KHUDAWADI L... | ||
634 | | 0xF0 0x91 0x8B 0x00..0x9E # | ||
635 | | 0xF0 0x91 0x8C 0x85..0x8C #Lo [8] GRANTHA LETTER A..GRANTHA LETTE... | ||
636 | | 0xF0 0x91 0x8C 0x8F..0x90 #Lo [2] GRANTHA LETTER EE..GRANTHA LETT... | ||
637 | | 0xF0 0x91 0x8C 0x93..0xA8 #Lo [22] GRANTHA LETTER OO..GRANTHA LETT... | ||
638 | | 0xF0 0x91 0x8C 0xAA..0xB0 #Lo [7] GRANTHA LETTER PA..GRANTHA LETT... | ||
639 | | 0xF0 0x91 0x8C 0xB2..0xB3 #Lo [2] GRANTHA LETTER LA..GRANTHA LETT... | ||
640 | | 0xF0 0x91 0x8C 0xB5..0xB9 #Lo [5] GRANTHA LETTER VA..GRANTHA LETT... | ||
641 | | 0xF0 0x91 0x8C 0xBD #Lo GRANTHA SIGN AVAGRAHA | ||
642 | | 0xF0 0x91 0x8D 0x90 #Lo GRANTHA OM | ||
643 | | 0xF0 0x91 0x8D 0x9D..0xA1 #Lo [5] GRANTHA SIGN PLUTA..GRANTHA LET... | ||
644 | | 0xF0 0x91 0x90 0x80..0xB4 #Lo [53] NEWA LETTER A..NEWA LETTER HA | ||
645 | | 0xF0 0x91 0x91 0x87..0x8A #Lo [4] NEWA SIGN AVAGRAHA..NEWA SIDDHI | ||
646 | | 0xF0 0x91 0x92 0x80..0xAF #Lo [48] TIRHUTA ANJI..TIRHUTA LETTER HA | ||
647 | | 0xF0 0x91 0x93 0x84..0x85 #Lo [2] TIRHUTA SIGN AVAGRAHA..TIRHUTA ... | ||
648 | | 0xF0 0x91 0x93 0x87 #Lo TIRHUTA OM | ||
649 | | 0xF0 0x91 0x96 0x80..0xAE #Lo [47] SIDDHAM LETTER A..SIDDHAM LETTE... | ||
650 | | 0xF0 0x91 0x97 0x98..0x9B #Lo [4] SIDDHAM LETTER THREE-CIRCLE ALT... | ||
651 | | 0xF0 0x91 0x98 0x80..0xAF #Lo [48] MODI LETTER A..MODI LETTER LLA | ||
652 | | 0xF0 0x91 0x99 0x84 #Lo MODI SIGN HUVA | ||
653 | | 0xF0 0x91 0x9A 0x80..0xAA #Lo [43] TAKRI LETTER A..TAKRI LETTER RRA | ||
654 | | 0xF0 0x91 0x9C 0x80..0x99 #Lo [26] AHOM LETTER KA..AHOM LETTER JHA | ||
655 | | 0xF0 0x91 0xA2 0xA0..0xFF #L& [64] WARANG CITI CAPITAL LETTER NGAA... | ||
656 | | 0xF0 0x91 0xA3 0x00..0x9F # | ||
657 | | 0xF0 0x91 0xA3 0xBF #Lo WARANG CITI OM | ||
658 | | 0xF0 0x91 0xAB 0x80..0xB8 #Lo [57] PAU CIN HAU LETTER PA..PAU CIN ... | ||
659 | | 0xF0 0x91 0xB0 0x80..0x88 #Lo [9] BHAIKSUKI LETTER A..BHAIKSUKI L... | ||
660 | | 0xF0 0x91 0xB0 0x8A..0xAE #Lo [37] BHAIKSUKI LETTER E..BHAIKSUKI L... | ||
661 | | 0xF0 0x91 0xB1 0x80 #Lo BHAIKSUKI SIGN AVAGRAHA | ||
662 | | 0xF0 0x91 0xB1 0xB2..0xFF #Lo [30] MARCHEN LETTER KA..MARCHEN LETT... | ||
663 | | 0xF0 0x91 0xB2 0x00..0x8F # | ||
664 | | 0xF0 0x92 0x80 0x80..0xFF #Lo [922] CUNEIFORM SIGN A..CUNEIFO... | ||
665 | | 0xF0 0x92 0x81..0x8D 0x00..0xFF # | ||
666 | | 0xF0 0x92 0x8E 0x00..0x99 # | ||
667 | | 0xF0 0x92 0x90 0x80..0xFF #Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH.... | ||
668 | | 0xF0 0x92 0x91 0x00..0xAE # | ||
669 | | 0xF0 0x92 0x92 0x80..0xFF #Lo [196] CUNEIFORM SIGN AB TIMES N... | ||
670 | | 0xF0 0x92 0x93..0x94 0x00..0xFF # | ||
671 | | 0xF0 0x92 0x95 0x00..0x83 # | ||
672 | | 0xF0 0x93 0x80 0x80..0xFF #Lo [1071] EGYPTIAN HIEROGLYPH A001... | ||
673 | | 0xF0 0x93 0x81..0x8F 0x00..0xFF # | ||
674 | | 0xF0 0x93 0x90 0x00..0xAE # | ||
675 | | 0xF0 0x94 0x90 0x80..0xFF #Lo [583] ANATOLIAN HIEROGLYPH A001... | ||
676 | | 0xF0 0x94 0x91..0x98 0x00..0xFF # | ||
677 | | 0xF0 0x94 0x99 0x00..0x86 # | ||
678 | | 0xF0 0x96 0xA0 0x80..0xFF #Lo [569] BAMUM LETTER PHASE-A NGKU... | ||
679 | | 0xF0 0x96 0xA1..0xA7 0x00..0xFF # | ||
680 | | 0xF0 0x96 0xA8 0x00..0xB8 # | ||
681 | | 0xF0 0x96 0xA9 0x80..0x9E #Lo [31] MRO LETTER TA..MRO LETTER TEK | ||
682 | | 0xF0 0x96 0xAB 0x90..0xAD #Lo [30] BASSA VAH LETTER ENNI..BASSA VA... | ||
683 | | 0xF0 0x96 0xAC 0x80..0xAF #Lo [48] PAHAWH HMONG VOWEL KEEB..PAHAWH... | ||
684 | | 0xF0 0x96 0xAD 0x80..0x83 #Lm [4] PAHAWH HMONG SIGN VOS SEEV..PAH... | ||
685 | | 0xF0 0x96 0xAD 0xA3..0xB7 #Lo [21] PAHAWH HMONG SIGN VOS LUB..PAHA... | ||
686 | | 0xF0 0x96 0xAD 0xBD..0xFF #Lo [19] PAHAWH HMONG CLAN SIGN TSHEEJ..... | ||
687 | | 0xF0 0x96 0xAE 0x00..0x8F # | ||
688 | | 0xF0 0x96 0xBC 0x80..0xFF #Lo [69] MIAO LETTER PA..MIAO LETTER HHA | ||
689 | | 0xF0 0x96 0xBD 0x00..0x84 # | ||
690 | | 0xF0 0x96 0xBD 0x90 #Lo MIAO LETTER NASALIZATION | ||
691 | | 0xF0 0x96 0xBE 0x93..0x9F #Lm [13] MIAO LETTER TONE-2..MIAO LETTER... | ||
692 | | 0xF0 0x96 0xBF 0xA0 #Lm TANGUT ITERATION MARK | ||
693 | | 0xF0 0x97 0x80 0x80..0xFF #Lo [6125] TANGUT IDEOGRAPH-17000..... | ||
694 | | 0xF0 0x97 0x81..0xFF 0x00..0xFF # | ||
695 | | 0xF0 0x98 0x00 0x00..0xFF # | ||
696 | | 0xF0 0x98 0x01..0x9E 0x00..0xFF # | ||
697 | | 0xF0 0x98 0x9F 0x00..0xAC # | ||
698 | | 0xF0 0x98 0xA0 0x80..0xFF #Lo [755] TANGUT COMPONENT-001..TAN... | ||
699 | | 0xF0 0x98 0xA1..0xAA 0x00..0xFF # | ||
700 | | 0xF0 0x98 0xAB 0x00..0xB2 # | ||
701 | | 0xF0 0x9B 0x80 0x80..0x81 #Lo [2] KATAKANA LETTER ARCHAIC E..HIRA... | ||
702 | | 0xF0 0x9B 0xB0 0x80..0xFF #Lo [107] DUPLOYAN LETTER H..DUPLOYAN LET... | ||
703 | | 0xF0 0x9B 0xB1 0x00..0xAA # | ||
704 | | 0xF0 0x9B 0xB1 0xB0..0xBC #Lo [13] DUPLOYAN AFFIX LEFT HORIZONTAL ... | ||
705 | | 0xF0 0x9B 0xB2 0x80..0x88 #Lo [9] DUPLOYAN AFFIX HIGH ACUTE..DUPL... | ||
706 | | 0xF0 0x9B 0xB2 0x90..0x99 #Lo [10] DUPLOYAN AFFIX LOW ACUTE..DUPLO... | ||
707 | | 0xF0 0x9D 0x90 0x80..0xFF #L& [85] MATHEMATICAL BOLD CAPITAL A..MA... | ||
708 | | 0xF0 0x9D 0x91 0x00..0x94 # | ||
709 | | 0xF0 0x9D 0x91 0x96..0xFF #L& [71] MATHEMATICAL ITALIC SMALL I..MA... | ||
710 | | 0xF0 0x9D 0x92 0x00..0x9C # | ||
711 | | 0xF0 0x9D 0x92 0x9E..0x9F #L& [2] MATHEMATICAL SCRIPT CAPITAL C..... | ||
712 | | 0xF0 0x9D 0x92 0xA2 #L& MATHEMATICAL SCRIPT CAPITAL G | ||
713 | | 0xF0 0x9D 0x92 0xA5..0xA6 #L& [2] MATHEMATICAL SCRIPT CAPITAL J..... | ||
714 | | 0xF0 0x9D 0x92 0xA9..0xAC #L& [4] MATHEMATICAL SCRIPT CAPITAL N..... | ||
715 | | 0xF0 0x9D 0x92 0xAE..0xB9 #L& [12] MATHEMATICAL SCRIPT CAPITAL S..... | ||
716 | | 0xF0 0x9D 0x92 0xBB #L& MATHEMATICAL SCRIPT SMALL F | ||
717 | | 0xF0 0x9D 0x92 0xBD..0xFF #L& [7] MATHEMATICAL SCRIPT SMALL H..MA... | ||
718 | | 0xF0 0x9D 0x93 0x00..0x83 # | ||
719 | | 0xF0 0x9D 0x93 0x85..0xFF #L& [65] MATHEMATICAL SCRIPT SMALL P..MA... | ||
720 | | 0xF0 0x9D 0x94 0x00..0x85 # | ||
721 | | 0xF0 0x9D 0x94 0x87..0x8A #L& [4] MATHEMATICAL FRAKTUR CAPITAL D.... | ||
722 | | 0xF0 0x9D 0x94 0x8D..0x94 #L& [8] MATHEMATICAL FRAKTUR CAPITAL J.... | ||
723 | | 0xF0 0x9D 0x94 0x96..0x9C #L& [7] MATHEMATICAL FRAKTUR CAPITAL S.... | ||
724 | | 0xF0 0x9D 0x94 0x9E..0xB9 #L& [28] MATHEMATICAL FRAKTUR SMALL A..M... | ||
725 | | 0xF0 0x9D 0x94 0xBB..0xBE #L& [4] MATHEMATICAL DOUBLE-STRUCK CAPI... | ||
726 | | 0xF0 0x9D 0x95 0x80..0x84 #L& [5] MATHEMATICAL DOUBLE-STRUCK CAPI... | ||
727 | | 0xF0 0x9D 0x95 0x86 #L& MATHEMATICAL DOUBLE-STRUCK CAPITAL O | ||
728 | | 0xF0 0x9D 0x95 0x8A..0x90 #L& [7] MATHEMATICAL DOUBLE-STRUCK CAPI... | ||
729 | | 0xF0 0x9D 0x95 0x92..0xFF #L& [340] MATHEMATICAL DOUBLE-STRUC... | ||
730 | | 0xF0 0x9D 0x96..0x99 0x00..0xFF # | ||
731 | | 0xF0 0x9D 0x9A 0x00..0xA5 # | ||
732 | | 0xF0 0x9D 0x9A 0xA8..0xFF #L& [25] MATHEMATICAL BOLD CAPITAL ALPHA... | ||
733 | | 0xF0 0x9D 0x9B 0x00..0x80 # | ||
734 | | 0xF0 0x9D 0x9B 0x82..0x9A #L& [25] MATHEMATICAL BOLD SMALL ALPHA..... | ||
735 | | 0xF0 0x9D 0x9B 0x9C..0xBA #L& [31] MATHEMATICAL BOLD EPSILON SYMBO... | ||
736 | | 0xF0 0x9D 0x9B 0xBC..0xFF #L& [25] MATHEMATICAL ITALIC SMALL ALPHA... | ||
737 | | 0xF0 0x9D 0x9C 0x00..0x94 # | ||
738 | | 0xF0 0x9D 0x9C 0x96..0xB4 #L& [31] MATHEMATICAL ITALIC EPSILON SYM... | ||
739 | | 0xF0 0x9D 0x9C 0xB6..0xFF #L& [25] MATHEMATICAL BOLD ITALIC SMALL ... | ||
740 | | 0xF0 0x9D 0x9D 0x00..0x8E # | ||
741 | | 0xF0 0x9D 0x9D 0x90..0xAE #L& [31] MATHEMATICAL BOLD ITALIC EPSILO... | ||
742 | | 0xF0 0x9D 0x9D 0xB0..0xFF #L& [25] MATHEMATICAL SANS-SERIF BOLD SM... | ||
743 | | 0xF0 0x9D 0x9E 0x00..0x88 # | ||
744 | | 0xF0 0x9D 0x9E 0x8A..0xA8 #L& [31] MATHEMATICAL SANS-SERIF BOLD EP... | ||
745 | | 0xF0 0x9D 0x9E 0xAA..0xFF #L& [25] MATHEMATICAL SANS-SERIF BOLD IT... | ||
746 | | 0xF0 0x9D 0x9F 0x00..0x82 # | ||
747 | | 0xF0 0x9D 0x9F 0x84..0x8B #L& [8] MATHEMATICAL SANS-SERIF BOLD IT... | ||
748 | | 0xF0 0x9E 0xA0 0x80..0xFF #Lo [197] MENDE KIKAKUI SYLLABLE M0... | ||
749 | | 0xF0 0x9E 0xA1..0xA2 0x00..0xFF # | ||
750 | | 0xF0 0x9E 0xA3 0x00..0x84 # | ||
751 | | 0xF0 0x9E 0xA4 0x80..0xFF #L& [68] ADLAM CAPITAL LETTER ALIF..ADLA... | ||
752 | | 0xF0 0x9E 0xA5 0x00..0x83 # | ||
753 | | 0xF0 0x9E 0xB8 0x80..0x83 #Lo [4] ARABIC MATHEMATICAL ALEF..ARABI... | ||
754 | | 0xF0 0x9E 0xB8 0x85..0x9F #Lo [27] ARABIC MATHEMATICAL WAW..ARABIC... | ||
755 | | 0xF0 0x9E 0xB8 0xA1..0xA2 #Lo [2] ARABIC MATHEMATICAL INITIAL BEH... | ||
756 | | 0xF0 0x9E 0xB8 0xA4 #Lo ARABIC MATHEMATICAL INITIAL HEH | ||
757 | | 0xF0 0x9E 0xB8 0xA7 #Lo ARABIC MATHEMATICAL INITIAL HAH | ||
758 | | 0xF0 0x9E 0xB8 0xA9..0xB2 #Lo [10] ARABIC MATHEMATICAL INITIAL YEH... | ||
759 | | 0xF0 0x9E 0xB8 0xB4..0xB7 #Lo [4] ARABIC MATHEMATICAL INITIAL SHE... | ||
760 | | 0xF0 0x9E 0xB8 0xB9 #Lo ARABIC MATHEMATICAL INITIAL DAD | ||
761 | | 0xF0 0x9E 0xB8 0xBB #Lo ARABIC MATHEMATICAL INITIAL GHAIN | ||
762 | | 0xF0 0x9E 0xB9 0x82 #Lo ARABIC MATHEMATICAL TAILED JEEM | ||
763 | | 0xF0 0x9E 0xB9 0x87 #Lo ARABIC MATHEMATICAL TAILED HAH | ||
764 | | 0xF0 0x9E 0xB9 0x89 #Lo ARABIC MATHEMATICAL TAILED YEH | ||
765 | | 0xF0 0x9E 0xB9 0x8B #Lo ARABIC MATHEMATICAL TAILED LAM | ||
766 | | 0xF0 0x9E 0xB9 0x8D..0x8F #Lo [3] ARABIC MATHEMATICAL TAILED NOON... | ||
767 | | 0xF0 0x9E 0xB9 0x91..0x92 #Lo [2] ARABIC MATHEMATICAL TAILED SAD.... | ||
768 | | 0xF0 0x9E 0xB9 0x94 #Lo ARABIC MATHEMATICAL TAILED SHEEN | ||
769 | | 0xF0 0x9E 0xB9 0x97 #Lo ARABIC MATHEMATICAL TAILED KHAH | ||
770 | | 0xF0 0x9E 0xB9 0x99 #Lo ARABIC MATHEMATICAL TAILED DAD | ||
771 | | 0xF0 0x9E 0xB9 0x9B #Lo ARABIC MATHEMATICAL TAILED GHAIN | ||
772 | | 0xF0 0x9E 0xB9 0x9D #Lo ARABIC MATHEMATICAL TAILED DOTLESS... | ||
773 | | 0xF0 0x9E 0xB9 0x9F #Lo ARABIC MATHEMATICAL TAILED DOTLESS... | ||
774 | | 0xF0 0x9E 0xB9 0xA1..0xA2 #Lo [2] ARABIC MATHEMATICAL STRETCHED B... | ||
775 | | 0xF0 0x9E 0xB9 0xA4 #Lo ARABIC MATHEMATICAL STRETCHED HEH | ||
776 | | 0xF0 0x9E 0xB9 0xA7..0xAA #Lo [4] ARABIC MATHEMATICAL STRETCHED H... | ||
777 | | 0xF0 0x9E 0xB9 0xAC..0xB2 #Lo [7] ARABIC MATHEMATICAL STRETCHED M... | ||
778 | | 0xF0 0x9E 0xB9 0xB4..0xB7 #Lo [4] ARABIC MATHEMATICAL STRETCHED S... | ||
779 | | 0xF0 0x9E 0xB9 0xB9..0xBC #Lo [4] ARABIC MATHEMATICAL STRETCHED D... | ||
780 | | 0xF0 0x9E 0xB9 0xBE #Lo ARABIC MATHEMATICAL STRETCHED DOTL... | ||
781 | | 0xF0 0x9E 0xBA 0x80..0x89 #Lo [10] ARABIC MATHEMATICAL LOOPED ALEF... | ||
782 | | 0xF0 0x9E 0xBA 0x8B..0x9B #Lo [17] ARABIC MATHEMATICAL LOOPED LAM.... | ||
783 | | 0xF0 0x9E 0xBA 0xA1..0xA3 #Lo [3] ARABIC MATHEMATICAL DOUBLE-STRU... | ||
784 | | 0xF0 0x9E 0xBA 0xA5..0xA9 #Lo [5] ARABIC MATHEMATICAL DOUBLE-STRU... | ||
785 | | 0xF0 0x9E 0xBA 0xAB..0xBB #Lo [17] ARABIC MATHEMATICAL DOUBLE-STRU... | ||
786 | | 0xF0 0xA0 0x80 0x80..0xFF #Lo [42711] CJK UNIFIED IDEOG... | ||
787 | | 0xF0 0xA0 0x81..0xFF 0x00..0xFF # | ||
788 | | 0xF0 0xA1..0xA9 0x00..0xFF 0x00..0xFF # | ||
789 | | 0xF0 0xAA 0x00 0x00..0xFF # | ||
790 | | 0xF0 0xAA 0x01..0x9A 0x00..0xFF # | ||
791 | | 0xF0 0xAA 0x9B 0x00..0x96 # | ||
792 | | 0xF0 0xAA 0x9C 0x80..0xFF #Lo [4149] CJK UNIFIED IDEOGRAPH-2A... | ||
793 | | 0xF0 0xAA 0x9D..0xFF 0x00..0xFF # | ||
794 | | 0xF0 0xAB 0x00 0x00..0xFF # | ||
795 | | 0xF0 0xAB 0x01..0x9B 0x00..0xFF # | ||
796 | | 0xF0 0xAB 0x9C 0x00..0xB4 # | ||
797 | | 0xF0 0xAB 0x9D 0x80..0xFF #Lo [222] CJK UNIFIED IDEOGRAPH-2B7... | ||
798 | | 0xF0 0xAB 0x9E..0x9F 0x00..0xFF # | ||
799 | | 0xF0 0xAB 0xA0 0x00..0x9D # | ||
800 | | 0xF0 0xAB 0xA0 0xA0..0xFF #Lo [5762] CJK UNIFIED IDEOGRAPH-2B... | ||
801 | | 0xF0 0xAB 0xA1..0xFF 0x00..0xFF # | ||
802 | | 0xF0 0xAC 0x00 0x00..0xFF # | ||
803 | | 0xF0 0xAC 0x01..0xB9 0x00..0xFF # | ||
804 | | 0xF0 0xAC 0xBA 0x00..0xA1 # | ||
805 | | 0xF0 0xAF 0xA0 0x80..0xFF #Lo [542] CJK COMPATIBILITY IDEOGRA... | ||
806 | | 0xF0 0xAF 0xA1..0xA7 0x00..0xFF # | ||
807 | | 0xF0 0xAF 0xA8 0x00..0x9D # | ||
808 | ; | ||
809 | |||
810 | ID_Continue = | ||
811 | 0x30..0x39 #Nd [10] DIGIT ZERO..DIGIT NINE | ||
812 | | 0x41..0x5A #L& [26] LATIN CAPITAL LETTER A..LATIN CAPI... | ||
813 | | 0x5F #Pc LOW LINE | ||
814 | | 0x61..0x7A #L& [26] LATIN SMALL LETTER A..LATIN SMALL ... | ||
815 | | 0xC2 0xAA #Lo FEMININE ORDINAL INDICATOR | ||
816 | | 0xC2 0xB5 #L& MICRO SIGN | ||
817 | | 0xC2 0xB7 #Po MIDDLE DOT | ||
818 | | 0xC2 0xBA #Lo MASCULINE ORDINAL INDICATOR | ||
819 | | 0xC3 0x80..0x96 #L& [23] LATIN CAPITAL LETTER A WITH GRAVE.... | ||
820 | | 0xC3 0x98..0xB6 #L& [31] LATIN CAPITAL LETTER O WITH STROKE... | ||
821 | | 0xC3 0xB8..0xFF #L& [195] LATIN SMALL LETTER O WITH STROKE..... | ||
822 | | 0xC4..0xC5 0x00..0xFF # | ||
823 | | 0xC6 0x00..0xBA # | ||
824 | | 0xC6 0xBB #Lo LATIN LETTER TWO WITH STROKE | ||
825 | | 0xC6 0xBC..0xBF #L& [4] LATIN CAPITAL LETTER TONE FIVE..LA... | ||
826 | | 0xC7 0x80..0x83 #Lo [4] LATIN LETTER DENTAL CLICK..LATIN L... | ||
827 | | 0xC7 0x84..0xFF #L& [208] LATIN CAPITAL LETTER DZ WITH CARON... | ||
828 | | 0xC8..0xC9 0x00..0xFF # | ||
829 | | 0xCA 0x00..0x93 # | ||
830 | | 0xCA 0x94 #Lo LATIN LETTER GLOTTAL STOP | ||
831 | | 0xCA 0x95..0xAF #L& [27] LATIN LETTER PHARYNGEAL VOICED FRI... | ||
832 | | 0xCA 0xB0..0xFF #Lm [18] MODIFIER LETTER SMALL H..MODIFIER ... | ||
833 | | 0xCB 0x00..0x81 # | ||
834 | | 0xCB 0x86..0x91 #Lm [12] MODIFIER LETTER CIRCUMFLEX ACCENT.... | ||
835 | | 0xCB 0xA0..0xA4 #Lm [5] MODIFIER LETTER SMALL GAMMA..MODIF... | ||
836 | | 0xCB 0xAC #Lm MODIFIER LETTER VOICING | ||
837 | | 0xCB 0xAE #Lm MODIFIER LETTER DOUBLE APOSTROPHE | ||
838 | | 0xCC 0x80..0xFF #Mn [112] COMBINING GRAVE ACCENT..COMBINING ... | ||
839 | | 0xCD 0x00..0xAF # | ||
840 | | 0xCD 0xB0..0xB3 #L& [4] GREEK CAPITAL LETTER HETA..GREEK S... | ||
841 | | 0xCD 0xB4 #Lm GREEK NUMERAL SIGN | ||
842 | | 0xCD 0xB6..0xB7 #L& [2] GREEK CAPITAL LETTER PAMPHYLIAN DI... | ||
843 | | 0xCD 0xBA #Lm GREEK YPOGEGRAMMENI | ||
844 | | 0xCD 0xBB..0xBD #L& [3] GREEK SMALL REVERSED LUNATE SIGMA ... | ||
845 | | 0xCD 0xBF #L& GREEK CAPITAL LETTER YOT | ||
846 | | 0xCE 0x86 #L& GREEK CAPITAL LETTER ALPHA WITH TONOS | ||
847 | | 0xCE 0x87 #Po GREEK ANO TELEIA | ||
848 | | 0xCE 0x88..0x8A #L& [3] GREEK CAPITAL LETTER EPSILON WITH ... | ||
849 | | 0xCE 0x8C #L& GREEK CAPITAL LETTER OMICRON WITH ... | ||
850 | | 0xCE 0x8E..0xA1 #L& [20] GREEK CAPITAL LETTER UPSILON WITH ... | ||
851 | | 0xCE 0xA3..0xFF #L& [83] GREEK CAPITAL LETTER SIGMA..GREEK ... | ||
852 | | 0xCF 0x00..0xB5 # | ||
853 | | 0xCF 0xB7..0xFF #L& [139] GREEK CAPITAL LETTER SHO..CYRILLIC... | ||
854 | | 0xD0..0xD1 0x00..0xFF # | ||
855 | | 0xD2 0x00..0x81 # | ||
856 | | 0xD2 0x83..0x87 #Mn [5] COMBINING CYRILLIC TITLO..COMBININ... | ||
857 | | 0xD2 0x8A..0xFF #L& [166] CYRILLIC CAPITAL LETTER SHORT I WI... | ||
858 | | 0xD3..0xD3 0x00..0xFF # | ||
859 | | 0xD4 0x00..0xAF # | ||
860 | | 0xD4 0xB1..0xFF #L& [38] ARMENIAN CAPITAL LETTER AYB..ARMEN... | ||
861 | | 0xD5 0x00..0x96 # | ||
862 | | 0xD5 0x99 #Lm ARMENIAN MODIFIER LETTER LEFT HALF... | ||
863 | | 0xD5 0xA1..0xFF #L& [39] ARMENIAN SMALL LETTER AYB..ARMENIA... | ||
864 | | 0xD6 0x00..0x87 # | ||
865 | | 0xD6 0x91..0xBD #Mn [45] HEBREW ACCENT ETNAHTA..HEBREW POIN... | ||
866 | | 0xD6 0xBF #Mn HEBREW POINT RAFE | ||
867 | | 0xD7 0x81..0x82 #Mn [2] HEBREW POINT SHIN DOT..HEBREW POIN... | ||
868 | | 0xD7 0x84..0x85 #Mn [2] HEBREW MARK UPPER DOT..HEBREW MARK... | ||
869 | | 0xD7 0x87 #Mn HEBREW POINT QAMATS QATAN | ||
870 | | 0xD7 0x90..0xAA #Lo [27] HEBREW LETTER ALEF..HEBREW LETTER TAV | ||
871 | | 0xD7 0xB0..0xB2 #Lo [3] HEBREW LIGATURE YIDDISH DOUBLE VAV... | ||
872 | | 0xD8 0x90..0x9A #Mn [11] ARABIC SIGN SALLALLAHOU ALAYHE WAS... | ||
873 | | 0xD8 0xA0..0xBF #Lo [32] ARABIC LETTER KASHMIRI YEH..ARABIC... | ||
874 | | 0xD9 0x80 #Lm ARABIC TATWEEL | ||
875 | | 0xD9 0x81..0x8A #Lo [10] ARABIC LETTER FEH..ARABIC LETTER YEH | ||
876 | | 0xD9 0x8B..0x9F #Mn [21] ARABIC FATHATAN..ARABIC WAVY HAMZA... | ||
877 | | 0xD9 0xA0..0xA9 #Nd [10] ARABIC-INDIC DIGIT ZERO..ARABIC-IN... | ||
878 | | 0xD9 0xAE..0xAF #Lo [2] ARABIC LETTER DOTLESS BEH..ARABIC ... | ||
879 | | 0xD9 0xB0 #Mn ARABIC LETTER SUPERSCRIPT ALEF | ||
880 | | 0xD9 0xB1..0xFF #Lo [99] ARABIC LETTER ALEF WASLA..ARABIC L... | ||
881 | | 0xDA..0xDA 0x00..0xFF # | ||
882 | | 0xDB 0x00..0x93 # | ||
883 | | 0xDB 0x95 #Lo ARABIC LETTER AE | ||
884 | | 0xDB 0x96..0x9C #Mn [7] ARABIC SMALL HIGH LIGATURE SAD WIT... | ||
885 | | 0xDB 0x9F..0xA4 #Mn [6] ARABIC SMALL HIGH ROUNDED ZERO..AR... | ||
886 | | 0xDB 0xA5..0xA6 #Lm [2] ARABIC SMALL WAW..ARABIC SMALL YEH | ||
887 | | 0xDB 0xA7..0xA8 #Mn [2] ARABIC SMALL HIGH YEH..ARABIC SMAL... | ||
888 | | 0xDB 0xAA..0xAD #Mn [4] ARABIC EMPTY CENTRE LOW STOP..ARAB... | ||
889 | | 0xDB 0xAE..0xAF #Lo [2] ARABIC LETTER DAL WITH INVERTED V.... | ||
890 | | 0xDB 0xB0..0xB9 #Nd [10] EXTENDED ARABIC-INDIC DIGIT ZERO..... | ||
891 | | 0xDB 0xBA..0xBC #Lo [3] ARABIC LETTER SHEEN WITH DOT BELOW... | ||
892 | | 0xDB 0xBF #Lo ARABIC LETTER HEH WITH INVERTED V | ||
893 | | 0xDC 0x90 #Lo SYRIAC LETTER ALAPH | ||
894 | | 0xDC 0x91 #Mn SYRIAC LETTER SUPERSCRIPT ALAPH | ||
895 | | 0xDC 0x92..0xAF #Lo [30] SYRIAC LETTER BETH..SYRIAC LETTER ... | ||
896 | | 0xDC 0xB0..0xFF #Mn [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH | ||
897 | | 0xDD 0x00..0x8A # | ||
898 | | 0xDD 0x8D..0xFF #Lo [89] SYRIAC LETTER SOGDIAN ZHAIN..THAAN... | ||
899 | | 0xDE 0x00..0xA5 # | ||
900 | | 0xDE 0xA6..0xB0 #Mn [11] THAANA ABAFILI..THAANA SUKUN | ||
901 | | 0xDE 0xB1 #Lo THAANA LETTER NAA | ||
902 | | 0xDF 0x80..0x89 #Nd [10] NKO DIGIT ZERO..NKO DIGIT NINE | ||
903 | | 0xDF 0x8A..0xAA #Lo [33] NKO LETTER A..NKO LETTER JONA RA | ||
904 | | 0xDF 0xAB..0xB3 #Mn [9] NKO COMBINING SHORT HIGH TONE..NKO... | ||
905 | | 0xDF 0xB4..0xB5 #Lm [2] NKO HIGH TONE APOSTROPHE..NKO LOW ... | ||
906 | | 0xDF 0xBA #Lm NKO LAJANYALAN | ||
907 | | 0xE0 0xA0 0x80..0x95 #Lo [22] SAMARITAN LETTER ALAF..SAMARITAN L... | ||
908 | | 0xE0 0xA0 0x96..0x99 #Mn [4] SAMARITAN MARK IN..SAMARITAN MARK ... | ||
909 | | 0xE0 0xA0 0x9A #Lm SAMARITAN MODIFIER LETTER EPENTHET... | ||
910 | | 0xE0 0xA0 0x9B..0xA3 #Mn [9] SAMARITAN MARK EPENTHETIC YUT..SAM... | ||
911 | | 0xE0 0xA0 0xA4 #Lm SAMARITAN MODIFIER LETTER SHORT A | ||
912 | | 0xE0 0xA0 0xA5..0xA7 #Mn [3] SAMARITAN VOWEL SIGN SHORT A..SAMA... | ||
913 | | 0xE0 0xA0 0xA8 #Lm SAMARITAN MODIFIER LETTER I | ||
914 | | 0xE0 0xA0 0xA9..0xAD #Mn [5] SAMARITAN VOWEL SIGN LONG I..SAMAR... | ||
915 | | 0xE0 0xA1 0x80..0x98 #Lo [25] MANDAIC LETTER HALQA..MANDAIC LETT... | ||
916 | | 0xE0 0xA1 0x99..0x9B #Mn [3] MANDAIC AFFRICATION MARK..MANDAIC ... | ||
917 | | 0xE0 0xA2 0xA0..0xB4 #Lo [21] ARABIC LETTER BEH WITH SMALL V BEL... | ||
918 | | 0xE0 0xA2 0xB6..0xBD #Lo [8] ARABIC LETTER BEH WITH SMALL MEEM ... | ||
919 | | 0xE0 0xA3 0x94..0xA1 #Mn [14] ARABIC SMALL HIGH WORD AR-RUB..ARA... | ||
920 | | 0xE0 0xA3 0xA3..0xFF #Mn [32] ARABIC TURNED DAMMA BELOW..DEVANAG... | ||
921 | | 0xE0 0xA4 0x00..0x82 # | ||
922 | | 0xE0 0xA4 0x83 #Mc DEVANAGARI SIGN VISARGA | ||
923 | | 0xE0 0xA4 0x84..0xB9 #Lo [54] DEVANAGARI LETTER SHORT A..DEVANAG... | ||
924 | | 0xE0 0xA4 0xBA #Mn DEVANAGARI VOWEL SIGN OE | ||
925 | | 0xE0 0xA4 0xBB #Mc DEVANAGARI VOWEL SIGN OOE | ||
926 | | 0xE0 0xA4 0xBC #Mn DEVANAGARI SIGN NUKTA | ||
927 | | 0xE0 0xA4 0xBD #Lo DEVANAGARI SIGN AVAGRAHA | ||
928 | | 0xE0 0xA4 0xBE..0xFF #Mc [3] DEVANAGARI VOWEL SIGN AA..DEVANAGA... | ||
929 | | 0xE0 0xA5 0x00..0x80 # | ||
930 | | 0xE0 0xA5 0x81..0x88 #Mn [8] DEVANAGARI VOWEL SIGN U..DEVANAGAR... | ||
931 | | 0xE0 0xA5 0x89..0x8C #Mc [4] DEVANAGARI VOWEL SIGN CANDRA O..DE... | ||
932 | | 0xE0 0xA5 0x8D #Mn DEVANAGARI SIGN VIRAMA | ||
933 | | 0xE0 0xA5 0x8E..0x8F #Mc [2] DEVANAGARI VOWEL SIGN PRISHTHAMATR... | ||
934 | | 0xE0 0xA5 0x90 #Lo DEVANAGARI OM | ||
935 | | 0xE0 0xA5 0x91..0x97 #Mn [7] DEVANAGARI STRESS SIGN UDATTA..DEV... | ||
936 | | 0xE0 0xA5 0x98..0xA1 #Lo [10] DEVANAGARI LETTER QA..DEVANAGARI L... | ||
937 | | 0xE0 0xA5 0xA2..0xA3 #Mn [2] DEVANAGARI VOWEL SIGN VOCALIC L..D... | ||
938 | | 0xE0 0xA5 0xA6..0xAF #Nd [10] DEVANAGARI DIGIT ZERO..DEVANAGARI ... | ||
939 | | 0xE0 0xA5 0xB1 #Lm DEVANAGARI SIGN HIGH SPACING DOT | ||
940 | | 0xE0 0xA5 0xB2..0xFF #Lo [15] DEVANAGARI LETTER CANDRA A..BENGAL... | ||
941 | | 0xE0 0xA6 0x00..0x80 # | ||
942 | | 0xE0 0xA6 0x81 #Mn BENGALI SIGN CANDRABINDU | ||
943 | | 0xE0 0xA6 0x82..0x83 #Mc [2] BENGALI SIGN ANUSVARA..BENGALI SIG... | ||
944 | | 0xE0 0xA6 0x85..0x8C #Lo [8] BENGALI LETTER A..BENGALI LETTER V... | ||
945 | | 0xE0 0xA6 0x8F..0x90 #Lo [2] BENGALI LETTER E..BENGALI LETTER AI | ||
946 | | 0xE0 0xA6 0x93..0xA8 #Lo [22] BENGALI LETTER O..BENGALI LETTER NA | ||
947 | | 0xE0 0xA6 0xAA..0xB0 #Lo [7] BENGALI LETTER PA..BENGALI LETTER RA | ||
948 | | 0xE0 0xA6 0xB2 #Lo BENGALI LETTER LA | ||
949 | | 0xE0 0xA6 0xB6..0xB9 #Lo [4] BENGALI LETTER SHA..BENGALI LETTER HA | ||
950 | | 0xE0 0xA6 0xBC #Mn BENGALI SIGN NUKTA | ||
951 | | 0xE0 0xA6 0xBD #Lo BENGALI SIGN AVAGRAHA | ||
952 | | 0xE0 0xA6 0xBE..0xFF #Mc [3] BENGALI VOWEL SIGN AA..BENGALI VOW... | ||
953 | | 0xE0 0xA7 0x00..0x80 # | ||
954 | | 0xE0 0xA7 0x81..0x84 #Mn [4] BENGALI VOWEL SIGN U..BENGALI VOWE... | ||
955 | | 0xE0 0xA7 0x87..0x88 #Mc [2] BENGALI VOWEL SIGN E..BENGALI VOWE... | ||
956 | | 0xE0 0xA7 0x8B..0x8C #Mc [2] BENGALI VOWEL SIGN O..BENGALI VOWE... | ||
957 | | 0xE0 0xA7 0x8D #Mn BENGALI SIGN VIRAMA | ||
958 | | 0xE0 0xA7 0x8E #Lo BENGALI LETTER KHANDA TA | ||
959 | | 0xE0 0xA7 0x97 #Mc BENGALI AU LENGTH MARK | ||
960 | | 0xE0 0xA7 0x9C..0x9D #Lo [2] BENGALI LETTER RRA..BENGALI LETTER... | ||
961 | | 0xE0 0xA7 0x9F..0xA1 #Lo [3] BENGALI LETTER YYA..BENGALI LETTER... | ||
962 | | 0xE0 0xA7 0xA2..0xA3 #Mn [2] BENGALI VOWEL SIGN VOCALIC L..BENG... | ||
963 | | 0xE0 0xA7 0xA6..0xAF #Nd [10] BENGALI DIGIT ZERO..BENGALI DIGIT ... | ||
964 | | 0xE0 0xA7 0xB0..0xB1 #Lo [2] BENGALI LETTER RA WITH MIDDLE DIAG... | ||
965 | | 0xE0 0xA8 0x81..0x82 #Mn [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI... | ||
966 | | 0xE0 0xA8 0x83 #Mc GURMUKHI SIGN VISARGA | ||
967 | | 0xE0 0xA8 0x85..0x8A #Lo [6] GURMUKHI LETTER A..GURMUKHI LETTER UU | ||
968 | | 0xE0 0xA8 0x8F..0x90 #Lo [2] GURMUKHI LETTER EE..GURMUKHI LETTE... | ||
969 | | 0xE0 0xA8 0x93..0xA8 #Lo [22] GURMUKHI LETTER OO..GURMUKHI LETTE... | ||
970 | | 0xE0 0xA8 0xAA..0xB0 #Lo [7] GURMUKHI LETTER PA..GURMUKHI LETTE... | ||
971 | | 0xE0 0xA8 0xB2..0xB3 #Lo [2] GURMUKHI LETTER LA..GURMUKHI LETTE... | ||
972 | | 0xE0 0xA8 0xB5..0xB6 #Lo [2] GURMUKHI LETTER VA..GURMUKHI LETTE... | ||
973 | | 0xE0 0xA8 0xB8..0xB9 #Lo [2] GURMUKHI LETTER SA..GURMUKHI LETTE... | ||
974 | | 0xE0 0xA8 0xBC #Mn GURMUKHI SIGN NUKTA | ||
975 | | 0xE0 0xA8 0xBE..0xFF #Mc [3] GURMUKHI VOWEL SIGN AA..GURMUKHI V... | ||
976 | | 0xE0 0xA9 0x00..0x80 # | ||
977 | | 0xE0 0xA9 0x81..0x82 #Mn [2] GURMUKHI VOWEL SIGN U..GURMUKHI VO... | ||
978 | | 0xE0 0xA9 0x87..0x88 #Mn [2] GURMUKHI VOWEL SIGN EE..GURMUKHI V... | ||
979 | | 0xE0 0xA9 0x8B..0x8D #Mn [3] GURMUKHI VOWEL SIGN OO..GURMUKHI S... | ||
980 | | 0xE0 0xA9 0x91 #Mn GURMUKHI SIGN UDAAT | ||
981 | | 0xE0 0xA9 0x99..0x9C #Lo [4] GURMUKHI LETTER KHHA..GURMUKHI LET... | ||
982 | | 0xE0 0xA9 0x9E #Lo GURMUKHI LETTER FA | ||
983 | | 0xE0 0xA9 0xA6..0xAF #Nd [10] GURMUKHI DIGIT ZERO..GURMUKHI DIGI... | ||
984 | | 0xE0 0xA9 0xB0..0xB1 #Mn [2] GURMUKHI TIPPI..GURMUKHI ADDAK | ||
985 | | 0xE0 0xA9 0xB2..0xB4 #Lo [3] GURMUKHI IRI..GURMUKHI EK ONKAR | ||
986 | | 0xE0 0xA9 0xB5 #Mn GURMUKHI SIGN YAKASH | ||
987 | | 0xE0 0xAA 0x81..0x82 #Mn [2] GUJARATI SIGN CANDRABINDU..GUJARAT... | ||
988 | | 0xE0 0xAA 0x83 #Mc GUJARATI SIGN VISARGA | ||
989 | | 0xE0 0xAA 0x85..0x8D #Lo [9] GUJARATI LETTER A..GUJARATI VOWEL ... | ||
990 | | 0xE0 0xAA 0x8F..0x91 #Lo [3] GUJARATI LETTER E..GUJARATI VOWEL ... | ||
991 | | 0xE0 0xAA 0x93..0xA8 #Lo [22] GUJARATI LETTER O..GUJARATI LETTER NA | ||
992 | | 0xE0 0xAA 0xAA..0xB0 #Lo [7] GUJARATI LETTER PA..GUJARATI LETTE... | ||
993 | | 0xE0 0xAA 0xB2..0xB3 #Lo [2] GUJARATI LETTER LA..GUJARATI LETTE... | ||
994 | | 0xE0 0xAA 0xB5..0xB9 #Lo [5] GUJARATI LETTER VA..GUJARATI LETTE... | ||
995 | | 0xE0 0xAA 0xBC #Mn GUJARATI SIGN NUKTA | ||
996 | | 0xE0 0xAA 0xBD #Lo GUJARATI SIGN AVAGRAHA | ||
997 | | 0xE0 0xAA 0xBE..0xFF #Mc [3] GUJARATI VOWEL SIGN AA..GUJARATI V... | ||
998 | | 0xE0 0xAB 0x00..0x80 # | ||
999 | | 0xE0 0xAB 0x81..0x85 #Mn [5] GUJARATI VOWEL SIGN U..GUJARATI VO... | ||
1000 | | 0xE0 0xAB 0x87..0x88 #Mn [2] GUJARATI VOWEL SIGN E..GUJARATI VO... | ||
1001 | | 0xE0 0xAB 0x89 #Mc GUJARATI VOWEL SIGN CANDRA O | ||
1002 | | 0xE0 0xAB 0x8B..0x8C #Mc [2] GUJARATI VOWEL SIGN O..GUJARATI VO... | ||
1003 | | 0xE0 0xAB 0x8D #Mn GUJARATI SIGN VIRAMA | ||
1004 | | 0xE0 0xAB 0x90 #Lo GUJARATI OM | ||
1005 | | 0xE0 0xAB 0xA0..0xA1 #Lo [2] GUJARATI LETTER VOCALIC RR..GUJARA... | ||
1006 | | 0xE0 0xAB 0xA2..0xA3 #Mn [2] GUJARATI VOWEL SIGN VOCALIC L..GUJ... | ||
1007 | | 0xE0 0xAB 0xA6..0xAF #Nd [10] GUJARATI DIGIT ZERO..GUJARATI DIGI... | ||
1008 | | 0xE0 0xAB 0xB9 #Lo GUJARATI LETTER ZHA | ||
1009 | | 0xE0 0xAC 0x81 #Mn ORIYA SIGN CANDRABINDU | ||
1010 | | 0xE0 0xAC 0x82..0x83 #Mc [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VI... | ||
1011 | | 0xE0 0xAC 0x85..0x8C #Lo [8] ORIYA LETTER A..ORIYA LETTER VOCAL... | ||
1012 | | 0xE0 0xAC 0x8F..0x90 #Lo [2] ORIYA LETTER E..ORIYA LETTER AI | ||
1013 | | 0xE0 0xAC 0x93..0xA8 #Lo [22] ORIYA LETTER O..ORIYA LETTER NA | ||
1014 | | 0xE0 0xAC 0xAA..0xB0 #Lo [7] ORIYA LETTER PA..ORIYA LETTER RA | ||
1015 | | 0xE0 0xAC 0xB2..0xB3 #Lo [2] ORIYA LETTER LA..ORIYA LETTER LLA | ||
1016 | | 0xE0 0xAC 0xB5..0xB9 #Lo [5] ORIYA LETTER VA..ORIYA LETTER HA | ||
1017 | | 0xE0 0xAC 0xBC #Mn ORIYA SIGN NUKTA | ||
1018 | | 0xE0 0xAC 0xBD #Lo ORIYA SIGN AVAGRAHA | ||
1019 | | 0xE0 0xAC 0xBE #Mc ORIYA VOWEL SIGN AA | ||
1020 | | 0xE0 0xAC 0xBF #Mn ORIYA VOWEL SIGN I | ||
1021 | | 0xE0 0xAD 0x80 #Mc ORIYA VOWEL SIGN II | ||
1022 | | 0xE0 0xAD 0x81..0x84 #Mn [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SI... | ||
1023 | | 0xE0 0xAD 0x87..0x88 #Mc [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SI... | ||
1024 | | 0xE0 0xAD 0x8B..0x8C #Mc [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SI... | ||
1025 | | 0xE0 0xAD 0x8D #Mn ORIYA SIGN VIRAMA | ||
1026 | | 0xE0 0xAD 0x96 #Mn ORIYA AI LENGTH MARK | ||
1027 | | 0xE0 0xAD 0x97 #Mc ORIYA AU LENGTH MARK | ||
1028 | | 0xE0 0xAD 0x9C..0x9D #Lo [2] ORIYA LETTER RRA..ORIYA LETTER RHA | ||
1029 | | 0xE0 0xAD 0x9F..0xA1 #Lo [3] ORIYA LETTER YYA..ORIYA LETTER VOC... | ||
1030 | | 0xE0 0xAD 0xA2..0xA3 #Mn [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA ... | ||
1031 | | 0xE0 0xAD 0xA6..0xAF #Nd [10] ORIYA DIGIT ZERO..ORIYA DIGIT NINE | ||
1032 | | 0xE0 0xAD 0xB1 #Lo ORIYA LETTER WA | ||
1033 | | 0xE0 0xAE 0x82 #Mn TAMIL SIGN ANUSVARA | ||
1034 | | 0xE0 0xAE 0x83 #Lo TAMIL SIGN VISARGA | ||
1035 | | 0xE0 0xAE 0x85..0x8A #Lo [6] TAMIL LETTER A..TAMIL LETTER UU | ||
1036 | | 0xE0 0xAE 0x8E..0x90 #Lo [3] TAMIL LETTER E..TAMIL LETTER AI | ||
1037 | | 0xE0 0xAE 0x92..0x95 #Lo [4] TAMIL LETTER O..TAMIL LETTER KA | ||
1038 | | 0xE0 0xAE 0x99..0x9A #Lo [2] TAMIL LETTER NGA..TAMIL LETTER CA | ||
1039 | | 0xE0 0xAE 0x9C #Lo TAMIL LETTER JA | ||
1040 | | 0xE0 0xAE 0x9E..0x9F #Lo [2] TAMIL LETTER NYA..TAMIL LETTER TTA | ||
1041 | | 0xE0 0xAE 0xA3..0xA4 #Lo [2] TAMIL LETTER NNA..TAMIL LETTER TA | ||
1042 | | 0xE0 0xAE 0xA8..0xAA #Lo [3] TAMIL LETTER NA..TAMIL LETTER PA | ||
1043 | | 0xE0 0xAE 0xAE..0xB9 #Lo [12] TAMIL LETTER MA..TAMIL LETTER HA | ||
1044 | | 0xE0 0xAE 0xBE..0xBF #Mc [2] TAMIL VOWEL SIGN AA..TAMIL VOWEL S... | ||
1045 | | 0xE0 0xAF 0x80 #Mn TAMIL VOWEL SIGN II | ||
1046 | | 0xE0 0xAF 0x81..0x82 #Mc [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SI... | ||
1047 | | 0xE0 0xAF 0x86..0x88 #Mc [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SI... | ||
1048 | | 0xE0 0xAF 0x8A..0x8C #Mc [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SI... | ||
1049 | | 0xE0 0xAF 0x8D #Mn TAMIL SIGN VIRAMA | ||
1050 | | 0xE0 0xAF 0x90 #Lo TAMIL OM | ||
1051 | | 0xE0 0xAF 0x97 #Mc TAMIL AU LENGTH MARK | ||
1052 | | 0xE0 0xAF 0xA6..0xAF #Nd [10] TAMIL DIGIT ZERO..TAMIL DIGIT NINE | ||
1053 | | 0xE0 0xB0 0x80 #Mn TELUGU SIGN COMBINING CANDRABINDU ... | ||
1054 | | 0xE0 0xB0 0x81..0x83 #Mc [3] TELUGU SIGN CANDRABINDU..TELUGU SI... | ||
1055 | | 0xE0 0xB0 0x85..0x8C #Lo [8] TELUGU LETTER A..TELUGU LETTER VOC... | ||
1056 | | 0xE0 0xB0 0x8E..0x90 #Lo [3] TELUGU LETTER E..TELUGU LETTER AI | ||
1057 | | 0xE0 0xB0 0x92..0xA8 #Lo [23] TELUGU LETTER O..TELUGU LETTER NA | ||
1058 | | 0xE0 0xB0 0xAA..0xB9 #Lo [16] TELUGU LETTER PA..TELUGU LETTER HA | ||
1059 | | 0xE0 0xB0 0xBD #Lo TELUGU SIGN AVAGRAHA | ||
1060 | | 0xE0 0xB0 0xBE..0xFF #Mn [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL... | ||
1061 | | 0xE0 0xB1 0x00..0x80 # | ||
1062 | | 0xE0 0xB1 0x81..0x84 #Mc [4] TELUGU VOWEL SIGN U..TELUGU VOWEL ... | ||
1063 | | 0xE0 0xB1 0x86..0x88 #Mn [3] TELUGU VOWEL SIGN E..TELUGU VOWEL ... | ||
1064 | | 0xE0 0xB1 0x8A..0x8D #Mn [4] TELUGU VOWEL SIGN O..TELUGU SIGN V... | ||
1065 | | 0xE0 0xB1 0x95..0x96 #Mn [2] TELUGU LENGTH MARK..TELUGU AI LENG... | ||
1066 | | 0xE0 0xB1 0x98..0x9A #Lo [3] TELUGU LETTER TSA..TELUGU LETTER RRRA | ||
1067 | | 0xE0 0xB1 0xA0..0xA1 #Lo [2] TELUGU LETTER VOCALIC RR..TELUGU L... | ||
1068 | | 0xE0 0xB1 0xA2..0xA3 #Mn [2] TELUGU VOWEL SIGN VOCALIC L..TELUG... | ||
1069 | | 0xE0 0xB1 0xA6..0xAF #Nd [10] TELUGU DIGIT ZERO..TELUGU DIGIT NINE | ||
1070 | | 0xE0 0xB2 0x80 #Lo KANNADA SIGN SPACING CANDRABINDU | ||
1071 | | 0xE0 0xB2 0x81 #Mn KANNADA SIGN CANDRABINDU | ||
1072 | | 0xE0 0xB2 0x82..0x83 #Mc [2] KANNADA SIGN ANUSVARA..KANNADA SIG... | ||
1073 | | 0xE0 0xB2 0x85..0x8C #Lo [8] KANNADA LETTER A..KANNADA LETTER V... | ||
1074 | | 0xE0 0xB2 0x8E..0x90 #Lo [3] KANNADA LETTER E..KANNADA LETTER AI | ||
1075 | | 0xE0 0xB2 0x92..0xA8 #Lo [23] KANNADA LETTER O..KANNADA LETTER NA | ||
1076 | | 0xE0 0xB2 0xAA..0xB3 #Lo [10] KANNADA LETTER PA..KANNADA LETTER LLA | ||
1077 | | 0xE0 0xB2 0xB5..0xB9 #Lo [5] KANNADA LETTER VA..KANNADA LETTER HA | ||
1078 | | 0xE0 0xB2 0xBC #Mn KANNADA SIGN NUKTA | ||
1079 | | 0xE0 0xB2 0xBD #Lo KANNADA SIGN AVAGRAHA | ||
1080 | | 0xE0 0xB2 0xBE #Mc KANNADA VOWEL SIGN AA | ||
1081 | | 0xE0 0xB2 0xBF #Mn KANNADA VOWEL SIGN I | ||
1082 | | 0xE0 0xB3 0x80..0x84 #Mc [5] KANNADA VOWEL SIGN II..KANNADA VOW... | ||
1083 | | 0xE0 0xB3 0x86 #Mn KANNADA VOWEL SIGN E | ||
1084 | | 0xE0 0xB3 0x87..0x88 #Mc [2] KANNADA VOWEL SIGN EE..KANNADA VOW... | ||
1085 | | 0xE0 0xB3 0x8A..0x8B #Mc [2] KANNADA VOWEL SIGN O..KANNADA VOWE... | ||
1086 | | 0xE0 0xB3 0x8C..0x8D #Mn [2] KANNADA VOWEL SIGN AU..KANNADA SIG... | ||
1087 | | 0xE0 0xB3 0x95..0x96 #Mc [2] KANNADA LENGTH MARK..KANNADA AI LE... | ||
1088 | | 0xE0 0xB3 0x9E #Lo KANNADA LETTER FA | ||
1089 | | 0xE0 0xB3 0xA0..0xA1 #Lo [2] KANNADA LETTER VOCALIC RR..KANNADA... | ||
1090 | | 0xE0 0xB3 0xA2..0xA3 #Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANN... | ||
1091 | | 0xE0 0xB3 0xA6..0xAF #Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT ... | ||
1092 | | 0xE0 0xB3 0xB1..0xB2 #Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA ... | ||
1093 | | 0xE0 0xB4 0x81 #Mn MALAYALAM SIGN CANDRABINDU | ||
1094 | | 0xE0 0xB4 0x82..0x83 #Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM... | ||
1095 | | 0xE0 0xB4 0x85..0x8C #Lo [8] MALAYALAM LETTER A..MALAYALAM LETT... | ||
1096 | | 0xE0 0xB4 0x8E..0x90 #Lo [3] MALAYALAM LETTER E..MALAYALAM LETT... | ||
1097 | | 0xE0 0xB4 0x92..0xBA #Lo [41] MALAYALAM LETTER O..MALAYALAM LETT... | ||
1098 | | 0xE0 0xB4 0xBD #Lo MALAYALAM SIGN AVAGRAHA | ||
1099 | | 0xE0 0xB4 0xBE..0xFF #Mc [3] MALAYALAM VOWEL SIGN AA..MALAYALAM... | ||
1100 | | 0xE0 0xB5 0x00..0x80 # | ||
1101 | | 0xE0 0xB5 0x81..0x84 #Mn [4] MALAYALAM VOWEL SIGN U..MALAYALAM ... | ||
1102 | | 0xE0 0xB5 0x86..0x88 #Mc [3] MALAYALAM VOWEL SIGN E..MALAYALAM ... | ||
1103 | | 0xE0 0xB5 0x8A..0x8C #Mc [3] MALAYALAM VOWEL SIGN O..MALAYALAM ... | ||
1104 | | 0xE0 0xB5 0x8D #Mn MALAYALAM SIGN VIRAMA | ||
1105 | | 0xE0 0xB5 0x8E #Lo MALAYALAM LETTER DOT REPH | ||
1106 | | 0xE0 0xB5 0x94..0x96 #Lo [3] MALAYALAM LETTER CHILLU M..MALAYAL... | ||
1107 | | 0xE0 0xB5 0x97 #Mc MALAYALAM AU LENGTH MARK | ||
1108 | | 0xE0 0xB5 0x9F..0xA1 #Lo [3] MALAYALAM LETTER ARCHAIC II..MALAY... | ||
1109 | | 0xE0 0xB5 0xA2..0xA3 #Mn [2] MALAYALAM VOWEL SIGN VOCALIC L..MA... | ||
1110 | | 0xE0 0xB5 0xA6..0xAF #Nd [10] MALAYALAM DIGIT ZERO..MALAYALAM DI... | ||
1111 | | 0xE0 0xB5 0xBA..0xBF #Lo [6] MALAYALAM LETTER CHILLU NN..MALAYA... | ||
1112 | | 0xE0 0xB6 0x82..0x83 #Mc [2] SINHALA SIGN ANUSVARAYA..SINHALA S... | ||
1113 | | 0xE0 0xB6 0x85..0x96 #Lo [18] SINHALA LETTER AYANNA..SINHALA LET... | ||
1114 | | 0xE0 0xB6 0x9A..0xB1 #Lo [24] SINHALA LETTER ALPAPRAANA KAYANNA.... | ||
1115 | | 0xE0 0xB6 0xB3..0xBB #Lo [9] SINHALA LETTER SANYAKA DAYANNA..SI... | ||
1116 | | 0xE0 0xB6 0xBD #Lo SINHALA LETTER DANTAJA LAYANNA | ||
1117 | | 0xE0 0xB7 0x80..0x86 #Lo [7] SINHALA LETTER VAYANNA..SINHALA LE... | ||
1118 | | 0xE0 0xB7 0x8A #Mn SINHALA SIGN AL-LAKUNA | ||
1119 | | 0xE0 0xB7 0x8F..0x91 #Mc [3] SINHALA VOWEL SIGN AELA-PILLA..SIN... | ||
1120 | | 0xE0 0xB7 0x92..0x94 #Mn [3] SINHALA VOWEL SIGN KETTI IS-PILLA.... | ||
1121 | | 0xE0 0xB7 0x96 #Mn SINHALA VOWEL SIGN DIGA PAA-PILLA | ||
1122 | | 0xE0 0xB7 0x98..0x9F #Mc [8] SINHALA VOWEL SIGN GAETTA-PILLA..S... | ||
1123 | | 0xE0 0xB7 0xA6..0xAF #Nd [10] SINHALA LITH DIGIT ZERO..SINHALA L... | ||
1124 | | 0xE0 0xB7 0xB2..0xB3 #Mc [2] SINHALA VOWEL SIGN DIGA GAETTA-PIL... | ||
1125 | | 0xE0 0xB8 0x81..0xB0 #Lo [48] THAI CHARACTER KO KAI..THAI CHARAC... | ||
1126 | | 0xE0 0xB8 0xB1 #Mn THAI CHARACTER MAI HAN-AKAT | ||
1127 | | 0xE0 0xB8 0xB2..0xB3 #Lo [2] THAI CHARACTER SARA AA..THAI CHARA... | ||
1128 | | 0xE0 0xB8 0xB4..0xBA #Mn [7] THAI CHARACTER SARA I..THAI CHARAC... | ||
1129 | | 0xE0 0xB9 0x80..0x85 #Lo [6] THAI CHARACTER SARA E..THAI CHARAC... | ||
1130 | | 0xE0 0xB9 0x86 #Lm THAI CHARACTER MAIYAMOK | ||
1131 | | 0xE0 0xB9 0x87..0x8E #Mn [8] THAI CHARACTER MAITAIKHU..THAI CHA... | ||
1132 | | 0xE0 0xB9 0x90..0x99 #Nd [10] THAI DIGIT ZERO..THAI DIGIT NINE | ||
1133 | | 0xE0 0xBA 0x81..0x82 #Lo [2] LAO LETTER KO..LAO LETTER KHO SUNG | ||
1134 | | 0xE0 0xBA 0x84 #Lo LAO LETTER KHO TAM | ||
1135 | | 0xE0 0xBA 0x87..0x88 #Lo [2] LAO LETTER NGO..LAO LETTER CO | ||
1136 | | 0xE0 0xBA 0x8A #Lo LAO LETTER SO TAM | ||
1137 | | 0xE0 0xBA 0x8D #Lo LAO LETTER NYO | ||
1138 | | 0xE0 0xBA 0x94..0x97 #Lo [4] LAO LETTER DO..LAO LETTER THO TAM | ||
1139 | | 0xE0 0xBA 0x99..0x9F #Lo [7] LAO LETTER NO..LAO LETTER FO SUNG | ||
1140 | | 0xE0 0xBA 0xA1..0xA3 #Lo [3] LAO LETTER MO..LAO LETTER LO LING | ||
1141 | | 0xE0 0xBA 0xA5 #Lo LAO LETTER LO LOOT | ||
1142 | | 0xE0 0xBA 0xA7 #Lo LAO LETTER WO | ||
1143 | | 0xE0 0xBA 0xAA..0xAB #Lo [2] LAO LETTER SO SUNG..LAO LETTER HO ... | ||
1144 | | 0xE0 0xBA 0xAD..0xB0 #Lo [4] LAO LETTER O..LAO VOWEL SIGN A | ||
1145 | | 0xE0 0xBA 0xB1 #Mn LAO VOWEL SIGN MAI KAN | ||
1146 | | 0xE0 0xBA 0xB2..0xB3 #Lo [2] LAO VOWEL SIGN AA..LAO VOWEL SIGN AM | ||
1147 | | 0xE0 0xBA 0xB4..0xB9 #Mn [6] LAO VOWEL SIGN I..LAO VOWEL SIGN UU | ||
1148 | | 0xE0 0xBA 0xBB..0xBC #Mn [2] LAO VOWEL SIGN MAI KON..LAO SEMIVO... | ||
1149 | | 0xE0 0xBA 0xBD #Lo LAO SEMIVOWEL SIGN NYO | ||
1150 | | 0xE0 0xBB 0x80..0x84 #Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI | ||
1151 | | 0xE0 0xBB 0x86 #Lm LAO KO LA | ||
1152 | | 0xE0 0xBB 0x88..0x8D #Mn [6] LAO TONE MAI EK..LAO NIGGAHITA | ||
1153 | | 0xE0 0xBB 0x90..0x99 #Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE | ||
1154 | | 0xE0 0xBB 0x9C..0x9F #Lo [4] LAO HO NO..LAO LETTER KHMU NYO | ||
1155 | | 0xE0 0xBC 0x80 #Lo TIBETAN SYLLABLE OM | ||
1156 | | 0xE0 0xBC 0x98..0x99 #Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD P... | ||
1157 | | 0xE0 0xBC 0xA0..0xA9 #Nd [10] TIBETAN DIGIT ZERO..TIBETAN DIGIT ... | ||
1158 | | 0xE0 0xBC 0xB5 #Mn TIBETAN MARK NGAS BZUNG NYI ZLA | ||
1159 | | 0xE0 0xBC 0xB7 #Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS | ||
1160 | | 0xE0 0xBC 0xB9 #Mn TIBETAN MARK TSA -PHRU | ||
1161 | | 0xE0 0xBC 0xBE..0xBF #Mc [2] TIBETAN SIGN YAR TSHES..TIBETAN SI... | ||
1162 | | 0xE0 0xBD 0x80..0x87 #Lo [8] TIBETAN LETTER KA..TIBETAN LETTER JA | ||
1163 | | 0xE0 0xBD 0x89..0xAC #Lo [36] TIBETAN LETTER NYA..TIBETAN LETTER... | ||
1164 | | 0xE0 0xBD 0xB1..0xBE #Mn [14] TIBETAN VOWEL SIGN AA..TIBETAN SIG... | ||
1165 | | 0xE0 0xBD 0xBF #Mc TIBETAN SIGN RNAM BCAD | ||
1166 | | 0xE0 0xBE 0x80..0x84 #Mn [5] TIBETAN VOWEL SIGN REVERSED I..TIB... | ||
1167 | | 0xE0 0xBE 0x86..0x87 #Mn [2] TIBETAN SIGN LCI RTAGS..TIBETAN SI... | ||
1168 | | 0xE0 0xBE 0x88..0x8C #Lo [5] TIBETAN SIGN LCE TSA CAN..TIBETAN ... | ||
1169 | | 0xE0 0xBE 0x8D..0x97 #Mn [11] TIBETAN SUBJOINED SIGN LCE TSA CAN... | ||
1170 | | 0xE0 0xBE 0x99..0xBC #Mn [36] TIBETAN SUBJOINED LETTER NYA..TIBE... | ||
1171 | | 0xE0 0xBF 0x86 #Mn TIBETAN SYMBOL PADMA GDAN | ||
1172 | | 0xE1 0x80 0x80..0xAA #Lo [43] MYANMAR LETTER KA..MYANMAR LETTER AU | ||
1173 | | 0xE1 0x80 0xAB..0xAC #Mc [2] MYANMAR VOWEL SIGN TALL AA..MYANMA... | ||
1174 | | 0xE1 0x80 0xAD..0xB0 #Mn [4] MYANMAR VOWEL SIGN I..MYANMAR VOWE... | ||
1175 | | 0xE1 0x80 0xB1 #Mc MYANMAR VOWEL SIGN E | ||
1176 | | 0xE1 0x80 0xB2..0xB7 #Mn [6] MYANMAR VOWEL SIGN AI..MYANMAR SIG... | ||
1177 | | 0xE1 0x80 0xB8 #Mc MYANMAR SIGN VISARGA | ||
1178 | | 0xE1 0x80 0xB9..0xBA #Mn [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ... | ||
1179 | | 0xE1 0x80 0xBB..0xBC #Mc [2] MYANMAR CONSONANT SIGN MEDIAL YA..... | ||
1180 | | 0xE1 0x80 0xBD..0xBE #Mn [2] MYANMAR CONSONANT SIGN MEDIAL WA..... | ||
1181 | | 0xE1 0x80 0xBF #Lo MYANMAR LETTER GREAT SA | ||
1182 | | 0xE1 0x81 0x80..0x89 #Nd [10] MYANMAR DIGIT ZERO..MYANMAR DIGIT ... | ||
1183 | | 0xE1 0x81 0x90..0x95 #Lo [6] MYANMAR LETTER SHA..MYANMAR LETTER... | ||
1184 | | 0xE1 0x81 0x96..0x97 #Mc [2] MYANMAR VOWEL SIGN VOCALIC R..MYAN... | ||
1185 | | 0xE1 0x81 0x98..0x99 #Mn [2] MYANMAR VOWEL SIGN VOCALIC L..MYAN... | ||
1186 | | 0xE1 0x81 0x9A..0x9D #Lo [4] MYANMAR LETTER MON NGA..MYANMAR LE... | ||
1187 | | 0xE1 0x81 0x9E..0xA0 #Mn [3] MYANMAR CONSONANT SIGN MON MEDIAL ... | ||
1188 | | 0xE1 0x81 0xA1 #Lo MYANMAR LETTER SGAW KAREN SHA | ||
1189 | | 0xE1 0x81 0xA2..0xA4 #Mc [3] MYANMAR VOWEL SIGN SGAW KAREN EU..... | ||
1190 | | 0xE1 0x81 0xA5..0xA6 #Lo [2] MYANMAR LETTER WESTERN PWO KAREN T... | ||
1191 | | 0xE1 0x81 0xA7..0xAD #Mc [7] MYANMAR VOWEL SIGN WESTERN PWO KAR... | ||
1192 | | 0xE1 0x81 0xAE..0xB0 #Lo [3] MYANMAR LETTER EASTERN PWO KAREN N... | ||
1193 | | 0xE1 0x81 0xB1..0xB4 #Mn [4] MYANMAR VOWEL SIGN GEBA KAREN I..M... | ||
1194 | | 0xE1 0x81 0xB5..0xFF #Lo [13] MYANMAR LETTER SHAN KA..MYANMAR LE... | ||
1195 | | 0xE1 0x82 0x00..0x81 # | ||
1196 | | 0xE1 0x82 0x82 #Mn MYANMAR CONSONANT SIGN SHAN MEDIAL WA | ||
1197 | | 0xE1 0x82 0x83..0x84 #Mc [2] MYANMAR VOWEL SIGN SHAN AA..MYANMA... | ||
1198 | | 0xE1 0x82 0x85..0x86 #Mn [2] MYANMAR VOWEL SIGN SHAN E ABOVE..M... | ||
1199 | | 0xE1 0x82 0x87..0x8C #Mc [6] MYANMAR SIGN SHAN TONE-2..MYANMAR ... | ||
1200 | | 0xE1 0x82 0x8D #Mn MYANMAR SIGN SHAN COUNCIL EMPHATIC... | ||
1201 | | 0xE1 0x82 0x8E #Lo MYANMAR LETTER RUMAI PALAUNG FA | ||
1202 | | 0xE1 0x82 0x8F #Mc MYANMAR SIGN RUMAI PALAUNG TONE-5 | ||
1203 | | 0xE1 0x82 0x90..0x99 #Nd [10] MYANMAR SHAN DIGIT ZERO..MYANMAR S... | ||
1204 | | 0xE1 0x82 0x9A..0x9C #Mc [3] MYANMAR SIGN KHAMTI TONE-1..MYANMA... | ||
1205 | | 0xE1 0x82 0x9D #Mn MYANMAR VOWEL SIGN AITON AI | ||
1206 | | 0xE1 0x82 0xA0..0xFF #L& [38] GEORGIAN CAPITAL LETTER AN..GEORGI... | ||
1207 | | 0xE1 0x83 0x00..0x85 # | ||
1208 | | 0xE1 0x83 0x87 #L& GEORGIAN CAPITAL LETTER YN | ||
1209 | | 0xE1 0x83 0x8D #L& GEORGIAN CAPITAL LETTER AEN | ||
1210 | | 0xE1 0x83 0x90..0xBA #Lo [43] GEORGIAN LETTER AN..GEORGIAN LETTE... | ||
1211 | | 0xE1 0x83 0xBC #Lm MODIFIER LETTER GEORGIAN NAR | ||
1212 | | 0xE1 0x83 0xBD..0xFF #Lo [332] GEORGIAN LETTER AEN..ETHIOPIC ... | ||
1213 | | 0xE1 0x84..0x88 0x00..0xFF # | ||
1214 | | 0xE1 0x89 0x00..0x88 # | ||
1215 | | 0xE1 0x89 0x8A..0x8D #Lo [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SY... | ||
1216 | | 0xE1 0x89 0x90..0x96 #Lo [7] ETHIOPIC SYLLABLE QHA..ETHIOPIC SY... | ||
1217 | | 0xE1 0x89 0x98 #Lo ETHIOPIC SYLLABLE QHWA | ||
1218 | | 0xE1 0x89 0x9A..0x9D #Lo [4] ETHIOPIC SYLLABLE QHWI..ETHIOPIC S... | ||
1219 | | 0xE1 0x89 0xA0..0xFF #Lo [41] ETHIOPIC SYLLABLE BA..ETHIOPIC SYL... | ||
1220 | | 0xE1 0x8A 0x00..0x88 # | ||
1221 | | 0xE1 0x8A 0x8A..0x8D #Lo [4] ETHIOPIC SYLLABLE XWI..ETHIOPIC SY... | ||
1222 | | 0xE1 0x8A 0x90..0xB0 #Lo [33] ETHIOPIC SYLLABLE NA..ETHIOPIC SYL... | ||
1223 | | 0xE1 0x8A 0xB2..0xB5 #Lo [4] ETHIOPIC SYLLABLE KWI..ETHIOPIC SY... | ||
1224 | | 0xE1 0x8A 0xB8..0xBE #Lo [7] ETHIOPIC SYLLABLE KXA..ETHIOPIC SY... | ||
1225 | | 0xE1 0x8B 0x80 #Lo ETHIOPIC SYLLABLE KXWA | ||
1226 | | 0xE1 0x8B 0x82..0x85 #Lo [4] ETHIOPIC SYLLABLE KXWI..ETHIOPIC S... | ||
1227 | | 0xE1 0x8B 0x88..0x96 #Lo [15] ETHIOPIC SYLLABLE WA..ETHIOPIC SYL... | ||
1228 | | 0xE1 0x8B 0x98..0xFF #Lo [57] ETHIOPIC SYLLABLE ZA..ETHIOPIC SYL... | ||
1229 | | 0xE1 0x8C 0x00..0x90 # | ||
1230 | | 0xE1 0x8C 0x92..0x95 #Lo [4] ETHIOPIC SYLLABLE GWI..ETHIOPIC SY... | ||
1231 | | 0xE1 0x8C 0x98..0xFF #Lo [67] ETHIOPIC SYLLABLE GGA..ETHIOPIC SY... | ||
1232 | | 0xE1 0x8D 0x00..0x9A # | ||
1233 | | 0xE1 0x8D 0x9D..0x9F #Mn [3] ETHIOPIC COMBINING GEMINATION AND ... | ||
1234 | | 0xE1 0x8D 0xA9..0xB1 #No [9] ETHIOPIC DIGIT ONE..ETHIOPIC DIGIT... | ||
1235 | | 0xE1 0x8E 0x80..0x8F #Lo [16] ETHIOPIC SYLLABLE SEBATBEIT MWA..E... | ||
1236 | | 0xE1 0x8E 0xA0..0xFF #L& [86] CHEROKEE LETTER A..CHEROKEE LETTER MV | ||
1237 | | 0xE1 0x8F 0x00..0xB5 # | ||
1238 | | 0xE1 0x8F 0xB8..0xBD #L& [6] CHEROKEE SMALL LETTER YE..CHEROKEE... | ||
1239 | | 0xE1 0x90 0x81..0xFF #Lo [620] CANADIAN SYLLABICS E..CANADIAN... | ||
1240 | | 0xE1 0x91..0x98 0x00..0xFF # | ||
1241 | | 0xE1 0x99 0x00..0xAC # | ||
1242 | | 0xE1 0x99 0xAF..0xBF #Lo [17] CANADIAN SYLLABICS QAI..CANADIAN S... | ||
1243 | | 0xE1 0x9A 0x81..0x9A #Lo [26] OGHAM LETTER BEITH..OGHAM LETTER P... | ||
1244 | | 0xE1 0x9A 0xA0..0xFF #Lo [75] RUNIC LETTER FEHU FEOH FE F..RUNIC... | ||
1245 | | 0xE1 0x9B 0x00..0xAA # | ||
1246 | | 0xE1 0x9B 0xAE..0xB0 #Nl [3] RUNIC ARLAUG SYMBOL..RUNIC BELGTHO... | ||
1247 | | 0xE1 0x9B 0xB1..0xB8 #Lo [8] RUNIC LETTER K..RUNIC LETTER FRANK... | ||
1248 | | 0xE1 0x9C 0x80..0x8C #Lo [13] TAGALOG LETTER A..TAGALOG LETTER YA | ||
1249 | | 0xE1 0x9C 0x8E..0x91 #Lo [4] TAGALOG LETTER LA..TAGALOG LETTER HA | ||
1250 | | 0xE1 0x9C 0x92..0x94 #Mn [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN... | ||
1251 | | 0xE1 0x9C 0xA0..0xB1 #Lo [18] HANUNOO LETTER A..HANUNOO LETTER HA | ||
1252 | | 0xE1 0x9C 0xB2..0xB4 #Mn [3] HANUNOO VOWEL SIGN I..HANUNOO SIGN... | ||
1253 | | 0xE1 0x9D 0x80..0x91 #Lo [18] BUHID LETTER A..BUHID LETTER HA | ||
1254 | | 0xE1 0x9D 0x92..0x93 #Mn [2] BUHID VOWEL SIGN I..BUHID VOWEL SI... | ||
1255 | | 0xE1 0x9D 0xA0..0xAC #Lo [13] TAGBANWA LETTER A..TAGBANWA LETTER YA | ||
1256 | | 0xE1 0x9D 0xAE..0xB0 #Lo [3] TAGBANWA LETTER LA..TAGBANWA LETTE... | ||
1257 | | 0xE1 0x9D 0xB2..0xB3 #Mn [2] TAGBANWA VOWEL SIGN I..TAGBANWA VO... | ||
1258 | | 0xE1 0x9E 0x80..0xB3 #Lo [52] KHMER LETTER KA..KHMER INDEPENDENT... | ||
1259 | | 0xE1 0x9E 0xB4..0xB5 #Mn [2] KHMER VOWEL INHERENT AQ..KHMER VOW... | ||
1260 | | 0xE1 0x9E 0xB6 #Mc KHMER VOWEL SIGN AA | ||
1261 | | 0xE1 0x9E 0xB7..0xBD #Mn [7] KHMER VOWEL SIGN I..KHMER VOWEL SI... | ||
1262 | | 0xE1 0x9E 0xBE..0xFF #Mc [8] KHMER VOWEL SIGN OE..KHMER VOWEL S... | ||
1263 | | 0xE1 0x9F 0x00..0x85 # | ||
1264 | | 0xE1 0x9F 0x86 #Mn KHMER SIGN NIKAHIT | ||
1265 | | 0xE1 0x9F 0x87..0x88 #Mc [2] KHMER SIGN REAHMUK..KHMER SIGN YUU... | ||
1266 | | 0xE1 0x9F 0x89..0x93 #Mn [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN... | ||
1267 | | 0xE1 0x9F 0x97 #Lm KHMER SIGN LEK TOO | ||
1268 | | 0xE1 0x9F 0x9C #Lo KHMER SIGN AVAKRAHASANYA | ||
1269 | | 0xE1 0x9F 0x9D #Mn KHMER SIGN ATTHACAN | ||
1270 | | 0xE1 0x9F 0xA0..0xA9 #Nd [10] KHMER DIGIT ZERO..KHMER DIGIT NINE | ||
1271 | | 0xE1 0xA0 0x8B..0x8D #Mn [3] MONGOLIAN FREE VARIATION SELECTOR ... | ||
1272 | | 0xE1 0xA0 0x90..0x99 #Nd [10] MONGOLIAN DIGIT ZERO..MONGOLIAN DI... | ||
1273 | | 0xE1 0xA0 0xA0..0xFF #Lo [35] MONGOLIAN LETTER A..MONGOLIAN LETT... | ||
1274 | | 0xE1 0xA1 0x00..0x82 # | ||
1275 | | 0xE1 0xA1 0x83 #Lm MONGOLIAN LETTER TODO LONG VOWEL SIGN | ||
1276 | | 0xE1 0xA1 0x84..0xB7 #Lo [52] MONGOLIAN LETTER TODO E..MONGOLIAN... | ||
1277 | | 0xE1 0xA2 0x80..0x84 #Lo [5] MONGOLIAN LETTER ALI GALI ANUSVARA... | ||
1278 | | 0xE1 0xA2 0x85..0x86 #Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..... | ||
1279 | | 0xE1 0xA2 0x87..0xA8 #Lo [34] MONGOLIAN LETTER ALI GALI A..MONGO... | ||
1280 | | 0xE1 0xA2 0xA9 #Mn MONGOLIAN LETTER ALI GALI DAGALGA | ||
1281 | | 0xE1 0xA2 0xAA #Lo MONGOLIAN LETTER MANCHU ALI GALI LHA | ||
1282 | | 0xE1 0xA2 0xB0..0xFF #Lo [70] CANADIAN SYLLABICS OY..CANADIAN SY... | ||
1283 | | 0xE1 0xA3 0x00..0xB5 # | ||
1284 | | 0xE1 0xA4 0x80..0x9E #Lo [31] LIMBU VOWEL-CARRIER LETTER..LIMBU ... | ||
1285 | | 0xE1 0xA4 0xA0..0xA2 #Mn [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SI... | ||
1286 | | 0xE1 0xA4 0xA3..0xA6 #Mc [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL S... | ||
1287 | | 0xE1 0xA4 0xA7..0xA8 #Mn [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SI... | ||
1288 | | 0xE1 0xA4 0xA9..0xAB #Mc [3] LIMBU SUBJOINED LETTER YA..LIMBU S... | ||
1289 | | 0xE1 0xA4 0xB0..0xB1 #Mc [2] LIMBU SMALL LETTER KA..LIMBU SMALL... | ||
1290 | | 0xE1 0xA4 0xB2 #Mn LIMBU SMALL LETTER ANUSVARA | ||
1291 | | 0xE1 0xA4 0xB3..0xB8 #Mc [6] LIMBU SMALL LETTER TA..LIMBU SMALL... | ||
1292 | | 0xE1 0xA4 0xB9..0xBB #Mn [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I | ||
1293 | | 0xE1 0xA5 0x86..0x8F #Nd [10] LIMBU DIGIT ZERO..LIMBU DIGIT NINE | ||
1294 | | 0xE1 0xA5 0x90..0xAD #Lo [30] TAI LE LETTER KA..TAI LE LETTER AI | ||
1295 | | 0xE1 0xA5 0xB0..0xB4 #Lo [5] TAI LE LETTER TONE-2..TAI LE LETTE... | ||
1296 | | 0xE1 0xA6 0x80..0xAB #Lo [44] NEW TAI LUE LETTER HIGH QA..NEW TA... | ||
1297 | | 0xE1 0xA6 0xB0..0xFF #Lo [26] NEW TAI LUE VOWEL SIGN VOWEL SHORT... | ||
1298 | | 0xE1 0xA7 0x00..0x89 # | ||
1299 | | 0xE1 0xA7 0x90..0x99 #Nd [10] NEW TAI LUE DIGIT ZERO..NEW TAI LU... | ||
1300 | | 0xE1 0xA7 0x9A #No NEW TAI LUE THAM DIGIT ONE | ||
1301 | | 0xE1 0xA8 0x80..0x96 #Lo [23] BUGINESE LETTER KA..BUGINESE LETTE... | ||
1302 | | 0xE1 0xA8 0x97..0x98 #Mn [2] BUGINESE VOWEL SIGN I..BUGINESE VO... | ||
1303 | | 0xE1 0xA8 0x99..0x9A #Mc [2] BUGINESE VOWEL SIGN E..BUGINESE VO... | ||
1304 | | 0xE1 0xA8 0x9B #Mn BUGINESE VOWEL SIGN AE | ||
1305 | | 0xE1 0xA8 0xA0..0xFF #Lo [53] TAI THAM LETTER HIGH KA..TAI THAM ... | ||
1306 | | 0xE1 0xA9 0x00..0x94 # | ||
1307 | | 0xE1 0xA9 0x95 #Mc TAI THAM CONSONANT SIGN MEDIAL RA | ||
1308 | | 0xE1 0xA9 0x96 #Mn TAI THAM CONSONANT SIGN MEDIAL LA | ||
1309 | | 0xE1 0xA9 0x97 #Mc TAI THAM CONSONANT SIGN LA TANG LAI | ||
1310 | | 0xE1 0xA9 0x98..0x9E #Mn [7] TAI THAM SIGN MAI KANG LAI..TAI TH... | ||
1311 | | 0xE1 0xA9 0xA0 #Mn TAI THAM SIGN SAKOT | ||
1312 | | 0xE1 0xA9 0xA1 #Mc TAI THAM VOWEL SIGN A | ||
1313 | | 0xE1 0xA9 0xA2 #Mn TAI THAM VOWEL SIGN MAI SAT | ||
1314 | | 0xE1 0xA9 0xA3..0xA4 #Mc [2] TAI THAM VOWEL SIGN AA..TAI THAM V... | ||
1315 | | 0xE1 0xA9 0xA5..0xAC #Mn [8] TAI THAM VOWEL SIGN I..TAI THAM VO... | ||
1316 | | 0xE1 0xA9 0xAD..0xB2 #Mc [6] TAI THAM VOWEL SIGN OY..TAI THAM V... | ||
1317 | | 0xE1 0xA9 0xB3..0xBC #Mn [10] TAI THAM VOWEL SIGN OA ABOVE..TAI ... | ||
1318 | | 0xE1 0xA9 0xBF #Mn TAI THAM COMBINING CRYPTOGRAMMIC DOT | ||
1319 | | 0xE1 0xAA 0x80..0x89 #Nd [10] TAI THAM HORA DIGIT ZERO..TAI THAM... | ||
1320 | | 0xE1 0xAA 0x90..0x99 #Nd [10] TAI THAM THAM DIGIT ZERO..TAI THAM... | ||
1321 | | 0xE1 0xAA 0xA7 #Lm TAI THAM SIGN MAI YAMOK | ||
1322 | | 0xE1 0xAA 0xB0..0xBD #Mn [14] COMBINING DOUBLED CIRCUMFLEX ACCEN... | ||
1323 | | 0xE1 0xAC 0x80..0x83 #Mn [4] BALINESE SIGN ULU RICEM..BALINESE ... | ||
1324 | | 0xE1 0xAC 0x84 #Mc BALINESE SIGN BISAH | ||
1325 | | 0xE1 0xAC 0x85..0xB3 #Lo [47] BALINESE LETTER AKARA..BALINESE LE... | ||
1326 | | 0xE1 0xAC 0xB4 #Mn BALINESE SIGN REREKAN | ||
1327 | | 0xE1 0xAC 0xB5 #Mc BALINESE VOWEL SIGN TEDUNG | ||
1328 | | 0xE1 0xAC 0xB6..0xBA #Mn [5] BALINESE VOWEL SIGN ULU..BALINESE ... | ||
1329 | | 0xE1 0xAC 0xBB #Mc BALINESE VOWEL SIGN RA REPA TEDUNG | ||
1330 | | 0xE1 0xAC 0xBC #Mn BALINESE VOWEL SIGN LA LENGA | ||
1331 | | 0xE1 0xAC 0xBD..0xFF #Mc [5] BALINESE VOWEL SIGN LA LENGA TEDUN... | ||
1332 | | 0xE1 0xAD 0x00..0x81 # | ||
1333 | | 0xE1 0xAD 0x82 #Mn BALINESE VOWEL SIGN PEPET | ||
1334 | | 0xE1 0xAD 0x83..0x84 #Mc [2] BALINESE VOWEL SIGN PEPET TEDUNG..... | ||
1335 | | 0xE1 0xAD 0x85..0x8B #Lo [7] BALINESE LETTER KAF SASAK..BALINES... | ||
1336 | | 0xE1 0xAD 0x90..0x99 #Nd [10] BALINESE DIGIT ZERO..BALINESE DIGI... | ||
1337 | | 0xE1 0xAD 0xAB..0xB3 #Mn [9] BALINESE MUSICAL SYMBOL COMBINING ... | ||
1338 | | 0xE1 0xAE 0x80..0x81 #Mn [2] SUNDANESE SIGN PANYECEK..SUNDANESE... | ||
1339 | | 0xE1 0xAE 0x82 #Mc SUNDANESE SIGN PANGWISAD | ||
1340 | | 0xE1 0xAE 0x83..0xA0 #Lo [30] SUNDANESE LETTER A..SUNDANESE LETT... | ||
1341 | | 0xE1 0xAE 0xA1 #Mc SUNDANESE CONSONANT SIGN PAMINGKAL | ||
1342 | | 0xE1 0xAE 0xA2..0xA5 #Mn [4] SUNDANESE CONSONANT SIGN PANYAKRA.... | ||
1343 | | 0xE1 0xAE 0xA6..0xA7 #Mc [2] SUNDANESE VOWEL SIGN PANAELAENG..S... | ||
1344 | | 0xE1 0xAE 0xA8..0xA9 #Mn [2] SUNDANESE VOWEL SIGN PAMEPET..SUND... | ||
1345 | | 0xE1 0xAE 0xAA #Mc SUNDANESE SIGN PAMAAEH | ||
1346 | | 0xE1 0xAE 0xAB..0xAD #Mn [3] SUNDANESE SIGN VIRAMA..SUNDANESE C... | ||
1347 | | 0xE1 0xAE 0xAE..0xAF #Lo [2] SUNDANESE LETTER KHA..SUNDANESE LE... | ||
1348 | | 0xE1 0xAE 0xB0..0xB9 #Nd [10] SUNDANESE DIGIT ZERO..SUNDANESE DI... | ||
1349 | | 0xE1 0xAE 0xBA..0xFF #Lo [44] SUNDANESE AVAGRAHA..BATAK LETTER U | ||
1350 | | 0xE1 0xAF 0x00..0xA5 # | ||
1351 | | 0xE1 0xAF 0xA6 #Mn BATAK SIGN TOMPI | ||
1352 | | 0xE1 0xAF 0xA7 #Mc BATAK VOWEL SIGN E | ||
1353 | | 0xE1 0xAF 0xA8..0xA9 #Mn [2] BATAK VOWEL SIGN PAKPAK E..BATAK V... | ||
1354 | | 0xE1 0xAF 0xAA..0xAC #Mc [3] BATAK VOWEL SIGN I..BATAK VOWEL SI... | ||
1355 | | 0xE1 0xAF 0xAD #Mn BATAK VOWEL SIGN KARO O | ||
1356 | | 0xE1 0xAF 0xAE #Mc BATAK VOWEL SIGN U | ||
1357 | | 0xE1 0xAF 0xAF..0xB1 #Mn [3] BATAK VOWEL SIGN U FOR SIMALUNGUN ... | ||
1358 | | 0xE1 0xAF 0xB2..0xB3 #Mc [2] BATAK PANGOLAT..BATAK PANONGONAN | ||
1359 | | 0xE1 0xB0 0x80..0xA3 #Lo [36] LEPCHA LETTER KA..LEPCHA LETTER A | ||
1360 | | 0xE1 0xB0 0xA4..0xAB #Mc [8] LEPCHA SUBJOINED LETTER YA..LEPCHA... | ||
1361 | | 0xE1 0xB0 0xAC..0xB3 #Mn [8] LEPCHA VOWEL SIGN E..LEPCHA CONSON... | ||
1362 | | 0xE1 0xB0 0xB4..0xB5 #Mc [2] LEPCHA CONSONANT SIGN NYIN-DO..LEP... | ||
1363 | | 0xE1 0xB0 0xB6..0xB7 #Mn [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA | ||
1364 | | 0xE1 0xB1 0x80..0x89 #Nd [10] LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE | ||
1365 | | 0xE1 0xB1 0x8D..0x8F #Lo [3] LEPCHA LETTER TTA..LEPCHA LETTER DDA | ||
1366 | | 0xE1 0xB1 0x90..0x99 #Nd [10] OL CHIKI DIGIT ZERO..OL CHIKI DIGI... | ||
1367 | | 0xE1 0xB1 0x9A..0xB7 #Lo [30] OL CHIKI LETTER LA..OL CHIKI LETTE... | ||
1368 | | 0xE1 0xB1 0xB8..0xBD #Lm [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD | ||
1369 | | 0xE1 0xB2 0x80..0x88 #L& [9] CYRILLIC SMALL LETTER ROUNDED VE..... | ||
1370 | | 0xE1 0xB3 0x90..0x92 #Mn [3] VEDIC TONE KARSHANA..VEDIC TONE PR... | ||
1371 | | 0xE1 0xB3 0x94..0xA0 #Mn [13] VEDIC SIGN YAJURVEDIC MIDLINE SVAR... | ||
1372 | | 0xE1 0xB3 0xA1 #Mc VEDIC TONE ATHARVAVEDIC INDEPENDEN... | ||
1373 | | 0xE1 0xB3 0xA2..0xA8 #Mn [7] VEDIC SIGN VISARGA SVARITA..VEDIC ... | ||
1374 | | 0xE1 0xB3 0xA9..0xAC #Lo [4] VEDIC SIGN ANUSVARA ANTARGOMUKHA..... | ||
1375 | | 0xE1 0xB3 0xAD #Mn VEDIC SIGN TIRYAK | ||
1376 | | 0xE1 0xB3 0xAE..0xB1 #Lo [4] VEDIC SIGN HEXIFORM LONG ANUSVARA.... | ||
1377 | | 0xE1 0xB3 0xB2..0xB3 #Mc [2] VEDIC SIGN ARDHAVISARGA..VEDIC SIG... | ||
1378 | | 0xE1 0xB3 0xB4 #Mn VEDIC TONE CANDRA ABOVE | ||
1379 | | 0xE1 0xB3 0xB5..0xB6 #Lo [2] VEDIC SIGN JIHVAMULIYA..VEDIC SIGN... | ||
1380 | | 0xE1 0xB3 0xB8..0xB9 #Mn [2] VEDIC TONE RING ABOVE..VEDIC TONE ... | ||
1381 | | 0xE1 0xB4 0x80..0xAB #L& [44] LATIN LETTER SMALL CAPITAL A..CYRI... | ||
1382 | | 0xE1 0xB4 0xAC..0xFF #Lm [63] MODIFIER LETTER CAPITAL A..GREEK S... | ||
1383 | | 0xE1 0xB5 0x00..0xAA # | ||
1384 | | 0xE1 0xB5 0xAB..0xB7 #L& [13] LATIN SMALL LETTER UE..LATIN SMALL... | ||
1385 | | 0xE1 0xB5 0xB8 #Lm MODIFIER LETTER CYRILLIC EN | ||
1386 | | 0xE1 0xB5 0xB9..0xFF #L& [34] LATIN SMALL LETTER INSULAR G..LATI... | ||
1387 | | 0xE1 0xB6 0x00..0x9A # | ||
1388 | | 0xE1 0xB6 0x9B..0xBF #Lm [37] MODIFIER LETTER SMALL TURNED ALPHA... | ||
1389 | | 0xE1 0xB7 0x80..0xB5 #Mn [54] COMBINING DOTTED GRAVE ACCENT..COM... | ||
1390 | | 0xE1 0xB7 0xBB..0xBF #Mn [5] COMBINING DELETION MARK..COMBINING... | ||
1391 | | 0xE1 0xB8 0x80..0xFF #L& [278] LATIN CAPITAL LETTER A WITH RI... | ||
1392 | | 0xE1 0xB9..0xBB 0x00..0xFF # | ||
1393 | | 0xE1 0xBC 0x00..0x95 # | ||
1394 | | 0xE1 0xBC 0x98..0x9D #L& [6] GREEK CAPITAL LETTER EPSILON WITH ... | ||
1395 | | 0xE1 0xBC 0xA0..0xFF #L& [38] GREEK SMALL LETTER ETA WITH PSILI.... | ||
1396 | | 0xE1 0xBD 0x00..0x85 # | ||
1397 | | 0xE1 0xBD 0x88..0x8D #L& [6] GREEK CAPITAL LETTER OMICRON WITH ... | ||
1398 | | 0xE1 0xBD 0x90..0x97 #L& [8] GREEK SMALL LETTER UPSILON WITH PS... | ||
1399 | | 0xE1 0xBD 0x99 #L& GREEK CAPITAL LETTER UPSILON WITH ... | ||
1400 | | 0xE1 0xBD 0x9B #L& GREEK CAPITAL LETTER UPSILON WITH ... | ||
1401 | | 0xE1 0xBD 0x9D #L& GREEK CAPITAL LETTER UPSILON WITH ... | ||
1402 | | 0xE1 0xBD 0x9F..0xBD #L& [31] GREEK CAPITAL LETTER UPSILON WITH ... | ||
1403 | | 0xE1 0xBE 0x80..0xB4 #L& [53] GREEK SMALL LETTER ALPHA WITH PSIL... | ||
1404 | | 0xE1 0xBE 0xB6..0xBC #L& [7] GREEK SMALL LETTER ALPHA WITH PERI... | ||
1405 | | 0xE1 0xBE 0xBE #L& GREEK PROSGEGRAMMENI | ||
1406 | | 0xE1 0xBF 0x82..0x84 #L& [3] GREEK SMALL LETTER ETA WITH VARIA ... | ||
1407 | | 0xE1 0xBF 0x86..0x8C #L& [7] GREEK SMALL LETTER ETA WITH PERISP... | ||
1408 | | 0xE1 0xBF 0x90..0x93 #L& [4] GREEK SMALL LETTER IOTA WITH VRACH... | ||
1409 | | 0xE1 0xBF 0x96..0x9B #L& [6] GREEK SMALL LETTER IOTA WITH PERIS... | ||
1410 | | 0xE1 0xBF 0xA0..0xAC #L& [13] GREEK SMALL LETTER UPSILON WITH VR... | ||
1411 | | 0xE1 0xBF 0xB2..0xB4 #L& [3] GREEK SMALL LETTER OMEGA WITH VARI... | ||
1412 | | 0xE1 0xBF 0xB6..0xBC #L& [7] GREEK SMALL LETTER OMEGA WITH PERI... | ||
1413 | | 0xE2 0x80 0xBF..0xFF #Pc [2] UNDERTIE..CHARACTER TIE | ||
1414 | | 0xE2 0x81 0x00..0x80 # | ||
1415 | | 0xE2 0x81 0x94 #Pc INVERTED UNDERTIE | ||
1416 | | 0xE2 0x81 0xB1 #Lm SUPERSCRIPT LATIN SMALL LETTER I | ||
1417 | | 0xE2 0x81 0xBF #Lm SUPERSCRIPT LATIN SMALL LETTER N | ||
1418 | | 0xE2 0x82 0x90..0x9C #Lm [13] LATIN SUBSCRIPT SMALL LETTER A..LA... | ||
1419 | | 0xE2 0x83 0x90..0x9C #Mn [13] COMBINING LEFT HARPOON ABOVE..COMB... | ||
1420 | | 0xE2 0x83 0xA1 #Mn COMBINING LEFT RIGHT ARROW ABOVE | ||
1421 | | 0xE2 0x83 0xA5..0xB0 #Mn [12] COMBINING REVERSE SOLIDUS OVERLAY.... | ||
1422 | | 0xE2 0x84 0x82 #L& DOUBLE-STRUCK CAPITAL C | ||
1423 | | 0xE2 0x84 0x87 #L& EULER CONSTANT | ||
1424 | | 0xE2 0x84 0x8A..0x93 #L& [10] SCRIPT SMALL G..SCRIPT SMALL L | ||
1425 | | 0xE2 0x84 0x95 #L& DOUBLE-STRUCK CAPITAL N | ||
1426 | | 0xE2 0x84 0x98 #Sm SCRIPT CAPITAL P | ||
1427 | | 0xE2 0x84 0x99..0x9D #L& [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-ST... | ||
1428 | | 0xE2 0x84 0xA4 #L& DOUBLE-STRUCK CAPITAL Z | ||
1429 | | 0xE2 0x84 0xA6 #L& OHM SIGN | ||
1430 | | 0xE2 0x84 0xA8 #L& BLACK-LETTER CAPITAL Z | ||
1431 | | 0xE2 0x84 0xAA..0xAD #L& [4] KELVIN SIGN..BLACK-LETTER CAPITAL C | ||
1432 | | 0xE2 0x84 0xAE #So ESTIMATED SYMBOL | ||
1433 | | 0xE2 0x84 0xAF..0xB4 #L& [6] SCRIPT SMALL E..SCRIPT SMALL O | ||
1434 | | 0xE2 0x84 0xB5..0xB8 #Lo [4] ALEF SYMBOL..DALET SYMBOL | ||
1435 | | 0xE2 0x84 0xB9 #L& INFORMATION SOURCE | ||
1436 | | 0xE2 0x84 0xBC..0xBF #L& [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STR... | ||
1437 | | 0xE2 0x85 0x85..0x89 #L& [5] DOUBLE-STRUCK ITALIC CAPITAL D..DO... | ||
1438 | | 0xE2 0x85 0x8E #L& TURNED SMALL F | ||
1439 | | 0xE2 0x85 0xA0..0xFF #Nl [35] ROMAN NUMERAL ONE..ROMAN NUMERAL T... | ||
1440 | | 0xE2 0x86 0x00..0x82 # | ||
1441 | | 0xE2 0x86 0x83..0x84 #L& [2] ROMAN NUMERAL REVERSED ONE HUNDRED... | ||
1442 | | 0xE2 0x86 0x85..0x88 #Nl [4] ROMAN NUMERAL SIX LATE FORM..ROMAN... | ||
1443 | | 0xE2 0xB0 0x80..0xAE #L& [47] GLAGOLITIC CAPITAL LETTER AZU..GLA... | ||
1444 | | 0xE2 0xB0 0xB0..0xFF #L& [47] GLAGOLITIC SMALL LETTER AZU..GLAGO... | ||
1445 | | 0xE2 0xB1 0x00..0x9E # | ||
1446 | | 0xE2 0xB1 0xA0..0xBB #L& [28] LATIN CAPITAL LETTER L WITH DOUBLE... | ||
1447 | | 0xE2 0xB1 0xBC..0xBD #Lm [2] LATIN SUBSCRIPT SMALL LETTER J..MO... | ||
1448 | | 0xE2 0xB1 0xBE..0xFF #L& [103] LATIN CAPITAL LETTER S WITH SW... | ||
1449 | | 0xE2 0xB2..0xB2 0x00..0xFF # | ||
1450 | | 0xE2 0xB3 0x00..0xA4 # | ||
1451 | | 0xE2 0xB3 0xAB..0xAE #L& [4] COPTIC CAPITAL LETTER CRYPTOGRAMMI... | ||
1452 | | 0xE2 0xB3 0xAF..0xB1 #Mn [3] COPTIC COMBINING NI ABOVE..COPTIC ... | ||
1453 | | 0xE2 0xB3 0xB2..0xB3 #L& [2] COPTIC CAPITAL LETTER BOHAIRIC KHE... | ||
1454 | | 0xE2 0xB4 0x80..0xA5 #L& [38] GEORGIAN SMALL LETTER AN..GEORGIAN... | ||
1455 | | 0xE2 0xB4 0xA7 #L& GEORGIAN SMALL LETTER YN | ||
1456 | | 0xE2 0xB4 0xAD #L& GEORGIAN SMALL LETTER AEN | ||
1457 | | 0xE2 0xB4 0xB0..0xFF #Lo [56] TIFINAGH LETTER YA..TIFINAGH LETTE... | ||
1458 | | 0xE2 0xB5 0x00..0xA7 # | ||
1459 | | 0xE2 0xB5 0xAF #Lm TIFINAGH MODIFIER LETTER LABIALIZA... | ||
1460 | | 0xE2 0xB5 0xBF #Mn TIFINAGH CONSONANT JOINER | ||
1461 | | 0xE2 0xB6 0x80..0x96 #Lo [23] ETHIOPIC SYLLABLE LOA..ETHIOPIC SY... | ||
1462 | | 0xE2 0xB6 0xA0..0xA6 #Lo [7] ETHIOPIC SYLLABLE SSA..ETHIOPIC SY... | ||
1463 | | 0xE2 0xB6 0xA8..0xAE #Lo [7] ETHIOPIC SYLLABLE CCA..ETHIOPIC SY... | ||
1464 | | 0xE2 0xB6 0xB0..0xB6 #Lo [7] ETHIOPIC SYLLABLE ZZA..ETHIOPIC SY... | ||
1465 | | 0xE2 0xB6 0xB8..0xBE #Lo [7] ETHIOPIC SYLLABLE CCHA..ETHIOPIC S... | ||
1466 | | 0xE2 0xB7 0x80..0x86 #Lo [7] ETHIOPIC SYLLABLE QYA..ETHIOPIC SY... | ||
1467 | | 0xE2 0xB7 0x88..0x8E #Lo [7] ETHIOPIC SYLLABLE KYA..ETHIOPIC SY... | ||
1468 | | 0xE2 0xB7 0x90..0x96 #Lo [7] ETHIOPIC SYLLABLE XYA..ETHIOPIC SY... | ||
1469 | | 0xE2 0xB7 0x98..0x9E #Lo [7] ETHIOPIC SYLLABLE GYA..ETHIOPIC SY... | ||
1470 | | 0xE2 0xB7 0xA0..0xBF #Mn [32] COMBINING CYRILLIC LETTER BE..COMB... | ||
1471 | | 0xE3 0x80 0x85 #Lm IDEOGRAPHIC ITERATION MARK | ||
1472 | | 0xE3 0x80 0x86 #Lo IDEOGRAPHIC CLOSING MARK | ||
1473 | | 0xE3 0x80 0x87 #Nl IDEOGRAPHIC NUMBER ZERO | ||
1474 | | 0xE3 0x80 0xA1..0xA9 #Nl [9] HANGZHOU NUMERAL ONE..HANGZHOU NUM... | ||
1475 | | 0xE3 0x80 0xAA..0xAD #Mn [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOG... | ||
1476 | | 0xE3 0x80 0xAE..0xAF #Mc [2] HANGUL SINGLE DOT TONE MARK..HANGU... | ||
1477 | | 0xE3 0x80 0xB1..0xB5 #Lm [5] VERTICAL KANA REPEAT MARK..VERTICA... | ||
1478 | | 0xE3 0x80 0xB8..0xBA #Nl [3] HANGZHOU NUMERAL TEN..HANGZHOU NUM... | ||
1479 | | 0xE3 0x80 0xBB #Lm VERTICAL IDEOGRAPHIC ITERATION MARK | ||
1480 | | 0xE3 0x80 0xBC #Lo MASU MARK | ||
1481 | | 0xE3 0x81 0x81..0xFF #Lo [86] HIRAGANA LETTER SMALL A..HIRAGANA ... | ||
1482 | | 0xE3 0x82 0x00..0x96 # | ||
1483 | | 0xE3 0x82 0x99..0x9A #Mn [2] COMBINING KATAKANA-HIRAGANA VOICED... | ||
1484 | | 0xE3 0x82 0x9B..0x9C #Sk [2] KATAKANA-HIRAGANA VOICED SOUND MAR... | ||
1485 | | 0xE3 0x82 0x9D..0x9E #Lm [2] HIRAGANA ITERATION MARK..HIRAGANA ... | ||
1486 | | 0xE3 0x82 0x9F #Lo HIRAGANA DIGRAPH YORI | ||
1487 | | 0xE3 0x82 0xA1..0xFF #Lo [90] KATAKANA LETTER SMALL A..KATAKANA ... | ||
1488 | | 0xE3 0x83 0x00..0xBA # | ||
1489 | | 0xE3 0x83 0xBC..0xBE #Lm [3] KATAKANA-HIRAGANA PROLONGED SOUND ... | ||
1490 | | 0xE3 0x83 0xBF #Lo KATAKANA DIGRAPH KOTO | ||
1491 | | 0xE3 0x84 0x85..0xAD #Lo [41] BOPOMOFO LETTER B..BOPOMOFO LETTER IH | ||
1492 | | 0xE3 0x84 0xB1..0xFF #Lo [94] HANGUL LETTER KIYEOK..HANGUL L... | ||
1493 | | 0xE3 0x85..0x85 0x00..0xFF # | ||
1494 | | 0xE3 0x86 0x00..0x8E # | ||
1495 | | 0xE3 0x86 0xA0..0xBA #Lo [27] BOPOMOFO LETTER BU..BOPOMOFO LETTE... | ||
1496 | | 0xE3 0x87 0xB0..0xBF #Lo [16] KATAKANA LETTER SMALL KU..KATAKANA... | ||
1497 | | 0xE3 0x90 0x80..0xFF #Lo [6582] CJK UNIFIED IDEOGRAPH-3400..C... | ||
1498 | | 0xE3 0x91..0xFF 0x00..0xFF # | ||
1499 | | 0xE4 0x00 0x00..0xFF # | ||
1500 | | 0xE4 0x01..0xB5 0x00..0xFF # | ||
1501 | | 0xE4 0xB6 0x00..0xB5 # | ||
1502 | | 0xE4 0xB8 0x80..0xFF #Lo [20950] CJK UNIFIED IDEOGRAPH-... | ||
1503 | | 0xE4 0xB9..0xFF 0x00..0xFF # | ||
1504 | | 0xE5..0xE8 0x00..0xFF 0x00..0xFF # | ||
1505 | | 0xE9 0x00 0x00..0xFF # | ||
1506 | | 0xE9 0x01..0xBE 0x00..0xFF # | ||
1507 | | 0xE9 0xBF 0x00..0x95 # | ||
1508 | | 0xEA 0x80 0x80..0x94 #Lo [21] YI SYLLABLE IT..YI SYLLABLE E | ||
1509 | | 0xEA 0x80 0x95 #Lm YI SYLLABLE WU | ||
1510 | | 0xEA 0x80 0x96..0xFF #Lo [1143] YI SYLLABLE BIT..YI SYLLABLE YYR | ||
1511 | | 0xEA 0x81..0x91 0x00..0xFF # | ||
1512 | | 0xEA 0x92 0x00..0x8C # | ||
1513 | | 0xEA 0x93 0x90..0xB7 #Lo [40] LISU LETTER BA..LISU LETTER OE | ||
1514 | | 0xEA 0x93 0xB8..0xBD #Lm [6] LISU LETTER TONE MYA TI..LISU LETT... | ||
1515 | | 0xEA 0x94 0x80..0xFF #Lo [268] VAI SYLLABLE EE..VAI SYLLABLE NG | ||
1516 | | 0xEA 0x95..0x97 0x00..0xFF # | ||
1517 | | 0xEA 0x98 0x00..0x8B # | ||
1518 | | 0xEA 0x98 0x8C #Lm VAI SYLLABLE LENGTHENER | ||
1519 | | 0xEA 0x98 0x90..0x9F #Lo [16] VAI SYLLABLE NDOLE FA..VAI SYMBOL ... | ||
1520 | | 0xEA 0x98 0xA0..0xA9 #Nd [10] VAI DIGIT ZERO..VAI DIGIT NINE | ||
1521 | | 0xEA 0x98 0xAA..0xAB #Lo [2] VAI SYLLABLE NDOLE MA..VAI SYLLABL... | ||
1522 | | 0xEA 0x99 0x80..0xAD #L& [46] CYRILLIC CAPITAL LETTER ZEMLYA..CY... | ||
1523 | | 0xEA 0x99 0xAE #Lo CYRILLIC LETTER MULTIOCULAR O | ||
1524 | | 0xEA 0x99 0xAF #Mn COMBINING CYRILLIC VZMET | ||
1525 | | 0xEA 0x99 0xB4..0xBD #Mn [10] COMBINING CYRILLIC LETTER UKRAINIA... | ||
1526 | | 0xEA 0x99 0xBF #Lm CYRILLIC PAYEROK | ||
1527 | | 0xEA 0x9A 0x80..0x9B #L& [28] CYRILLIC CAPITAL LETTER DWE..CYRIL... | ||
1528 | | 0xEA 0x9A 0x9C..0x9D #Lm [2] MODIFIER LETTER CYRILLIC HARD SIGN... | ||
1529 | | 0xEA 0x9A 0x9E..0x9F #Mn [2] COMBINING CYRILLIC LETTER EF..COMB... | ||
1530 | | 0xEA 0x9A 0xA0..0xFF #Lo [70] BAMUM LETTER A..BAMUM LETTER KI | ||
1531 | | 0xEA 0x9B 0x00..0xA5 # | ||
1532 | | 0xEA 0x9B 0xA6..0xAF #Nl [10] BAMUM LETTER MO..BAMUM LETTER KOGHOM | ||
1533 | | 0xEA 0x9B 0xB0..0xB1 #Mn [2] BAMUM COMBINING MARK KOQNDON..BAMU... | ||
1534 | | 0xEA 0x9C 0x97..0x9F #Lm [9] MODIFIER LETTER DOT VERTICAL BAR..... | ||
1535 | | 0xEA 0x9C 0xA2..0xFF #L& [78] LATIN CAPITAL LETTER EGYPTOLOGICAL... | ||
1536 | | 0xEA 0x9D 0x00..0xAF # | ||
1537 | | 0xEA 0x9D 0xB0 #Lm MODIFIER LETTER US | ||
1538 | | 0xEA 0x9D 0xB1..0xFF #L& [23] LATIN SMALL LETTER DUM..LATIN SMAL... | ||
1539 | | 0xEA 0x9E 0x00..0x87 # | ||
1540 | | 0xEA 0x9E 0x88 #Lm MODIFIER LETTER LOW CIRCUMFLEX ACCENT | ||
1541 | | 0xEA 0x9E 0x8B..0x8E #L& [4] LATIN CAPITAL LETTER SALTILLO..LAT... | ||
1542 | | 0xEA 0x9E 0x8F #Lo LATIN LETTER SINOLOGICAL DOT | ||
1543 | | 0xEA 0x9E 0x90..0xAE #L& [31] LATIN CAPITAL LETTER N WITH DESCEN... | ||
1544 | | 0xEA 0x9E 0xB0..0xB7 #L& [8] LATIN CAPITAL LETTER TURNED K..LAT... | ||
1545 | | 0xEA 0x9F 0xB7 #Lo LATIN EPIGRAPHIC LETTER SIDEWAYS I | ||
1546 | | 0xEA 0x9F 0xB8..0xB9 #Lm [2] MODIFIER LETTER CAPITAL H WITH STR... | ||
1547 | | 0xEA 0x9F 0xBA #L& LATIN LETTER SMALL CAPITAL TURNED M | ||
1548 | | 0xEA 0x9F 0xBB..0xFF #Lo [7] LATIN EPIGRAPHIC LETTER REVERSED F... | ||
1549 | | 0xEA 0xA0 0x00..0x81 # | ||
1550 | | 0xEA 0xA0 0x82 #Mn SYLOTI NAGRI SIGN DVISVARA | ||
1551 | | 0xEA 0xA0 0x83..0x85 #Lo [3] SYLOTI NAGRI LETTER U..SYLOTI NAGR... | ||
1552 | | 0xEA 0xA0 0x86 #Mn SYLOTI NAGRI SIGN HASANTA | ||
1553 | | 0xEA 0xA0 0x87..0x8A #Lo [4] SYLOTI NAGRI LETTER KO..SYLOTI NAG... | ||
1554 | | 0xEA 0xA0 0x8B #Mn SYLOTI NAGRI SIGN ANUSVARA | ||
1555 | | 0xEA 0xA0 0x8C..0xA2 #Lo [23] SYLOTI NAGRI LETTER CO..SYLOTI NAG... | ||
1556 | | 0xEA 0xA0 0xA3..0xA4 #Mc [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI ... | ||
1557 | | 0xEA 0xA0 0xA5..0xA6 #Mn [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI ... | ||
1558 | | 0xEA 0xA0 0xA7 #Mc SYLOTI NAGRI VOWEL SIGN OO | ||
1559 | | 0xEA 0xA1 0x80..0xB3 #Lo [52] PHAGS-PA LETTER KA..PHAGS-PA LETTE... | ||
1560 | | 0xEA 0xA2 0x80..0x81 #Mc [2] SAURASHTRA SIGN ANUSVARA..SAURASHT... | ||
1561 | | 0xEA 0xA2 0x82..0xB3 #Lo [50] SAURASHTRA LETTER A..SAURASHTRA LE... | ||
1562 | | 0xEA 0xA2 0xB4..0xFF #Mc [16] SAURASHTRA CONSONANT SIGN HAARU..S... | ||
1563 | | 0xEA 0xA3 0x00..0x83 # | ||
1564 | | 0xEA 0xA3 0x84..0x85 #Mn [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA... | ||
1565 | | 0xEA 0xA3 0x90..0x99 #Nd [10] SAURASHTRA DIGIT ZERO..SAURASHTRA ... | ||
1566 | | 0xEA 0xA3 0xA0..0xB1 #Mn [18] COMBINING DEVANAGARI DIGIT ZERO..C... | ||
1567 | | 0xEA 0xA3 0xB2..0xB7 #Lo [6] DEVANAGARI SIGN SPACING CANDRABIND... | ||
1568 | | 0xEA 0xA3 0xBB #Lo DEVANAGARI HEADSTROKE | ||
1569 | | 0xEA 0xA3 0xBD #Lo DEVANAGARI JAIN OM | ||
1570 | | 0xEA 0xA4 0x80..0x89 #Nd [10] KAYAH LI DIGIT ZERO..KAYAH LI DIGI... | ||
1571 | | 0xEA 0xA4 0x8A..0xA5 #Lo [28] KAYAH LI LETTER KA..KAYAH LI LETTE... | ||
1572 | | 0xEA 0xA4 0xA6..0xAD #Mn [8] KAYAH LI VOWEL UE..KAYAH LI TONE C... | ||
1573 | | 0xEA 0xA4 0xB0..0xFF #Lo [23] REJANG LETTER KA..REJANG LETTER A | ||
1574 | | 0xEA 0xA5 0x00..0x86 # | ||
1575 | | 0xEA 0xA5 0x87..0x91 #Mn [11] REJANG VOWEL SIGN I..REJANG CONSON... | ||
1576 | | 0xEA 0xA5 0x92..0x93 #Mc [2] REJANG CONSONANT SIGN H..REJANG VI... | ||
1577 | | 0xEA 0xA5 0xA0..0xBC #Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANG... | ||
1578 | | 0xEA 0xA6 0x80..0x82 #Mn [3] JAVANESE SIGN PANYANGGA..JAVANESE ... | ||
1579 | | 0xEA 0xA6 0x83 #Mc JAVANESE SIGN WIGNYAN | ||
1580 | | 0xEA 0xA6 0x84..0xB2 #Lo [47] JAVANESE LETTER A..JAVANESE LETTER HA | ||
1581 | | 0xEA 0xA6 0xB3 #Mn JAVANESE SIGN CECAK TELU | ||
1582 | | 0xEA 0xA6 0xB4..0xB5 #Mc [2] JAVANESE VOWEL SIGN TARUNG..JAVANE... | ||
1583 | | 0xEA 0xA6 0xB6..0xB9 #Mn [4] JAVANESE VOWEL SIGN WULU..JAVANESE... | ||
1584 | | 0xEA 0xA6 0xBA..0xBB #Mc [2] JAVANESE VOWEL SIGN TALING..JAVANE... | ||
1585 | | 0xEA 0xA6 0xBC #Mn JAVANESE VOWEL SIGN PEPET | ||
1586 | | 0xEA 0xA6 0xBD..0xFF #Mc [4] JAVANESE CONSONANT SIGN KERET..JAV... | ||
1587 | | 0xEA 0xA7 0x00..0x80 # | ||
1588 | | 0xEA 0xA7 0x8F #Lm JAVANESE PANGRANGKEP | ||
1589 | | 0xEA 0xA7 0x90..0x99 #Nd [10] JAVANESE DIGIT ZERO..JAVANESE DIGI... | ||
1590 | | 0xEA 0xA7 0xA0..0xA4 #Lo [5] MYANMAR LETTER SHAN GHA..MYANMAR L... | ||
1591 | | 0xEA 0xA7 0xA5 #Mn MYANMAR SIGN SHAN SAW | ||
1592 | | 0xEA 0xA7 0xA6 #Lm MYANMAR MODIFIER LETTER SHAN REDUP... | ||
1593 | | 0xEA 0xA7 0xA7..0xAF #Lo [9] MYANMAR LETTER TAI LAING NYA..MYAN... | ||
1594 | | 0xEA 0xA7 0xB0..0xB9 #Nd [10] MYANMAR TAI LAING DIGIT ZERO..MYAN... | ||
1595 | | 0xEA 0xA7 0xBA..0xBE #Lo [5] MYANMAR LETTER TAI LAING LLA..MYAN... | ||
1596 | | 0xEA 0xA8 0x80..0xA8 #Lo [41] CHAM LETTER A..CHAM LETTER HA | ||
1597 | | 0xEA 0xA8 0xA9..0xAE #Mn [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIG... | ||
1598 | | 0xEA 0xA8 0xAF..0xB0 #Mc [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI | ||
1599 | | 0xEA 0xA8 0xB1..0xB2 #Mn [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIG... | ||
1600 | | 0xEA 0xA8 0xB3..0xB4 #Mc [2] CHAM CONSONANT SIGN YA..CHAM CONSO... | ||
1601 | | 0xEA 0xA8 0xB5..0xB6 #Mn [2] CHAM CONSONANT SIGN LA..CHAM CONSO... | ||
1602 | | 0xEA 0xA9 0x80..0x82 #Lo [3] CHAM LETTER FINAL K..CHAM LETTER F... | ||
1603 | | 0xEA 0xA9 0x83 #Mn CHAM CONSONANT SIGN FINAL NG | ||
1604 | | 0xEA 0xA9 0x84..0x8B #Lo [8] CHAM LETTER FINAL CH..CHAM LETTER ... | ||
1605 | | 0xEA 0xA9 0x8C #Mn CHAM CONSONANT SIGN FINAL M | ||
1606 | | 0xEA 0xA9 0x8D #Mc CHAM CONSONANT SIGN FINAL H | ||
1607 | | 0xEA 0xA9 0x90..0x99 #Nd [10] CHAM DIGIT ZERO..CHAM DIGIT NINE | ||
1608 | | 0xEA 0xA9 0xA0..0xAF #Lo [16] MYANMAR LETTER KHAMTI GA..MYANMAR ... | ||
1609 | | 0xEA 0xA9 0xB0 #Lm MYANMAR MODIFIER LETTER KHAMTI RED... | ||
1610 | | 0xEA 0xA9 0xB1..0xB6 #Lo [6] MYANMAR LETTER KHAMTI XA..MYANMAR ... | ||
1611 | | 0xEA 0xA9 0xBA #Lo MYANMAR LETTER AITON RA | ||
1612 | | 0xEA 0xA9 0xBB #Mc MYANMAR SIGN PAO KAREN TONE | ||
1613 | | 0xEA 0xA9 0xBC #Mn MYANMAR SIGN TAI LAING TONE-2 | ||
1614 | | 0xEA 0xA9 0xBD #Mc MYANMAR SIGN TAI LAING TONE-5 | ||
1615 | | 0xEA 0xA9 0xBE..0xFF #Lo [50] MYANMAR LETTER SHWE PALAUNG CHA..T... | ||
1616 | | 0xEA 0xAA 0x00..0xAF # | ||
1617 | | 0xEA 0xAA 0xB0 #Mn TAI VIET MAI KANG | ||
1618 | | 0xEA 0xAA 0xB1 #Lo TAI VIET VOWEL AA | ||
1619 | | 0xEA 0xAA 0xB2..0xB4 #Mn [3] TAI VIET VOWEL I..TAI VIET VOWEL U | ||
1620 | | 0xEA 0xAA 0xB5..0xB6 #Lo [2] TAI VIET VOWEL E..TAI VIET VOWEL O | ||
1621 | | 0xEA 0xAA 0xB7..0xB8 #Mn [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA | ||
1622 | | 0xEA 0xAA 0xB9..0xBD #Lo [5] TAI VIET VOWEL UEA..TAI VIET VOWEL AN | ||
1623 | | 0xEA 0xAA 0xBE..0xBF #Mn [2] TAI VIET VOWEL AM..TAI VIET TONE M... | ||
1624 | | 0xEA 0xAB 0x80 #Lo TAI VIET TONE MAI NUENG | ||
1625 | | 0xEA 0xAB 0x81 #Mn TAI VIET TONE MAI THO | ||
1626 | | 0xEA 0xAB 0x82 #Lo TAI VIET TONE MAI SONG | ||
1627 | | 0xEA 0xAB 0x9B..0x9C #Lo [2] TAI VIET SYMBOL KON..TAI VIET SYMB... | ||
1628 | | 0xEA 0xAB 0x9D #Lm TAI VIET SYMBOL SAM | ||
1629 | | 0xEA 0xAB 0xA0..0xAA #Lo [11] MEETEI MAYEK LETTER E..MEETEI MAYE... | ||
1630 | | 0xEA 0xAB 0xAB #Mc MEETEI MAYEK VOWEL SIGN II | ||
1631 | | 0xEA 0xAB 0xAC..0xAD #Mn [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI... | ||
1632 | | 0xEA 0xAB 0xAE..0xAF #Mc [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI... | ||
1633 | | 0xEA 0xAB 0xB2 #Lo MEETEI MAYEK ANJI | ||
1634 | | 0xEA 0xAB 0xB3..0xB4 #Lm [2] MEETEI MAYEK SYLLABLE REPETITION M... | ||
1635 | | 0xEA 0xAB 0xB5 #Mc MEETEI MAYEK VOWEL SIGN VISARGA | ||
1636 | | 0xEA 0xAB 0xB6 #Mn MEETEI MAYEK VIRAMA | ||
1637 | | 0xEA 0xAC 0x81..0x86 #Lo [6] ETHIOPIC SYLLABLE TTHU..ETHIOPIC S... | ||
1638 | | 0xEA 0xAC 0x89..0x8E #Lo [6] ETHIOPIC SYLLABLE DDHU..ETHIOPIC S... | ||
1639 | | 0xEA 0xAC 0x91..0x96 #Lo [6] ETHIOPIC SYLLABLE DZU..ETHIOPIC SY... | ||
1640 | | 0xEA 0xAC 0xA0..0xA6 #Lo [7] ETHIOPIC SYLLABLE CCHHA..ETHIOPIC ... | ||
1641 | | 0xEA 0xAC 0xA8..0xAE #Lo [7] ETHIOPIC SYLLABLE BBA..ETHIOPIC SY... | ||
1642 | | 0xEA 0xAC 0xB0..0xFF #L& [43] LATIN SMALL LETTER BARRED ALPHA..L... | ||
1643 | | 0xEA 0xAD 0x00..0x9A # | ||
1644 | | 0xEA 0xAD 0x9C..0x9F #Lm [4] MODIFIER LETTER SMALL HENG..MODIFI... | ||
1645 | | 0xEA 0xAD 0xA0..0xA5 #L& [6] LATIN SMALL LETTER SAKHA YAT..GREE... | ||
1646 | | 0xEA 0xAD 0xB0..0xFF #L& [80] CHEROKEE SMALL LETTER A..CHEROKEE ... | ||
1647 | | 0xEA 0xAE 0x00..0xBF # | ||
1648 | | 0xEA 0xAF 0x80..0xA2 #Lo [35] MEETEI MAYEK LETTER KOK..MEETEI MA... | ||
1649 | | 0xEA 0xAF 0xA3..0xA4 #Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEET... | ||
1650 | | 0xEA 0xAF 0xA5 #Mn MEETEI MAYEK VOWEL SIGN ANAP | ||
1651 | | 0xEA 0xAF 0xA6..0xA7 #Mc [2] MEETEI MAYEK VOWEL SIGN YENAP..MEE... | ||
1652 | | 0xEA 0xAF 0xA8 #Mn MEETEI MAYEK VOWEL SIGN UNAP | ||
1653 | | 0xEA 0xAF 0xA9..0xAA #Mc [2] MEETEI MAYEK VOWEL SIGN CHEINAP..M... | ||
1654 | | 0xEA 0xAF 0xAC #Mc MEETEI MAYEK LUM IYEK | ||
1655 | | 0xEA 0xAF 0xAD #Mn MEETEI MAYEK APUN IYEK | ||
1656 | | 0xEA 0xAF 0xB0..0xB9 #Nd [10] MEETEI MAYEK DIGIT ZERO..MEETEI MA... | ||
1657 | | 0xEA 0xB0 0x80..0xFF #Lo [11172] HANGUL SYLLABLE GA..HA... | ||
1658 | | 0xEA 0xB1..0xFF 0x00..0xFF # | ||
1659 | | 0xEB..0xEC 0x00..0xFF 0x00..0xFF # | ||
1660 | | 0xED 0x00 0x00..0xFF # | ||
1661 | | 0xED 0x01..0x9D 0x00..0xFF # | ||
1662 | | 0xED 0x9E 0x00..0xA3 # | ||
1663 | | 0xED 0x9E 0xB0..0xFF #Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUN... | ||
1664 | | 0xED 0x9F 0x00..0x86 # | ||
1665 | | 0xED 0x9F 0x8B..0xBB #Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANG... | ||
1666 | | 0xEF 0xA4 0x80..0xFF #Lo [366] CJK COMPATIBILITY IDEOGRAPH-F9... | ||
1667 | | 0xEF 0xA5..0xA8 0x00..0xFF # | ||
1668 | | 0xEF 0xA9 0x00..0xAD # | ||
1669 | | 0xEF 0xA9 0xB0..0xFF #Lo [106] CJK COMPATIBILITY IDEOGRAPH-FA... | ||
1670 | | 0xEF 0xAA..0xAA 0x00..0xFF # | ||
1671 | | 0xEF 0xAB 0x00..0x99 # | ||
1672 | | 0xEF 0xAC 0x80..0x86 #L& [7] LATIN SMALL LIGATURE FF..LATIN SMA... | ||
1673 | | 0xEF 0xAC 0x93..0x97 #L& [5] ARMENIAN SMALL LIGATURE MEN NOW..A... | ||
1674 | | 0xEF 0xAC 0x9D #Lo HEBREW LETTER YOD WITH HIRIQ | ||
1675 | | 0xEF 0xAC 0x9E #Mn HEBREW POINT JUDEO-SPANISH VARIKA | ||
1676 | | 0xEF 0xAC 0x9F..0xA8 #Lo [10] HEBREW LIGATURE YIDDISH YOD YOD PA... | ||
1677 | | 0xEF 0xAC 0xAA..0xB6 #Lo [13] HEBREW LETTER SHIN WITH SHIN DOT..... | ||
1678 | | 0xEF 0xAC 0xB8..0xBC #Lo [5] HEBREW LETTER TET WITH DAGESH..HEB... | ||
1679 | | 0xEF 0xAC 0xBE #Lo HEBREW LETTER MEM WITH DAGESH | ||
1680 | | 0xEF 0xAD 0x80..0x81 #Lo [2] HEBREW LETTER NUN WITH DAGESH..HEB... | ||
1681 | | 0xEF 0xAD 0x83..0x84 #Lo [2] HEBREW LETTER FINAL PE WITH DAGESH... | ||
1682 | | 0xEF 0xAD 0x86..0xFF #Lo [108] HEBREW LETTER TSADI WITH DAGESH..A... | ||
1683 | | 0xEF 0xAE 0x00..0xB1 # | ||
1684 | | 0xEF 0xAF 0x93..0xFF #Lo [363] ARABIC LETTER NG ISOLATED FORM... | ||
1685 | | 0xEF 0xB0..0xB3 0x00..0xFF # | ||
1686 | | 0xEF 0xB4 0x00..0xBD # | ||
1687 | | 0xEF 0xB5 0x90..0xFF #Lo [64] ARABIC LIGATURE TEH WITH JEEM WITH... | ||
1688 | | 0xEF 0xB6 0x00..0x8F # | ||
1689 | | 0xEF 0xB6 0x92..0xFF #Lo [54] ARABIC LIGATURE MEEM WITH JEEM WIT... | ||
1690 | | 0xEF 0xB7 0x00..0x87 # | ||
1691 | | 0xEF 0xB7 0xB0..0xBB #Lo [12] ARABIC LIGATURE SALLA USED AS KORA... | ||
1692 | | 0xEF 0xB8 0x80..0x8F #Mn [16] VARIATION SELECTOR-1..VARIATION SE... | ||
1693 | | 0xEF 0xB8 0xA0..0xAF #Mn [16] COMBINING LIGATURE LEFT HALF..COMB... | ||
1694 | | 0xEF 0xB8 0xB3..0xB4 #Pc [2] PRESENTATION FORM FOR VERTICAL LOW... | ||
1695 | | 0xEF 0xB9 0x8D..0x8F #Pc [3] DASHED LOW LINE..WAVY LOW LINE | ||
1696 | | 0xEF 0xB9 0xB0..0xB4 #Lo [5] ARABIC FATHATAN ISOLATED FORM..ARA... | ||
1697 | | 0xEF 0xB9 0xB6..0xFF #Lo [135] ARABIC FATHA ISOLATED FORM..AR... | ||
1698 | | 0xEF 0xBA..0xBA 0x00..0xFF # | ||
1699 | | 0xEF 0xBB 0x00..0xBC # | ||
1700 | | 0xEF 0xBC 0x90..0x99 #Nd [10] FULLWIDTH DIGIT ZERO..FULLWIDTH DI... | ||
1701 | | 0xEF 0xBC 0xA1..0xBA #L& [26] FULLWIDTH LATIN CAPITAL LETTER A..... | ||
1702 | | 0xEF 0xBC 0xBF #Pc FULLWIDTH LOW LINE | ||
1703 | | 0xEF 0xBD 0x81..0x9A #L& [26] FULLWIDTH LATIN SMALL LETTER A..FU... | ||
1704 | | 0xEF 0xBD 0xA6..0xAF #Lo [10] HALFWIDTH KATAKANA LETTER WO..HALF... | ||
1705 | | 0xEF 0xBD 0xB0 #Lm HALFWIDTH KATAKANA-HIRAGANA PROLON... | ||
1706 | | 0xEF 0xBD 0xB1..0xFF #Lo [45] HALFWIDTH KATAKANA LETTER A..HALFW... | ||
1707 | | 0xEF 0xBE 0x00..0x9D # | ||
1708 | | 0xEF 0xBE 0x9E..0x9F #Lm [2] HALFWIDTH KATAKANA VOICED SOUND MA... | ||
1709 | | 0xEF 0xBE 0xA0..0xBE #Lo [31] HALFWIDTH HANGUL FILLER..HALFWIDTH... | ||
1710 | | 0xEF 0xBF 0x82..0x87 #Lo [6] HALFWIDTH HANGUL LETTER A..HALFWID... | ||
1711 | | 0xEF 0xBF 0x8A..0x8F #Lo [6] HALFWIDTH HANGUL LETTER YEO..HALFW... | ||
1712 | | 0xEF 0xBF 0x92..0x97 #Lo [6] HALFWIDTH HANGUL LETTER YO..HALFWI... | ||
1713 | | 0xEF 0xBF 0x9A..0x9C #Lo [3] HALFWIDTH HANGUL LETTER EU..HALFWI... | ||
1714 | | 0xF0 0x90 0x80 0x80..0x8B #Lo [12] LINEAR B SYLLABLE B008 A..LINEA... | ||
1715 | | 0xF0 0x90 0x80 0x8D..0xA6 #Lo [26] LINEAR B SYLLABLE B036 JO..LINE... | ||
1716 | | 0xF0 0x90 0x80 0xA8..0xBA #Lo [19] LINEAR B SYLLABLE B060 RA..LINE... | ||
1717 | | 0xF0 0x90 0x80 0xBC..0xBD #Lo [2] LINEAR B SYLLABLE B017 ZA..LINE... | ||
1718 | | 0xF0 0x90 0x80 0xBF..0xFF #Lo [15] LINEAR B SYLLABLE B020 ZO..LINE... | ||
1719 | | 0xF0 0x90 0x81 0x00..0x8D # | ||
1720 | | 0xF0 0x90 0x81 0x90..0x9D #Lo [14] LINEAR B SYMBOL B018..LINEAR B ... | ||
1721 | | 0xF0 0x90 0x82 0x80..0xFF #Lo [123] LINEAR B IDEOGRAM B100 MAN..LIN... | ||
1722 | | 0xF0 0x90 0x83 0x00..0xBA # | ||
1723 | | 0xF0 0x90 0x85 0x80..0xB4 #Nl [53] GREEK ACROPHONIC ATTIC ONE QUAR... | ||
1724 | | 0xF0 0x90 0x87 0xBD #Mn PHAISTOS DISC SIGN COMBINING OBLIQ... | ||
1725 | | 0xF0 0x90 0x8A 0x80..0x9C #Lo [29] LYCIAN LETTER A..LYCIAN LETTER X | ||
1726 | | 0xF0 0x90 0x8A 0xA0..0xFF #Lo [49] CARIAN LETTER A..CARIAN LETTER ... | ||
1727 | | 0xF0 0x90 0x8B 0x00..0x90 # | ||
1728 | | 0xF0 0x90 0x8B 0xA0 #Mn COPTIC EPACT THOUSANDS MARK | ||
1729 | | 0xF0 0x90 0x8C 0x80..0x9F #Lo [32] OLD ITALIC LETTER A..OLD ITALIC... | ||
1730 | | 0xF0 0x90 0x8C 0xB0..0xFF #Lo [17] GOTHIC LETTER AHSA..GOTHIC LETT... | ||
1731 | | 0xF0 0x90 0x8D 0x00..0x80 # | ||
1732 | | 0xF0 0x90 0x8D 0x81 #Nl GOTHIC LETTER NINETY | ||
1733 | | 0xF0 0x90 0x8D 0x82..0x89 #Lo [8] GOTHIC LETTER RAIDA..GOTHIC LET... | ||
1734 | | 0xF0 0x90 0x8D 0x8A #Nl GOTHIC LETTER NINE HUNDRED | ||
1735 | | 0xF0 0x90 0x8D 0x90..0xB5 #Lo [38] OLD PERMIC LETTER AN..OLD PERMI... | ||
1736 | | 0xF0 0x90 0x8D 0xB6..0xBA #Mn [5] COMBINING OLD PERMIC LETTER AN.... | ||
1737 | | 0xF0 0x90 0x8E 0x80..0x9D #Lo [30] UGARITIC LETTER ALPA..UGARITIC ... | ||
1738 | | 0xF0 0x90 0x8E 0xA0..0xFF #Lo [36] OLD PERSIAN SIGN A..OLD PERSIAN... | ||
1739 | | 0xF0 0x90 0x8F 0x00..0x83 # | ||
1740 | | 0xF0 0x90 0x8F 0x88..0x8F #Lo [8] OLD PERSIAN SIGN AURAMAZDAA..OL... | ||
1741 | | 0xF0 0x90 0x8F 0x91..0x95 #Nl [5] OLD PERSIAN NUMBER ONE..OLD PER... | ||
1742 | | 0xF0 0x90 0x90 0x80..0xFF #L& [80] DESERET CAPITAL LETTER LONG I..... | ||
1743 | | 0xF0 0x90 0x91 0x00..0x8F # | ||
1744 | | 0xF0 0x90 0x91 0x90..0xFF #Lo [78] SHAVIAN LETTER PEEP..OSMANYA LE... | ||
1745 | | 0xF0 0x90 0x92 0x00..0x9D # | ||
1746 | | 0xF0 0x90 0x92 0xA0..0xA9 #Nd [10] OSMANYA DIGIT ZERO..OSMANYA DIG... | ||
1747 | | 0xF0 0x90 0x92 0xB0..0xFF #L& [36] OSAGE CAPITAL LETTER A..OSAGE C... | ||
1748 | | 0xF0 0x90 0x93 0x00..0x93 # | ||
1749 | | 0xF0 0x90 0x93 0x98..0xBB #L& [36] OSAGE SMALL LETTER A..OSAGE SMA... | ||
1750 | | 0xF0 0x90 0x94 0x80..0xA7 #Lo [40] ELBASAN LETTER A..ELBASAN LETTE... | ||
1751 | | 0xF0 0x90 0x94 0xB0..0xFF #Lo [52] CAUCASIAN ALBANIAN LETTER ALT..... | ||
1752 | | 0xF0 0x90 0x95 0x00..0xA3 # | ||
1753 | | 0xF0 0x90 0x98 0x80..0xFF #Lo [311] LINEAR A SIGN AB001..LINE... | ||
1754 | | 0xF0 0x90 0x99..0x9B 0x00..0xFF # | ||
1755 | | 0xF0 0x90 0x9C 0x00..0xB6 # | ||
1756 | | 0xF0 0x90 0x9D 0x80..0x95 #Lo [22] LINEAR A SIGN A701 A..LINEAR A ... | ||
1757 | | 0xF0 0x90 0x9D 0xA0..0xA7 #Lo [8] LINEAR A SIGN A800..LINEAR A SI... | ||
1758 | | 0xF0 0x90 0xA0 0x80..0x85 #Lo [6] CYPRIOT SYLLABLE A..CYPRIOT SYL... | ||
1759 | | 0xF0 0x90 0xA0 0x88 #Lo CYPRIOT SYLLABLE JO | ||
1760 | | 0xF0 0x90 0xA0 0x8A..0xB5 #Lo [44] CYPRIOT SYLLABLE KA..CYPRIOT SY... | ||
1761 | | 0xF0 0x90 0xA0 0xB7..0xB8 #Lo [2] CYPRIOT SYLLABLE XA..CYPRIOT SY... | ||
1762 | | 0xF0 0x90 0xA0 0xBC #Lo CYPRIOT SYLLABLE ZA | ||
1763 | | 0xF0 0x90 0xA0 0xBF..0xFF #Lo [23] CYPRIOT SYLLABLE ZO..IMPERIAL A... | ||
1764 | | 0xF0 0x90 0xA1 0x00..0x95 # | ||
1765 | | 0xF0 0x90 0xA1 0xA0..0xB6 #Lo [23] PALMYRENE LETTER ALEPH..PALMYRE... | ||
1766 | | 0xF0 0x90 0xA2 0x80..0x9E #Lo [31] NABATAEAN LETTER FINAL ALEPH..N... | ||
1767 | | 0xF0 0x90 0xA3 0xA0..0xB2 #Lo [19] HATRAN LETTER ALEPH..HATRAN LET... | ||
1768 | | 0xF0 0x90 0xA3 0xB4..0xB5 #Lo [2] HATRAN LETTER SHIN..HATRAN LETT... | ||
1769 | | 0xF0 0x90 0xA4 0x80..0x95 #Lo [22] PHOENICIAN LETTER ALF..PHOENICI... | ||
1770 | | 0xF0 0x90 0xA4 0xA0..0xB9 #Lo [26] LYDIAN LETTER A..LYDIAN LETTER C | ||
1771 | | 0xF0 0x90 0xA6 0x80..0xB7 #Lo [56] MEROITIC HIEROGLYPHIC LETTER A.... | ||
1772 | | 0xF0 0x90 0xA6 0xBE..0xBF #Lo [2] MEROITIC CURSIVE LOGOGRAM RMT..... | ||
1773 | | 0xF0 0x90 0xA8 0x80 #Lo KHAROSHTHI LETTER A | ||
1774 | | 0xF0 0x90 0xA8 0x81..0x83 #Mn [3] KHAROSHTHI VOWEL SIGN I..KHAROS... | ||
1775 | | 0xF0 0x90 0xA8 0x85..0x86 #Mn [2] KHAROSHTHI VOWEL SIGN E..KHAROS... | ||
1776 | | 0xF0 0x90 0xA8 0x8C..0x8F #Mn [4] KHAROSHTHI VOWEL LENGTH MARK..K... | ||
1777 | | 0xF0 0x90 0xA8 0x90..0x93 #Lo [4] KHAROSHTHI LETTER KA..KHAROSHTH... | ||
1778 | | 0xF0 0x90 0xA8 0x95..0x97 #Lo [3] KHAROSHTHI LETTER CA..KHAROSHTH... | ||
1779 | | 0xF0 0x90 0xA8 0x99..0xB3 #Lo [27] KHAROSHTHI LETTER NYA..KHAROSHT... | ||
1780 | | 0xF0 0x90 0xA8 0xB8..0xBA #Mn [3] KHAROSHTHI SIGN BAR ABOVE..KHAR... | ||
1781 | | 0xF0 0x90 0xA8 0xBF #Mn KHAROSHTHI VIRAMA | ||
1782 | | 0xF0 0x90 0xA9 0xA0..0xBC #Lo [29] OLD SOUTH ARABIAN LETTER HE..OL... | ||
1783 | | 0xF0 0x90 0xAA 0x80..0x9C #Lo [29] OLD NORTH ARABIAN LETTER HEH..O... | ||
1784 | | 0xF0 0x90 0xAB 0x80..0x87 #Lo [8] MANICHAEAN LETTER ALEPH..MANICH... | ||
1785 | | 0xF0 0x90 0xAB 0x89..0xA4 #Lo [28] MANICHAEAN LETTER ZAYIN..MANICH... | ||
1786 | | 0xF0 0x90 0xAB 0xA5..0xA6 #Mn [2] MANICHAEAN ABBREVIATION MARK AB... | ||
1787 | | 0xF0 0x90 0xAC 0x80..0xB5 #Lo [54] AVESTAN LETTER A..AVESTAN LETTE... | ||
1788 | | 0xF0 0x90 0xAD 0x80..0x95 #Lo [22] INSCRIPTIONAL PARTHIAN LETTER A... | ||
1789 | | 0xF0 0x90 0xAD 0xA0..0xB2 #Lo [19] INSCRIPTIONAL PAHLAVI LETTER AL... | ||
1790 | | 0xF0 0x90 0xAE 0x80..0x91 #Lo [18] PSALTER PAHLAVI LETTER ALEPH..P... | ||
1791 | | 0xF0 0x90 0xB0 0x80..0xFF #Lo [73] OLD TURKIC LETTER ORKHON A..OLD... | ||
1792 | | 0xF0 0x90 0xB1 0x00..0x88 # | ||
1793 | | 0xF0 0x90 0xB2 0x80..0xB2 #L& [51] OLD HUNGARIAN CAPITAL LETTER A.... | ||
1794 | | 0xF0 0x90 0xB3 0x80..0xB2 #L& [51] OLD HUNGARIAN SMALL LETTER A..O... | ||
1795 | | 0xF0 0x91 0x80 0x80 #Mc BRAHMI SIGN CANDRABINDU | ||
1796 | | 0xF0 0x91 0x80 0x81 #Mn BRAHMI SIGN ANUSVARA | ||
1797 | | 0xF0 0x91 0x80 0x82 #Mc BRAHMI SIGN VISARGA | ||
1798 | | 0xF0 0x91 0x80 0x83..0xB7 #Lo [53] BRAHMI SIGN JIHVAMULIYA..BRAHMI... | ||
1799 | | 0xF0 0x91 0x80 0xB8..0xFF #Mn [15] BRAHMI VOWEL SIGN AA..BRAHMI VI... | ||
1800 | | 0xF0 0x91 0x81 0x00..0x86 # | ||
1801 | | 0xF0 0x91 0x81 0xA6..0xAF #Nd [10] BRAHMI DIGIT ZERO..BRAHMI DIGIT... | ||
1802 | | 0xF0 0x91 0x81 0xBF..0xFF #Mn [3] BRAHMI NUMBER JOINER..KAITHI SI... | ||
1803 | | 0xF0 0x91 0x82 0x00..0x81 # | ||
1804 | | 0xF0 0x91 0x82 0x82 #Mc KAITHI SIGN VISARGA | ||
1805 | | 0xF0 0x91 0x82 0x83..0xAF #Lo [45] KAITHI LETTER A..KAITHI LETTER HA | ||
1806 | | 0xF0 0x91 0x82 0xB0..0xB2 #Mc [3] KAITHI VOWEL SIGN AA..KAITHI VO... | ||
1807 | | 0xF0 0x91 0x82 0xB3..0xB6 #Mn [4] KAITHI VOWEL SIGN U..KAITHI VOW... | ||
1808 | | 0xF0 0x91 0x82 0xB7..0xB8 #Mc [2] KAITHI VOWEL SIGN O..KAITHI VOW... | ||
1809 | | 0xF0 0x91 0x82 0xB9..0xBA #Mn [2] KAITHI SIGN VIRAMA..KAITHI SIGN... | ||
1810 | | 0xF0 0x91 0x83 0x90..0xA8 #Lo [25] SORA SOMPENG LETTER SAH..SORA S... | ||
1811 | | 0xF0 0x91 0x83 0xB0..0xB9 #Nd [10] SORA SOMPENG DIGIT ZERO..SORA S... | ||
1812 | | 0xF0 0x91 0x84 0x80..0x82 #Mn [3] CHAKMA SIGN CANDRABINDU..CHAKMA... | ||
1813 | | 0xF0 0x91 0x84 0x83..0xA6 #Lo [36] CHAKMA LETTER AA..CHAKMA LETTER... | ||
1814 | | 0xF0 0x91 0x84 0xA7..0xAB #Mn [5] CHAKMA VOWEL SIGN A..CHAKMA VOW... | ||
1815 | | 0xF0 0x91 0x84 0xAC #Mc CHAKMA VOWEL SIGN E | ||
1816 | | 0xF0 0x91 0x84 0xAD..0xB4 #Mn [8] CHAKMA VOWEL SIGN AI..CHAKMA MA... | ||
1817 | | 0xF0 0x91 0x84 0xB6..0xBF #Nd [10] CHAKMA DIGIT ZERO..CHAKMA DIGIT... | ||
1818 | | 0xF0 0x91 0x85 0x90..0xB2 #Lo [35] MAHAJANI LETTER A..MAHAJANI LET... | ||
1819 | | 0xF0 0x91 0x85 0xB3 #Mn MAHAJANI SIGN NUKTA | ||
1820 | | 0xF0 0x91 0x85 0xB6 #Lo MAHAJANI LIGATURE SHRI | ||
1821 | | 0xF0 0x91 0x86 0x80..0x81 #Mn [2] SHARADA SIGN CANDRABINDU..SHARA... | ||
1822 | | 0xF0 0x91 0x86 0x82 #Mc SHARADA SIGN VISARGA | ||
1823 | | 0xF0 0x91 0x86 0x83..0xB2 #Lo [48] SHARADA LETTER A..SHARADA LETTE... | ||
1824 | | 0xF0 0x91 0x86 0xB3..0xB5 #Mc [3] SHARADA VOWEL SIGN AA..SHARADA ... | ||
1825 | | 0xF0 0x91 0x86 0xB6..0xBE #Mn [9] SHARADA VOWEL SIGN U..SHARADA V... | ||
1826 | | 0xF0 0x91 0x86 0xBF..0xFF #Mc [2] SHARADA VOWEL SIGN AU..SHARADA ... | ||
1827 | | 0xF0 0x91 0x87 0x00..0x80 # | ||
1828 | | 0xF0 0x91 0x87 0x81..0x84 #Lo [4] SHARADA SIGN AVAGRAHA..SHARADA OM | ||
1829 | | 0xF0 0x91 0x87 0x8A..0x8C #Mn [3] SHARADA SIGN NUKTA..SHARADA EXT... | ||
1830 | | 0xF0 0x91 0x87 0x90..0x99 #Nd [10] SHARADA DIGIT ZERO..SHARADA DIG... | ||
1831 | | 0xF0 0x91 0x87 0x9A #Lo SHARADA EKAM | ||
1832 | | 0xF0 0x91 0x87 0x9C #Lo SHARADA HEADSTROKE | ||
1833 | | 0xF0 0x91 0x88 0x80..0x91 #Lo [18] KHOJKI LETTER A..KHOJKI LETTER JJA | ||
1834 | | 0xF0 0x91 0x88 0x93..0xAB #Lo [25] KHOJKI LETTER NYA..KHOJKI LETTE... | ||
1835 | | 0xF0 0x91 0x88 0xAC..0xAE #Mc [3] KHOJKI VOWEL SIGN AA..KHOJKI VO... | ||
1836 | | 0xF0 0x91 0x88 0xAF..0xB1 #Mn [3] KHOJKI VOWEL SIGN U..KHOJKI VOW... | ||
1837 | | 0xF0 0x91 0x88 0xB2..0xB3 #Mc [2] KHOJKI VOWEL SIGN O..KHOJKI VOW... | ||
1838 | | 0xF0 0x91 0x88 0xB4 #Mn KHOJKI SIGN ANUSVARA | ||
1839 | | 0xF0 0x91 0x88 0xB5 #Mc KHOJKI SIGN VIRAMA | ||
1840 | | 0xF0 0x91 0x88 0xB6..0xB7 #Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN ... | ||
1841 | | 0xF0 0x91 0x88 0xBE #Mn KHOJKI SIGN SUKUN | ||
1842 | | 0xF0 0x91 0x8A 0x80..0x86 #Lo [7] MULTANI LETTER A..MULTANI LETTE... | ||
1843 | | 0xF0 0x91 0x8A 0x88 #Lo MULTANI LETTER GHA | ||
1844 | | 0xF0 0x91 0x8A 0x8A..0x8D #Lo [4] MULTANI LETTER CA..MULTANI LETT... | ||
1845 | | 0xF0 0x91 0x8A 0x8F..0x9D #Lo [15] MULTANI LETTER NYA..MULTANI LET... | ||
1846 | | 0xF0 0x91 0x8A 0x9F..0xA8 #Lo [10] MULTANI LETTER BHA..MULTANI LET... | ||
1847 | | 0xF0 0x91 0x8A 0xB0..0xFF #Lo [47] KHUDAWADI LETTER A..KHUDAWADI L... | ||
1848 | | 0xF0 0x91 0x8B 0x00..0x9E # | ||
1849 | | 0xF0 0x91 0x8B 0x9F #Mn KHUDAWADI SIGN ANUSVARA | ||
1850 | | 0xF0 0x91 0x8B 0xA0..0xA2 #Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAW... | ||
1851 | | 0xF0 0x91 0x8B 0xA3..0xAA #Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWA... | ||
1852 | | 0xF0 0x91 0x8B 0xB0..0xB9 #Nd [10] KHUDAWADI DIGIT ZERO..KHUDAWADI... | ||
1853 | | 0xF0 0x91 0x8C 0x80..0x81 #Mn [2] GRANTHA SIGN COMBINING ANUSVARA... | ||
1854 | | 0xF0 0x91 0x8C 0x82..0x83 #Mc [2] GRANTHA SIGN ANUSVARA..GRANTHA ... | ||
1855 | | 0xF0 0x91 0x8C 0x85..0x8C #Lo [8] GRANTHA LETTER A..GRANTHA LETTE... | ||
1856 | | 0xF0 0x91 0x8C 0x8F..0x90 #Lo [2] GRANTHA LETTER EE..GRANTHA LETT... | ||
1857 | | 0xF0 0x91 0x8C 0x93..0xA8 #Lo [22] GRANTHA LETTER OO..GRANTHA LETT... | ||
1858 | | 0xF0 0x91 0x8C 0xAA..0xB0 #Lo [7] GRANTHA LETTER PA..GRANTHA LETT... | ||
1859 | | 0xF0 0x91 0x8C 0xB2..0xB3 #Lo [2] GRANTHA LETTER LA..GRANTHA LETT... | ||
1860 | | 0xF0 0x91 0x8C 0xB5..0xB9 #Lo [5] GRANTHA LETTER VA..GRANTHA LETT... | ||
1861 | | 0xF0 0x91 0x8C 0xBC #Mn GRANTHA SIGN NUKTA | ||
1862 | | 0xF0 0x91 0x8C 0xBD #Lo GRANTHA SIGN AVAGRAHA | ||
1863 | | 0xF0 0x91 0x8C 0xBE..0xBF #Mc [2] GRANTHA VOWEL SIGN AA..GRANTHA ... | ||
1864 | | 0xF0 0x91 0x8D 0x80 #Mn GRANTHA VOWEL SIGN II | ||
1865 | | 0xF0 0x91 0x8D 0x81..0x84 #Mc [4] GRANTHA VOWEL SIGN U..GRANTHA V... | ||
1866 | | 0xF0 0x91 0x8D 0x87..0x88 #Mc [2] GRANTHA VOWEL SIGN EE..GRANTHA ... | ||
1867 | | 0xF0 0x91 0x8D 0x8B..0x8D #Mc [3] GRANTHA VOWEL SIGN OO..GRANTHA ... | ||
1868 | | 0xF0 0x91 0x8D 0x90 #Lo GRANTHA OM | ||
1869 | | 0xF0 0x91 0x8D 0x97 #Mc GRANTHA AU LENGTH MARK | ||
1870 | | 0xF0 0x91 0x8D 0x9D..0xA1 #Lo [5] GRANTHA SIGN PLUTA..GRANTHA LET... | ||
1871 | | 0xF0 0x91 0x8D 0xA2..0xA3 #Mc [2] GRANTHA VOWEL SIGN VOCALIC L..G... | ||
1872 | | 0xF0 0x91 0x8D 0xA6..0xAC #Mn [7] COMBINING GRANTHA DIGIT ZERO..C... | ||
1873 | | 0xF0 0x91 0x8D 0xB0..0xB4 #Mn [5] COMBINING GRANTHA LETTER A..COM... | ||
1874 | | 0xF0 0x91 0x90 0x80..0xB4 #Lo [53] NEWA LETTER A..NEWA LETTER HA | ||
1875 | | 0xF0 0x91 0x90 0xB5..0xB7 #Mc [3] NEWA VOWEL SIGN AA..NEWA VOWEL ... | ||
1876 | | 0xF0 0x91 0x90 0xB8..0xBF #Mn [8] NEWA VOWEL SIGN U..NEWA VOWEL S... | ||
1877 | | 0xF0 0x91 0x91 0x80..0x81 #Mc [2] NEWA VOWEL SIGN O..NEWA VOWEL S... | ||
1878 | | 0xF0 0x91 0x91 0x82..0x84 #Mn [3] NEWA SIGN VIRAMA..NEWA SIGN ANU... | ||
1879 | | 0xF0 0x91 0x91 0x85 #Mc NEWA SIGN VISARGA | ||
1880 | | 0xF0 0x91 0x91 0x86 #Mn NEWA SIGN NUKTA | ||
1881 | | 0xF0 0x91 0x91 0x87..0x8A #Lo [4] NEWA SIGN AVAGRAHA..NEWA SIDDHI | ||
1882 | | 0xF0 0x91 0x91 0x90..0x99 #Nd [10] NEWA DIGIT ZERO..NEWA DIGIT NINE | ||
1883 | | 0xF0 0x91 0x92 0x80..0xAF #Lo [48] TIRHUTA ANJI..TIRHUTA LETTER HA | ||
1884 | | 0xF0 0x91 0x92 0xB0..0xB2 #Mc [3] TIRHUTA VOWEL SIGN AA..TIRHUTA ... | ||
1885 | | 0xF0 0x91 0x92 0xB3..0xB8 #Mn [6] TIRHUTA VOWEL SIGN U..TIRHUTA V... | ||
1886 | | 0xF0 0x91 0x92 0xB9 #Mc TIRHUTA VOWEL SIGN E | ||
1887 | | 0xF0 0x91 0x92 0xBA #Mn TIRHUTA VOWEL SIGN SHORT E | ||
1888 | | 0xF0 0x91 0x92 0xBB..0xBE #Mc [4] TIRHUTA VOWEL SIGN AI..TIRHUTA ... | ||
1889 | | 0xF0 0x91 0x92 0xBF..0xFF #Mn [2] TIRHUTA SIGN CANDRABINDU..TIRHU... | ||
1890 | | 0xF0 0x91 0x93 0x00..0x80 # | ||
1891 | | 0xF0 0x91 0x93 0x81 #Mc TIRHUTA SIGN VISARGA | ||
1892 | | 0xF0 0x91 0x93 0x82..0x83 #Mn [2] TIRHUTA SIGN VIRAMA..TIRHUTA SI... | ||
1893 | | 0xF0 0x91 0x93 0x84..0x85 #Lo [2] TIRHUTA SIGN AVAGRAHA..TIRHUTA ... | ||
1894 | | 0xF0 0x91 0x93 0x87 #Lo TIRHUTA OM | ||
1895 | | 0xF0 0x91 0x93 0x90..0x99 #Nd [10] TIRHUTA DIGIT ZERO..TIRHUTA DIG... | ||
1896 | | 0xF0 0x91 0x96 0x80..0xAE #Lo [47] SIDDHAM LETTER A..SIDDHAM LETTE... | ||
1897 | | 0xF0 0x91 0x96 0xAF..0xB1 #Mc [3] SIDDHAM VOWEL SIGN AA..SIDDHAM ... | ||
1898 | | 0xF0 0x91 0x96 0xB2..0xB5 #Mn [4] SIDDHAM VOWEL SIGN U..SIDDHAM V... | ||
1899 | | 0xF0 0x91 0x96 0xB8..0xBB #Mc [4] SIDDHAM VOWEL SIGN E..SIDDHAM V... | ||
1900 | | 0xF0 0x91 0x96 0xBC..0xBD #Mn [2] SIDDHAM SIGN CANDRABINDU..SIDDH... | ||
1901 | | 0xF0 0x91 0x96 0xBE #Mc SIDDHAM SIGN VISARGA | ||
1902 | | 0xF0 0x91 0x96 0xBF..0xFF #Mn [2] SIDDHAM SIGN VIRAMA..SIDDHAM SI... | ||
1903 | | 0xF0 0x91 0x97 0x00..0x80 # | ||
1904 | | 0xF0 0x91 0x97 0x98..0x9B #Lo [4] SIDDHAM LETTER THREE-CIRCLE ALT... | ||
1905 | | 0xF0 0x91 0x97 0x9C..0x9D #Mn [2] SIDDHAM VOWEL SIGN ALTERNATE U.... | ||
1906 | | 0xF0 0x91 0x98 0x80..0xAF #Lo [48] MODI LETTER A..MODI LETTER LLA | ||
1907 | | 0xF0 0x91 0x98 0xB0..0xB2 #Mc [3] MODI VOWEL SIGN AA..MODI VOWEL ... | ||
1908 | | 0xF0 0x91 0x98 0xB3..0xBA #Mn [8] MODI VOWEL SIGN U..MODI VOWEL S... | ||
1909 | | 0xF0 0x91 0x98 0xBB..0xBC #Mc [2] MODI VOWEL SIGN O..MODI VOWEL S... | ||
1910 | | 0xF0 0x91 0x98 0xBD #Mn MODI SIGN ANUSVARA | ||
1911 | | 0xF0 0x91 0x98 0xBE #Mc MODI SIGN VISARGA | ||
1912 | | 0xF0 0x91 0x98 0xBF..0xFF #Mn [2] MODI SIGN VIRAMA..MODI SIGN ARD... | ||
1913 | | 0xF0 0x91 0x99 0x00..0x80 # | ||
1914 | | 0xF0 0x91 0x99 0x84 #Lo MODI SIGN HUVA | ||
1915 | | 0xF0 0x91 0x99 0x90..0x99 #Nd [10] MODI DIGIT ZERO..MODI DIGIT NINE | ||
1916 | | 0xF0 0x91 0x9A 0x80..0xAA #Lo [43] TAKRI LETTER A..TAKRI LETTER RRA | ||
1917 | | 0xF0 0x91 0x9A 0xAB #Mn TAKRI SIGN ANUSVARA | ||
1918 | | 0xF0 0x91 0x9A 0xAC #Mc TAKRI SIGN VISARGA | ||
1919 | | 0xF0 0x91 0x9A 0xAD #Mn TAKRI VOWEL SIGN AA | ||
1920 | | 0xF0 0x91 0x9A 0xAE..0xAF #Mc [2] TAKRI VOWEL SIGN I..TAKRI VOWEL... | ||
1921 | | 0xF0 0x91 0x9A 0xB0..0xB5 #Mn [6] TAKRI VOWEL SIGN U..TAKRI VOWEL... | ||
1922 | | 0xF0 0x91 0x9A 0xB6 #Mc TAKRI SIGN VIRAMA | ||
1923 | | 0xF0 0x91 0x9A 0xB7 #Mn TAKRI SIGN NUKTA | ||
1924 | | 0xF0 0x91 0x9B 0x80..0x89 #Nd [10] TAKRI DIGIT ZERO..TAKRI DIGIT NINE | ||
1925 | | 0xF0 0x91 0x9C 0x80..0x99 #Lo [26] AHOM LETTER KA..AHOM LETTER JHA | ||
1926 | | 0xF0 0x91 0x9C 0x9D..0x9F #Mn [3] AHOM CONSONANT SIGN MEDIAL LA..... | ||
1927 | | 0xF0 0x91 0x9C 0xA0..0xA1 #Mc [2] AHOM VOWEL SIGN A..AHOM VOWEL S... | ||
1928 | | 0xF0 0x91 0x9C 0xA2..0xA5 #Mn [4] AHOM VOWEL SIGN I..AHOM VOWEL S... | ||
1929 | | 0xF0 0x91 0x9C 0xA6 #Mc AHOM VOWEL SIGN E | ||
1930 | | 0xF0 0x91 0x9C 0xA7..0xAB #Mn [5] AHOM VOWEL SIGN AW..AHOM SIGN K... | ||
1931 | | 0xF0 0x91 0x9C 0xB0..0xB9 #Nd [10] AHOM DIGIT ZERO..AHOM DIGIT NINE | ||
1932 | | 0xF0 0x91 0xA2 0xA0..0xFF #L& [64] WARANG CITI CAPITAL LETTER NGAA... | ||
1933 | | 0xF0 0x91 0xA3 0x00..0x9F # | ||
1934 | | 0xF0 0x91 0xA3 0xA0..0xA9 #Nd [10] WARANG CITI DIGIT ZERO..WARANG ... | ||
1935 | | 0xF0 0x91 0xA3 0xBF #Lo WARANG CITI OM | ||
1936 | | 0xF0 0x91 0xAB 0x80..0xB8 #Lo [57] PAU CIN HAU LETTER PA..PAU CIN ... | ||
1937 | | 0xF0 0x91 0xB0 0x80..0x88 #Lo [9] BHAIKSUKI LETTER A..BHAIKSUKI L... | ||
1938 | | 0xF0 0x91 0xB0 0x8A..0xAE #Lo [37] BHAIKSUKI LETTER E..BHAIKSUKI L... | ||
1939 | | 0xF0 0x91 0xB0 0xAF #Mc BHAIKSUKI VOWEL SIGN AA | ||
1940 | | 0xF0 0x91 0xB0 0xB0..0xB6 #Mn [7] BHAIKSUKI VOWEL SIGN I..BHAIKSU... | ||
1941 | | 0xF0 0x91 0xB0 0xB8..0xBD #Mn [6] BHAIKSUKI VOWEL SIGN E..BHAIKSU... | ||
1942 | | 0xF0 0x91 0xB0 0xBE #Mc BHAIKSUKI SIGN VISARGA | ||
1943 | | 0xF0 0x91 0xB0 0xBF #Mn BHAIKSUKI SIGN VIRAMA | ||
1944 | | 0xF0 0x91 0xB1 0x80 #Lo BHAIKSUKI SIGN AVAGRAHA | ||
1945 | | 0xF0 0x91 0xB1 0x90..0x99 #Nd [10] BHAIKSUKI DIGIT ZERO..BHAIKSUKI... | ||
1946 | | 0xF0 0x91 0xB1 0xB2..0xFF #Lo [30] MARCHEN LETTER KA..MARCHEN LETT... | ||
1947 | | 0xF0 0x91 0xB2 0x00..0x8F # | ||
1948 | | 0xF0 0x91 0xB2 0x92..0xA7 #Mn [22] MARCHEN SUBJOINED LETTER KA..MA... | ||
1949 | | 0xF0 0x91 0xB2 0xA9 #Mc MARCHEN SUBJOINED LETTER YA | ||
1950 | | 0xF0 0x91 0xB2 0xAA..0xB0 #Mn [7] MARCHEN SUBJOINED LETTER RA..MA... | ||
1951 | | 0xF0 0x91 0xB2 0xB1 #Mc MARCHEN VOWEL SIGN I | ||
1952 | | 0xF0 0x91 0xB2 0xB2..0xB3 #Mn [2] MARCHEN VOWEL SIGN U..MARCHEN V... | ||
1953 | | 0xF0 0x91 0xB2 0xB4 #Mc MARCHEN VOWEL SIGN O | ||
1954 | | 0xF0 0x91 0xB2 0xB5..0xB6 #Mn [2] MARCHEN SIGN ANUSVARA..MARCHEN ... | ||
1955 | | 0xF0 0x92 0x80 0x80..0xFF #Lo [922] CUNEIFORM SIGN A..CUNEIFO... | ||
1956 | | 0xF0 0x92 0x81..0x8D 0x00..0xFF # | ||
1957 | | 0xF0 0x92 0x8E 0x00..0x99 # | ||
1958 | | 0xF0 0x92 0x90 0x80..0xFF #Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH.... | ||
1959 | | 0xF0 0x92 0x91 0x00..0xAE # | ||
1960 | | 0xF0 0x92 0x92 0x80..0xFF #Lo [196] CUNEIFORM SIGN AB TIMES N... | ||
1961 | | 0xF0 0x92 0x93..0x94 0x00..0xFF # | ||
1962 | | 0xF0 0x92 0x95 0x00..0x83 # | ||
1963 | | 0xF0 0x93 0x80 0x80..0xFF #Lo [1071] EGYPTIAN HIEROGLYPH A001... | ||
1964 | | 0xF0 0x93 0x81..0x8F 0x00..0xFF # | ||
1965 | | 0xF0 0x93 0x90 0x00..0xAE # | ||
1966 | | 0xF0 0x94 0x90 0x80..0xFF #Lo [583] ANATOLIAN HIEROGLYPH A001... | ||
1967 | | 0xF0 0x94 0x91..0x98 0x00..0xFF # | ||
1968 | | 0xF0 0x94 0x99 0x00..0x86 # | ||
1969 | | 0xF0 0x96 0xA0 0x80..0xFF #Lo [569] BAMUM LETTER PHASE-A NGKU... | ||
1970 | | 0xF0 0x96 0xA1..0xA7 0x00..0xFF # | ||
1971 | | 0xF0 0x96 0xA8 0x00..0xB8 # | ||
1972 | | 0xF0 0x96 0xA9 0x80..0x9E #Lo [31] MRO LETTER TA..MRO LETTER TEK | ||
1973 | | 0xF0 0x96 0xA9 0xA0..0xA9 #Nd [10] MRO DIGIT ZERO..MRO DIGIT NINE | ||
1974 | | 0xF0 0x96 0xAB 0x90..0xAD #Lo [30] BASSA VAH LETTER ENNI..BASSA VA... | ||
1975 | | 0xF0 0x96 0xAB 0xB0..0xB4 #Mn [5] BASSA VAH COMBINING HIGH TONE..... | ||
1976 | | 0xF0 0x96 0xAC 0x80..0xAF #Lo [48] PAHAWH HMONG VOWEL KEEB..PAHAWH... | ||
1977 | | 0xF0 0x96 0xAC 0xB0..0xB6 #Mn [7] PAHAWH HMONG MARK CIM TUB..PAHA... | ||
1978 | | 0xF0 0x96 0xAD 0x80..0x83 #Lm [4] PAHAWH HMONG SIGN VOS SEEV..PAH... | ||
1979 | | 0xF0 0x96 0xAD 0x90..0x99 #Nd [10] PAHAWH HMONG DIGIT ZERO..PAHAWH... | ||
1980 | | 0xF0 0x96 0xAD 0xA3..0xB7 #Lo [21] PAHAWH HMONG SIGN VOS LUB..PAHA... | ||
1981 | | 0xF0 0x96 0xAD 0xBD..0xFF #Lo [19] PAHAWH HMONG CLAN SIGN TSHEEJ..... | ||
1982 | | 0xF0 0x96 0xAE 0x00..0x8F # | ||
1983 | | 0xF0 0x96 0xBC 0x80..0xFF #Lo [69] MIAO LETTER PA..MIAO LETTER HHA | ||
1984 | | 0xF0 0x96 0xBD 0x00..0x84 # | ||
1985 | | 0xF0 0x96 0xBD 0x90 #Lo MIAO LETTER NASALIZATION | ||
1986 | | 0xF0 0x96 0xBD 0x91..0xBE #Mc [46] MIAO SIGN ASPIRATION..MIAO VOWE... | ||
1987 | | 0xF0 0x96 0xBE 0x8F..0x92 #Mn [4] MIAO TONE RIGHT..MIAO TONE BELOW | ||
1988 | | 0xF0 0x96 0xBE 0x93..0x9F #Lm [13] MIAO LETTER TONE-2..MIAO LETTER... | ||
1989 | | 0xF0 0x96 0xBF 0xA0 #Lm TANGUT ITERATION MARK | ||
1990 | | 0xF0 0x97 0x80 0x80..0xFF #Lo [6125] TANGUT IDEOGRAPH-17000..... | ||
1991 | | 0xF0 0x97 0x81..0xFF 0x00..0xFF # | ||
1992 | | 0xF0 0x98 0x00 0x00..0xFF # | ||
1993 | | 0xF0 0x98 0x01..0x9E 0x00..0xFF # | ||
1994 | | 0xF0 0x98 0x9F 0x00..0xAC # | ||
1995 | | 0xF0 0x98 0xA0 0x80..0xFF #Lo [755] TANGUT COMPONENT-001..TAN... | ||
1996 | | 0xF0 0x98 0xA1..0xAA 0x00..0xFF # | ||
1997 | | 0xF0 0x98 0xAB 0x00..0xB2 # | ||
1998 | | 0xF0 0x9B 0x80 0x80..0x81 #Lo [2] KATAKANA LETTER ARCHAIC E..HIRA... | ||
1999 | | 0xF0 0x9B 0xB0 0x80..0xFF #Lo [107] DUPLOYAN LETTER H..DUPLOYAN LET... | ||
2000 | | 0xF0 0x9B 0xB1 0x00..0xAA # | ||
2001 | | 0xF0 0x9B 0xB1 0xB0..0xBC #Lo [13] DUPLOYAN AFFIX LEFT HORIZONTAL ... | ||
2002 | | 0xF0 0x9B 0xB2 0x80..0x88 #Lo [9] DUPLOYAN AFFIX HIGH ACUTE..DUPL... | ||
2003 | | 0xF0 0x9B 0xB2 0x90..0x99 #Lo [10] DUPLOYAN AFFIX LOW ACUTE..DUPLO... | ||
2004 | | 0xF0 0x9B 0xB2 0x9D..0x9E #Mn [2] DUPLOYAN THICK LETTER SELECTOR.... | ||
2005 | | 0xF0 0x9D 0x85 0xA5..0xA6 #Mc [2] MUSICAL SYMBOL COMBINING STEM..... | ||
2006 | | 0xF0 0x9D 0x85 0xA7..0xA9 #Mn [3] MUSICAL SYMBOL COMBINING TREMOL... | ||
2007 | | 0xF0 0x9D 0x85 0xAD..0xB2 #Mc [6] MUSICAL SYMBOL COMBINING AUGMEN... | ||
2008 | | 0xF0 0x9D 0x85 0xBB..0xFF #Mn [8] MUSICAL SYMBOL COMBINING ACCENT... | ||
2009 | | 0xF0 0x9D 0x86 0x00..0x82 # | ||
2010 | | 0xF0 0x9D 0x86 0x85..0x8B #Mn [7] MUSICAL SYMBOL COMBINING DOIT..... | ||
2011 | | 0xF0 0x9D 0x86 0xAA..0xAD #Mn [4] MUSICAL SYMBOL COMBINING DOWN B... | ||
2012 | | 0xF0 0x9D 0x89 0x82..0x84 #Mn [3] COMBINING GREEK MUSICAL TRISEME... | ||
2013 | | 0xF0 0x9D 0x90 0x80..0xFF #L& [85] MATHEMATICAL BOLD CAPITAL A..MA... | ||
2014 | | 0xF0 0x9D 0x91 0x00..0x94 # | ||
2015 | | 0xF0 0x9D 0x91 0x96..0xFF #L& [71] MATHEMATICAL ITALIC SMALL I..MA... | ||
2016 | | 0xF0 0x9D 0x92 0x00..0x9C # | ||
2017 | | 0xF0 0x9D 0x92 0x9E..0x9F #L& [2] MATHEMATICAL SCRIPT CAPITAL C..... | ||
2018 | | 0xF0 0x9D 0x92 0xA2 #L& MATHEMATICAL SCRIPT CAPITAL G | ||
2019 | | 0xF0 0x9D 0x92 0xA5..0xA6 #L& [2] MATHEMATICAL SCRIPT CAPITAL J..... | ||
2020 | | 0xF0 0x9D 0x92 0xA9..0xAC #L& [4] MATHEMATICAL SCRIPT CAPITAL N..... | ||
2021 | | 0xF0 0x9D 0x92 0xAE..0xB9 #L& [12] MATHEMATICAL SCRIPT CAPITAL S..... | ||
2022 | | 0xF0 0x9D 0x92 0xBB #L& MATHEMATICAL SCRIPT SMALL F | ||
2023 | | 0xF0 0x9D 0x92 0xBD..0xFF #L& [7] MATHEMATICAL SCRIPT SMALL H..MA... | ||
2024 | | 0xF0 0x9D 0x93 0x00..0x83 # | ||
2025 | | 0xF0 0x9D 0x93 0x85..0xFF #L& [65] MATHEMATICAL SCRIPT SMALL P..MA... | ||
2026 | | 0xF0 0x9D 0x94 0x00..0x85 # | ||
2027 | | 0xF0 0x9D 0x94 0x87..0x8A #L& [4] MATHEMATICAL FRAKTUR CAPITAL D.... | ||
2028 | | 0xF0 0x9D 0x94 0x8D..0x94 #L& [8] MATHEMATICAL FRAKTUR CAPITAL J.... | ||
2029 | | 0xF0 0x9D 0x94 0x96..0x9C #L& [7] MATHEMATICAL FRAKTUR CAPITAL S.... | ||
2030 | | 0xF0 0x9D 0x94 0x9E..0xB9 #L& [28] MATHEMATICAL FRAKTUR SMALL A..M... | ||
2031 | | 0xF0 0x9D 0x94 0xBB..0xBE #L& [4] MATHEMATICAL DOUBLE-STRUCK CAPI... | ||
2032 | | 0xF0 0x9D 0x95 0x80..0x84 #L& [5] MATHEMATICAL DOUBLE-STRUCK CAPI... | ||
2033 | | 0xF0 0x9D 0x95 0x86 #L& MATHEMATICAL DOUBLE-STRUCK CAPITAL O | ||
2034 | | 0xF0 0x9D 0x95 0x8A..0x90 #L& [7] MATHEMATICAL DOUBLE-STRUCK CAPI... | ||
2035 | | 0xF0 0x9D 0x95 0x92..0xFF #L& [340] MATHEMATICAL DOUBLE-STRUC... | ||
2036 | | 0xF0 0x9D 0x96..0x99 0x00..0xFF # | ||
2037 | | 0xF0 0x9D 0x9A 0x00..0xA5 # | ||
2038 | | 0xF0 0x9D 0x9A 0xA8..0xFF #L& [25] MATHEMATICAL BOLD CAPITAL ALPHA... | ||
2039 | | 0xF0 0x9D 0x9B 0x00..0x80 # | ||
2040 | | 0xF0 0x9D 0x9B 0x82..0x9A #L& [25] MATHEMATICAL BOLD SMALL ALPHA..... | ||
2041 | | 0xF0 0x9D 0x9B 0x9C..0xBA #L& [31] MATHEMATICAL BOLD EPSILON SYMBO... | ||
2042 | | 0xF0 0x9D 0x9B 0xBC..0xFF #L& [25] MATHEMATICAL ITALIC SMALL ALPHA... | ||
2043 | | 0xF0 0x9D 0x9C 0x00..0x94 # | ||
2044 | | 0xF0 0x9D 0x9C 0x96..0xB4 #L& [31] MATHEMATICAL ITALIC EPSILON SYM... | ||
2045 | | 0xF0 0x9D 0x9C 0xB6..0xFF #L& [25] MATHEMATICAL BOLD ITALIC SMALL ... | ||
2046 | | 0xF0 0x9D 0x9D 0x00..0x8E # | ||
2047 | | 0xF0 0x9D 0x9D 0x90..0xAE #L& [31] MATHEMATICAL BOLD ITALIC EPSILO... | ||
2048 | | 0xF0 0x9D 0x9D 0xB0..0xFF #L& [25] MATHEMATICAL SANS-SERIF BOLD SM... | ||
2049 | | 0xF0 0x9D 0x9E 0x00..0x88 # | ||
2050 | | 0xF0 0x9D 0x9E 0x8A..0xA8 #L& [31] MATHEMATICAL SANS-SERIF BOLD EP... | ||
2051 | | 0xF0 0x9D 0x9E 0xAA..0xFF #L& [25] MATHEMATICAL SANS-SERIF BOLD IT... | ||
2052 | | 0xF0 0x9D 0x9F 0x00..0x82 # | ||
2053 | | 0xF0 0x9D 0x9F 0x84..0x8B #L& [8] MATHEMATICAL SANS-SERIF BOLD IT... | ||
2054 | | 0xF0 0x9D 0x9F 0x8E..0xBF #Nd [50] MATHEMATICAL BOLD DIGIT ZERO..M... | ||
2055 | | 0xF0 0x9D 0xA8 0x80..0xB6 #Mn [55] SIGNWRITING HEAD RIM..SIGNWRITI... | ||
2056 | | 0xF0 0x9D 0xA8 0xBB..0xFF #Mn [50] SIGNWRITING MOUTH CLOSED NEUTRA... | ||
2057 | | 0xF0 0x9D 0xA9 0x00..0xAC # | ||
2058 | | 0xF0 0x9D 0xA9 0xB5 #Mn SIGNWRITING UPPER BODY TILTING FRO... | ||
2059 | | 0xF0 0x9D 0xAA 0x84 #Mn SIGNWRITING LOCATION HEAD NECK | ||
2060 | | 0xF0 0x9D 0xAA 0x9B..0x9F #Mn [5] SIGNWRITING FILL MODIFIER-2..SI... | ||
2061 | | 0xF0 0x9D 0xAA 0xA1..0xAF #Mn [15] SIGNWRITING ROTATION MODIFIER-2... | ||
2062 | | 0xF0 0x9E 0x80 0x80..0x86 #Mn [7] COMBINING GLAGOLITIC LETTER AZU... | ||
2063 | | 0xF0 0x9E 0x80 0x88..0x98 #Mn [17] COMBINING GLAGOLITIC LETTER ZEM... | ||
2064 | | 0xF0 0x9E 0x80 0x9B..0xA1 #Mn [7] COMBINING GLAGOLITIC LETTER SHT... | ||
2065 | | 0xF0 0x9E 0x80 0xA3..0xA4 #Mn [2] COMBINING GLAGOLITIC LETTER YU.... | ||
2066 | | 0xF0 0x9E 0x80 0xA6..0xAA #Mn [5] COMBINING GLAGOLITIC LETTER YO.... | ||
2067 | | 0xF0 0x9E 0xA0 0x80..0xFF #Lo [197] MENDE KIKAKUI SYLLABLE M0... | ||
2068 | | 0xF0 0x9E 0xA1..0xA2 0x00..0xFF # | ||
2069 | | 0xF0 0x9E 0xA3 0x00..0x84 # | ||
2070 | | 0xF0 0x9E 0xA3 0x90..0x96 #Mn [7] MENDE KIKAKUI COMBINING NUMBER ... | ||
2071 | | 0xF0 0x9E 0xA4 0x80..0xFF #L& [68] ADLAM CAPITAL LETTER ALIF..ADLA... | ||
2072 | | 0xF0 0x9E 0xA5 0x00..0x83 # | ||
2073 | | 0xF0 0x9E 0xA5 0x84..0x8A #Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA | ||
2074 | | 0xF0 0x9E 0xA5 0x90..0x99 #Nd [10] ADLAM DIGIT ZERO..ADLAM DIGIT NINE | ||
2075 | | 0xF0 0x9E 0xB8 0x80..0x83 #Lo [4] ARABIC MATHEMATICAL ALEF..ARABI... | ||
2076 | | 0xF0 0x9E 0xB8 0x85..0x9F #Lo [27] ARABIC MATHEMATICAL WAW..ARABIC... | ||
2077 | | 0xF0 0x9E 0xB8 0xA1..0xA2 #Lo [2] ARABIC MATHEMATICAL INITIAL BEH... | ||
2078 | | 0xF0 0x9E 0xB8 0xA4 #Lo ARABIC MATHEMATICAL INITIAL HEH | ||
2079 | | 0xF0 0x9E 0xB8 0xA7 #Lo ARABIC MATHEMATICAL INITIAL HAH | ||
2080 | | 0xF0 0x9E 0xB8 0xA9..0xB2 #Lo [10] ARABIC MATHEMATICAL INITIAL YEH... | ||
2081 | | 0xF0 0x9E 0xB8 0xB4..0xB7 #Lo [4] ARABIC MATHEMATICAL INITIAL SHE... | ||
2082 | | 0xF0 0x9E 0xB8 0xB9 #Lo ARABIC MATHEMATICAL INITIAL DAD | ||
2083 | | 0xF0 0x9E 0xB8 0xBB #Lo ARABIC MATHEMATICAL INITIAL GHAIN | ||
2084 | | 0xF0 0x9E 0xB9 0x82 #Lo ARABIC MATHEMATICAL TAILED JEEM | ||
2085 | | 0xF0 0x9E 0xB9 0x87 #Lo ARABIC MATHEMATICAL TAILED HAH | ||
2086 | | 0xF0 0x9E 0xB9 0x89 #Lo ARABIC MATHEMATICAL TAILED YEH | ||
2087 | | 0xF0 0x9E 0xB9 0x8B #Lo ARABIC MATHEMATICAL TAILED LAM | ||
2088 | | 0xF0 0x9E 0xB9 0x8D..0x8F #Lo [3] ARABIC MATHEMATICAL TAILED NOON... | ||
2089 | | 0xF0 0x9E 0xB9 0x91..0x92 #Lo [2] ARABIC MATHEMATICAL TAILED SAD.... | ||
2090 | | 0xF0 0x9E 0xB9 0x94 #Lo ARABIC MATHEMATICAL TAILED SHEEN | ||
2091 | | 0xF0 0x9E 0xB9 0x97 #Lo ARABIC MATHEMATICAL TAILED KHAH | ||
2092 | | 0xF0 0x9E 0xB9 0x99 #Lo ARABIC MATHEMATICAL TAILED DAD | ||
2093 | | 0xF0 0x9E 0xB9 0x9B #Lo ARABIC MATHEMATICAL TAILED GHAIN | ||
2094 | | 0xF0 0x9E 0xB9 0x9D #Lo ARABIC MATHEMATICAL TAILED DOTLESS... | ||
2095 | | 0xF0 0x9E 0xB9 0x9F #Lo ARABIC MATHEMATICAL TAILED DOTLESS... | ||
2096 | | 0xF0 0x9E 0xB9 0xA1..0xA2 #Lo [2] ARABIC MATHEMATICAL STRETCHED B... | ||
2097 | | 0xF0 0x9E 0xB9 0xA4 #Lo ARABIC MATHEMATICAL STRETCHED HEH | ||
2098 | | 0xF0 0x9E 0xB9 0xA7..0xAA #Lo [4] ARABIC MATHEMATICAL STRETCHED H... | ||
2099 | | 0xF0 0x9E 0xB9 0xAC..0xB2 #Lo [7] ARABIC MATHEMATICAL STRETCHED M... | ||
2100 | | 0xF0 0x9E 0xB9 0xB4..0xB7 #Lo [4] ARABIC MATHEMATICAL STRETCHED S... | ||
2101 | | 0xF0 0x9E 0xB9 0xB9..0xBC #Lo [4] ARABIC MATHEMATICAL STRETCHED D... | ||
2102 | | 0xF0 0x9E 0xB9 0xBE #Lo ARABIC MATHEMATICAL STRETCHED DOTL... | ||
2103 | | 0xF0 0x9E 0xBA 0x80..0x89 #Lo [10] ARABIC MATHEMATICAL LOOPED ALEF... | ||
2104 | | 0xF0 0x9E 0xBA 0x8B..0x9B #Lo [17] ARABIC MATHEMATICAL LOOPED LAM.... | ||
2105 | | 0xF0 0x9E 0xBA 0xA1..0xA3 #Lo [3] ARABIC MATHEMATICAL DOUBLE-STRU... | ||
2106 | | 0xF0 0x9E 0xBA 0xA5..0xA9 #Lo [5] ARABIC MATHEMATICAL DOUBLE-STRU... | ||
2107 | | 0xF0 0x9E 0xBA 0xAB..0xBB #Lo [17] ARABIC MATHEMATICAL DOUBLE-STRU... | ||
2108 | | 0xF0 0xA0 0x80 0x80..0xFF #Lo [42711] CJK UNIFIED IDEOG... | ||
2109 | | 0xF0 0xA0 0x81..0xFF 0x00..0xFF # | ||
2110 | | 0xF0 0xA1..0xA9 0x00..0xFF 0x00..0xFF # | ||
2111 | | 0xF0 0xAA 0x00 0x00..0xFF # | ||
2112 | | 0xF0 0xAA 0x01..0x9A 0x00..0xFF # | ||
2113 | | 0xF0 0xAA 0x9B 0x00..0x96 # | ||
2114 | | 0xF0 0xAA 0x9C 0x80..0xFF #Lo [4149] CJK UNIFIED IDEOGRAPH-2A... | ||
2115 | | 0xF0 0xAA 0x9D..0xFF 0x00..0xFF # | ||
2116 | | 0xF0 0xAB 0x00 0x00..0xFF # | ||
2117 | | 0xF0 0xAB 0x01..0x9B 0x00..0xFF # | ||
2118 | | 0xF0 0xAB 0x9C 0x00..0xB4 # | ||
2119 | | 0xF0 0xAB 0x9D 0x80..0xFF #Lo [222] CJK UNIFIED IDEOGRAPH-2B7... | ||
2120 | | 0xF0 0xAB 0x9E..0x9F 0x00..0xFF # | ||
2121 | | 0xF0 0xAB 0xA0 0x00..0x9D # | ||
2122 | | 0xF0 0xAB 0xA0 0xA0..0xFF #Lo [5762] CJK UNIFIED IDEOGRAPH-2B... | ||
2123 | | 0xF0 0xAB 0xA1..0xFF 0x00..0xFF # | ||
2124 | | 0xF0 0xAC 0x00 0x00..0xFF # | ||
2125 | | 0xF0 0xAC 0x01..0xB9 0x00..0xFF # | ||
2126 | | 0xF0 0xAC 0xBA 0x00..0xA1 # | ||
2127 | | 0xF0 0xAF 0xA0 0x80..0xFF #Lo [542] CJK COMPATIBILITY IDEOGRA... | ||
2128 | | 0xF0 0xAF 0xA1..0xA7 0x00..0xFF # | ||
2129 | | 0xF0 0xAF 0xA8 0x00..0x9D # | ||
2130 | | 0xF3 0xA0 0x84 0x80..0xFF #Mn [240] VARIATION SELECTOR-17..VA... | ||
2131 | | 0xF3 0xA0 0x85..0x86 0x00..0xFF # | ||
2132 | | 0xF3 0xA0 0x87 0x00..0xAF # | ||
2133 | ; | ||
2134 | |||
2135 | }%% | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/variables.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/variables.go new file mode 100644 index 0000000..eeee1a5 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/variables.go | |||
@@ -0,0 +1,86 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | ) | ||
6 | |||
7 | // Variables returns all of the variables referenced within a given experssion. | ||
8 | // | ||
9 | // This is the implementation of the "Variables" method on every native | ||
10 | // expression. | ||
11 | func Variables(expr Expression) []hcl.Traversal { | ||
12 | var vars []hcl.Traversal | ||
13 | |||
14 | walker := &variablesWalker{ | ||
15 | Callback: func(t hcl.Traversal) { | ||
16 | vars = append(vars, t) | ||
17 | }, | ||
18 | } | ||
19 | |||
20 | Walk(expr, walker) | ||
21 | |||
22 | return vars | ||
23 | } | ||
24 | |||
25 | // variablesWalker is a Walker implementation that calls its callback for any | ||
26 | // root scope traversal found while walking. | ||
27 | type variablesWalker struct { | ||
28 | Callback func(hcl.Traversal) | ||
29 | localScopes []map[string]struct{} | ||
30 | } | ||
31 | |||
32 | func (w *variablesWalker) Enter(n Node) hcl.Diagnostics { | ||
33 | switch tn := n.(type) { | ||
34 | case *ScopeTraversalExpr: | ||
35 | t := tn.Traversal | ||
36 | |||
37 | // Check if the given root name appears in any of the active | ||
38 | // local scopes. We don't want to return local variables here, since | ||
39 | // the goal of walking variables is to tell the calling application | ||
40 | // which names it needs to populate in the _root_ scope. | ||
41 | name := t.RootName() | ||
42 | for _, names := range w.localScopes { | ||
43 | if _, localized := names[name]; localized { | ||
44 | return nil | ||
45 | } | ||
46 | } | ||
47 | |||
48 | w.Callback(t) | ||
49 | case ChildScope: | ||
50 | w.localScopes = append(w.localScopes, tn.LocalNames) | ||
51 | } | ||
52 | return nil | ||
53 | } | ||
54 | |||
55 | func (w *variablesWalker) Exit(n Node) hcl.Diagnostics { | ||
56 | switch n.(type) { | ||
57 | case ChildScope: | ||
58 | // pop the latest local scope, assuming that the walker will | ||
59 | // behave symmetrically as promised. | ||
60 | w.localScopes = w.localScopes[:len(w.localScopes)-1] | ||
61 | } | ||
62 | return nil | ||
63 | } | ||
64 | |||
65 | // ChildScope is a synthetic AST node that is visited during a walk to | ||
66 | // indicate that its descendent will be evaluated in a child scope, which | ||
67 | // may mask certain variables from the parent scope as locals. | ||
68 | // | ||
69 | // ChildScope nodes don't really exist in the AST, but are rather synthesized | ||
70 | // on the fly during walk. Therefore it doesn't do any good to transform them; | ||
71 | // instead, transform either parent node that created a scope or the expression | ||
72 | // that the child scope struct wraps. | ||
73 | type ChildScope struct { | ||
74 | LocalNames map[string]struct{} | ||
75 | Expr *Expression // pointer because it can be replaced on walk | ||
76 | } | ||
77 | |||
78 | func (e ChildScope) walkChildNodes(w internalWalkFunc) { | ||
79 | *(e.Expr) = w(*(e.Expr)).(Expression) | ||
80 | } | ||
81 | |||
82 | // Range returns the range of the expression that the ChildScope is | ||
83 | // encapsulating. It isn't really very useful to call Range on a ChildScope. | ||
84 | func (e ChildScope) Range() hcl.Range { | ||
85 | return (*e.Expr).Range() | ||
86 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/walk.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/walk.go new file mode 100644 index 0000000..3405d26 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/walk.go | |||
@@ -0,0 +1,77 @@ | |||
1 | package hclsyntax | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | ) | ||
6 | |||
7 | // VisitFunc is the callback signature for VisitAll. | ||
8 | type VisitFunc func(node Node) hcl.Diagnostics | ||
9 | |||
10 | // VisitAll is a basic way to traverse the AST beginning with a particular | ||
11 | // node. The given function will be called once for each AST node in | ||
12 | // depth-first order, but no context is provided about the shape of the tree. | ||
13 | // | ||
14 | // The VisitFunc may return diagnostics, in which case they will be accumulated | ||
15 | // and returned as a single set. | ||
16 | func VisitAll(node Node, f VisitFunc) hcl.Diagnostics { | ||
17 | diags := f(node) | ||
18 | node.walkChildNodes(func(node Node) Node { | ||
19 | diags = append(diags, VisitAll(node, f)...) | ||
20 | return node | ||
21 | }) | ||
22 | return diags | ||
23 | } | ||
24 | |||
25 | // Walker is an interface used with Walk. | ||
26 | type Walker interface { | ||
27 | Enter(node Node) hcl.Diagnostics | ||
28 | Exit(node Node) hcl.Diagnostics | ||
29 | } | ||
30 | |||
31 | // Walk is a more complex way to traverse the AST starting with a particular | ||
32 | // node, which provides information about the tree structure via separate | ||
33 | // Enter and Exit functions. | ||
34 | func Walk(node Node, w Walker) hcl.Diagnostics { | ||
35 | diags := w.Enter(node) | ||
36 | node.walkChildNodes(func(node Node) Node { | ||
37 | diags = append(diags, Walk(node, w)...) | ||
38 | return node | ||
39 | }) | ||
40 | return diags | ||
41 | } | ||
42 | |||
43 | // Transformer is an interface used with Transform | ||
44 | type Transformer interface { | ||
45 | // Transform accepts a node and returns a replacement node along with | ||
46 | // a flag for whether to also visit child nodes. If the flag is false, | ||
47 | // none of the child nodes will be visited and the TransformExit method | ||
48 | // will not be called for the node. | ||
49 | // | ||
50 | // It is acceptable and appropriate for Transform to return the same node | ||
51 | // it was given, for situations where no transform is needed. | ||
52 | Transform(node Node) (Node, bool, hcl.Diagnostics) | ||
53 | |||
54 | // TransformExit signals the end of transformations of child nodes of the | ||
55 | // given node. If Transform returned a new node, the given node is the | ||
56 | // node that was returned, rather than the node that was originally | ||
57 | // encountered. | ||
58 | TransformExit(node Node) hcl.Diagnostics | ||
59 | } | ||
60 | |||
61 | // Transform allows for in-place transformations of an AST starting with a | ||
62 | // particular node. The provider Transformer implementation drives the | ||
63 | // transformation process. The return value is the node that replaced the | ||
64 | // given top-level node. | ||
65 | func Transform(node Node, t Transformer) (Node, hcl.Diagnostics) { | ||
66 | newNode, descend, diags := t.Transform(node) | ||
67 | if !descend { | ||
68 | return newNode, diags | ||
69 | } | ||
70 | node.walkChildNodes(func(node Node) Node { | ||
71 | newNode, newDiags := Transform(node, t) | ||
72 | diags = append(diags, newDiags...) | ||
73 | return newNode | ||
74 | }) | ||
75 | diags = append(diags, t.TransformExit(newNode)...) | ||
76 | return newNode, diags | ||
77 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/ast.go b/vendor/github.com/hashicorp/hcl2/hcl/json/ast.go new file mode 100644 index 0000000..753bfa0 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/ast.go | |||
@@ -0,0 +1,121 @@ | |||
1 | package json | ||
2 | |||
3 | import ( | ||
4 | "math/big" | ||
5 | |||
6 | "github.com/hashicorp/hcl2/hcl" | ||
7 | ) | ||
8 | |||
9 | type node interface { | ||
10 | Range() hcl.Range | ||
11 | StartRange() hcl.Range | ||
12 | } | ||
13 | |||
14 | type objectVal struct { | ||
15 | Attrs []*objectAttr | ||
16 | SrcRange hcl.Range // range of the entire object, brace-to-brace | ||
17 | OpenRange hcl.Range // range of the opening brace | ||
18 | CloseRange hcl.Range // range of the closing brace | ||
19 | } | ||
20 | |||
21 | func (n *objectVal) Range() hcl.Range { | ||
22 | return n.SrcRange | ||
23 | } | ||
24 | |||
25 | func (n *objectVal) StartRange() hcl.Range { | ||
26 | return n.OpenRange | ||
27 | } | ||
28 | |||
29 | type objectAttr struct { | ||
30 | Name string | ||
31 | Value node | ||
32 | NameRange hcl.Range // range of the name string | ||
33 | } | ||
34 | |||
35 | func (n *objectAttr) Range() hcl.Range { | ||
36 | return n.NameRange | ||
37 | } | ||
38 | |||
39 | func (n *objectAttr) StartRange() hcl.Range { | ||
40 | return n.NameRange | ||
41 | } | ||
42 | |||
43 | type arrayVal struct { | ||
44 | Values []node | ||
45 | SrcRange hcl.Range // range of the entire object, bracket-to-bracket | ||
46 | OpenRange hcl.Range // range of the opening bracket | ||
47 | } | ||
48 | |||
49 | func (n *arrayVal) Range() hcl.Range { | ||
50 | return n.SrcRange | ||
51 | } | ||
52 | |||
53 | func (n *arrayVal) StartRange() hcl.Range { | ||
54 | return n.OpenRange | ||
55 | } | ||
56 | |||
57 | type booleanVal struct { | ||
58 | Value bool | ||
59 | SrcRange hcl.Range | ||
60 | } | ||
61 | |||
62 | func (n *booleanVal) Range() hcl.Range { | ||
63 | return n.SrcRange | ||
64 | } | ||
65 | |||
66 | func (n *booleanVal) StartRange() hcl.Range { | ||
67 | return n.SrcRange | ||
68 | } | ||
69 | |||
70 | type numberVal struct { | ||
71 | Value *big.Float | ||
72 | SrcRange hcl.Range | ||
73 | } | ||
74 | |||
75 | func (n *numberVal) Range() hcl.Range { | ||
76 | return n.SrcRange | ||
77 | } | ||
78 | |||
79 | func (n *numberVal) StartRange() hcl.Range { | ||
80 | return n.SrcRange | ||
81 | } | ||
82 | |||
83 | type stringVal struct { | ||
84 | Value string | ||
85 | SrcRange hcl.Range | ||
86 | } | ||
87 | |||
88 | func (n *stringVal) Range() hcl.Range { | ||
89 | return n.SrcRange | ||
90 | } | ||
91 | |||
92 | func (n *stringVal) StartRange() hcl.Range { | ||
93 | return n.SrcRange | ||
94 | } | ||
95 | |||
96 | type nullVal struct { | ||
97 | SrcRange hcl.Range | ||
98 | } | ||
99 | |||
100 | func (n *nullVal) Range() hcl.Range { | ||
101 | return n.SrcRange | ||
102 | } | ||
103 | |||
104 | func (n *nullVal) StartRange() hcl.Range { | ||
105 | return n.SrcRange | ||
106 | } | ||
107 | |||
108 | // invalidVal is used as a placeholder where a value is needed for a valid | ||
109 | // parse tree but the input was invalid enough to prevent one from being | ||
110 | // created. | ||
111 | type invalidVal struct { | ||
112 | SrcRange hcl.Range | ||
113 | } | ||
114 | |||
115 | func (n invalidVal) Range() hcl.Range { | ||
116 | return n.SrcRange | ||
117 | } | ||
118 | |||
119 | func (n invalidVal) StartRange() hcl.Range { | ||
120 | return n.SrcRange | ||
121 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go b/vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go new file mode 100644 index 0000000..fbdd8bf --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go | |||
@@ -0,0 +1,33 @@ | |||
1 | package json | ||
2 | |||
3 | import ( | ||
4 | "github.com/agext/levenshtein" | ||
5 | ) | ||
6 | |||
7 | var keywords = []string{"false", "true", "null"} | ||
8 | |||
9 | // keywordSuggestion tries to find a valid JSON keyword that is close to the | ||
10 | // given string and returns it if found. If no keyword is close enough, returns | ||
11 | // the empty string. | ||
12 | func keywordSuggestion(given string) string { | ||
13 | return nameSuggestion(given, keywords) | ||
14 | } | ||
15 | |||
16 | // nameSuggestion tries to find a name from the given slice of suggested names | ||
17 | // that is close to the given name and returns it if found. If no suggestion | ||
18 | // is close enough, returns the empty string. | ||
19 | // | ||
20 | // The suggestions are tried in order, so earlier suggestions take precedence | ||
21 | // if the given string is similar to two or more suggestions. | ||
22 | // | ||
23 | // This function is intended to be used with a relatively-small number of | ||
24 | // suggestions. It's not optimized for hundreds or thousands of them. | ||
25 | func nameSuggestion(given string, suggestions []string) string { | ||
26 | for _, suggestion := range suggestions { | ||
27 | dist := levenshtein.Distance(given, suggestion, nil) | ||
28 | if dist < 3 { // threshold determined experimentally | ||
29 | return suggestion | ||
30 | } | ||
31 | } | ||
32 | return "" | ||
33 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/doc.go b/vendor/github.com/hashicorp/hcl2/hcl/json/doc.go new file mode 100644 index 0000000..4943f9b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/doc.go | |||
@@ -0,0 +1,8 @@ | |||
1 | // Package json is the JSON parser for HCL. It parses JSON files and returns | ||
2 | // implementations of the core HCL structural interfaces in terms of the | ||
3 | // JSON data inside. | ||
4 | // | ||
5 | // This is not a generic JSON parser. Instead, it deals with the mapping from | ||
6 | // the JSON information model to the HCL information model, using a number | ||
7 | // of hard-coded structural conventions. | ||
8 | package json | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go b/vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go new file mode 100644 index 0000000..bc8a97f --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go | |||
@@ -0,0 +1,70 @@ | |||
1 | package json | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | "strings" | ||
6 | ) | ||
7 | |||
8 | type navigation struct { | ||
9 | root node | ||
10 | } | ||
11 | |||
12 | // Implementation of hcled.ContextString | ||
13 | func (n navigation) ContextString(offset int) string { | ||
14 | steps := navigationStepsRev(n.root, offset) | ||
15 | if steps == nil { | ||
16 | return "" | ||
17 | } | ||
18 | |||
19 | // We built our slice backwards, so we'll reverse it in-place now. | ||
20 | half := len(steps) / 2 // integer division | ||
21 | for i := 0; i < half; i++ { | ||
22 | steps[i], steps[len(steps)-1-i] = steps[len(steps)-1-i], steps[i] | ||
23 | } | ||
24 | |||
25 | ret := strings.Join(steps, "") | ||
26 | if len(ret) > 0 && ret[0] == '.' { | ||
27 | ret = ret[1:] | ||
28 | } | ||
29 | return ret | ||
30 | } | ||
31 | |||
32 | func navigationStepsRev(v node, offset int) []string { | ||
33 | switch tv := v.(type) { | ||
34 | case *objectVal: | ||
35 | // Do any of our properties have an object that contains the target | ||
36 | // offset? | ||
37 | for _, attr := range tv.Attrs { | ||
38 | k := attr.Name | ||
39 | av := attr.Value | ||
40 | |||
41 | switch av.(type) { | ||
42 | case *objectVal, *arrayVal: | ||
43 | // okay | ||
44 | default: | ||
45 | continue | ||
46 | } | ||
47 | |||
48 | if av.Range().ContainsOffset(offset) { | ||
49 | return append(navigationStepsRev(av, offset), "."+k) | ||
50 | } | ||
51 | } | ||
52 | case *arrayVal: | ||
53 | // Do any of our elements contain the target offset? | ||
54 | for i, elem := range tv.Values { | ||
55 | |||
56 | switch elem.(type) { | ||
57 | case *objectVal, *arrayVal: | ||
58 | // okay | ||
59 | default: | ||
60 | continue | ||
61 | } | ||
62 | |||
63 | if elem.Range().ContainsOffset(offset) { | ||
64 | return append(navigationStepsRev(elem, offset), fmt.Sprintf("[%d]", i)) | ||
65 | } | ||
66 | } | ||
67 | } | ||
68 | |||
69 | return nil | ||
70 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go b/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go new file mode 100644 index 0000000..246fd1c --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go | |||
@@ -0,0 +1,491 @@ | |||
1 | package json | ||
2 | |||
3 | import ( | ||
4 | "encoding/json" | ||
5 | "fmt" | ||
6 | "math/big" | ||
7 | |||
8 | "github.com/hashicorp/hcl2/hcl" | ||
9 | ) | ||
10 | |||
11 | func parseFileContent(buf []byte, filename string) (node, hcl.Diagnostics) { | ||
12 | tokens := scan(buf, pos{ | ||
13 | Filename: filename, | ||
14 | Pos: hcl.Pos{ | ||
15 | Byte: 0, | ||
16 | Line: 1, | ||
17 | Column: 1, | ||
18 | }, | ||
19 | }) | ||
20 | p := newPeeker(tokens) | ||
21 | node, diags := parseValue(p) | ||
22 | if len(diags) == 0 && p.Peek().Type != tokenEOF { | ||
23 | diags = diags.Append(&hcl.Diagnostic{ | ||
24 | Severity: hcl.DiagError, | ||
25 | Summary: "Extraneous data after value", | ||
26 | Detail: "Extra characters appear after the JSON value.", | ||
27 | Subject: p.Peek().Range.Ptr(), | ||
28 | }) | ||
29 | } | ||
30 | return node, diags | ||
31 | } | ||
32 | |||
33 | func parseValue(p *peeker) (node, hcl.Diagnostics) { | ||
34 | tok := p.Peek() | ||
35 | |||
36 | wrapInvalid := func(n node, diags hcl.Diagnostics) (node, hcl.Diagnostics) { | ||
37 | if n != nil { | ||
38 | return n, diags | ||
39 | } | ||
40 | return invalidVal{tok.Range}, diags | ||
41 | } | ||
42 | |||
43 | switch tok.Type { | ||
44 | case tokenBraceO: | ||
45 | return wrapInvalid(parseObject(p)) | ||
46 | case tokenBrackO: | ||
47 | return wrapInvalid(parseArray(p)) | ||
48 | case tokenNumber: | ||
49 | return wrapInvalid(parseNumber(p)) | ||
50 | case tokenString: | ||
51 | return wrapInvalid(parseString(p)) | ||
52 | case tokenKeyword: | ||
53 | return wrapInvalid(parseKeyword(p)) | ||
54 | case tokenBraceC: | ||
55 | return wrapInvalid(nil, hcl.Diagnostics{ | ||
56 | { | ||
57 | Severity: hcl.DiagError, | ||
58 | Summary: "Missing attribute value", | ||
59 | Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.", | ||
60 | Subject: &tok.Range, | ||
61 | }, | ||
62 | }) | ||
63 | case tokenBrackC: | ||
64 | return wrapInvalid(nil, hcl.Diagnostics{ | ||
65 | { | ||
66 | Severity: hcl.DiagError, | ||
67 | Summary: "Missing array element value", | ||
68 | Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.", | ||
69 | Subject: &tok.Range, | ||
70 | }, | ||
71 | }) | ||
72 | case tokenEOF: | ||
73 | return wrapInvalid(nil, hcl.Diagnostics{ | ||
74 | { | ||
75 | Severity: hcl.DiagError, | ||
76 | Summary: "Missing value", | ||
77 | Detail: "The JSON data ends prematurely.", | ||
78 | Subject: &tok.Range, | ||
79 | }, | ||
80 | }) | ||
81 | default: | ||
82 | return wrapInvalid(nil, hcl.Diagnostics{ | ||
83 | { | ||
84 | Severity: hcl.DiagError, | ||
85 | Summary: "Invalid start of value", | ||
86 | Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.", | ||
87 | Subject: &tok.Range, | ||
88 | }, | ||
89 | }) | ||
90 | } | ||
91 | } | ||
92 | |||
93 | func tokenCanStartValue(tok token) bool { | ||
94 | switch tok.Type { | ||
95 | case tokenBraceO, tokenBrackO, tokenNumber, tokenString, tokenKeyword: | ||
96 | return true | ||
97 | default: | ||
98 | return false | ||
99 | } | ||
100 | } | ||
101 | |||
102 | func parseObject(p *peeker) (node, hcl.Diagnostics) { | ||
103 | var diags hcl.Diagnostics | ||
104 | |||
105 | open := p.Read() | ||
106 | attrs := []*objectAttr{} | ||
107 | |||
108 | // recover is used to shift the peeker to what seems to be the end of | ||
109 | // our object, so that when we encounter an error we leave the peeker | ||
110 | // at a reasonable point in the token stream to continue parsing. | ||
111 | recover := func(tok token) { | ||
112 | open := 1 | ||
113 | for { | ||
114 | switch tok.Type { | ||
115 | case tokenBraceO: | ||
116 | open++ | ||
117 | case tokenBraceC: | ||
118 | open-- | ||
119 | if open <= 1 { | ||
120 | return | ||
121 | } | ||
122 | case tokenEOF: | ||
123 | // Ran out of source before we were able to recover, | ||
124 | // so we'll bail here and let the caller deal with it. | ||
125 | return | ||
126 | } | ||
127 | tok = p.Read() | ||
128 | } | ||
129 | } | ||
130 | |||
131 | Token: | ||
132 | for { | ||
133 | if p.Peek().Type == tokenBraceC { | ||
134 | break Token | ||
135 | } | ||
136 | |||
137 | keyNode, keyDiags := parseValue(p) | ||
138 | diags = diags.Extend(keyDiags) | ||
139 | if keyNode == nil { | ||
140 | return nil, diags | ||
141 | } | ||
142 | |||
143 | keyStrNode, ok := keyNode.(*stringVal) | ||
144 | if !ok { | ||
145 | return nil, diags.Append(&hcl.Diagnostic{ | ||
146 | Severity: hcl.DiagError, | ||
147 | Summary: "Invalid object attribute name", | ||
148 | Detail: "A JSON object attribute name must be a string", | ||
149 | Subject: keyNode.StartRange().Ptr(), | ||
150 | }) | ||
151 | } | ||
152 | |||
153 | key := keyStrNode.Value | ||
154 | |||
155 | colon := p.Read() | ||
156 | if colon.Type != tokenColon { | ||
157 | recover(colon) | ||
158 | |||
159 | if colon.Type == tokenBraceC || colon.Type == tokenComma { | ||
160 | // Catch common mistake of using braces instead of brackets | ||
161 | // for an object. | ||
162 | return nil, diags.Append(&hcl.Diagnostic{ | ||
163 | Severity: hcl.DiagError, | ||
164 | Summary: "Missing object value", | ||
165 | Detail: "A JSON object attribute must have a value, introduced by a colon.", | ||
166 | Subject: &colon.Range, | ||
167 | }) | ||
168 | } | ||
169 | |||
170 | if colon.Type == tokenEquals { | ||
171 | // Possible confusion with native HCL syntax. | ||
172 | return nil, diags.Append(&hcl.Diagnostic{ | ||
173 | Severity: hcl.DiagError, | ||
174 | Summary: "Missing attribute value colon", | ||
175 | Detail: "JSON uses a colon as its name/value delimiter, not an equals sign.", | ||
176 | Subject: &colon.Range, | ||
177 | }) | ||
178 | } | ||
179 | |||
180 | return nil, diags.Append(&hcl.Diagnostic{ | ||
181 | Severity: hcl.DiagError, | ||
182 | Summary: "Missing attribute value colon", | ||
183 | Detail: "A colon must appear between an object attribute's name and its value.", | ||
184 | Subject: &colon.Range, | ||
185 | }) | ||
186 | } | ||
187 | |||
188 | valNode, valDiags := parseValue(p) | ||
189 | diags = diags.Extend(valDiags) | ||
190 | if valNode == nil { | ||
191 | return nil, diags | ||
192 | } | ||
193 | |||
194 | attrs = append(attrs, &objectAttr{ | ||
195 | Name: key, | ||
196 | Value: valNode, | ||
197 | NameRange: keyStrNode.SrcRange, | ||
198 | }) | ||
199 | |||
200 | switch p.Peek().Type { | ||
201 | case tokenComma: | ||
202 | comma := p.Read() | ||
203 | if p.Peek().Type == tokenBraceC { | ||
204 | // Special error message for this common mistake | ||
205 | return nil, diags.Append(&hcl.Diagnostic{ | ||
206 | Severity: hcl.DiagError, | ||
207 | Summary: "Trailing comma in object", | ||
208 | Detail: "JSON does not permit a trailing comma after the final attribute in an object.", | ||
209 | Subject: &comma.Range, | ||
210 | }) | ||
211 | } | ||
212 | continue Token | ||
213 | case tokenEOF: | ||
214 | return nil, diags.Append(&hcl.Diagnostic{ | ||
215 | Severity: hcl.DiagError, | ||
216 | Summary: "Unclosed object", | ||
217 | Detail: "No closing brace was found for this JSON object.", | ||
218 | Subject: &open.Range, | ||
219 | }) | ||
220 | case tokenBrackC: | ||
221 | // Consume the bracket anyway, so that we don't return with the peeker | ||
222 | // at a strange place. | ||
223 | p.Read() | ||
224 | return nil, diags.Append(&hcl.Diagnostic{ | ||
225 | Severity: hcl.DiagError, | ||
226 | Summary: "Mismatched braces", | ||
227 | Detail: "A JSON object must be closed with a brace, not a bracket.", | ||
228 | Subject: p.Peek().Range.Ptr(), | ||
229 | }) | ||
230 | case tokenBraceC: | ||
231 | break Token | ||
232 | default: | ||
233 | recover(p.Read()) | ||
234 | return nil, diags.Append(&hcl.Diagnostic{ | ||
235 | Severity: hcl.DiagError, | ||
236 | Summary: "Missing attribute seperator comma", | ||
237 | Detail: "A comma must appear between each attribute declaration in an object.", | ||
238 | Subject: p.Peek().Range.Ptr(), | ||
239 | }) | ||
240 | } | ||
241 | |||
242 | } | ||
243 | |||
244 | close := p.Read() | ||
245 | return &objectVal{ | ||
246 | Attrs: attrs, | ||
247 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
248 | OpenRange: open.Range, | ||
249 | CloseRange: close.Range, | ||
250 | }, diags | ||
251 | } | ||
252 | |||
253 | func parseArray(p *peeker) (node, hcl.Diagnostics) { | ||
254 | var diags hcl.Diagnostics | ||
255 | |||
256 | open := p.Read() | ||
257 | vals := []node{} | ||
258 | |||
259 | // recover is used to shift the peeker to what seems to be the end of | ||
260 | // our array, so that when we encounter an error we leave the peeker | ||
261 | // at a reasonable point in the token stream to continue parsing. | ||
262 | recover := func(tok token) { | ||
263 | open := 1 | ||
264 | for { | ||
265 | switch tok.Type { | ||
266 | case tokenBrackO: | ||
267 | open++ | ||
268 | case tokenBrackC: | ||
269 | open-- | ||
270 | if open <= 1 { | ||
271 | return | ||
272 | } | ||
273 | case tokenEOF: | ||
274 | // Ran out of source before we were able to recover, | ||
275 | // so we'll bail here and let the caller deal with it. | ||
276 | return | ||
277 | } | ||
278 | tok = p.Read() | ||
279 | } | ||
280 | } | ||
281 | |||
282 | Token: | ||
283 | for { | ||
284 | if p.Peek().Type == tokenBrackC { | ||
285 | break Token | ||
286 | } | ||
287 | |||
288 | valNode, valDiags := parseValue(p) | ||
289 | diags = diags.Extend(valDiags) | ||
290 | if valNode == nil { | ||
291 | return nil, diags | ||
292 | } | ||
293 | |||
294 | vals = append(vals, valNode) | ||
295 | |||
296 | switch p.Peek().Type { | ||
297 | case tokenComma: | ||
298 | comma := p.Read() | ||
299 | if p.Peek().Type == tokenBrackC { | ||
300 | // Special error message for this common mistake | ||
301 | return nil, diags.Append(&hcl.Diagnostic{ | ||
302 | Severity: hcl.DiagError, | ||
303 | Summary: "Trailing comma in array", | ||
304 | Detail: "JSON does not permit a trailing comma after the final attribute in an array.", | ||
305 | Subject: &comma.Range, | ||
306 | }) | ||
307 | } | ||
308 | continue Token | ||
309 | case tokenColon: | ||
310 | recover(p.Read()) | ||
311 | return nil, diags.Append(&hcl.Diagnostic{ | ||
312 | Severity: hcl.DiagError, | ||
313 | Summary: "Invalid array value", | ||
314 | Detail: "A colon is not used to introduce values in a JSON array.", | ||
315 | Subject: p.Peek().Range.Ptr(), | ||
316 | }) | ||
317 | case tokenEOF: | ||
318 | recover(p.Read()) | ||
319 | return nil, diags.Append(&hcl.Diagnostic{ | ||
320 | Severity: hcl.DiagError, | ||
321 | Summary: "Unclosed object", | ||
322 | Detail: "No closing bracket was found for this JSON array.", | ||
323 | Subject: &open.Range, | ||
324 | }) | ||
325 | case tokenBraceC: | ||
326 | recover(p.Read()) | ||
327 | return nil, diags.Append(&hcl.Diagnostic{ | ||
328 | Severity: hcl.DiagError, | ||
329 | Summary: "Mismatched brackets", | ||
330 | Detail: "A JSON array must be closed with a bracket, not a brace.", | ||
331 | Subject: p.Peek().Range.Ptr(), | ||
332 | }) | ||
333 | case tokenBrackC: | ||
334 | break Token | ||
335 | default: | ||
336 | recover(p.Read()) | ||
337 | return nil, diags.Append(&hcl.Diagnostic{ | ||
338 | Severity: hcl.DiagError, | ||
339 | Summary: "Missing attribute seperator comma", | ||
340 | Detail: "A comma must appear between each value in an array.", | ||
341 | Subject: p.Peek().Range.Ptr(), | ||
342 | }) | ||
343 | } | ||
344 | |||
345 | } | ||
346 | |||
347 | close := p.Read() | ||
348 | return &arrayVal{ | ||
349 | Values: vals, | ||
350 | SrcRange: hcl.RangeBetween(open.Range, close.Range), | ||
351 | OpenRange: open.Range, | ||
352 | }, diags | ||
353 | } | ||
354 | |||
355 | func parseNumber(p *peeker) (node, hcl.Diagnostics) { | ||
356 | tok := p.Read() | ||
357 | |||
358 | // Use encoding/json to validate the number syntax. | ||
359 | // TODO: Do this more directly to produce better diagnostics. | ||
360 | var num json.Number | ||
361 | err := json.Unmarshal(tok.Bytes, &num) | ||
362 | if err != nil { | ||
363 | return nil, hcl.Diagnostics{ | ||
364 | { | ||
365 | Severity: hcl.DiagError, | ||
366 | Summary: "Invalid JSON number", | ||
367 | Detail: fmt.Sprintf("There is a syntax error in the given JSON number."), | ||
368 | Subject: &tok.Range, | ||
369 | }, | ||
370 | } | ||
371 | } | ||
372 | |||
373 | f, _, err := big.ParseFloat(string(num), 10, 512, big.ToNearestEven) | ||
374 | if err != nil { | ||
375 | // Should never happen if above passed, since JSON numbers are a subset | ||
376 | // of what big.Float can parse... | ||
377 | return nil, hcl.Diagnostics{ | ||
378 | { | ||
379 | Severity: hcl.DiagError, | ||
380 | Summary: "Invalid JSON number", | ||
381 | Detail: fmt.Sprintf("There is a syntax error in the given JSON number."), | ||
382 | Subject: &tok.Range, | ||
383 | }, | ||
384 | } | ||
385 | } | ||
386 | |||
387 | return &numberVal{ | ||
388 | Value: f, | ||
389 | SrcRange: tok.Range, | ||
390 | }, nil | ||
391 | } | ||
392 | |||
393 | func parseString(p *peeker) (node, hcl.Diagnostics) { | ||
394 | tok := p.Read() | ||
395 | var str string | ||
396 | err := json.Unmarshal(tok.Bytes, &str) | ||
397 | |||
398 | if err != nil { | ||
399 | var errRange hcl.Range | ||
400 | if serr, ok := err.(*json.SyntaxError); ok { | ||
401 | errOfs := serr.Offset | ||
402 | errPos := tok.Range.Start | ||
403 | errPos.Byte += int(errOfs) | ||
404 | |||
405 | // TODO: Use the byte offset to properly count unicode | ||
406 | // characters for the column, and mark the whole of the | ||
407 | // character that was wrong as part of our range. | ||
408 | errPos.Column += int(errOfs) | ||
409 | |||
410 | errEndPos := errPos | ||
411 | errEndPos.Byte++ | ||
412 | errEndPos.Column++ | ||
413 | |||
414 | errRange = hcl.Range{ | ||
415 | Filename: tok.Range.Filename, | ||
416 | Start: errPos, | ||
417 | End: errEndPos, | ||
418 | } | ||
419 | } else { | ||
420 | errRange = tok.Range | ||
421 | } | ||
422 | |||
423 | var contextRange *hcl.Range | ||
424 | if errRange != tok.Range { | ||
425 | contextRange = &tok.Range | ||
426 | } | ||
427 | |||
428 | // FIXME: Eventually we should parse strings directly here so | ||
429 | // we can produce a more useful error message in the face fo things | ||
430 | // such as invalid escapes, etc. | ||
431 | return nil, hcl.Diagnostics{ | ||
432 | { | ||
433 | Severity: hcl.DiagError, | ||
434 | Summary: "Invalid JSON string", | ||
435 | Detail: fmt.Sprintf("There is a syntax error in the given JSON string."), | ||
436 | Subject: &errRange, | ||
437 | Context: contextRange, | ||
438 | }, | ||
439 | } | ||
440 | } | ||
441 | |||
442 | return &stringVal{ | ||
443 | Value: str, | ||
444 | SrcRange: tok.Range, | ||
445 | }, nil | ||
446 | } | ||
447 | |||
448 | func parseKeyword(p *peeker) (node, hcl.Diagnostics) { | ||
449 | tok := p.Read() | ||
450 | s := string(tok.Bytes) | ||
451 | |||
452 | switch s { | ||
453 | case "true": | ||
454 | return &booleanVal{ | ||
455 | Value: true, | ||
456 | SrcRange: tok.Range, | ||
457 | }, nil | ||
458 | case "false": | ||
459 | return &booleanVal{ | ||
460 | Value: false, | ||
461 | SrcRange: tok.Range, | ||
462 | }, nil | ||
463 | case "null": | ||
464 | return &nullVal{ | ||
465 | SrcRange: tok.Range, | ||
466 | }, nil | ||
467 | case "undefined", "NaN", "Infinity": | ||
468 | return nil, hcl.Diagnostics{ | ||
469 | { | ||
470 | Severity: hcl.DiagError, | ||
471 | Summary: "Invalid JSON keyword", | ||
472 | Detail: fmt.Sprintf("The JavaScript identifier %q cannot be used in JSON.", s), | ||
473 | Subject: &tok.Range, | ||
474 | }, | ||
475 | } | ||
476 | default: | ||
477 | var dym string | ||
478 | if suggest := keywordSuggestion(s); suggest != "" { | ||
479 | dym = fmt.Sprintf(" Did you mean %q?", suggest) | ||
480 | } | ||
481 | |||
482 | return nil, hcl.Diagnostics{ | ||
483 | { | ||
484 | Severity: hcl.DiagError, | ||
485 | Summary: "Invalid JSON keyword", | ||
486 | Detail: fmt.Sprintf("%q is not a valid JSON keyword.%s", s, dym), | ||
487 | Subject: &tok.Range, | ||
488 | }, | ||
489 | } | ||
490 | } | ||
491 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go b/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go new file mode 100644 index 0000000..fc7bbf5 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go | |||
@@ -0,0 +1,25 @@ | |||
1 | package json | ||
2 | |||
3 | type peeker struct { | ||
4 | tokens []token | ||
5 | pos int | ||
6 | } | ||
7 | |||
8 | func newPeeker(tokens []token) *peeker { | ||
9 | return &peeker{ | ||
10 | tokens: tokens, | ||
11 | pos: 0, | ||
12 | } | ||
13 | } | ||
14 | |||
15 | func (p *peeker) Peek() token { | ||
16 | return p.tokens[p.pos] | ||
17 | } | ||
18 | |||
19 | func (p *peeker) Read() token { | ||
20 | ret := p.tokens[p.pos] | ||
21 | if ret.Type != tokenEOF { | ||
22 | p.pos++ | ||
23 | } | ||
24 | return ret | ||
25 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/public.go b/vendor/github.com/hashicorp/hcl2/hcl/json/public.go new file mode 100644 index 0000000..2728aa1 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/public.go | |||
@@ -0,0 +1,94 @@ | |||
1 | package json | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | "io/ioutil" | ||
6 | "os" | ||
7 | |||
8 | "github.com/hashicorp/hcl2/hcl" | ||
9 | ) | ||
10 | |||
11 | // Parse attempts to parse the given buffer as JSON and, if successful, returns | ||
12 | // a hcl.File for the HCL configuration represented by it. | ||
13 | // | ||
14 | // This is not a generic JSON parser. Instead, it deals only with the profile | ||
15 | // of JSON used to express HCL configuration. | ||
16 | // | ||
17 | // The returned file is valid only if the returned diagnostics returns false | ||
18 | // from its HasErrors method. If HasErrors returns true, the file represents | ||
19 | // the subset of data that was able to be parsed, which may be none. | ||
20 | func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) { | ||
21 | rootNode, diags := parseFileContent(src, filename) | ||
22 | |||
23 | switch rootNode.(type) { | ||
24 | case *objectVal, *arrayVal: | ||
25 | // okay | ||
26 | default: | ||
27 | diags = diags.Append(&hcl.Diagnostic{ | ||
28 | Severity: hcl.DiagError, | ||
29 | Summary: "Root value must be object", | ||
30 | Detail: "The root value in a JSON-based configuration must be either a JSON object or a JSON array of objects.", | ||
31 | Subject: rootNode.StartRange().Ptr(), | ||
32 | }) | ||
33 | |||
34 | // Since we've already produced an error message for this being | ||
35 | // invalid, we'll return an empty placeholder here so that trying to | ||
36 | // extract content from our root body won't produce a redundant | ||
37 | // error saying the same thing again in more general terms. | ||
38 | fakePos := hcl.Pos{ | ||
39 | Byte: 0, | ||
40 | Line: 1, | ||
41 | Column: 1, | ||
42 | } | ||
43 | fakeRange := hcl.Range{ | ||
44 | Filename: filename, | ||
45 | Start: fakePos, | ||
46 | End: fakePos, | ||
47 | } | ||
48 | rootNode = &objectVal{ | ||
49 | Attrs: []*objectAttr{}, | ||
50 | SrcRange: fakeRange, | ||
51 | OpenRange: fakeRange, | ||
52 | } | ||
53 | } | ||
54 | |||
55 | file := &hcl.File{ | ||
56 | Body: &body{ | ||
57 | val: rootNode, | ||
58 | }, | ||
59 | Bytes: src, | ||
60 | Nav: navigation{rootNode}, | ||
61 | } | ||
62 | return file, diags | ||
63 | } | ||
64 | |||
65 | // ParseFile is a convenience wrapper around Parse that first attempts to load | ||
66 | // data from the given filename, passing the result to Parse if successful. | ||
67 | // | ||
68 | // If the file cannot be read, an error diagnostic with nil context is returned. | ||
69 | func ParseFile(filename string) (*hcl.File, hcl.Diagnostics) { | ||
70 | f, err := os.Open(filename) | ||
71 | if err != nil { | ||
72 | return nil, hcl.Diagnostics{ | ||
73 | { | ||
74 | Severity: hcl.DiagError, | ||
75 | Summary: "Failed to open file", | ||
76 | Detail: fmt.Sprintf("The file %q could not be opened.", filename), | ||
77 | }, | ||
78 | } | ||
79 | } | ||
80 | defer f.Close() | ||
81 | |||
82 | src, err := ioutil.ReadAll(f) | ||
83 | if err != nil { | ||
84 | return nil, hcl.Diagnostics{ | ||
85 | { | ||
86 | Severity: hcl.DiagError, | ||
87 | Summary: "Failed to read file", | ||
88 | Detail: fmt.Sprintf("The file %q was opened, but an error occured while reading it.", filename), | ||
89 | }, | ||
90 | } | ||
91 | } | ||
92 | |||
93 | return Parse(src, filename) | ||
94 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go b/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go new file mode 100644 index 0000000..0a8378b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go | |||
@@ -0,0 +1,293 @@ | |||
1 | package json | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | |||
6 | "github.com/apparentlymart/go-textseg/textseg" | ||
7 | "github.com/hashicorp/hcl2/hcl" | ||
8 | ) | ||
9 | |||
10 | //go:generate stringer -type tokenType scanner.go | ||
11 | type tokenType rune | ||
12 | |||
13 | const ( | ||
14 | tokenBraceO tokenType = '{' | ||
15 | tokenBraceC tokenType = '}' | ||
16 | tokenBrackO tokenType = '[' | ||
17 | tokenBrackC tokenType = ']' | ||
18 | tokenComma tokenType = ',' | ||
19 | tokenColon tokenType = ':' | ||
20 | tokenKeyword tokenType = 'K' | ||
21 | tokenString tokenType = 'S' | ||
22 | tokenNumber tokenType = 'N' | ||
23 | tokenEOF tokenType = '␄' | ||
24 | tokenInvalid tokenType = 0 | ||
25 | tokenEquals tokenType = '=' // used only for reminding the user of JSON syntax | ||
26 | ) | ||
27 | |||
28 | type token struct { | ||
29 | Type tokenType | ||
30 | Bytes []byte | ||
31 | Range hcl.Range | ||
32 | } | ||
33 | |||
34 | // scan returns the primary tokens for the given JSON buffer in sequence. | ||
35 | // | ||
36 | // The responsibility of this pass is to just mark the slices of the buffer | ||
37 | // as being of various types. It is lax in how it interprets the multi-byte | ||
38 | // token types keyword, string and number, preferring to capture erroneous | ||
39 | // extra bytes that we presume the user intended to be part of the token | ||
40 | // so that we can generate more helpful diagnostics in the parser. | ||
41 | func scan(buf []byte, start pos) []token { | ||
42 | var tokens []token | ||
43 | p := start | ||
44 | for { | ||
45 | if len(buf) == 0 { | ||
46 | tokens = append(tokens, token{ | ||
47 | Type: tokenEOF, | ||
48 | Bytes: nil, | ||
49 | Range: posRange(p, p), | ||
50 | }) | ||
51 | return tokens | ||
52 | } | ||
53 | |||
54 | buf, p = skipWhitespace(buf, p) | ||
55 | |||
56 | if len(buf) == 0 { | ||
57 | tokens = append(tokens, token{ | ||
58 | Type: tokenEOF, | ||
59 | Bytes: nil, | ||
60 | Range: posRange(p, p), | ||
61 | }) | ||
62 | return tokens | ||
63 | } | ||
64 | |||
65 | start = p | ||
66 | |||
67 | first := buf[0] | ||
68 | switch { | ||
69 | case first == '{' || first == '}' || first == '[' || first == ']' || first == ',' || first == ':' || first == '=': | ||
70 | p.Pos.Column++ | ||
71 | p.Pos.Byte++ | ||
72 | tokens = append(tokens, token{ | ||
73 | Type: tokenType(first), | ||
74 | Bytes: buf[0:1], | ||
75 | Range: posRange(start, p), | ||
76 | }) | ||
77 | buf = buf[1:] | ||
78 | case first == '"': | ||
79 | var tokBuf []byte | ||
80 | tokBuf, buf, p = scanString(buf, p) | ||
81 | tokens = append(tokens, token{ | ||
82 | Type: tokenString, | ||
83 | Bytes: tokBuf, | ||
84 | Range: posRange(start, p), | ||
85 | }) | ||
86 | case byteCanStartNumber(first): | ||
87 | var tokBuf []byte | ||
88 | tokBuf, buf, p = scanNumber(buf, p) | ||
89 | tokens = append(tokens, token{ | ||
90 | Type: tokenNumber, | ||
91 | Bytes: tokBuf, | ||
92 | Range: posRange(start, p), | ||
93 | }) | ||
94 | case byteCanStartKeyword(first): | ||
95 | var tokBuf []byte | ||
96 | tokBuf, buf, p = scanKeyword(buf, p) | ||
97 | tokens = append(tokens, token{ | ||
98 | Type: tokenKeyword, | ||
99 | Bytes: tokBuf, | ||
100 | Range: posRange(start, p), | ||
101 | }) | ||
102 | default: | ||
103 | tokens = append(tokens, token{ | ||
104 | Type: tokenInvalid, | ||
105 | Bytes: buf[:1], | ||
106 | Range: start.Range(1, 1), | ||
107 | }) | ||
108 | // If we've encountered an invalid then we might as well stop | ||
109 | // scanning since the parser won't proceed beyond this point. | ||
110 | return tokens | ||
111 | } | ||
112 | } | ||
113 | } | ||
114 | |||
115 | func byteCanStartNumber(b byte) bool { | ||
116 | switch b { | ||
117 | // We are slightly more tolerant than JSON requires here since we | ||
118 | // expect the parser will make a stricter interpretation of the | ||
119 | // number bytes, but we specifically don't allow 'e' or 'E' here | ||
120 | // since we want the scanner to treat that as the start of an | ||
121 | // invalid keyword instead, to produce more intelligible error messages. | ||
122 | case '-', '+', '.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': | ||
123 | return true | ||
124 | default: | ||
125 | return false | ||
126 | } | ||
127 | } | ||
128 | |||
129 | func scanNumber(buf []byte, start pos) ([]byte, []byte, pos) { | ||
130 | // The scanner doesn't check that the sequence of digit-ish bytes is | ||
131 | // in a valid order. The parser must do this when decoding a number | ||
132 | // token. | ||
133 | var i int | ||
134 | p := start | ||
135 | Byte: | ||
136 | for i = 0; i < len(buf); i++ { | ||
137 | switch buf[i] { | ||
138 | case '-', '+', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': | ||
139 | p.Pos.Byte++ | ||
140 | p.Pos.Column++ | ||
141 | default: | ||
142 | break Byte | ||
143 | } | ||
144 | } | ||
145 | return buf[:i], buf[i:], p | ||
146 | } | ||
147 | |||
148 | func byteCanStartKeyword(b byte) bool { | ||
149 | switch { | ||
150 | // We allow any sequence of alphabetical characters here, even though | ||
151 | // JSON is more constrained, so that we can collect what we presume | ||
152 | // the user intended to be a single keyword and then check its validity | ||
153 | // in the parser, where we can generate better diagnostics. | ||
154 | // So e.g. we want to be able to say: | ||
155 | // unrecognized keyword "True". Did you mean "true"? | ||
156 | case b >= 'a' || b <= 'z' || b >= 'A' || b <= 'Z': | ||
157 | return true | ||
158 | default: | ||
159 | return false | ||
160 | } | ||
161 | } | ||
162 | |||
163 | func scanKeyword(buf []byte, start pos) ([]byte, []byte, pos) { | ||
164 | var i int | ||
165 | p := start | ||
166 | Byte: | ||
167 | for i = 0; i < len(buf); i++ { | ||
168 | b := buf[i] | ||
169 | switch { | ||
170 | case (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_': | ||
171 | p.Pos.Byte++ | ||
172 | p.Pos.Column++ | ||
173 | default: | ||
174 | break Byte | ||
175 | } | ||
176 | } | ||
177 | return buf[:i], buf[i:], p | ||
178 | } | ||
179 | |||
180 | func scanString(buf []byte, start pos) ([]byte, []byte, pos) { | ||
181 | // The scanner doesn't validate correct use of escapes, etc. It pays | ||
182 | // attention to escapes only for the purpose of identifying the closing | ||
183 | // quote character. It's the parser's responsibility to do proper | ||
184 | // validation. | ||
185 | // | ||
186 | // The scanner also doesn't specifically detect unterminated string | ||
187 | // literals, though they can be identified in the parser by checking if | ||
188 | // the final byte in a string token is the double-quote character. | ||
189 | |||
190 | // Skip the opening quote symbol | ||
191 | i := 1 | ||
192 | p := start | ||
193 | p.Pos.Byte++ | ||
194 | p.Pos.Column++ | ||
195 | escaping := false | ||
196 | Byte: | ||
197 | for i < len(buf) { | ||
198 | b := buf[i] | ||
199 | |||
200 | switch { | ||
201 | case b == '\\': | ||
202 | escaping = !escaping | ||
203 | p.Pos.Byte++ | ||
204 | p.Pos.Column++ | ||
205 | i++ | ||
206 | case b == '"': | ||
207 | p.Pos.Byte++ | ||
208 | p.Pos.Column++ | ||
209 | i++ | ||
210 | if !escaping { | ||
211 | break Byte | ||
212 | } | ||
213 | escaping = false | ||
214 | case b < 32: | ||
215 | break Byte | ||
216 | default: | ||
217 | // Advance by one grapheme cluster, so that we consider each | ||
218 | // grapheme to be a "column". | ||
219 | // Ignoring error because this scanner cannot produce errors. | ||
220 | advance, _, _ := textseg.ScanGraphemeClusters(buf[i:], true) | ||
221 | |||
222 | p.Pos.Byte += advance | ||
223 | p.Pos.Column++ | ||
224 | i += advance | ||
225 | |||
226 | escaping = false | ||
227 | } | ||
228 | } | ||
229 | return buf[:i], buf[i:], p | ||
230 | } | ||
231 | |||
232 | func skipWhitespace(buf []byte, start pos) ([]byte, pos) { | ||
233 | var i int | ||
234 | p := start | ||
235 | Byte: | ||
236 | for i = 0; i < len(buf); i++ { | ||
237 | switch buf[i] { | ||
238 | case ' ': | ||
239 | p.Pos.Byte++ | ||
240 | p.Pos.Column++ | ||
241 | case '\n': | ||
242 | p.Pos.Byte++ | ||
243 | p.Pos.Column = 1 | ||
244 | p.Pos.Line++ | ||
245 | case '\r': | ||
246 | // For the purpose of line/column counting we consider a | ||
247 | // carriage return to take up no space, assuming that it will | ||
248 | // be paired up with a newline (on Windows, for example) that | ||
249 | // will account for both of them. | ||
250 | p.Pos.Byte++ | ||
251 | case '\t': | ||
252 | // We arbitrarily count a tab as if it were two spaces, because | ||
253 | // we need to choose _some_ number here. This means any system | ||
254 | // that renders code on-screen with markers must itself treat | ||
255 | // tabs as a pair of spaces for rendering purposes, or instead | ||
256 | // use the byte offset and back into its own column position. | ||
257 | p.Pos.Byte++ | ||
258 | p.Pos.Column += 2 | ||
259 | default: | ||
260 | break Byte | ||
261 | } | ||
262 | } | ||
263 | return buf[i:], p | ||
264 | } | ||
265 | |||
266 | type pos struct { | ||
267 | Filename string | ||
268 | Pos hcl.Pos | ||
269 | } | ||
270 | |||
271 | func (p *pos) Range(byteLen, charLen int) hcl.Range { | ||
272 | start := p.Pos | ||
273 | end := p.Pos | ||
274 | end.Byte += byteLen | ||
275 | end.Column += charLen | ||
276 | return hcl.Range{ | ||
277 | Filename: p.Filename, | ||
278 | Start: start, | ||
279 | End: end, | ||
280 | } | ||
281 | } | ||
282 | |||
283 | func posRange(start, end pos) hcl.Range { | ||
284 | return hcl.Range{ | ||
285 | Filename: start.Filename, | ||
286 | Start: start.Pos, | ||
287 | End: end.Pos, | ||
288 | } | ||
289 | } | ||
290 | |||
291 | func (t token) GoString() string { | ||
292 | return fmt.Sprintf("json.token{json.%s, []byte(%q), %#v}", t.Type, t.Bytes, t.Range) | ||
293 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md b/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md new file mode 100644 index 0000000..9b33c7f --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md | |||
@@ -0,0 +1,405 @@ | |||
1 | # HCL JSON Syntax Specification | ||
2 | |||
3 | This is the specification for the JSON serialization for hcl. HCL is a system | ||
4 | for defining configuration languages for applications. The HCL information | ||
5 | model is designed to support multiple concrete syntaxes for configuration, | ||
6 | and this JSON-based format complements [the native syntax](../hclsyntax/spec.md) | ||
7 | by being easy to machine-generate, whereas the native syntax is oriented | ||
8 | towards human authoring and maintenence. | ||
9 | |||
10 | This syntax is defined in terms of JSON as defined in | ||
11 | [RFC7159](https://tools.ietf.org/html/rfc7159). As such it inherits the JSON | ||
12 | grammar as-is, and merely defines a specific methodology for interpreting | ||
13 | JSON constructs into HCL structural elements and expressions. | ||
14 | |||
15 | This mapping is defined such that valid JSON-serialized HCL input can be | ||
16 | _produced_ using standard JSON implementations in various programming languages. | ||
17 | _Parsing_ such JSON has some additional constraints not beyond what is normally | ||
18 | supported by JSON parsers, so a specialized parser may be required that | ||
19 | is able to: | ||
20 | |||
21 | * Preserve the relative ordering of properties defined in an object. | ||
22 | * Preserve multiple definitions of the same property name. | ||
23 | * Preserve numeric values to the precision required by the number type | ||
24 | in [the HCL syntax-agnostic information model](../spec.md). | ||
25 | * Retain source location information for parsed tokens/constructs in order | ||
26 | to produce good error messages. | ||
27 | |||
28 | ## Structural Elements | ||
29 | |||
30 | [The HCL syntax-agnostic information model](../spec.md) defines a _body_ as an | ||
31 | abstract container for attribute definitions and child blocks. A body is | ||
32 | represented in JSON as either a single JSON object or a JSON array of objects. | ||
33 | |||
34 | Body processing is in terms of JSON object properties, visited in the order | ||
35 | they appear in the input. Where a body is represented by a single JSON object, | ||
36 | the properties of that object are visited in order. Where a body is | ||
37 | represented by a JSON array, each of its elements are visited in order and | ||
38 | each element has its properties visited in order. If any element of the array | ||
39 | is not a JSON object then the input is erroneous. | ||
40 | |||
41 | When a body is being processed in the _dynamic attributes_ mode, the allowance | ||
42 | of a JSON array in the previous paragraph does not apply and instead a single | ||
43 | JSON object is always required. | ||
44 | |||
45 | As defined in the language-agnostic model, body processing is in terms | ||
46 | of a schema which provides context for interpreting the body's content. For | ||
47 | JSON bodies, the schema is crucial to allow differentiation of attribute | ||
48 | definitions and block definitions, both of which are represented via object | ||
49 | properties. | ||
50 | |||
51 | The special property name `"//"`, when used in an object representing a HCL | ||
52 | body, is parsed and ignored. A property with this name can be used to | ||
53 | include human-readable comments. (This special property name is _not_ | ||
54 | processed in this way for any _other_ HCL constructs that are represented as | ||
55 | JSON objects.) | ||
56 | |||
57 | ### Attributes | ||
58 | |||
59 | Where the given schema describes an attribute with a given name, the object | ||
60 | property with the matching name — if present — serves as the attribute's | ||
61 | definition. | ||
62 | |||
63 | When a body is being processed in the _dynamic attributes_ mode, each object | ||
64 | property serves as an attribute definition for the attribute whose name | ||
65 | matches the property name. | ||
66 | |||
67 | The value of an attribute definition property is interpreted as an _expression_, | ||
68 | as described in a later section. | ||
69 | |||
70 | Given a schema that calls for an attribute named "foo", a JSON object like | ||
71 | the following provides a definition for that attribute: | ||
72 | |||
73 | ```json | ||
74 | { | ||
75 | "foo": "bar baz" | ||
76 | } | ||
77 | ``` | ||
78 | |||
79 | ### Blocks | ||
80 | |||
81 | Where the given schema describes a block with a given type name, each object | ||
82 | property with the matching name serves as a definition of zero or more blocks | ||
83 | of that type. | ||
84 | |||
85 | Processing of child blocks is in terms of nested JSON objects and arrays. | ||
86 | If the schema defines one or more _labels_ for the block type, a nested JSON | ||
87 | object or JSON array of objects is required for each labelling level. These | ||
88 | are flattened to a single ordered sequence of object properties using the | ||
89 | same algorithm as for body content as defined above. Each object property | ||
90 | serves as a label value at the corresponding level. | ||
91 | |||
92 | After any labelling levels, the next nested value is either a JSON object | ||
93 | representing a single block body, or a JSON array of JSON objects that each | ||
94 | represent a single block body. Use of an array accommodates the definition | ||
95 | of multiple blocks that have identical type and labels. | ||
96 | |||
97 | Given a schema that calls for a block type named "foo" with no labels, the | ||
98 | following JSON objects are all valid definitions of zero or more blocks of this | ||
99 | type: | ||
100 | |||
101 | ```json | ||
102 | { | ||
103 | "foo": { | ||
104 | "child_attr": "baz" | ||
105 | } | ||
106 | } | ||
107 | ``` | ||
108 | |||
109 | ```json | ||
110 | { | ||
111 | "foo": [ | ||
112 | { | ||
113 | "child_attr": "baz" | ||
114 | }, | ||
115 | { | ||
116 | "child_attr": "boz" | ||
117 | } | ||
118 | ] | ||
119 | } | ||
120 | ``` | ||
121 | ```json | ||
122 | { | ||
123 | "foo": [] | ||
124 | } | ||
125 | ``` | ||
126 | |||
127 | The first of these defines a single child block of type "foo". The second | ||
128 | defines _two_ such blocks. The final example shows a degenerate definition | ||
129 | of zero blocks, though generators should prefer to omit the property entirely | ||
130 | in this scenario. | ||
131 | |||
132 | Given a schema that calls for a block type named "foo" with _two_ labels, the | ||
133 | extra label levels must be represented as objects or arrays of objects as in | ||
134 | the following examples: | ||
135 | |||
136 | ```json | ||
137 | { | ||
138 | "foo": { | ||
139 | "bar": { | ||
140 | "baz": { | ||
141 | "child_attr": "baz" | ||
142 | }, | ||
143 | "boz": { | ||
144 | "child_attr": "baz" | ||
145 | } | ||
146 | }, | ||
147 | "boz": { | ||
148 | "baz": { | ||
149 | "child_attr": "baz" | ||
150 | }, | ||
151 | } | ||
152 | } | ||
153 | } | ||
154 | ``` | ||
155 | |||
156 | ```json | ||
157 | { | ||
158 | "foo": { | ||
159 | "bar": { | ||
160 | "baz": { | ||
161 | "child_attr": "baz" | ||
162 | }, | ||
163 | "boz": { | ||
164 | "child_attr": "baz" | ||
165 | } | ||
166 | }, | ||
167 | "boz": { | ||
168 | "baz": [ | ||
169 | { | ||
170 | "child_attr": "baz" | ||
171 | }, | ||
172 | { | ||
173 | "child_attr": "boz" | ||
174 | } | ||
175 | ] | ||
176 | } | ||
177 | } | ||
178 | } | ||
179 | ``` | ||
180 | |||
181 | ```json | ||
182 | { | ||
183 | "foo": [ | ||
184 | { | ||
185 | "bar": { | ||
186 | "baz": { | ||
187 | "child_attr": "baz" | ||
188 | }, | ||
189 | "boz": { | ||
190 | "child_attr": "baz" | ||
191 | } | ||
192 | }, | ||
193 | }, | ||
194 | { | ||
195 | "bar": { | ||
196 | "baz": [ | ||
197 | { | ||
198 | "child_attr": "baz" | ||
199 | }, | ||
200 | { | ||
201 | "child_attr": "boz" | ||
202 | } | ||
203 | ] | ||
204 | } | ||
205 | } | ||
206 | ] | ||
207 | } | ||
208 | ``` | ||
209 | |||
210 | ```json | ||
211 | { | ||
212 | "foo": { | ||
213 | "bar": { | ||
214 | "baz": { | ||
215 | "child_attr": "baz" | ||
216 | }, | ||
217 | "boz": { | ||
218 | "child_attr": "baz" | ||
219 | } | ||
220 | }, | ||
221 | "bar": { | ||
222 | "baz": [ | ||
223 | { | ||
224 | "child_attr": "baz" | ||
225 | }, | ||
226 | { | ||
227 | "child_attr": "boz" | ||
228 | } | ||
229 | ] | ||
230 | } | ||
231 | } | ||
232 | } | ||
233 | ``` | ||
234 | |||
235 | Arrays can be introduced at either the label definition or block body | ||
236 | definition levels to define multiple definitions of the same block type | ||
237 | or labels while preserving order. | ||
238 | |||
239 | A JSON HCL parser _must_ support duplicate definitions of the same property | ||
240 | name within a single object, preserving all of them and the relative ordering | ||
241 | between them. The array-based forms are also required so that JSON HCL | ||
242 | configurations can be produced with JSON producing libraries that are not | ||
243 | able to preserve property definition order and multiple definitions of | ||
244 | the same property. | ||
245 | |||
246 | ## Expressions | ||
247 | |||
248 | JSON lacks a native expression syntax, so the HCL JSON syntax instead defines | ||
249 | a mapping for each of the JSON value types, including a special mapping for | ||
250 | strings that allows optional use of arbitrary expressions. | ||
251 | |||
252 | ### Objects | ||
253 | |||
254 | When interpreted as an expression, a JSON object represents a value of a HCL | ||
255 | object type. | ||
256 | |||
257 | Each property of the JSON object represents an attribute of the HCL object type. | ||
258 | The property name string given in the JSON input is interpreted as a string | ||
259 | expression as described below, and its result is converted to string as defined | ||
260 | by the syntax-agnostic information model. If such a conversion is not possible, | ||
261 | an error is produced and evaluation fails. | ||
262 | |||
263 | An instance of the constructed object type is then created, whose values | ||
264 | are interpreted by again recursively applying the mapping rules defined in | ||
265 | this section to each of the property values. | ||
266 | |||
267 | If any evaluated property name strings produce null values, an error is | ||
268 | produced and evaluation fails. If any produce _unknown_ values, the _entire | ||
269 | object's_ result is an unknown value of the dynamic pseudo-type, signalling | ||
270 | that the type of the object cannot be determined. | ||
271 | |||
272 | It is an error to define the same property name multiple times within a single | ||
273 | JSON object interpreted as an expression. In full expression mode, this | ||
274 | constraint applies to the name expression results after conversion to string, | ||
275 | rather than the raw string that may contain interpolation expressions. | ||
276 | |||
277 | ### Arrays | ||
278 | |||
279 | When interpreted as an expression, a JSON array represents a value of a HCL | ||
280 | tuple type. | ||
281 | |||
282 | Each element of the JSON array represents an element of the HCL tuple type. | ||
283 | The tuple type is constructed by enumerationg the JSON array elements, creating | ||
284 | for each an element whose type is the result of recursively applying the | ||
285 | expression mapping rules. Correspondance is preserved between the array element | ||
286 | indices and the tuple element indices. | ||
287 | |||
288 | An instance of the constructed tuple type is then created, whose values are | ||
289 | interpreted by again recursively applying the mapping rules defined in this | ||
290 | section. | ||
291 | |||
292 | ### Numbers | ||
293 | |||
294 | When interpreted as an expression, a JSON number represents a HCL number value. | ||
295 | |||
296 | HCL numbers are arbitrary-precision decimal values, so a JSON HCL parser must | ||
297 | be able to translate exactly the value given to a number of corresponding | ||
298 | precision, within the constraints set by the HCL syntax-agnostic information | ||
299 | model. | ||
300 | |||
301 | In practice, off-the-shelf JSON serializers often do not support customizing the | ||
302 | processing of numbers, and instead force processing as 32-bit or 64-bit | ||
303 | floating point values. | ||
304 | |||
305 | A _producer_ of JSON HCL that uses such a serializer can provide numeric values | ||
306 | as JSON strings where they have precision too great for representation in the | ||
307 | serializer's chosen numeric type in situations where the result will be | ||
308 | converted to number (using the standard conversion rules) by a calling | ||
309 | application. | ||
310 | |||
311 | Alternatively, for expressions that are evaluated in full expression mode an | ||
312 | embedded template interpolation can be used to faithfully represent a number, | ||
313 | such as `"${1e150}"`, which will then be evaluated by the underlying HCL native | ||
314 | syntax expression evaluator. | ||
315 | |||
316 | ### Boolean Values | ||
317 | |||
318 | The JSON boolean values `true` and `false`, when interpreted as expressions, | ||
319 | represent the corresponding HCL boolean values. | ||
320 | |||
321 | ### The Null Value | ||
322 | |||
323 | The JSON value `null`, when interpreted as an expression, represents a | ||
324 | HCL null value of the dynamic pseudo-type. | ||
325 | |||
326 | ### Strings | ||
327 | |||
328 | When intepreted as an expression, a JSON string may be interpreted in one of | ||
329 | two ways depending on the evaluation mode. | ||
330 | |||
331 | If evaluating in literal-only mode (as defined by the syntax-agnostic | ||
332 | information model) the literal string is intepreted directly as a HCL string | ||
333 | value, by directly using the exact sequence of unicode characters represented. | ||
334 | Template interpolations and directives MUST NOT be processed in this mode, | ||
335 | allowing any characters that appear as introduction sequences to pass through | ||
336 | literally: | ||
337 | |||
338 | ```json | ||
339 | "Hello world! Template sequences like ${ are not intepreted here." | ||
340 | ``` | ||
341 | |||
342 | When evaluating in full expression mode (again, as defined by the syntax- | ||
343 | agnostic information model) the literal string is instead interpreted as a | ||
344 | _standalone template_ in the HCL Native Syntax. The expression evaluation | ||
345 | result is then the direct result of evaluating that template with the current | ||
346 | variable scope and function table. | ||
347 | |||
348 | ```json | ||
349 | "Hello, ${name}! Template sequences are interpreted in full expression mode." | ||
350 | ``` | ||
351 | |||
352 | In particular the _Template Interpolation Unwrapping_ requirement from the | ||
353 | HCL native syntax specification must be implemented, allowing the use of | ||
354 | single-interpolation templates to represent expressions that would not | ||
355 | otherwise be representable in JSON, such as the following example where | ||
356 | the result must be a number, rather than a string representation of a number: | ||
357 | |||
358 | ```json | ||
359 | "${ a + b }" | ||
360 | ``` | ||
361 | |||
362 | ## Static Analysis | ||
363 | |||
364 | The HCL static analysis operations are implemented for JSON values that | ||
365 | represent expressions, as described in the following sections. | ||
366 | |||
367 | Due to the limited expressive power of the JSON syntax alone, use of these | ||
368 | static analyses functions rather than normal expression evaluation is used | ||
369 | as additional context for how a JSON value is to be interpreted, which means | ||
370 | that static analyses can result in a different interpretation of a given | ||
371 | expression than normal evaluation. | ||
372 | |||
373 | ### Static List | ||
374 | |||
375 | An expression interpreted as a static list must be a JSON array. Each of the | ||
376 | values in the array is interpreted as an expression and returned. | ||
377 | |||
378 | ### Static Map | ||
379 | |||
380 | An expression interpreted as a static map must be a JSON object. Each of the | ||
381 | key/value pairs in the object is presented as a pair of expressions. Since | ||
382 | object property names are always strings, evaluating the key expression with | ||
383 | a non-`nil` evaluation context will evaluate any template sequences given | ||
384 | in the property name. | ||
385 | |||
386 | ### Static Call | ||
387 | |||
388 | An expression interpreted as a static call must be a string. The content of | ||
389 | the string is interpreted as a native syntax expression (not a _template_, | ||
390 | unlike normal evaluation) and then the static call analysis is delegated to | ||
391 | that expression. | ||
392 | |||
393 | If the original expression is not a string or its contents cannot be parsed | ||
394 | as a native syntax expression then static call analysis is not supported. | ||
395 | |||
396 | ### Static Traversal | ||
397 | |||
398 | An expression interpreted as a static traversal must be a string. The content | ||
399 | of the string is interpreted as a native syntax expression (not a _template_, | ||
400 | unlike normal evaluation) and then static traversal analysis is delegated | ||
401 | to that expression. | ||
402 | |||
403 | If the original expression is not a string or its contents cannot be parsed | ||
404 | as a native syntax expression then static call analysis is not supported. | ||
405 | |||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go b/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go new file mode 100644 index 0000000..28dcf52 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go | |||
@@ -0,0 +1,616 @@ | |||
1 | package json | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | |||
6 | "github.com/hashicorp/hcl2/hcl" | ||
7 | "github.com/hashicorp/hcl2/hcl/hclsyntax" | ||
8 | "github.com/zclconf/go-cty/cty" | ||
9 | "github.com/zclconf/go-cty/cty/convert" | ||
10 | ) | ||
11 | |||
12 | // body is the implementation of "Body" used for files processed with the JSON | ||
13 | // parser. | ||
14 | type body struct { | ||
15 | val node | ||
16 | |||
17 | // If non-nil, the keys of this map cause the corresponding attributes to | ||
18 | // be treated as non-existing. This is used when Body.PartialContent is | ||
19 | // called, to produce the "remaining content" Body. | ||
20 | hiddenAttrs map[string]struct{} | ||
21 | } | ||
22 | |||
23 | // expression is the implementation of "Expression" used for files processed | ||
24 | // with the JSON parser. | ||
25 | type expression struct { | ||
26 | src node | ||
27 | } | ||
28 | |||
29 | func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) { | ||
30 | content, newBody, diags := b.PartialContent(schema) | ||
31 | |||
32 | hiddenAttrs := newBody.(*body).hiddenAttrs | ||
33 | |||
34 | var nameSuggestions []string | ||
35 | for _, attrS := range schema.Attributes { | ||
36 | if _, ok := hiddenAttrs[attrS.Name]; !ok { | ||
37 | // Only suggest an attribute name if we didn't use it already. | ||
38 | nameSuggestions = append(nameSuggestions, attrS.Name) | ||
39 | } | ||
40 | } | ||
41 | for _, blockS := range schema.Blocks { | ||
42 | // Blocks can appear multiple times, so we'll suggest their type | ||
43 | // names regardless of whether they've already been used. | ||
44 | nameSuggestions = append(nameSuggestions, blockS.Type) | ||
45 | } | ||
46 | |||
47 | jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil) | ||
48 | diags = append(diags, attrDiags...) | ||
49 | |||
50 | for _, attr := range jsonAttrs { | ||
51 | k := attr.Name | ||
52 | if k == "//" { | ||
53 | // Ignore "//" keys in objects representing bodies, to allow | ||
54 | // their use as comments. | ||
55 | continue | ||
56 | } | ||
57 | |||
58 | if _, ok := hiddenAttrs[k]; !ok { | ||
59 | suggestion := nameSuggestion(k, nameSuggestions) | ||
60 | if suggestion != "" { | ||
61 | suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) | ||
62 | } | ||
63 | |||
64 | diags = append(diags, &hcl.Diagnostic{ | ||
65 | Severity: hcl.DiagError, | ||
66 | Summary: "Extraneous JSON object property", | ||
67 | Detail: fmt.Sprintf("No attribute or block type is named %q.%s", k, suggestion), | ||
68 | Subject: &attr.NameRange, | ||
69 | Context: attr.Range().Ptr(), | ||
70 | }) | ||
71 | } | ||
72 | } | ||
73 | |||
74 | return content, diags | ||
75 | } | ||
76 | |||
77 | func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) { | ||
78 | var diags hcl.Diagnostics | ||
79 | |||
80 | jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil) | ||
81 | diags = append(diags, attrDiags...) | ||
82 | |||
83 | usedNames := map[string]struct{}{} | ||
84 | if b.hiddenAttrs != nil { | ||
85 | for k := range b.hiddenAttrs { | ||
86 | usedNames[k] = struct{}{} | ||
87 | } | ||
88 | } | ||
89 | |||
90 | content := &hcl.BodyContent{ | ||
91 | Attributes: map[string]*hcl.Attribute{}, | ||
92 | Blocks: nil, | ||
93 | |||
94 | MissingItemRange: b.MissingItemRange(), | ||
95 | } | ||
96 | |||
97 | // Create some more convenient data structures for our work below. | ||
98 | attrSchemas := map[string]hcl.AttributeSchema{} | ||
99 | blockSchemas := map[string]hcl.BlockHeaderSchema{} | ||
100 | for _, attrS := range schema.Attributes { | ||
101 | attrSchemas[attrS.Name] = attrS | ||
102 | } | ||
103 | for _, blockS := range schema.Blocks { | ||
104 | blockSchemas[blockS.Type] = blockS | ||
105 | } | ||
106 | |||
107 | for _, jsonAttr := range jsonAttrs { | ||
108 | attrName := jsonAttr.Name | ||
109 | if _, used := b.hiddenAttrs[attrName]; used { | ||
110 | continue | ||
111 | } | ||
112 | |||
113 | if attrS, defined := attrSchemas[attrName]; defined { | ||
114 | if existing, exists := content.Attributes[attrName]; exists { | ||
115 | diags = append(diags, &hcl.Diagnostic{ | ||
116 | Severity: hcl.DiagError, | ||
117 | Summary: "Duplicate attribute definition", | ||
118 | Detail: fmt.Sprintf("The attribute %q was already defined at %s.", attrName, existing.Range), | ||
119 | Subject: &jsonAttr.NameRange, | ||
120 | Context: jsonAttr.Range().Ptr(), | ||
121 | }) | ||
122 | continue | ||
123 | } | ||
124 | |||
125 | content.Attributes[attrS.Name] = &hcl.Attribute{ | ||
126 | Name: attrS.Name, | ||
127 | Expr: &expression{src: jsonAttr.Value}, | ||
128 | Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()), | ||
129 | NameRange: jsonAttr.NameRange, | ||
130 | } | ||
131 | usedNames[attrName] = struct{}{} | ||
132 | |||
133 | } else if blockS, defined := blockSchemas[attrName]; defined { | ||
134 | bv := jsonAttr.Value | ||
135 | blockDiags := b.unpackBlock(bv, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks) | ||
136 | diags = append(diags, blockDiags...) | ||
137 | usedNames[attrName] = struct{}{} | ||
138 | } | ||
139 | |||
140 | // We ignore anything that isn't defined because that's the | ||
141 | // PartialContent contract. The Content method will catch leftovers. | ||
142 | } | ||
143 | |||
144 | // Make sure we got all the required attributes. | ||
145 | for _, attrS := range schema.Attributes { | ||
146 | if !attrS.Required { | ||
147 | continue | ||
148 | } | ||
149 | if _, defined := content.Attributes[attrS.Name]; !defined { | ||
150 | diags = append(diags, &hcl.Diagnostic{ | ||
151 | Severity: hcl.DiagError, | ||
152 | Summary: "Missing required attribute", | ||
153 | Detail: fmt.Sprintf("The attribute %q is required, but no definition was found.", attrS.Name), | ||
154 | Subject: b.MissingItemRange().Ptr(), | ||
155 | }) | ||
156 | } | ||
157 | } | ||
158 | |||
159 | unusedBody := &body{ | ||
160 | val: b.val, | ||
161 | hiddenAttrs: usedNames, | ||
162 | } | ||
163 | |||
164 | return content, unusedBody, diags | ||
165 | } | ||
166 | |||
167 | // JustAttributes for JSON bodies interprets all properties of the wrapped | ||
168 | // JSON object as attributes and returns them. | ||
169 | func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) { | ||
170 | var diags hcl.Diagnostics | ||
171 | attrs := make(map[string]*hcl.Attribute) | ||
172 | |||
173 | obj, ok := b.val.(*objectVal) | ||
174 | if !ok { | ||
175 | diags = append(diags, &hcl.Diagnostic{ | ||
176 | Severity: hcl.DiagError, | ||
177 | Summary: "Incorrect JSON value type", | ||
178 | Detail: "A JSON object is required here, defining the attributes for this block.", | ||
179 | Subject: b.val.StartRange().Ptr(), | ||
180 | }) | ||
181 | return attrs, diags | ||
182 | } | ||
183 | |||
184 | for _, jsonAttr := range obj.Attrs { | ||
185 | name := jsonAttr.Name | ||
186 | if name == "//" { | ||
187 | // Ignore "//" keys in objects representing bodies, to allow | ||
188 | // their use as comments. | ||
189 | continue | ||
190 | } | ||
191 | |||
192 | if _, hidden := b.hiddenAttrs[name]; hidden { | ||
193 | continue | ||
194 | } | ||
195 | |||
196 | if existing, exists := attrs[name]; exists { | ||
197 | diags = append(diags, &hcl.Diagnostic{ | ||
198 | Severity: hcl.DiagError, | ||
199 | Summary: "Duplicate attribute definition", | ||
200 | Detail: fmt.Sprintf("The attribute %q was already defined at %s.", name, existing.Range), | ||
201 | Subject: &jsonAttr.NameRange, | ||
202 | }) | ||
203 | continue | ||
204 | } | ||
205 | |||
206 | attrs[name] = &hcl.Attribute{ | ||
207 | Name: name, | ||
208 | Expr: &expression{src: jsonAttr.Value}, | ||
209 | Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()), | ||
210 | NameRange: jsonAttr.NameRange, | ||
211 | } | ||
212 | } | ||
213 | |||
214 | // No diagnostics possible here, since the parser already took care of | ||
215 | // finding duplicates and every JSON value can be a valid attribute value. | ||
216 | return attrs, diags | ||
217 | } | ||
218 | |||
219 | func (b *body) MissingItemRange() hcl.Range { | ||
220 | switch tv := b.val.(type) { | ||
221 | case *objectVal: | ||
222 | return tv.CloseRange | ||
223 | case *arrayVal: | ||
224 | return tv.OpenRange | ||
225 | default: | ||
226 | // Should not happen in correct operation, but might show up if the | ||
227 | // input is invalid and we are producing partial results. | ||
228 | return tv.StartRange() | ||
229 | } | ||
230 | } | ||
231 | |||
232 | func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labelsLeft []string, labelsUsed []string, labelRanges []hcl.Range, blocks *hcl.Blocks) (diags hcl.Diagnostics) { | ||
233 | if len(labelsLeft) > 0 { | ||
234 | labelName := labelsLeft[0] | ||
235 | jsonAttrs, attrDiags := b.collectDeepAttrs(v, &labelName) | ||
236 | diags = append(diags, attrDiags...) | ||
237 | |||
238 | if len(jsonAttrs) == 0 { | ||
239 | diags = diags.Append(&hcl.Diagnostic{ | ||
240 | Severity: hcl.DiagError, | ||
241 | Summary: "Missing block label", | ||
242 | Detail: fmt.Sprintf("At least one object property is required, whose name represents the %s block's %s.", typeName, labelName), | ||
243 | Subject: v.StartRange().Ptr(), | ||
244 | }) | ||
245 | return | ||
246 | } | ||
247 | labelsUsed := append(labelsUsed, "") | ||
248 | labelRanges := append(labelRanges, hcl.Range{}) | ||
249 | for _, p := range jsonAttrs { | ||
250 | pk := p.Name | ||
251 | labelsUsed[len(labelsUsed)-1] = pk | ||
252 | labelRanges[len(labelRanges)-1] = p.NameRange | ||
253 | diags = append(diags, b.unpackBlock(p.Value, typeName, typeRange, labelsLeft[1:], labelsUsed, labelRanges, blocks)...) | ||
254 | } | ||
255 | return | ||
256 | } | ||
257 | |||
258 | // By the time we get here, we've peeled off all the labels and we're ready | ||
259 | // to deal with the block's actual content. | ||
260 | |||
261 | // need to copy the label slices because their underlying arrays will | ||
262 | // continue to be mutated after we return. | ||
263 | labels := make([]string, len(labelsUsed)) | ||
264 | copy(labels, labelsUsed) | ||
265 | labelR := make([]hcl.Range, len(labelRanges)) | ||
266 | copy(labelR, labelRanges) | ||
267 | |||
268 | switch tv := v.(type) { | ||
269 | case *objectVal: | ||
270 | // Single instance of the block | ||
271 | *blocks = append(*blocks, &hcl.Block{ | ||
272 | Type: typeName, | ||
273 | Labels: labels, | ||
274 | Body: &body{ | ||
275 | val: tv, | ||
276 | }, | ||
277 | |||
278 | DefRange: tv.OpenRange, | ||
279 | TypeRange: *typeRange, | ||
280 | LabelRanges: labelR, | ||
281 | }) | ||
282 | case *arrayVal: | ||
283 | // Multiple instances of the block | ||
284 | for _, av := range tv.Values { | ||
285 | *blocks = append(*blocks, &hcl.Block{ | ||
286 | Type: typeName, | ||
287 | Labels: labels, | ||
288 | Body: &body{ | ||
289 | val: av, // might be mistyped; we'll find out when content is requested for this body | ||
290 | }, | ||
291 | |||
292 | DefRange: tv.OpenRange, | ||
293 | TypeRange: *typeRange, | ||
294 | LabelRanges: labelR, | ||
295 | }) | ||
296 | } | ||
297 | default: | ||
298 | diags = diags.Append(&hcl.Diagnostic{ | ||
299 | Severity: hcl.DiagError, | ||
300 | Summary: "Incorrect JSON value type", | ||
301 | Detail: fmt.Sprintf("Either a JSON object or a JSON array is required, representing the contents of one or more %q blocks.", typeName), | ||
302 | Subject: v.StartRange().Ptr(), | ||
303 | }) | ||
304 | } | ||
305 | return | ||
306 | } | ||
307 | |||
308 | // collectDeepAttrs takes either a single object or an array of objects and | ||
309 | // flattens it into a list of object attributes, collecting attributes from | ||
310 | // all of the objects in a given array. | ||
311 | // | ||
312 | // Ordering is preserved, so a list of objects that each have one property | ||
313 | // will result in those properties being returned in the same order as the | ||
314 | // objects appeared in the array. | ||
315 | // | ||
316 | // This is appropriate for use only for objects representing bodies or labels | ||
317 | // within a block. | ||
318 | // | ||
319 | // The labelName argument, if non-null, is used to tailor returned error | ||
320 | // messages to refer to block labels rather than attributes and child blocks. | ||
321 | // It has no other effect. | ||
322 | func (b *body) collectDeepAttrs(v node, labelName *string) ([]*objectAttr, hcl.Diagnostics) { | ||
323 | var diags hcl.Diagnostics | ||
324 | var attrs []*objectAttr | ||
325 | |||
326 | switch tv := v.(type) { | ||
327 | |||
328 | case *objectVal: | ||
329 | attrs = append(attrs, tv.Attrs...) | ||
330 | |||
331 | case *arrayVal: | ||
332 | for _, ev := range tv.Values { | ||
333 | switch tev := ev.(type) { | ||
334 | case *objectVal: | ||
335 | attrs = append(attrs, tev.Attrs...) | ||
336 | default: | ||
337 | if labelName != nil { | ||
338 | diags = append(diags, &hcl.Diagnostic{ | ||
339 | Severity: hcl.DiagError, | ||
340 | Summary: "Incorrect JSON value type", | ||
341 | Detail: fmt.Sprintf("A JSON object is required here, to specify %s labels for this block.", *labelName), | ||
342 | Subject: ev.StartRange().Ptr(), | ||
343 | }) | ||
344 | } else { | ||
345 | diags = append(diags, &hcl.Diagnostic{ | ||
346 | Severity: hcl.DiagError, | ||
347 | Summary: "Incorrect JSON value type", | ||
348 | Detail: "A JSON object is required here, to define attributes and child blocks.", | ||
349 | Subject: ev.StartRange().Ptr(), | ||
350 | }) | ||
351 | } | ||
352 | } | ||
353 | } | ||
354 | |||
355 | default: | ||
356 | if labelName != nil { | ||
357 | diags = append(diags, &hcl.Diagnostic{ | ||
358 | Severity: hcl.DiagError, | ||
359 | Summary: "Incorrect JSON value type", | ||
360 | Detail: fmt.Sprintf("Either a JSON object or JSON array of objects is required here, to specify %s labels for this block.", *labelName), | ||
361 | Subject: v.StartRange().Ptr(), | ||
362 | }) | ||
363 | } else { | ||
364 | diags = append(diags, &hcl.Diagnostic{ | ||
365 | Severity: hcl.DiagError, | ||
366 | Summary: "Incorrect JSON value type", | ||
367 | Detail: "Either a JSON object or JSON array of objects is required here, to define attributes and child blocks.", | ||
368 | Subject: v.StartRange().Ptr(), | ||
369 | }) | ||
370 | } | ||
371 | } | ||
372 | |||
373 | return attrs, diags | ||
374 | } | ||
375 | |||
376 | func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
377 | switch v := e.src.(type) { | ||
378 | case *stringVal: | ||
379 | if ctx != nil { | ||
380 | // Parse string contents as a HCL native language expression. | ||
381 | // We only do this if we have a context, so passing a nil context | ||
382 | // is how the caller specifies that interpolations are not allowed | ||
383 | // and that the string should just be returned verbatim. | ||
384 | templateSrc := v.Value | ||
385 | expr, diags := hclsyntax.ParseTemplate( | ||
386 | []byte(templateSrc), | ||
387 | v.SrcRange.Filename, | ||
388 | |||
389 | // This won't produce _exactly_ the right result, since | ||
390 | // the hclsyntax parser can't "see" any escapes we removed | ||
391 | // while parsing JSON, but it's better than nothing. | ||
392 | hcl.Pos{ | ||
393 | Line: v.SrcRange.Start.Line, | ||
394 | |||
395 | // skip over the opening quote mark | ||
396 | Byte: v.SrcRange.Start.Byte + 1, | ||
397 | Column: v.SrcRange.Start.Column + 1, | ||
398 | }, | ||
399 | ) | ||
400 | if diags.HasErrors() { | ||
401 | return cty.DynamicVal, diags | ||
402 | } | ||
403 | val, evalDiags := expr.Value(ctx) | ||
404 | diags = append(diags, evalDiags...) | ||
405 | return val, diags | ||
406 | } | ||
407 | |||
408 | return cty.StringVal(v.Value), nil | ||
409 | case *numberVal: | ||
410 | return cty.NumberVal(v.Value), nil | ||
411 | case *booleanVal: | ||
412 | return cty.BoolVal(v.Value), nil | ||
413 | case *arrayVal: | ||
414 | vals := []cty.Value{} | ||
415 | for _, jsonVal := range v.Values { | ||
416 | val, _ := (&expression{src: jsonVal}).Value(ctx) | ||
417 | vals = append(vals, val) | ||
418 | } | ||
419 | return cty.TupleVal(vals), nil | ||
420 | case *objectVal: | ||
421 | var diags hcl.Diagnostics | ||
422 | attrs := map[string]cty.Value{} | ||
423 | attrRanges := map[string]hcl.Range{} | ||
424 | known := true | ||
425 | for _, jsonAttr := range v.Attrs { | ||
426 | // In this one context we allow keys to contain interpolation | ||
427 | // experessions too, assuming we're evaluating in interpolation | ||
428 | // mode. This achieves parity with the native syntax where | ||
429 | // object expressions can have dynamic keys, while block contents | ||
430 | // may not. | ||
431 | name, nameDiags := (&expression{src: &stringVal{ | ||
432 | Value: jsonAttr.Name, | ||
433 | SrcRange: jsonAttr.NameRange, | ||
434 | }}).Value(ctx) | ||
435 | val, valDiags := (&expression{src: jsonAttr.Value}).Value(ctx) | ||
436 | diags = append(diags, nameDiags...) | ||
437 | diags = append(diags, valDiags...) | ||
438 | |||
439 | var err error | ||
440 | name, err = convert.Convert(name, cty.String) | ||
441 | if err != nil { | ||
442 | diags = append(diags, &hcl.Diagnostic{ | ||
443 | Severity: hcl.DiagError, | ||
444 | Summary: "Invalid object key expression", | ||
445 | Detail: fmt.Sprintf("Cannot use this expression as an object key: %s.", err), | ||
446 | Subject: &jsonAttr.NameRange, | ||
447 | }) | ||
448 | continue | ||
449 | } | ||
450 | if name.IsNull() { | ||
451 | diags = append(diags, &hcl.Diagnostic{ | ||
452 | Severity: hcl.DiagError, | ||
453 | Summary: "Invalid object key expression", | ||
454 | Detail: "Cannot use null value as an object key.", | ||
455 | Subject: &jsonAttr.NameRange, | ||
456 | }) | ||
457 | continue | ||
458 | } | ||
459 | if !name.IsKnown() { | ||
460 | // This is a bit of a weird case, since our usual rules require | ||
461 | // us to tolerate unknowns and just represent the result as | ||
462 | // best we can but if we don't know the key then we can't | ||
463 | // know the type of our object at all, and thus we must turn | ||
464 | // the whole thing into cty.DynamicVal. This is consistent with | ||
465 | // how this situation is handled in the native syntax. | ||
466 | // We'll keep iterating so we can collect other errors in | ||
467 | // subsequent attributes. | ||
468 | known = false | ||
469 | continue | ||
470 | } | ||
471 | nameStr := name.AsString() | ||
472 | if _, defined := attrs[nameStr]; defined { | ||
473 | diags = append(diags, &hcl.Diagnostic{ | ||
474 | Severity: hcl.DiagError, | ||
475 | Summary: "Duplicate object attribute", | ||
476 | Detail: fmt.Sprintf("An attribute named %q was already defined at %s.", nameStr, attrRanges[nameStr]), | ||
477 | Subject: &jsonAttr.NameRange, | ||
478 | }) | ||
479 | continue | ||
480 | } | ||
481 | attrs[nameStr] = val | ||
482 | attrRanges[nameStr] = jsonAttr.NameRange | ||
483 | } | ||
484 | if !known { | ||
485 | // We encountered an unknown key somewhere along the way, so | ||
486 | // we can't know what our type will eventually be. | ||
487 | return cty.DynamicVal, diags | ||
488 | } | ||
489 | return cty.ObjectVal(attrs), diags | ||
490 | default: | ||
491 | // Default to DynamicVal so that ASTs containing invalid nodes can | ||
492 | // still be partially-evaluated. | ||
493 | return cty.DynamicVal, nil | ||
494 | } | ||
495 | } | ||
496 | |||
497 | func (e *expression) Variables() []hcl.Traversal { | ||
498 | var vars []hcl.Traversal | ||
499 | |||
500 | switch v := e.src.(type) { | ||
501 | case *stringVal: | ||
502 | templateSrc := v.Value | ||
503 | expr, diags := hclsyntax.ParseTemplate( | ||
504 | []byte(templateSrc), | ||
505 | v.SrcRange.Filename, | ||
506 | |||
507 | // This won't produce _exactly_ the right result, since | ||
508 | // the hclsyntax parser can't "see" any escapes we removed | ||
509 | // while parsing JSON, but it's better than nothing. | ||
510 | hcl.Pos{ | ||
511 | Line: v.SrcRange.Start.Line, | ||
512 | |||
513 | // skip over the opening quote mark | ||
514 | Byte: v.SrcRange.Start.Byte + 1, | ||
515 | Column: v.SrcRange.Start.Column + 1, | ||
516 | }, | ||
517 | ) | ||
518 | if diags.HasErrors() { | ||
519 | return vars | ||
520 | } | ||
521 | return expr.Variables() | ||
522 | |||
523 | case *arrayVal: | ||
524 | for _, jsonVal := range v.Values { | ||
525 | vars = append(vars, (&expression{src: jsonVal}).Variables()...) | ||
526 | } | ||
527 | case *objectVal: | ||
528 | for _, jsonAttr := range v.Attrs { | ||
529 | vars = append(vars, (&expression{src: jsonAttr.Value}).Variables()...) | ||
530 | } | ||
531 | } | ||
532 | |||
533 | return vars | ||
534 | } | ||
535 | |||
536 | func (e *expression) Range() hcl.Range { | ||
537 | return e.src.Range() | ||
538 | } | ||
539 | |||
540 | func (e *expression) StartRange() hcl.Range { | ||
541 | return e.src.StartRange() | ||
542 | } | ||
543 | |||
544 | // Implementation for hcl.AbsTraversalForExpr. | ||
545 | func (e *expression) AsTraversal() hcl.Traversal { | ||
546 | // In JSON-based syntax a traversal is given as a string containing | ||
547 | // traversal syntax as defined by hclsyntax.ParseTraversalAbs. | ||
548 | |||
549 | switch v := e.src.(type) { | ||
550 | case *stringVal: | ||
551 | traversal, diags := hclsyntax.ParseTraversalAbs([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start) | ||
552 | if diags.HasErrors() { | ||
553 | return nil | ||
554 | } | ||
555 | return traversal | ||
556 | default: | ||
557 | return nil | ||
558 | } | ||
559 | } | ||
560 | |||
561 | // Implementation for hcl.ExprCall. | ||
562 | func (e *expression) ExprCall() *hcl.StaticCall { | ||
563 | // In JSON-based syntax a static call is given as a string containing | ||
564 | // an expression in the native syntax that also supports ExprCall. | ||
565 | |||
566 | switch v := e.src.(type) { | ||
567 | case *stringVal: | ||
568 | expr, diags := hclsyntax.ParseExpression([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start) | ||
569 | if diags.HasErrors() { | ||
570 | return nil | ||
571 | } | ||
572 | |||
573 | call, diags := hcl.ExprCall(expr) | ||
574 | if diags.HasErrors() { | ||
575 | return nil | ||
576 | } | ||
577 | |||
578 | return call | ||
579 | default: | ||
580 | return nil | ||
581 | } | ||
582 | } | ||
583 | |||
584 | // Implementation for hcl.ExprList. | ||
585 | func (e *expression) ExprList() []hcl.Expression { | ||
586 | switch v := e.src.(type) { | ||
587 | case *arrayVal: | ||
588 | ret := make([]hcl.Expression, len(v.Values)) | ||
589 | for i, node := range v.Values { | ||
590 | ret[i] = &expression{src: node} | ||
591 | } | ||
592 | return ret | ||
593 | default: | ||
594 | return nil | ||
595 | } | ||
596 | } | ||
597 | |||
598 | // Implementation for hcl.ExprMap. | ||
599 | func (e *expression) ExprMap() []hcl.KeyValuePair { | ||
600 | switch v := e.src.(type) { | ||
601 | case *objectVal: | ||
602 | ret := make([]hcl.KeyValuePair, len(v.Attrs)) | ||
603 | for i, jsonAttr := range v.Attrs { | ||
604 | ret[i] = hcl.KeyValuePair{ | ||
605 | Key: &expression{src: &stringVal{ | ||
606 | Value: jsonAttr.Name, | ||
607 | SrcRange: jsonAttr.NameRange, | ||
608 | }}, | ||
609 | Value: &expression{src: jsonAttr.Value}, | ||
610 | } | ||
611 | } | ||
612 | return ret | ||
613 | default: | ||
614 | return nil | ||
615 | } | ||
616 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go b/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go new file mode 100644 index 0000000..bbcce5b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go | |||
@@ -0,0 +1,29 @@ | |||
1 | // Code generated by "stringer -type tokenType scanner.go"; DO NOT EDIT. | ||
2 | |||
3 | package json | ||
4 | |||
5 | import "strconv" | ||
6 | |||
7 | const _tokenType_name = "tokenInvalidtokenCommatokenColontokenEqualstokenKeywordtokenNumbertokenStringtokenBrackOtokenBrackCtokenBraceOtokenBraceCtokenEOF" | ||
8 | |||
9 | var _tokenType_map = map[tokenType]string{ | ||
10 | 0: _tokenType_name[0:12], | ||
11 | 44: _tokenType_name[12:22], | ||
12 | 58: _tokenType_name[22:32], | ||
13 | 61: _tokenType_name[32:43], | ||
14 | 75: _tokenType_name[43:55], | ||
15 | 78: _tokenType_name[55:66], | ||
16 | 83: _tokenType_name[66:77], | ||
17 | 91: _tokenType_name[77:88], | ||
18 | 93: _tokenType_name[88:99], | ||
19 | 123: _tokenType_name[99:110], | ||
20 | 125: _tokenType_name[110:121], | ||
21 | 9220: _tokenType_name[121:129], | ||
22 | } | ||
23 | |||
24 | func (i tokenType) String() string { | ||
25 | if str, ok := _tokenType_map[i]; ok { | ||
26 | return str | ||
27 | } | ||
28 | return "tokenType(" + strconv.FormatInt(int64(i), 10) + ")" | ||
29 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/merged.go b/vendor/github.com/hashicorp/hcl2/hcl/merged.go new file mode 100644 index 0000000..ca2b728 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/merged.go | |||
@@ -0,0 +1,226 @@ | |||
1 | package hcl | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | ) | ||
6 | |||
7 | // MergeFiles combines the given files to produce a single body that contains | ||
8 | // configuration from all of the given files. | ||
9 | // | ||
10 | // The ordering of the given files decides the order in which contained | ||
11 | // elements will be returned. If any top-level attributes are defined with | ||
12 | // the same name across multiple files, a diagnostic will be produced from | ||
13 | // the Content and PartialContent methods describing this error in a | ||
14 | // user-friendly way. | ||
15 | func MergeFiles(files []*File) Body { | ||
16 | var bodies []Body | ||
17 | for _, file := range files { | ||
18 | bodies = append(bodies, file.Body) | ||
19 | } | ||
20 | return MergeBodies(bodies) | ||
21 | } | ||
22 | |||
23 | // MergeBodies is like MergeFiles except it deals directly with bodies, rather | ||
24 | // than with entire files. | ||
25 | func MergeBodies(bodies []Body) Body { | ||
26 | if len(bodies) == 0 { | ||
27 | // Swap out for our singleton empty body, to reduce the number of | ||
28 | // empty slices we have hanging around. | ||
29 | return emptyBody | ||
30 | } | ||
31 | |||
32 | // If any of the given bodies are already merged bodies, we'll unpack | ||
33 | // to flatten to a single mergedBodies, since that's conceptually simpler. | ||
34 | // This also, as a side-effect, eliminates any empty bodies, since | ||
35 | // empties are merged bodies with no inner bodies. | ||
36 | var newLen int | ||
37 | var flatten bool | ||
38 | for _, body := range bodies { | ||
39 | if children, merged := body.(mergedBodies); merged { | ||
40 | newLen += len(children) | ||
41 | flatten = true | ||
42 | } else { | ||
43 | newLen++ | ||
44 | } | ||
45 | } | ||
46 | |||
47 | if !flatten { // not just newLen == len, because we might have mergedBodies with single bodies inside | ||
48 | return mergedBodies(bodies) | ||
49 | } | ||
50 | |||
51 | if newLen == 0 { | ||
52 | // Don't allocate a new empty when we already have one | ||
53 | return emptyBody | ||
54 | } | ||
55 | |||
56 | new := make([]Body, 0, newLen) | ||
57 | for _, body := range bodies { | ||
58 | if children, merged := body.(mergedBodies); merged { | ||
59 | new = append(new, children...) | ||
60 | } else { | ||
61 | new = append(new, body) | ||
62 | } | ||
63 | } | ||
64 | return mergedBodies(new) | ||
65 | } | ||
66 | |||
67 | var emptyBody = mergedBodies([]Body{}) | ||
68 | |||
69 | // EmptyBody returns a body with no content. This body can be used as a | ||
70 | // placeholder when a body is required but no body content is available. | ||
71 | func EmptyBody() Body { | ||
72 | return emptyBody | ||
73 | } | ||
74 | |||
75 | type mergedBodies []Body | ||
76 | |||
77 | // Content returns the content produced by applying the given schema to all | ||
78 | // of the merged bodies and merging the result. | ||
79 | // | ||
80 | // Although required attributes _are_ supported, they should be used sparingly | ||
81 | // with merged bodies since in this case there is no contextual information | ||
82 | // with which to return good diagnostics. Applications working with merged | ||
83 | // bodies may wish to mark all attributes as optional and then check for | ||
84 | // required attributes afterwards, to produce better diagnostics. | ||
85 | func (mb mergedBodies) Content(schema *BodySchema) (*BodyContent, Diagnostics) { | ||
86 | // the returned body will always be empty in this case, because mergedContent | ||
87 | // will only ever call Content on the child bodies. | ||
88 | content, _, diags := mb.mergedContent(schema, false) | ||
89 | return content, diags | ||
90 | } | ||
91 | |||
92 | func (mb mergedBodies) PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics) { | ||
93 | return mb.mergedContent(schema, true) | ||
94 | } | ||
95 | |||
96 | func (mb mergedBodies) JustAttributes() (Attributes, Diagnostics) { | ||
97 | attrs := make(map[string]*Attribute) | ||
98 | var diags Diagnostics | ||
99 | |||
100 | for _, body := range mb { | ||
101 | thisAttrs, thisDiags := body.JustAttributes() | ||
102 | |||
103 | if len(thisDiags) != 0 { | ||
104 | diags = append(diags, thisDiags...) | ||
105 | } | ||
106 | |||
107 | if thisAttrs != nil { | ||
108 | for name, attr := range thisAttrs { | ||
109 | if existing := attrs[name]; existing != nil { | ||
110 | diags = diags.Append(&Diagnostic{ | ||
111 | Severity: DiagError, | ||
112 | Summary: "Duplicate attribute", | ||
113 | Detail: fmt.Sprintf( | ||
114 | "Attribute %q was already assigned at %s", | ||
115 | name, existing.NameRange.String(), | ||
116 | ), | ||
117 | Subject: &attr.NameRange, | ||
118 | }) | ||
119 | continue | ||
120 | } | ||
121 | |||
122 | attrs[name] = attr | ||
123 | } | ||
124 | } | ||
125 | } | ||
126 | |||
127 | return attrs, diags | ||
128 | } | ||
129 | |||
130 | func (mb mergedBodies) MissingItemRange() Range { | ||
131 | if len(mb) == 0 { | ||
132 | // Nothing useful to return here, so we'll return some garbage. | ||
133 | return Range{ | ||
134 | Filename: "<empty>", | ||
135 | } | ||
136 | } | ||
137 | |||
138 | // arbitrarily use the first body's missing item range | ||
139 | return mb[0].MissingItemRange() | ||
140 | } | ||
141 | |||
142 | func (mb mergedBodies) mergedContent(schema *BodySchema, partial bool) (*BodyContent, Body, Diagnostics) { | ||
143 | // We need to produce a new schema with none of the attributes marked as | ||
144 | // required, since _any one_ of our bodies can contribute an attribute value. | ||
145 | // We'll separately check that all required attributes are present at | ||
146 | // the end. | ||
147 | mergedSchema := &BodySchema{ | ||
148 | Blocks: schema.Blocks, | ||
149 | } | ||
150 | for _, attrS := range schema.Attributes { | ||
151 | mergedAttrS := attrS | ||
152 | mergedAttrS.Required = false | ||
153 | mergedSchema.Attributes = append(mergedSchema.Attributes, mergedAttrS) | ||
154 | } | ||
155 | |||
156 | var mergedLeftovers []Body | ||
157 | content := &BodyContent{ | ||
158 | Attributes: map[string]*Attribute{}, | ||
159 | } | ||
160 | |||
161 | var diags Diagnostics | ||
162 | for _, body := range mb { | ||
163 | var thisContent *BodyContent | ||
164 | var thisLeftovers Body | ||
165 | var thisDiags Diagnostics | ||
166 | |||
167 | if partial { | ||
168 | thisContent, thisLeftovers, thisDiags = body.PartialContent(mergedSchema) | ||
169 | } else { | ||
170 | thisContent, thisDiags = body.Content(mergedSchema) | ||
171 | } | ||
172 | |||
173 | if thisLeftovers != nil { | ||
174 | mergedLeftovers = append(mergedLeftovers) | ||
175 | } | ||
176 | if len(thisDiags) != 0 { | ||
177 | diags = append(diags, thisDiags...) | ||
178 | } | ||
179 | |||
180 | if thisContent.Attributes != nil { | ||
181 | for name, attr := range thisContent.Attributes { | ||
182 | if existing := content.Attributes[name]; existing != nil { | ||
183 | diags = diags.Append(&Diagnostic{ | ||
184 | Severity: DiagError, | ||
185 | Summary: "Duplicate attribute", | ||
186 | Detail: fmt.Sprintf( | ||
187 | "Attribute %q was already assigned at %s", | ||
188 | name, existing.NameRange.String(), | ||
189 | ), | ||
190 | Subject: &attr.NameRange, | ||
191 | }) | ||
192 | continue | ||
193 | } | ||
194 | content.Attributes[name] = attr | ||
195 | } | ||
196 | } | ||
197 | |||
198 | if len(thisContent.Blocks) != 0 { | ||
199 | content.Blocks = append(content.Blocks, thisContent.Blocks...) | ||
200 | } | ||
201 | } | ||
202 | |||
203 | // Finally, we check for required attributes. | ||
204 | for _, attrS := range schema.Attributes { | ||
205 | if !attrS.Required { | ||
206 | continue | ||
207 | } | ||
208 | |||
209 | if content.Attributes[attrS.Name] == nil { | ||
210 | // We don't have any context here to produce a good diagnostic, | ||
211 | // which is why we warn in the Content docstring to minimize the | ||
212 | // use of required attributes on merged bodies. | ||
213 | diags = diags.Append(&Diagnostic{ | ||
214 | Severity: DiagError, | ||
215 | Summary: "Missing required attribute", | ||
216 | Detail: fmt.Sprintf( | ||
217 | "The attribute %q is required, but was not assigned.", | ||
218 | attrS.Name, | ||
219 | ), | ||
220 | }) | ||
221 | } | ||
222 | } | ||
223 | |||
224 | leftoverBody := MergeBodies(mergedLeftovers) | ||
225 | return content, leftoverBody, diags | ||
226 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/ops.go b/vendor/github.com/hashicorp/hcl2/hcl/ops.go new file mode 100644 index 0000000..f4e30b0 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/ops.go | |||
@@ -0,0 +1,147 @@ | |||
1 | package hcl | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | |||
6 | "github.com/zclconf/go-cty/cty" | ||
7 | "github.com/zclconf/go-cty/cty/convert" | ||
8 | ) | ||
9 | |||
10 | // Index is a helper function that performs the same operation as the index | ||
11 | // operator in the HCL expression language. That is, the result is the | ||
12 | // same as it would be for collection[key] in a configuration expression. | ||
13 | // | ||
14 | // This is exported so that applications can perform indexing in a manner | ||
15 | // consistent with how the language does it, including handling of null and | ||
16 | // unknown values, etc. | ||
17 | // | ||
18 | // Diagnostics are produced if the given combination of values is not valid. | ||
19 | // Therefore a pointer to a source range must be provided to use in diagnostics, | ||
20 | // though nil can be provided if the calling application is going to | ||
21 | // ignore the subject of the returned diagnostics anyway. | ||
22 | func Index(collection, key cty.Value, srcRange *Range) (cty.Value, Diagnostics) { | ||
23 | if collection.IsNull() { | ||
24 | return cty.DynamicVal, Diagnostics{ | ||
25 | { | ||
26 | Severity: DiagError, | ||
27 | Summary: "Attempt to index null value", | ||
28 | Detail: "This value is null, so it does not have any indices.", | ||
29 | Subject: srcRange, | ||
30 | }, | ||
31 | } | ||
32 | } | ||
33 | if key.IsNull() { | ||
34 | return cty.DynamicVal, Diagnostics{ | ||
35 | { | ||
36 | Severity: DiagError, | ||
37 | Summary: "Invalid index", | ||
38 | Detail: "Can't use a null value as an indexing key.", | ||
39 | Subject: srcRange, | ||
40 | }, | ||
41 | } | ||
42 | } | ||
43 | ty := collection.Type() | ||
44 | kty := key.Type() | ||
45 | if kty == cty.DynamicPseudoType || ty == cty.DynamicPseudoType { | ||
46 | return cty.DynamicVal, nil | ||
47 | } | ||
48 | |||
49 | switch { | ||
50 | |||
51 | case ty.IsListType() || ty.IsTupleType() || ty.IsMapType(): | ||
52 | var wantType cty.Type | ||
53 | switch { | ||
54 | case ty.IsListType() || ty.IsTupleType(): | ||
55 | wantType = cty.Number | ||
56 | case ty.IsMapType(): | ||
57 | wantType = cty.String | ||
58 | default: | ||
59 | // should never happen | ||
60 | panic("don't know what key type we want") | ||
61 | } | ||
62 | |||
63 | key, keyErr := convert.Convert(key, wantType) | ||
64 | if keyErr != nil { | ||
65 | return cty.DynamicVal, Diagnostics{ | ||
66 | { | ||
67 | Severity: DiagError, | ||
68 | Summary: "Invalid index", | ||
69 | Detail: fmt.Sprintf( | ||
70 | "The given key does not identify an element in this collection value: %s.", | ||
71 | keyErr.Error(), | ||
72 | ), | ||
73 | Subject: srcRange, | ||
74 | }, | ||
75 | } | ||
76 | } | ||
77 | |||
78 | has := collection.HasIndex(key) | ||
79 | if !has.IsKnown() { | ||
80 | if ty.IsTupleType() { | ||
81 | return cty.DynamicVal, nil | ||
82 | } else { | ||
83 | return cty.UnknownVal(ty.ElementType()), nil | ||
84 | } | ||
85 | } | ||
86 | if has.False() { | ||
87 | return cty.DynamicVal, Diagnostics{ | ||
88 | { | ||
89 | Severity: DiagError, | ||
90 | Summary: "Invalid index", | ||
91 | Detail: "The given key does not identify an element in this collection value.", | ||
92 | Subject: srcRange, | ||
93 | }, | ||
94 | } | ||
95 | } | ||
96 | |||
97 | return collection.Index(key), nil | ||
98 | |||
99 | case ty.IsObjectType(): | ||
100 | key, keyErr := convert.Convert(key, cty.String) | ||
101 | if keyErr != nil { | ||
102 | return cty.DynamicVal, Diagnostics{ | ||
103 | { | ||
104 | Severity: DiagError, | ||
105 | Summary: "Invalid index", | ||
106 | Detail: fmt.Sprintf( | ||
107 | "The given key does not identify an element in this collection value: %s.", | ||
108 | keyErr.Error(), | ||
109 | ), | ||
110 | Subject: srcRange, | ||
111 | }, | ||
112 | } | ||
113 | } | ||
114 | if !collection.IsKnown() { | ||
115 | return cty.DynamicVal, nil | ||
116 | } | ||
117 | if !key.IsKnown() { | ||
118 | return cty.DynamicVal, nil | ||
119 | } | ||
120 | |||
121 | attrName := key.AsString() | ||
122 | |||
123 | if !ty.HasAttribute(attrName) { | ||
124 | return cty.DynamicVal, Diagnostics{ | ||
125 | { | ||
126 | Severity: DiagError, | ||
127 | Summary: "Invalid index", | ||
128 | Detail: "The given key does not identify an element in this collection value.", | ||
129 | Subject: srcRange, | ||
130 | }, | ||
131 | } | ||
132 | } | ||
133 | |||
134 | return collection.GetAttr(attrName), nil | ||
135 | |||
136 | default: | ||
137 | return cty.DynamicVal, Diagnostics{ | ||
138 | { | ||
139 | Severity: DiagError, | ||
140 | Summary: "Invalid index", | ||
141 | Detail: "This value does not have any indices.", | ||
142 | Subject: srcRange, | ||
143 | }, | ||
144 | } | ||
145 | } | ||
146 | |||
147 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/pos.go b/vendor/github.com/hashicorp/hcl2/hcl/pos.go new file mode 100644 index 0000000..1a4b329 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/pos.go | |||
@@ -0,0 +1,262 @@ | |||
1 | package hcl | ||
2 | |||
3 | import "fmt" | ||
4 | |||
5 | // Pos represents a single position in a source file, by addressing the | ||
6 | // start byte of a unicode character encoded in UTF-8. | ||
7 | // | ||
8 | // Pos is generally used only in the context of a Range, which then defines | ||
9 | // which source file the position is within. | ||
10 | type Pos struct { | ||
11 | // Line is the source code line where this position points. Lines are | ||
12 | // counted starting at 1 and incremented for each newline character | ||
13 | // encountered. | ||
14 | Line int | ||
15 | |||
16 | // Column is the source code column where this position points, in | ||
17 | // unicode characters, with counting starting at 1. | ||
18 | // | ||
19 | // Column counts characters as they appear visually, so for example a | ||
20 | // latin letter with a combining diacritic mark counts as one character. | ||
21 | // This is intended for rendering visual markers against source code in | ||
22 | // contexts where these diacritics would be rendered in a single character | ||
23 | // cell. Technically speaking, Column is counting grapheme clusters as | ||
24 | // used in unicode normalization. | ||
25 | Column int | ||
26 | |||
27 | // Byte is the byte offset into the file where the indicated character | ||
28 | // begins. This is a zero-based offset to the first byte of the first | ||
29 | // UTF-8 codepoint sequence in the character, and thus gives a position | ||
30 | // that can be resolved _without_ awareness of Unicode characters. | ||
31 | Byte int | ||
32 | } | ||
33 | |||
34 | // Range represents a span of characters between two positions in a source | ||
35 | // file. | ||
36 | // | ||
37 | // This struct is usually used by value in types that represent AST nodes, | ||
38 | // but by pointer in types that refer to the positions of other objects, | ||
39 | // such as in diagnostics. | ||
40 | type Range struct { | ||
41 | // Filename is the name of the file into which this range's positions | ||
42 | // point. | ||
43 | Filename string | ||
44 | |||
45 | // Start and End represent the bounds of this range. Start is inclusive | ||
46 | // and End is exclusive. | ||
47 | Start, End Pos | ||
48 | } | ||
49 | |||
50 | // RangeBetween returns a new range that spans from the beginning of the | ||
51 | // start range to the end of the end range. | ||
52 | // | ||
53 | // The result is meaningless if the two ranges do not belong to the same | ||
54 | // source file or if the end range appears before the start range. | ||
55 | func RangeBetween(start, end Range) Range { | ||
56 | return Range{ | ||
57 | Filename: start.Filename, | ||
58 | Start: start.Start, | ||
59 | End: end.End, | ||
60 | } | ||
61 | } | ||
62 | |||
63 | // RangeOver returns a new range that covers both of the given ranges and | ||
64 | // possibly additional content between them if the two ranges do not overlap. | ||
65 | // | ||
66 | // If either range is empty then it is ignored. The result is empty if both | ||
67 | // given ranges are empty. | ||
68 | // | ||
69 | // The result is meaningless if the two ranges to not belong to the same | ||
70 | // source file. | ||
71 | func RangeOver(a, b Range) Range { | ||
72 | if a.Empty() { | ||
73 | return b | ||
74 | } | ||
75 | if b.Empty() { | ||
76 | return a | ||
77 | } | ||
78 | |||
79 | var start, end Pos | ||
80 | if a.Start.Byte < b.Start.Byte { | ||
81 | start = a.Start | ||
82 | } else { | ||
83 | start = b.Start | ||
84 | } | ||
85 | if a.End.Byte > b.End.Byte { | ||
86 | end = a.End | ||
87 | } else { | ||
88 | end = b.End | ||
89 | } | ||
90 | return Range{ | ||
91 | Filename: a.Filename, | ||
92 | Start: start, | ||
93 | End: end, | ||
94 | } | ||
95 | } | ||
96 | |||
97 | // ContainsOffset returns true if and only if the given byte offset is within | ||
98 | // the receiving Range. | ||
99 | func (r Range) ContainsOffset(offset int) bool { | ||
100 | return offset >= r.Start.Byte && offset < r.End.Byte | ||
101 | } | ||
102 | |||
103 | // Ptr returns a pointer to a copy of the receiver. This is a convenience when | ||
104 | // ranges in places where pointers are required, such as in Diagnostic, but | ||
105 | // the range in question is returned from a method. Go would otherwise not | ||
106 | // allow one to take the address of a function call. | ||
107 | func (r Range) Ptr() *Range { | ||
108 | return &r | ||
109 | } | ||
110 | |||
111 | // String returns a compact string representation of the receiver. | ||
112 | // Callers should generally prefer to present a range more visually, | ||
113 | // e.g. via markers directly on the relevant portion of source code. | ||
114 | func (r Range) String() string { | ||
115 | if r.Start.Line == r.End.Line { | ||
116 | return fmt.Sprintf( | ||
117 | "%s:%d,%d-%d", | ||
118 | r.Filename, | ||
119 | r.Start.Line, r.Start.Column, | ||
120 | r.End.Column, | ||
121 | ) | ||
122 | } else { | ||
123 | return fmt.Sprintf( | ||
124 | "%s:%d,%d-%d,%d", | ||
125 | r.Filename, | ||
126 | r.Start.Line, r.Start.Column, | ||
127 | r.End.Line, r.End.Column, | ||
128 | ) | ||
129 | } | ||
130 | } | ||
131 | |||
132 | func (r Range) Empty() bool { | ||
133 | return r.Start.Byte == r.End.Byte | ||
134 | } | ||
135 | |||
136 | // CanSliceBytes returns true if SliceBytes could return an accurate | ||
137 | // sub-slice of the given slice. | ||
138 | // | ||
139 | // This effectively tests whether the start and end offsets of the range | ||
140 | // are within the bounds of the slice, and thus whether SliceBytes can be | ||
141 | // trusted to produce an accurate start and end position within that slice. | ||
142 | func (r Range) CanSliceBytes(b []byte) bool { | ||
143 | switch { | ||
144 | case r.Start.Byte < 0 || r.Start.Byte > len(b): | ||
145 | return false | ||
146 | case r.End.Byte < 0 || r.End.Byte > len(b): | ||
147 | return false | ||
148 | case r.End.Byte < r.Start.Byte: | ||
149 | return false | ||
150 | default: | ||
151 | return true | ||
152 | } | ||
153 | } | ||
154 | |||
155 | // SliceBytes returns a sub-slice of the given slice that is covered by the | ||
156 | // receiving range, assuming that the given slice is the source code of the | ||
157 | // file indicated by r.Filename. | ||
158 | // | ||
159 | // If the receiver refers to any byte offsets that are outside of the slice | ||
160 | // then the result is constrained to the overlapping portion only, to avoid | ||
161 | // a panic. Use CanSliceBytes to determine if the result is guaranteed to | ||
162 | // be an accurate span of the requested range. | ||
163 | func (r Range) SliceBytes(b []byte) []byte { | ||
164 | start := r.Start.Byte | ||
165 | end := r.End.Byte | ||
166 | if start < 0 { | ||
167 | start = 0 | ||
168 | } else if start > len(b) { | ||
169 | start = len(b) | ||
170 | } | ||
171 | if end < 0 { | ||
172 | end = 0 | ||
173 | } else if end > len(b) { | ||
174 | end = len(b) | ||
175 | } | ||
176 | if end < start { | ||
177 | end = start | ||
178 | } | ||
179 | return b[start:end] | ||
180 | } | ||
181 | |||
182 | // Overlaps returns true if the receiver and the other given range share any | ||
183 | // characters in common. | ||
184 | func (r Range) Overlaps(other Range) bool { | ||
185 | switch { | ||
186 | case r.Filename != other.Filename: | ||
187 | // If the ranges are in different files then they can't possibly overlap | ||
188 | return false | ||
189 | case r.Empty() || other.Empty(): | ||
190 | // Empty ranges can never overlap | ||
191 | return false | ||
192 | case r.ContainsOffset(other.Start.Byte) || r.ContainsOffset(other.End.Byte): | ||
193 | return true | ||
194 | case other.ContainsOffset(r.Start.Byte) || other.ContainsOffset(r.End.Byte): | ||
195 | return true | ||
196 | default: | ||
197 | return false | ||
198 | } | ||
199 | } | ||
200 | |||
201 | // Overlap finds a range that is either identical to or a sub-range of both | ||
202 | // the receiver and the other given range. It returns an empty range | ||
203 | // within the receiver if there is no overlap between the two ranges. | ||
204 | // | ||
205 | // A non-empty result is either identical to or a subset of the receiver. | ||
206 | func (r Range) Overlap(other Range) Range { | ||
207 | if !r.Overlaps(other) { | ||
208 | // Start == End indicates an empty range | ||
209 | return Range{ | ||
210 | Filename: r.Filename, | ||
211 | Start: r.Start, | ||
212 | End: r.Start, | ||
213 | } | ||
214 | } | ||
215 | |||
216 | var start, end Pos | ||
217 | if r.Start.Byte > other.Start.Byte { | ||
218 | start = r.Start | ||
219 | } else { | ||
220 | start = other.Start | ||
221 | } | ||
222 | if r.End.Byte < other.End.Byte { | ||
223 | end = r.End | ||
224 | } else { | ||
225 | end = other.End | ||
226 | } | ||
227 | |||
228 | return Range{ | ||
229 | Filename: r.Filename, | ||
230 | Start: start, | ||
231 | End: end, | ||
232 | } | ||
233 | } | ||
234 | |||
235 | // PartitionAround finds the portion of the given range that overlaps with | ||
236 | // the reciever and returns three ranges: the portion of the reciever that | ||
237 | // precedes the overlap, the overlap itself, and then the portion of the | ||
238 | // reciever that comes after the overlap. | ||
239 | // | ||
240 | // If the two ranges do not overlap then all three returned ranges are empty. | ||
241 | // | ||
242 | // If the given range aligns with or extends beyond either extent of the | ||
243 | // reciever then the corresponding outer range will be empty. | ||
244 | func (r Range) PartitionAround(other Range) (before, overlap, after Range) { | ||
245 | overlap = r.Overlap(other) | ||
246 | if overlap.Empty() { | ||
247 | return overlap, overlap, overlap | ||
248 | } | ||
249 | |||
250 | before = Range{ | ||
251 | Filename: r.Filename, | ||
252 | Start: r.Start, | ||
253 | End: overlap.Start, | ||
254 | } | ||
255 | after = Range{ | ||
256 | Filename: r.Filename, | ||
257 | Start: overlap.End, | ||
258 | End: r.End, | ||
259 | } | ||
260 | |||
261 | return before, overlap, after | ||
262 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/pos_scanner.go b/vendor/github.com/hashicorp/hcl2/hcl/pos_scanner.go new file mode 100644 index 0000000..7c8f2df --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/pos_scanner.go | |||
@@ -0,0 +1,148 @@ | |||
1 | package hcl | ||
2 | |||
3 | import ( | ||
4 | "bufio" | ||
5 | "bytes" | ||
6 | |||
7 | "github.com/apparentlymart/go-textseg/textseg" | ||
8 | ) | ||
9 | |||
10 | // RangeScanner is a helper that will scan over a buffer using a bufio.SplitFunc | ||
11 | // and visit a source range for each token matched. | ||
12 | // | ||
13 | // For example, this can be used with bufio.ScanLines to find the source range | ||
14 | // for each line in the file, skipping over the actual newline characters, which | ||
15 | // may be useful when printing source code snippets as part of diagnostic | ||
16 | // messages. | ||
17 | // | ||
18 | // The line and column information in the returned ranges is produced by | ||
19 | // counting newline characters and grapheme clusters respectively, which | ||
20 | // mimics the behavior we expect from a parser when producing ranges. | ||
21 | type RangeScanner struct { | ||
22 | filename string | ||
23 | b []byte | ||
24 | cb bufio.SplitFunc | ||
25 | |||
26 | pos Pos // position of next byte to process in b | ||
27 | cur Range // latest range | ||
28 | tok []byte // slice of b that is covered by cur | ||
29 | err error // error from last scan, if any | ||
30 | } | ||
31 | |||
32 | // Create a new RangeScanner for the given buffer, producing ranges for the | ||
33 | // given filename. | ||
34 | // | ||
35 | // Since ranges have grapheme-cluster granularity rather than byte granularity, | ||
36 | // the scanner will produce incorrect results if the given SplitFunc creates | ||
37 | // tokens between grapheme cluster boundaries. In particular, it is incorrect | ||
38 | // to use RangeScanner with bufio.ScanRunes because it will produce tokens | ||
39 | // around individual UTF-8 sequences, which will split any multi-sequence | ||
40 | // grapheme clusters. | ||
41 | func NewRangeScanner(b []byte, filename string, cb bufio.SplitFunc) *RangeScanner { | ||
42 | return &RangeScanner{ | ||
43 | filename: filename, | ||
44 | b: b, | ||
45 | cb: cb, | ||
46 | pos: Pos{ | ||
47 | Byte: 0, | ||
48 | Line: 1, | ||
49 | Column: 1, | ||
50 | }, | ||
51 | } | ||
52 | } | ||
53 | |||
54 | func (sc *RangeScanner) Scan() bool { | ||
55 | if sc.pos.Byte >= len(sc.b) || sc.err != nil { | ||
56 | // All done | ||
57 | return false | ||
58 | } | ||
59 | |||
60 | // Since we're operating on an in-memory buffer, we always pass the whole | ||
61 | // remainder of the buffer to our SplitFunc and set isEOF to let it know | ||
62 | // that it has the whole thing. | ||
63 | advance, token, err := sc.cb(sc.b[sc.pos.Byte:], true) | ||
64 | |||
65 | // Since we are setting isEOF to true this should never happen, but | ||
66 | // if it does we will just abort and assume the SplitFunc is misbehaving. | ||
67 | if advance == 0 && token == nil && err == nil { | ||
68 | return false | ||
69 | } | ||
70 | |||
71 | if err != nil { | ||
72 | sc.err = err | ||
73 | sc.cur = Range{ | ||
74 | Filename: sc.filename, | ||
75 | Start: sc.pos, | ||
76 | End: sc.pos, | ||
77 | } | ||
78 | sc.tok = nil | ||
79 | return false | ||
80 | } | ||
81 | |||
82 | sc.tok = token | ||
83 | start := sc.pos | ||
84 | end := sc.pos | ||
85 | new := sc.pos | ||
86 | |||
87 | // adv is similar to token but it also includes any subsequent characters | ||
88 | // we're being asked to skip over by the SplitFunc. | ||
89 | // adv is a slice covering any additional bytes we are skipping over, based | ||
90 | // on what the SplitFunc told us to do with advance. | ||
91 | adv := sc.b[sc.pos.Byte : sc.pos.Byte+advance] | ||
92 | |||
93 | // We now need to scan over our token to count the grapheme clusters | ||
94 | // so we can correctly advance Column, and count the newlines so we | ||
95 | // can correctly advance Line. | ||
96 | advR := bytes.NewReader(adv) | ||
97 | gsc := bufio.NewScanner(advR) | ||
98 | advanced := 0 | ||
99 | gsc.Split(textseg.ScanGraphemeClusters) | ||
100 | for gsc.Scan() { | ||
101 | gr := gsc.Bytes() | ||
102 | new.Byte += len(gr) | ||
103 | new.Column++ | ||
104 | |||
105 | // We rely here on the fact that \r\n is considered a grapheme cluster | ||
106 | // and so we don't need to worry about miscounting additional lines | ||
107 | // on files with Windows-style line endings. | ||
108 | if len(gr) != 0 && (gr[0] == '\r' || gr[0] == '\n') { | ||
109 | new.Column = 1 | ||
110 | new.Line++ | ||
111 | } | ||
112 | |||
113 | if advanced < len(token) { | ||
114 | // If we've not yet found the end of our token then we'll | ||
115 | // also push our "end" marker along. | ||
116 | // (if advance > len(token) then we'll stop moving "end" early | ||
117 | // so that the caller only sees the range covered by token.) | ||
118 | end = new | ||
119 | } | ||
120 | advanced += len(gr) | ||
121 | } | ||
122 | |||
123 | sc.cur = Range{ | ||
124 | Filename: sc.filename, | ||
125 | Start: start, | ||
126 | End: end, | ||
127 | } | ||
128 | sc.pos = new | ||
129 | return true | ||
130 | } | ||
131 | |||
132 | // Range returns a range that covers the latest token obtained after a call | ||
133 | // to Scan returns true. | ||
134 | func (sc *RangeScanner) Range() Range { | ||
135 | return sc.cur | ||
136 | } | ||
137 | |||
138 | // Bytes returns the slice of the input buffer that is covered by the range | ||
139 | // that would be returned by Range. | ||
140 | func (sc *RangeScanner) Bytes() []byte { | ||
141 | return sc.tok | ||
142 | } | ||
143 | |||
144 | // Err can be called after Scan returns false to determine if the latest read | ||
145 | // resulted in an error, and obtain that error if so. | ||
146 | func (sc *RangeScanner) Err() error { | ||
147 | return sc.err | ||
148 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/schema.go b/vendor/github.com/hashicorp/hcl2/hcl/schema.go new file mode 100644 index 0000000..891257a --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/schema.go | |||
@@ -0,0 +1,21 @@ | |||
1 | package hcl | ||
2 | |||
3 | // BlockHeaderSchema represents the shape of a block header, and is | ||
4 | // used for matching blocks within bodies. | ||
5 | type BlockHeaderSchema struct { | ||
6 | Type string | ||
7 | LabelNames []string | ||
8 | } | ||
9 | |||
10 | // AttributeSchema represents the requirements for an attribute, and is used | ||
11 | // for matching attributes within bodies. | ||
12 | type AttributeSchema struct { | ||
13 | Name string | ||
14 | Required bool | ||
15 | } | ||
16 | |||
17 | // BodySchema represents the desired shallow structure of a body. | ||
18 | type BodySchema struct { | ||
19 | Attributes []AttributeSchema | ||
20 | Blocks []BlockHeaderSchema | ||
21 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/spec.md b/vendor/github.com/hashicorp/hcl2/hcl/spec.md new file mode 100644 index 0000000..58257bf --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/spec.md | |||
@@ -0,0 +1,691 @@ | |||
1 | # HCL Syntax-Agnostic Information Model | ||
2 | |||
3 | This is the specification for the general information model (abstract types and | ||
4 | semantics) for hcl. HCL is a system for defining configuration languages for | ||
5 | applications. The HCL information model is designed to support multiple | ||
6 | concrete syntaxes for configuration, each with a mapping to the model defined | ||
7 | in this specification. | ||
8 | |||
9 | The two primary syntaxes intended for use in conjunction with this model are | ||
10 | [the HCL native syntax](./hclsyntax/spec.md) and [the JSON syntax](./json/spec.md). | ||
11 | In principle other syntaxes are possible as long as either their language model | ||
12 | is sufficiently rich to express the concepts described in this specification | ||
13 | or the language targets a well-defined subset of the specification. | ||
14 | |||
15 | ## Structural Elements | ||
16 | |||
17 | The primary structural element is the _body_, which is a container representing | ||
18 | a set of zero or more _attributes_ and a set of zero or more _blocks_. | ||
19 | |||
20 | A _configuration file_ is the top-level object, and will usually be produced | ||
21 | by reading a file from disk and parsing it as a particular syntax. A | ||
22 | configuration file has its own _body_, representing the top-level attributes | ||
23 | and blocks. | ||
24 | |||
25 | An _attribute_ is a name and value pair associated with a body. Attribute names | ||
26 | are unique within a given body. Attribute values are provided as _expressions_, | ||
27 | which are discussed in detail in a later section. | ||
28 | |||
29 | A _block_ is a nested structure that has a _type name_, zero or more string | ||
30 | _labels_ (e.g. identifiers), and a nested body. | ||
31 | |||
32 | Together the structural elements create a heirarchical data structure, with | ||
33 | attributes intended to represent the direct properties of a particular object | ||
34 | in the calling application, and blocks intended to represent child objects | ||
35 | of a particular object. | ||
36 | |||
37 | ## Body Content | ||
38 | |||
39 | To support the expression of the HCL concepts in languages whose information | ||
40 | model is a subset of HCL's, such as JSON, a _body_ is an opaque container | ||
41 | whose content can only be accessed by providing information on the expected | ||
42 | structure of the content. | ||
43 | |||
44 | The specification for each syntax must describe how its physical constructs | ||
45 | are mapped on to body content given a schema. For syntaxes that have | ||
46 | first-class syntax distinguishing attributes and bodies this can be relatively | ||
47 | straightforward, while more detailed mapping rules may be required in syntaxes | ||
48 | where the representation of attributes vs. blocks is ambiguous. | ||
49 | |||
50 | ### Schema-driven Processing | ||
51 | |||
52 | Schema-driven processing is the primary way to access body content. | ||
53 | A _body schema_ is a description of what is expected within a particular body, | ||
54 | which can then be used to extract the _body content_, which then provides | ||
55 | access to the specific attributes and blocks requested. | ||
56 | |||
57 | A _body schema_ consists of a list of _attribute schemata_ and | ||
58 | _block header schemata_: | ||
59 | |||
60 | * An _attribute schema_ provides the name of an attribute and whether its | ||
61 | presence is required. | ||
62 | |||
63 | * A _block header schema_ provides a block type name and the semantic names | ||
64 | assigned to each of the labels of that block type, if any. | ||
65 | |||
66 | Within a schema, it is an error to request the same attribute name twice or | ||
67 | to request a block type whose name is also an attribute name. While this can | ||
68 | in principle be supported in some syntaxes, in other syntaxes the attribute | ||
69 | and block namespaces are combined and so an an attribute cannot coexist with | ||
70 | a block whose type name is identical to the attribute name. | ||
71 | |||
72 | The result of applying a body schema to a body is _body content_, which | ||
73 | consists of an _attribute map_ and a _block sequence_: | ||
74 | |||
75 | * The _attribute map_ is a map data structure whose keys are attribute names | ||
76 | and whose values are _expressions_ that represent the corresponding attribute | ||
77 | values. | ||
78 | |||
79 | * The _block sequence_ is an ordered sequence of blocks, with each specifying | ||
80 | a block _type name_, the sequence of _labels_ specified for the block, | ||
81 | and the body object (not body _content_) representing the block's own body. | ||
82 | |||
83 | After obtaining _body content_, the calling application may continue processing | ||
84 | by evaluating attribute expressions and/or recursively applying further | ||
85 | schema-driven processing to the child block bodies. | ||
86 | |||
87 | **Note:** The _body schema_ is intentionally minimal, to reduce the set of | ||
88 | mapping rules that must be defined for each syntax. Higher-level utility | ||
89 | libraries may be provided to assist in the construction of a schema and | ||
90 | perform additional processing, such as automatically evaluating attribute | ||
91 | expressions and assigning their result values into a data structure, or | ||
92 | recursively applying a schema to child blocks. Such utilities are not part of | ||
93 | this core specification and will vary depending on the capabilities and idiom | ||
94 | of the implementation language. | ||
95 | |||
96 | ### _Dynamic Attributes_ Processing | ||
97 | |||
98 | The _schema-driven_ processing model is useful when the expected structure | ||
99 | of a body is known a priori by the calling application. Some blocks are | ||
100 | instead more free-form, such as a user-provided set of arbitrary key/value | ||
101 | pairs. | ||
102 | |||
103 | The alternative _dynamic attributes_ processing mode allows for this more | ||
104 | ad-hoc approach. Processing in this mode behaves as if a schema had been | ||
105 | constructed without any _block header schemata_ and with an attribute | ||
106 | schema for each distinct key provided within the physical representation | ||
107 | of the body. | ||
108 | |||
109 | The means by which _distinct keys_ are identified is dependent on the | ||
110 | physical syntax; this processing mode assumes that the syntax has a way | ||
111 | to enumerate keys provided by the author and identify expressions that | ||
112 | correspond with those keys, but does not define the means by which this is | ||
113 | done. | ||
114 | |||
115 | The result of _dynamic attributes_ processing is an _attribute map_ as | ||
116 | defined in the previous section. No _block sequence_ is produced in this | ||
117 | processing mode. | ||
118 | |||
119 | ### Partial Processing of Body Content | ||
120 | |||
121 | Under _schema-driven processing_, by default the given schema is assumed | ||
122 | to be exhaustive, such that any attribute or block not matched by schema | ||
123 | elements is considered an error. This allows feedback about unsupported | ||
124 | attributes and blocks (such as typos) to be provided. | ||
125 | |||
126 | An alternative is _partial processing_, where any additional elements within | ||
127 | the body are not considered an error. | ||
128 | |||
129 | Under partial processing, the result is both body content as described | ||
130 | above _and_ a new body that represents any body elements that remain after | ||
131 | the schema has been processed. | ||
132 | |||
133 | Specifically: | ||
134 | |||
135 | * Any attribute whose name is specified in the schema is returned in body | ||
136 | content and elided from the new body. | ||
137 | |||
138 | * Any block whose type is specified in the schema is returned in body content | ||
139 | and elided from the new body. | ||
140 | |||
141 | * Any attribute or block _not_ meeting the above conditions is placed into | ||
142 | the new body, unmodified. | ||
143 | |||
144 | The new body can then be recursively processed using any of the body | ||
145 | processing models. This facility allows different subsets of body content | ||
146 | to be processed by different parts of the calling application. | ||
147 | |||
148 | Processing a body in two steps — first partial processing of a source body, | ||
149 | then exhaustive processing of the returned body — is equivalent to single-step | ||
150 | processing with a schema that is the union of the schemata used | ||
151 | across the two steps. | ||
152 | |||
153 | ## Expressions | ||
154 | |||
155 | Attribute values are represented by _expressions_. Depending on the concrete | ||
156 | syntax in use, an expression may just be a literal value or it may describe | ||
157 | a computation in terms of literal values, variables, and functions. | ||
158 | |||
159 | Each syntax defines its own representation of expressions. For syntaxes based | ||
160 | in languages that do not have any non-literal expression syntax, it is | ||
161 | recommended to embed the template language from | ||
162 | [the native syntax](./hclsyntax/spec.md) e.g. as a post-processing step on | ||
163 | string literals. | ||
164 | |||
165 | ### Expression Evaluation | ||
166 | |||
167 | In order to obtain a concrete value, each expression must be _evaluated_. | ||
168 | Evaluation is performed in terms of an evaluation context, which | ||
169 | consists of the following: | ||
170 | |||
171 | * An _evaluation mode_, which is defined below. | ||
172 | * A _variable scope_, which provides a set of named variables for use in | ||
173 | expressions. | ||
174 | * A _function table_, which provides a set of named functions for use in | ||
175 | expressions. | ||
176 | |||
177 | The _evaluation mode_ allows for two different interpretations of an | ||
178 | expression: | ||
179 | |||
180 | * In _literal-only mode_, variables and functions are not available and it | ||
181 | is assumed that the calling application's intent is to treat the attribute | ||
182 | value as a literal. | ||
183 | |||
184 | * In _full expression mode_, variables and functions are defined and it is | ||
185 | assumed that the calling application wishes to provide a full expression | ||
186 | language for definition of the attribute value. | ||
187 | |||
188 | The actual behavior of these two modes depends on the syntax in use. For | ||
189 | languages with first-class expression syntax, these two modes may be considered | ||
190 | equivalent, with _literal-only mode_ simply not defining any variables or | ||
191 | functions. For languages that embed arbitrary expressions via string templates, | ||
192 | _literal-only mode_ may disable such processing, allowing literal strings to | ||
193 | pass through without interpretation as templates. | ||
194 | |||
195 | Since literal-only mode does not support variables and functions, it is an | ||
196 | error for the calling application to enable this mode and yet provide a | ||
197 | variable scope and/or function table. | ||
198 | |||
199 | ## Values and Value Types | ||
200 | |||
201 | The result of expression evaluation is a _value_. Each value has a _type_, | ||
202 | which is dynamically determined during evaluation. The _variable scope_ in | ||
203 | the evaluation context is a map from variable name to value, using the same | ||
204 | definition of value. | ||
205 | |||
206 | The type system for HCL values is intended to be of a level abstraction | ||
207 | suitable for configuration of various applications. A well-defined, | ||
208 | implementation-language-agnostic type system is defined to allow for | ||
209 | consistent processing of configuration across many implementation languages. | ||
210 | Concrete implementations may provide additional functionality to lower | ||
211 | HCL values and types to corresponding native language types, which may then | ||
212 | impose additional constraints on the values outside of the scope of this | ||
213 | specification. | ||
214 | |||
215 | Two values are _equal_ if and only if they have identical types and their | ||
216 | values are equal according to the rules of their shared type. | ||
217 | |||
218 | ### Primitive Types | ||
219 | |||
220 | The primitive types are _string_, _bool_, and _number_. | ||
221 | |||
222 | A _string_ is a sequence of unicode characters. Two strings are equal if | ||
223 | NFC normalization ([UAX#15](http://unicode.org/reports/tr15/) | ||
224 | of each string produces two identical sequences of characters. | ||
225 | NFC normalization ensures that, for example, a precomposed combination of a | ||
226 | latin letter and a diacritic compares equal with the letter followed by | ||
227 | a combining diacritic. | ||
228 | |||
229 | The _bool_ type has only two non-null values: _true_ and _false_. Two bool | ||
230 | values are equal if and only if they are either both true or both false. | ||
231 | |||
232 | A _number_ is an arbitrary-precision floating point value. An implementation | ||
233 | _must_ make the full-precision values available to the calling application | ||
234 | for interpretation into any suitable number representation. An implementation | ||
235 | may in practice implement numbers with limited precision so long as the | ||
236 | following constraints are met: | ||
237 | |||
238 | * Integers are represented with at least 256 bits. | ||
239 | * Non-integer numbers are represented as floating point values with a | ||
240 | mantissa of at least 256 bits and a signed binary exponent of at least | ||
241 | 16 bits. | ||
242 | * An error is produced if an integer value given in source cannot be | ||
243 | represented precisely. | ||
244 | * An error is produced if a non-integer value cannot be represented due to | ||
245 | overflow. | ||
246 | * A non-integer number is rounded to the nearest possible value when a | ||
247 | value is of too high a precision to be represented. | ||
248 | |||
249 | The _number_ type also requires representation of both positive and negative | ||
250 | infinity. A "not a number" (NaN) value is _not_ provided nor used. | ||
251 | |||
252 | Two number values are equal if they are numerically equal to the precision | ||
253 | associated with the number. Positive infinity and negative infinity are | ||
254 | equal to themselves but not to each other. Positive infinity is greater than | ||
255 | any other number value, and negative infinity is less than any other number | ||
256 | value. | ||
257 | |||
258 | Some syntaxes may be unable to represent numeric literals of arbitrary | ||
259 | precision. This must be defined in the syntax specification as part of its | ||
260 | description of mapping numeric literals to HCL values. | ||
261 | |||
262 | ### Structural Types | ||
263 | |||
264 | _Structural types_ are types that are constructed by combining other types. | ||
265 | Each distinct combination of other types is itself a distinct type. There | ||
266 | are two structural type _kinds_: | ||
267 | |||
268 | * _Object types_ are constructed of a set of named attributes, each of which | ||
269 | has a type. Attribute names are always strings. (_Object_ attributes are a | ||
270 | distinct idea from _body_ attributes, though calling applications | ||
271 | may choose to blur the distinction by use of common naming schemes.) | ||
272 | * _Tuple tupes_ are constructed of a sequence of elements, each of which | ||
273 | has a type. | ||
274 | |||
275 | Values of structural types are compared for equality in terms of their | ||
276 | attributes or elements. A structural type value is equal to another if and | ||
277 | only if all of the corresponding attributes or elements are equal. | ||
278 | |||
279 | Two structural types are identical if they are of the same kind and | ||
280 | have attributes or elements with identical types. | ||
281 | |||
282 | ### Collection Types | ||
283 | |||
284 | _Collection types_ are types that combine together an arbitrary number of | ||
285 | values of some other single type. There are three collection type _kinds_: | ||
286 | |||
287 | * _List types_ represent ordered sequences of values of their element type. | ||
288 | * _Map types_ represent values of their element type accessed via string keys. | ||
289 | * _Set types_ represent unordered sets of distinct values of their element type. | ||
290 | |||
291 | For each of these kinds and each distinct element type there is a distinct | ||
292 | collection type. For example, "list of string" is a distinct type from | ||
293 | "set of string", and "list of number" is a distinct type from "list of string". | ||
294 | |||
295 | Values of collection types are compared for equality in terms of their | ||
296 | elements. A collection type value is equal to another if and only if both | ||
297 | have the same number of elements and their corresponding elements are equal. | ||
298 | |||
299 | Two collection types are identical if they are of the same kind and have | ||
300 | the same element type. | ||
301 | |||
302 | ### Null values | ||
303 | |||
304 | Each type has a null value. The null value of a type represents the absense | ||
305 | of a value, but with type information retained to allow for type checking. | ||
306 | |||
307 | Null values are used primarily to represent the conditional absense of a | ||
308 | body attribute. In a syntax with a conditional operator, one of the result | ||
309 | values of that conditional may be null to indicate that the attribute should be | ||
310 | considered not present in that case. | ||
311 | |||
312 | Calling applications _should_ consider an attribute with a null value as | ||
313 | equivalent to the value not being present at all. | ||
314 | |||
315 | A null value of a particular type is equal to itself. | ||
316 | |||
317 | ### Unknown Values and the Dynamic Pseudo-type | ||
318 | |||
319 | An _unknown value_ is a placeholder for a value that is not yet known. | ||
320 | Operations on unknown values themselves return unknown values that have a | ||
321 | type appropriate to the operation. For example, adding together two unknown | ||
322 | numbers yields an unknown number, while comparing two unknown values of any | ||
323 | type for equality yields an unknown bool. | ||
324 | |||
325 | Each type has a distinct unknown value. For example, an unknown _number_ is | ||
326 | a distinct value from an unknown _string_. | ||
327 | |||
328 | _The dynamic pseudo-type_ is a placeholder for a type that is not yet known. | ||
329 | The only values of this type are its null value and its unknown value. It is | ||
330 | referred to as a _pseudo-type_ because it should not be considered a type in | ||
331 | its own right, but rather as a placeholder for a type yet to be established. | ||
332 | The unknown value of the dynamic pseudo-type is referred to as _the dynamic | ||
333 | value_. | ||
334 | |||
335 | Operations on values of the dynamic pseudo-type behave as if it is a value | ||
336 | of the expected type, optimistically assuming that once the value and type | ||
337 | are known they will be valid for the operation. For example, adding together | ||
338 | a number and the dynamic value produces an unknown number. | ||
339 | |||
340 | Unknown values and the dynamic pseudo-type can be used as a mechanism for | ||
341 | partial type checking and semantic checking: by evaluating an expression with | ||
342 | all variables set to an unknown value, the expression can be evaluated to | ||
343 | produce an unknown value of a given type, or produce an error if any operation | ||
344 | is provably invalid with only type information. | ||
345 | |||
346 | Unknown values and the dynamic pseudo-type must never be returned from | ||
347 | operations unless at least one operand is unknown or dynamic. Calling | ||
348 | applications are guaranteed that unless the global scope includes unknown | ||
349 | values, or the function table includes functions that return unknown values, | ||
350 | no expression will evaluate to an unknown value. The calling application is | ||
351 | thus in total control over the use and meaning of unknown values. | ||
352 | |||
353 | The dynamic pseudo-type is identical only to itself. | ||
354 | |||
355 | ### Capsule Types | ||
356 | |||
357 | A _capsule type_ is a custom type defined by the calling application. A value | ||
358 | of a capsule type is considered opaque to HCL, but may be accepted | ||
359 | by functions provided by the calling application. | ||
360 | |||
361 | A particular capsule type is identical only to itself. The equality of two | ||
362 | values of the same capsule type is defined by the calling application. No | ||
363 | other operations are supported for values of capsule types. | ||
364 | |||
365 | Support for capsule types in a HCL implementation is optional. Capsule types | ||
366 | are intended to allow calling applications to pass through values that are | ||
367 | not part of the standard type system. For example, an application that | ||
368 | deals with raw binary data may define a capsule type representing a byte | ||
369 | array, and provide functions that produce or operate on byte arrays. | ||
370 | |||
371 | ### Type Specifications | ||
372 | |||
373 | In certain situations it is necessary to define expectations about the expected | ||
374 | type of a value. Whereas two _types_ have a commutative _identity_ relationship, | ||
375 | a type has a non-commutative _matches_ relationship with a _type specification_. | ||
376 | A type specification is, in practice, just a different interpretation of a | ||
377 | type such that: | ||
378 | |||
379 | * Any type _matches_ any type that it is identical to. | ||
380 | |||
381 | * Any type _matches_ the dynamic pseudo-type. | ||
382 | |||
383 | For example, given a type specification "list of dynamic pseudo-type", the | ||
384 | concrete types "list of string" and "list of map" match, but the | ||
385 | type "set of string" does not. | ||
386 | |||
387 | ## Functions and Function Calls | ||
388 | |||
389 | The evaluation context used to evaluate an expression includes a function | ||
390 | table, which represents an application-defined set of named functions | ||
391 | available for use in expressions. | ||
392 | |||
393 | Each syntax defines whether function calls are supported and how they are | ||
394 | physically represented in source code, but the semantics of function calls are | ||
395 | defined here to ensure consistent results across syntaxes and to allow | ||
396 | applications to provide functions that are interoperable with all syntaxes. | ||
397 | |||
398 | A _function_ is defined from the following elements: | ||
399 | |||
400 | * Zero or more _positional parameters_, each with a name used for documentation, | ||
401 | a type specification for expected argument values, and a flag for whether | ||
402 | each of null values, unknown values, and values of the dynamic pseudo-type | ||
403 | are accepted. | ||
404 | |||
405 | * Zero or one _variadic parameters_, with the same structure as the _positional_ | ||
406 | parameters, which if present collects any additional arguments provided at | ||
407 | the function call site. | ||
408 | |||
409 | * A _result type definition_, which specifies the value type returned for each | ||
410 | valid sequence of argument values. | ||
411 | |||
412 | * A _result value definition_, which specifies the value returned for each | ||
413 | valid sequence of argument values. | ||
414 | |||
415 | A _function call_, regardless of source syntax, consists of a sequence of | ||
416 | argument values. The argument values are each mapped to a corresponding | ||
417 | parameter as follows: | ||
418 | |||
419 | * For each of the function's positional parameters in sequence, take the next | ||
420 | argument. If there are no more arguments, the call is erroneous. | ||
421 | |||
422 | * If the function has a variadic parameter, take all remaining arguments that | ||
423 | where not yet assigned to a positional parameter and collect them into | ||
424 | a sequence of variadic arguments that each correspond to the variadic | ||
425 | parameter. | ||
426 | |||
427 | * If the function has _no_ variadic parameter, it is an error if any arguments | ||
428 | remain after taking one argument for each positional parameter. | ||
429 | |||
430 | After mapping each argument to a parameter, semantic checking proceeds | ||
431 | for each argument: | ||
432 | |||
433 | * If the argument value corresponding to a parameter does not match the | ||
434 | parameter's type specification, the call is erroneous. | ||
435 | |||
436 | * If the argument value corresponding to a parameter is null and the parameter | ||
437 | is not specified as accepting nulls, the call is erroneous. | ||
438 | |||
439 | * If the argument value corresponding to a parameter is the dynamic value | ||
440 | and the parameter is not specified as accepting values of the dynamic | ||
441 | pseudo-type, the call is valid but its _result type_ is forced to be the | ||
442 | dynamic pseudo type. | ||
443 | |||
444 | * If neither of the above conditions holds for any argument, the call is | ||
445 | valid and the function's value type definition is used to determine the | ||
446 | call's _result type_. A function _may_ vary its result type depending on | ||
447 | the argument _values_ as well as the argument _types_; for example, a | ||
448 | function that decodes a JSON value will return a different result type | ||
449 | depending on the data structure described by the given JSON source code. | ||
450 | |||
451 | If semantic checking succeeds without error, the call is _executed_: | ||
452 | |||
453 | * For each argument, if its value is unknown and its corresponding parameter | ||
454 | is not specified as accepting unknowns, the _result value_ is forced to be an | ||
455 | unknown value of the result type. | ||
456 | |||
457 | * If the previous condition does not apply, the function's result value | ||
458 | definition is used to determine the call's _result value_. | ||
459 | |||
460 | The result of a function call expression is either an error, if one of the | ||
461 | erroenous conditions above applies, or the _result value_. | ||
462 | |||
463 | ## Type Conversions and Unification | ||
464 | |||
465 | Values given in configuration may not always match the expectations of the | ||
466 | operations applied to them or to the calling application. In such situations, | ||
467 | automatic type conversion is attempted as a convenience to the user. | ||
468 | |||
469 | Along with conversions to a _specified_ type, it is sometimes necessary to | ||
470 | ensure that a selection of values are all of the _same_ type, without any | ||
471 | constraint on which type that is. This is the process of _type unification_, | ||
472 | which attempts to find the most general type that all of the given types can | ||
473 | be converted to. | ||
474 | |||
475 | Both type conversions and unification are defined in the syntax-agnostic | ||
476 | model to ensure consistency of behavior between syntaxes. | ||
477 | |||
478 | Type conversions are broadly characterized into two categories: _safe_ and | ||
479 | _unsafe_. A conversion is "safe" if any distinct value of the source type | ||
480 | has a corresponding distinct value in the target type. A conversion is | ||
481 | "unsafe" if either the target type values are _not_ distinct (information | ||
482 | may be lost in conversion) or if some values of the source type do not have | ||
483 | any corresponding value in the target type. An unsafe conversion may result | ||
484 | in an error. | ||
485 | |||
486 | A given type can always be converted to itself, which is a no-op. | ||
487 | |||
488 | ### Conversion of Null Values | ||
489 | |||
490 | All null values are safely convertable to a null value of any other type, | ||
491 | regardless of other type-specific rules specified in the sections below. | ||
492 | |||
493 | ### Conversion to and from the Dynamic Pseudo-type | ||
494 | |||
495 | Conversion _from_ the dynamic pseudo-type _to_ any other type always succeeds, | ||
496 | producing an unknown value of the target type. | ||
497 | |||
498 | Conversion of any value _to_ the dynamic pseudo-type is a no-op. The result | ||
499 | is the input value, verbatim. This is the only situation where the conversion | ||
500 | result value is not of the the given target type. | ||
501 | |||
502 | ### Primitive Type Conversions | ||
503 | |||
504 | Bidirectional conversions are available between the string and number types, | ||
505 | and between the string and boolean types. | ||
506 | |||
507 | The bool value true corresponds to the string containing the characters "true", | ||
508 | while the bool value false corresponds to teh string containing the characters | ||
509 | "false". Conversion from bool to string is safe, while the converse is | ||
510 | unsafe. The strings "1" and "0" are alternative string representations | ||
511 | of true and false respectively. It is an error to convert a string other than | ||
512 | the four in this paragraph to type bool. | ||
513 | |||
514 | A number value is converted to string by translating its integer portion | ||
515 | into a sequence of decimal digits (`0` through `9`), and then if it has a | ||
516 | non-zero fractional part, a period `.` followed by a sequence of decimal | ||
517 | digits representing its fractional part. No exponent portion is included. | ||
518 | The number is converted at its full precision. Conversion from number to | ||
519 | string is safe. | ||
520 | |||
521 | A string is converted to a number value by reversing the above mapping. | ||
522 | No exponent portion is allowed. Conversion from string to number is unsafe. | ||
523 | It is an error to convert a string that does not comply with the expected | ||
524 | syntax to type number. | ||
525 | |||
526 | No direct conversion is available between the bool and number types. | ||
527 | |||
528 | ### Collection and Structural Type Conversions | ||
529 | |||
530 | Conversion from set types to list types is _safe_, as long as their | ||
531 | element types are safely convertable. If the element types are _unsafely_ | ||
532 | convertable, then the collection conversion is also unsafe. Each set element | ||
533 | becomes a corresponding list element, in an undefined order. Although no | ||
534 | particular ordering is required, implementations _should_ produce list | ||
535 | elements in a consistent order for a given input set, as a convenience | ||
536 | to calling applications. | ||
537 | |||
538 | Conversion from list types to set types is _unsafe_, as long as their element | ||
539 | types are convertable. Each distinct list item becomes a distinct set item. | ||
540 | If two list items are equal, one of the two is lost in the conversion. | ||
541 | |||
542 | Conversion from tuple types to list types permitted if all of the | ||
543 | tuple element types are convertable to the target list element type. | ||
544 | The safety of the conversion depends on the safety of each of the element | ||
545 | conversions. Each element in turn is converted to the list element type, | ||
546 | producing a list of identical length. | ||
547 | |||
548 | Conversion from tuple types to set types is permitted, behaving as if the | ||
549 | tuple type was first converted to a list of the same element type and then | ||
550 | that list converted to the target set type. | ||
551 | |||
552 | Conversion from object types to map types is permitted if all of the object | ||
553 | attribute types are convertable to the target map element type. The safety | ||
554 | of the conversion depends on the safety of each of the attribute conversions. | ||
555 | Each attribute in turn is converted to the map element type, and map element | ||
556 | keys are set to the name of each corresponding object attribute. | ||
557 | |||
558 | Conversion from list and set types to tuple types is permitted, following | ||
559 | the opposite steps as the converse conversions. Such conversions are _unsafe_. | ||
560 | It is an error to convert a list or set to a tuple type whose number of | ||
561 | elements does not match the list or set length. | ||
562 | |||
563 | Conversion from map types to object types is permitted if each map key | ||
564 | corresponds to an attribute in the target object type. It is an error to | ||
565 | convert from a map value whose set of keys does not exactly match the target | ||
566 | type's attributes. The conversion takes the opposite steps of the converse | ||
567 | conversion. | ||
568 | |||
569 | Conversion from one object type to another is permitted as long as the | ||
570 | common attribute names have convertable types. Any attribute present in the | ||
571 | target type but not in the source type is populated with a null value of | ||
572 | the appropriate type. | ||
573 | |||
574 | Conversion from one tuple type to another is permitted as long as the | ||
575 | tuples have the same length and the elements have convertable types. | ||
576 | |||
577 | ### Type Unification | ||
578 | |||
579 | Type unification is an operation that takes a list of types and attempts | ||
580 | to find a single type to which they can all be converted. Since some | ||
581 | type pairs have bidirectional conversions, preference is given to _safe_ | ||
582 | conversions. In technical terms, all possible types are arranged into | ||
583 | a lattice, from which a most general supertype is selected where possible. | ||
584 | |||
585 | The type resulting from type unification may be one of the input types, or | ||
586 | it may be an entirely new type produced by combination of two or more | ||
587 | input types. | ||
588 | |||
589 | The following rules do not guarantee a valid result. In addition to these | ||
590 | rules, unification fails if any of the given types are not convertable | ||
591 | (per the above rules) to the selected result type. | ||
592 | |||
593 | The following unification rules apply transitively. That is, if a rule is | ||
594 | defined from A to B, and one from B to C, then A can unify to C. | ||
595 | |||
596 | Number and bool types both unify with string by preferring string. | ||
597 | |||
598 | Two collection types of the same kind unify according to the unification | ||
599 | of their element types. | ||
600 | |||
601 | List and set types unify by preferring the list type. | ||
602 | |||
603 | Map and object types unify by preferring the object type. | ||
604 | |||
605 | List, set and tuple types unify by preferring the tuple type. | ||
606 | |||
607 | The dynamic pseudo-type unifies with any other type by selecting that other | ||
608 | type. The dynamic pseudo-type is the result type only if _all_ input types | ||
609 | are the dynamic pseudo-type. | ||
610 | |||
611 | Two object types unify by constructing a new type whose attributes are | ||
612 | the union of those of the two input types. Any common attributes themselves | ||
613 | have their types unified. | ||
614 | |||
615 | Two tuple types of the same length unify constructing a new type of the | ||
616 | same length whose elements are the unification of the corresponding elements | ||
617 | in the two input types. | ||
618 | |||
619 | ## Static Analysis | ||
620 | |||
621 | In most applications, full expression evaluation is sufficient for understanding | ||
622 | the provided configuration. However, some specialized applications require more | ||
623 | direct access to the physical structures in the expressions, which can for | ||
624 | example allow the construction of new language constructs in terms of the | ||
625 | existing syntax elements. | ||
626 | |||
627 | Since static analysis analyses the physical structure of configuration, the | ||
628 | details will vary depending on syntax. Each syntax must decide which of its | ||
629 | physical structures corresponds to the following analyses, producing error | ||
630 | diagnostics if they are applied to inappropriate expressions. | ||
631 | |||
632 | The following are the required static analysis functions: | ||
633 | |||
634 | * **Static List**: Require list/tuple construction syntax to be used and | ||
635 | return a list of expressions for each of the elements given. | ||
636 | |||
637 | * **Static Map**: Require map/object construction syntax to be used and | ||
638 | return a list of key/value pairs -- both expressions -- for each of | ||
639 | the elements given. The usual constraint that a map key must be a string | ||
640 | must not apply to this analysis, thus allowing applications to interpret | ||
641 | arbitrary keys as they see fit. | ||
642 | |||
643 | * **Static Call**: Require function call syntax to be used and return an | ||
644 | object describing the called function name and a list of expressions | ||
645 | representing each of the call arguments. | ||
646 | |||
647 | * **Static Traversal**: Require a reference to a symbol in the variable | ||
648 | scope and return a description of the path from the root scope to the | ||
649 | accessed attribute or index. | ||
650 | |||
651 | The intent of a calling application using these features is to require a more | ||
652 | rigid interpretation of the configuration than in expression evaluation. | ||
653 | Syntax implementations should make use of the extra contextual information | ||
654 | provided in order to make an intuitive mapping onto the constructs of the | ||
655 | underlying syntax, possibly interpreting the expression slightly differently | ||
656 | than it would be interpreted in normal evaluation. | ||
657 | |||
658 | Each syntax must define which of its expression elements each of the analyses | ||
659 | above applies to, and how those analyses behave given those expression elements. | ||
660 | |||
661 | ## Implementation Considerations | ||
662 | |||
663 | Implementations of this specification are free to adopt any strategy that | ||
664 | produces behavior consistent with the specification. This non-normative | ||
665 | section describes some possible implementation strategies that are consistent | ||
666 | with the goals of this specification. | ||
667 | |||
668 | ### Language-agnosticism | ||
669 | |||
670 | The language-agnosticism of this specification assumes that certain behaviors | ||
671 | are implemented separately for each syntax: | ||
672 | |||
673 | * Matching of a body schema with the physical elements of a body in the | ||
674 | source language, to determine correspondance between physical constructs | ||
675 | and schema elements. | ||
676 | |||
677 | * Implementing the _dynamic attributes_ body processing mode by either | ||
678 | interpreting all physical constructs as attributes or producing an error | ||
679 | if non-attribute constructs are present. | ||
680 | |||
681 | * Providing an evaluation function for all possible expressions that produces | ||
682 | a value given an evaluation context. | ||
683 | |||
684 | * Providing the static analysis functionality described above in a manner that | ||
685 | makes sense within the convention of the syntax. | ||
686 | |||
687 | The suggested implementation strategy is to use an implementation language's | ||
688 | closest concept to an _abstract type_, _virtual type_ or _interface type_ | ||
689 | to represent both Body and Expression. Each language-specific implementation | ||
690 | can then provide an implementation of each of these types wrapping AST nodes | ||
691 | or other physical constructs from the language parser. | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/static_expr.go b/vendor/github.com/hashicorp/hcl2/hcl/static_expr.go new file mode 100644 index 0000000..98ada87 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/static_expr.go | |||
@@ -0,0 +1,40 @@ | |||
1 | package hcl | ||
2 | |||
3 | import ( | ||
4 | "github.com/zclconf/go-cty/cty" | ||
5 | ) | ||
6 | |||
7 | type staticExpr struct { | ||
8 | val cty.Value | ||
9 | rng Range | ||
10 | } | ||
11 | |||
12 | // StaticExpr returns an Expression that always evaluates to the given value. | ||
13 | // | ||
14 | // This is useful to substitute default values for expressions that are | ||
15 | // not explicitly given in configuration and thus would otherwise have no | ||
16 | // Expression to return. | ||
17 | // | ||
18 | // Since expressions are expected to have a source range, the caller must | ||
19 | // provide one. Ideally this should be a real source range, but it can | ||
20 | // be a synthetic one (with an empty-string filename) if no suitable range | ||
21 | // is available. | ||
22 | func StaticExpr(val cty.Value, rng Range) Expression { | ||
23 | return staticExpr{val, rng} | ||
24 | } | ||
25 | |||
26 | func (e staticExpr) Value(ctx *EvalContext) (cty.Value, Diagnostics) { | ||
27 | return e.val, nil | ||
28 | } | ||
29 | |||
30 | func (e staticExpr) Variables() []Traversal { | ||
31 | return nil | ||
32 | } | ||
33 | |||
34 | func (e staticExpr) Range() Range { | ||
35 | return e.rng | ||
36 | } | ||
37 | |||
38 | func (e staticExpr) StartRange() Range { | ||
39 | return e.rng | ||
40 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/structure.go b/vendor/github.com/hashicorp/hcl2/hcl/structure.go new file mode 100644 index 0000000..b336f30 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/structure.go | |||
@@ -0,0 +1,151 @@ | |||
1 | package hcl | ||
2 | |||
3 | import ( | ||
4 | "github.com/zclconf/go-cty/cty" | ||
5 | ) | ||
6 | |||
7 | // File is the top-level node that results from parsing a HCL file. | ||
8 | type File struct { | ||
9 | Body Body | ||
10 | Bytes []byte | ||
11 | |||
12 | // Nav is used to integrate with the "hcled" editor integration package, | ||
13 | // and with diagnostic information formatters. It is not for direct use | ||
14 | // by a calling application. | ||
15 | Nav interface{} | ||
16 | } | ||
17 | |||
18 | // Block represents a nested block within a Body. | ||
19 | type Block struct { | ||
20 | Type string | ||
21 | Labels []string | ||
22 | Body Body | ||
23 | |||
24 | DefRange Range // Range that can be considered the "definition" for seeking in an editor | ||
25 | TypeRange Range // Range for the block type declaration specifically. | ||
26 | LabelRanges []Range // Ranges for the label values specifically. | ||
27 | } | ||
28 | |||
29 | // Blocks is a sequence of Block. | ||
30 | type Blocks []*Block | ||
31 | |||
32 | // Attributes is a set of attributes keyed by their names. | ||
33 | type Attributes map[string]*Attribute | ||
34 | |||
35 | // Body is a container for attributes and blocks. It serves as the primary | ||
36 | // unit of heirarchical structure within configuration. | ||
37 | // | ||
38 | // The content of a body cannot be meaningfully intepreted without a schema, | ||
39 | // so Body represents the raw body content and has methods that allow the | ||
40 | // content to be extracted in terms of a given schema. | ||
41 | type Body interface { | ||
42 | // Content verifies that the entire body content conforms to the given | ||
43 | // schema and then returns it, and/or returns diagnostics. The returned | ||
44 | // body content is valid if non-nil, regardless of whether Diagnostics | ||
45 | // are provided, but diagnostics should still be eventually shown to | ||
46 | // the user. | ||
47 | Content(schema *BodySchema) (*BodyContent, Diagnostics) | ||
48 | |||
49 | // PartialContent is like Content except that it permits the configuration | ||
50 | // to contain additional blocks or attributes not specified in the | ||
51 | // schema. If any are present, the returned Body is non-nil and contains | ||
52 | // the remaining items from the body that were not selected by the schema. | ||
53 | PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics) | ||
54 | |||
55 | // JustAttributes attempts to interpret all of the contents of the body | ||
56 | // as attributes, allowing for the contents to be accessed without a priori | ||
57 | // knowledge of the structure. | ||
58 | // | ||
59 | // The behavior of this method depends on the body's source language. | ||
60 | // Some languages, like JSON, can't distinguish between attributes and | ||
61 | // blocks without schema hints, but for languages that _can_ error | ||
62 | // diagnostics will be generated if any blocks are present in the body. | ||
63 | // | ||
64 | // Diagnostics may be produced for other reasons too, such as duplicate | ||
65 | // declarations of the same attribute. | ||
66 | JustAttributes() (Attributes, Diagnostics) | ||
67 | |||
68 | // MissingItemRange returns a range that represents where a missing item | ||
69 | // might hypothetically be inserted. This is used when producing | ||
70 | // diagnostics about missing required attributes or blocks. Not all bodies | ||
71 | // will have an obvious single insertion point, so the result here may | ||
72 | // be rather arbitrary. | ||
73 | MissingItemRange() Range | ||
74 | } | ||
75 | |||
76 | // BodyContent is the result of applying a BodySchema to a Body. | ||
77 | type BodyContent struct { | ||
78 | Attributes Attributes | ||
79 | Blocks Blocks | ||
80 | |||
81 | MissingItemRange Range | ||
82 | } | ||
83 | |||
84 | // Attribute represents an attribute from within a body. | ||
85 | type Attribute struct { | ||
86 | Name string | ||
87 | Expr Expression | ||
88 | |||
89 | Range Range | ||
90 | NameRange Range | ||
91 | } | ||
92 | |||
93 | // Expression is a literal value or an expression provided in the | ||
94 | // configuration, which can be evaluated within a scope to produce a value. | ||
95 | type Expression interface { | ||
96 | // Value returns the value resulting from evaluating the expression | ||
97 | // in the given evaluation context. | ||
98 | // | ||
99 | // The context may be nil, in which case the expression may contain | ||
100 | // only constants and diagnostics will be produced for any non-constant | ||
101 | // sub-expressions. (The exact definition of this depends on the source | ||
102 | // language.) | ||
103 | // | ||
104 | // The context may instead be set but have either its Variables or | ||
105 | // Functions maps set to nil, in which case only use of these features | ||
106 | // will return diagnostics. | ||
107 | // | ||
108 | // Different diagnostics are provided depending on whether the given | ||
109 | // context maps are nil or empty. In the former case, the message | ||
110 | // tells the user that variables/functions are not permitted at all, | ||
111 | // while in the latter case usage will produce a "not found" error for | ||
112 | // the specific symbol in question. | ||
113 | Value(ctx *EvalContext) (cty.Value, Diagnostics) | ||
114 | |||
115 | // Variables returns a list of variables referenced in the receiving | ||
116 | // expression. These are expressed as absolute Traversals, so may include | ||
117 | // additional information about how the variable is used, such as | ||
118 | // attribute lookups, which the calling application can potentially use | ||
119 | // to only selectively populate the scope. | ||
120 | Variables() []Traversal | ||
121 | |||
122 | Range() Range | ||
123 | StartRange() Range | ||
124 | } | ||
125 | |||
126 | // OfType filters the receiving block sequence by block type name, | ||
127 | // returning a new block sequence including only the blocks of the | ||
128 | // requested type. | ||
129 | func (els Blocks) OfType(typeName string) Blocks { | ||
130 | ret := make(Blocks, 0) | ||
131 | for _, el := range els { | ||
132 | if el.Type == typeName { | ||
133 | ret = append(ret, el) | ||
134 | } | ||
135 | } | ||
136 | return ret | ||
137 | } | ||
138 | |||
139 | // ByType transforms the receiving block sequence into a map from type | ||
140 | // name to block sequences of only that type. | ||
141 | func (els Blocks) ByType() map[string]Blocks { | ||
142 | ret := make(map[string]Blocks) | ||
143 | for _, el := range els { | ||
144 | ty := el.Type | ||
145 | if ret[ty] == nil { | ||
146 | ret[ty] = make(Blocks, 0, 1) | ||
147 | } | ||
148 | ret[ty] = append(ret[ty], el) | ||
149 | } | ||
150 | return ret | ||
151 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/traversal.go b/vendor/github.com/hashicorp/hcl2/hcl/traversal.go new file mode 100644 index 0000000..24f4c91 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/traversal.go | |||
@@ -0,0 +1,352 @@ | |||
1 | package hcl | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | |||
6 | "github.com/zclconf/go-cty/cty" | ||
7 | ) | ||
8 | |||
9 | // A Traversal is a description of traversing through a value through a | ||
10 | // series of operations such as attribute lookup, index lookup, etc. | ||
11 | // | ||
12 | // It is used to look up values in scopes, for example. | ||
13 | // | ||
14 | // The traversal operations are implementations of interface Traverser. | ||
15 | // This is a closed set of implementations, so the interface cannot be | ||
16 | // implemented from outside this package. | ||
17 | // | ||
18 | // A traversal can be absolute (its first value is a symbol name) or relative | ||
19 | // (starts from an existing value). | ||
20 | type Traversal []Traverser | ||
21 | |||
22 | // TraversalJoin appends a relative traversal to an absolute traversal to | ||
23 | // produce a new absolute traversal. | ||
24 | func TraversalJoin(abs Traversal, rel Traversal) Traversal { | ||
25 | if abs.IsRelative() { | ||
26 | panic("first argument to TraversalJoin must be absolute") | ||
27 | } | ||
28 | if !rel.IsRelative() { | ||
29 | panic("second argument to TraversalJoin must be relative") | ||
30 | } | ||
31 | |||
32 | ret := make(Traversal, len(abs)+len(rel)) | ||
33 | copy(ret, abs) | ||
34 | copy(ret[len(abs):], rel) | ||
35 | return ret | ||
36 | } | ||
37 | |||
38 | // TraverseRel applies the receiving traversal to the given value, returning | ||
39 | // the resulting value. This is supported only for relative traversals, | ||
40 | // and will panic if applied to an absolute traversal. | ||
41 | func (t Traversal) TraverseRel(val cty.Value) (cty.Value, Diagnostics) { | ||
42 | if !t.IsRelative() { | ||
43 | panic("can't use TraverseRel on an absolute traversal") | ||
44 | } | ||
45 | |||
46 | current := val | ||
47 | var diags Diagnostics | ||
48 | for _, tr := range t { | ||
49 | var newDiags Diagnostics | ||
50 | current, newDiags = tr.TraversalStep(current) | ||
51 | diags = append(diags, newDiags...) | ||
52 | if newDiags.HasErrors() { | ||
53 | return cty.DynamicVal, diags | ||
54 | } | ||
55 | } | ||
56 | return current, diags | ||
57 | } | ||
58 | |||
59 | // TraverseAbs applies the receiving traversal to the given eval context, | ||
60 | // returning the resulting value. This is supported only for absolute | ||
61 | // traversals, and will panic if applied to a relative traversal. | ||
62 | func (t Traversal) TraverseAbs(ctx *EvalContext) (cty.Value, Diagnostics) { | ||
63 | if t.IsRelative() { | ||
64 | panic("can't use TraverseAbs on a relative traversal") | ||
65 | } | ||
66 | |||
67 | split := t.SimpleSplit() | ||
68 | root := split.Abs[0].(TraverseRoot) | ||
69 | name := root.Name | ||
70 | |||
71 | thisCtx := ctx | ||
72 | hasNonNil := false | ||
73 | for thisCtx != nil { | ||
74 | if thisCtx.Variables == nil { | ||
75 | thisCtx = thisCtx.parent | ||
76 | continue | ||
77 | } | ||
78 | hasNonNil = true | ||
79 | val, exists := thisCtx.Variables[name] | ||
80 | if exists { | ||
81 | return split.Rel.TraverseRel(val) | ||
82 | } | ||
83 | thisCtx = thisCtx.parent | ||
84 | } | ||
85 | |||
86 | if !hasNonNil { | ||
87 | return cty.DynamicVal, Diagnostics{ | ||
88 | { | ||
89 | Severity: DiagError, | ||
90 | Summary: "Variables not allowed", | ||
91 | Detail: "Variables may not be used here.", | ||
92 | Subject: &root.SrcRange, | ||
93 | }, | ||
94 | } | ||
95 | } | ||
96 | |||
97 | suggestions := make([]string, 0, len(ctx.Variables)) | ||
98 | thisCtx = ctx | ||
99 | for thisCtx != nil { | ||
100 | for k := range thisCtx.Variables { | ||
101 | suggestions = append(suggestions, k) | ||
102 | } | ||
103 | thisCtx = thisCtx.parent | ||
104 | } | ||
105 | suggestion := nameSuggestion(name, suggestions) | ||
106 | if suggestion != "" { | ||
107 | suggestion = fmt.Sprintf(" Did you mean %q?", suggestion) | ||
108 | } | ||
109 | |||
110 | return cty.DynamicVal, Diagnostics{ | ||
111 | { | ||
112 | Severity: DiagError, | ||
113 | Summary: "Unknown variable", | ||
114 | Detail: fmt.Sprintf("There is no variable named %q.%s", name, suggestion), | ||
115 | Subject: &root.SrcRange, | ||
116 | }, | ||
117 | } | ||
118 | } | ||
119 | |||
120 | // IsRelative returns true if the receiver is a relative traversal, or false | ||
121 | // otherwise. | ||
122 | func (t Traversal) IsRelative() bool { | ||
123 | if len(t) == 0 { | ||
124 | return true | ||
125 | } | ||
126 | if _, firstIsRoot := t[0].(TraverseRoot); firstIsRoot { | ||
127 | return false | ||
128 | } | ||
129 | return true | ||
130 | } | ||
131 | |||
132 | // SimpleSplit returns a TraversalSplit where the name lookup is the absolute | ||
133 | // part and the remainder is the relative part. Supported only for | ||
134 | // absolute traversals, and will panic if applied to a relative traversal. | ||
135 | // | ||
136 | // This can be used by applications that have a relatively-simple variable | ||
137 | // namespace where only the top-level is directly populated in the scope, with | ||
138 | // everything else handled by relative lookups from those initial values. | ||
139 | func (t Traversal) SimpleSplit() TraversalSplit { | ||
140 | if t.IsRelative() { | ||
141 | panic("can't use SimpleSplit on a relative traversal") | ||
142 | } | ||
143 | return TraversalSplit{ | ||
144 | Abs: t[0:1], | ||
145 | Rel: t[1:], | ||
146 | } | ||
147 | } | ||
148 | |||
149 | // RootName returns the root name for a absolute traversal. Will panic if | ||
150 | // called on a relative traversal. | ||
151 | func (t Traversal) RootName() string { | ||
152 | if t.IsRelative() { | ||
153 | panic("can't use RootName on a relative traversal") | ||
154 | |||
155 | } | ||
156 | return t[0].(TraverseRoot).Name | ||
157 | } | ||
158 | |||
159 | // SourceRange returns the source range for the traversal. | ||
160 | func (t Traversal) SourceRange() Range { | ||
161 | if len(t) == 0 { | ||
162 | // Nothing useful to return here, but we'll return something | ||
163 | // that's correctly-typed at least. | ||
164 | return Range{} | ||
165 | } | ||
166 | |||
167 | return RangeBetween(t[0].SourceRange(), t[len(t)-1].SourceRange()) | ||
168 | } | ||
169 | |||
170 | // TraversalSplit represents a pair of traversals, the first of which is | ||
171 | // an absolute traversal and the second of which is relative to the first. | ||
172 | // | ||
173 | // This is used by calling applications that only populate prefixes of the | ||
174 | // traversals in the scope, with Abs representing the part coming from the | ||
175 | // scope and Rel representing the remaining steps once that part is | ||
176 | // retrieved. | ||
177 | type TraversalSplit struct { | ||
178 | Abs Traversal | ||
179 | Rel Traversal | ||
180 | } | ||
181 | |||
182 | // TraverseAbs traverses from a scope to the value resulting from the | ||
183 | // absolute traversal. | ||
184 | func (t TraversalSplit) TraverseAbs(ctx *EvalContext) (cty.Value, Diagnostics) { | ||
185 | return t.Abs.TraverseAbs(ctx) | ||
186 | } | ||
187 | |||
188 | // TraverseRel traverses from a given value, assumed to be the result of | ||
189 | // TraverseAbs on some scope, to a final result for the entire split traversal. | ||
190 | func (t TraversalSplit) TraverseRel(val cty.Value) (cty.Value, Diagnostics) { | ||
191 | return t.Rel.TraverseRel(val) | ||
192 | } | ||
193 | |||
194 | // Traverse is a convenience function to apply TraverseAbs followed by | ||
195 | // TraverseRel. | ||
196 | func (t TraversalSplit) Traverse(ctx *EvalContext) (cty.Value, Diagnostics) { | ||
197 | v1, diags := t.TraverseAbs(ctx) | ||
198 | if diags.HasErrors() { | ||
199 | return cty.DynamicVal, diags | ||
200 | } | ||
201 | v2, newDiags := t.TraverseRel(v1) | ||
202 | diags = append(diags, newDiags...) | ||
203 | return v2, diags | ||
204 | } | ||
205 | |||
206 | // Join concatenates together the Abs and Rel parts to produce a single | ||
207 | // absolute traversal. | ||
208 | func (t TraversalSplit) Join() Traversal { | ||
209 | return TraversalJoin(t.Abs, t.Rel) | ||
210 | } | ||
211 | |||
212 | // RootName returns the root name for the absolute part of the split. | ||
213 | func (t TraversalSplit) RootName() string { | ||
214 | return t.Abs.RootName() | ||
215 | } | ||
216 | |||
217 | // A Traverser is a step within a Traversal. | ||
218 | type Traverser interface { | ||
219 | TraversalStep(cty.Value) (cty.Value, Diagnostics) | ||
220 | SourceRange() Range | ||
221 | isTraverserSigil() isTraverser | ||
222 | } | ||
223 | |||
224 | // Embed this in a struct to declare it as a Traverser | ||
225 | type isTraverser struct { | ||
226 | } | ||
227 | |||
228 | func (tr isTraverser) isTraverserSigil() isTraverser { | ||
229 | return isTraverser{} | ||
230 | } | ||
231 | |||
232 | // TraverseRoot looks up a root name in a scope. It is used as the first step | ||
233 | // of an absolute Traversal, and cannot itself be traversed directly. | ||
234 | type TraverseRoot struct { | ||
235 | isTraverser | ||
236 | Name string | ||
237 | SrcRange Range | ||
238 | } | ||
239 | |||
240 | // TraversalStep on a TraverseName immediately panics, because absolute | ||
241 | // traversals cannot be directly traversed. | ||
242 | func (tn TraverseRoot) TraversalStep(cty.Value) (cty.Value, Diagnostics) { | ||
243 | panic("Cannot traverse an absolute traversal") | ||
244 | } | ||
245 | |||
246 | func (tn TraverseRoot) SourceRange() Range { | ||
247 | return tn.SrcRange | ||
248 | } | ||
249 | |||
250 | // TraverseAttr looks up an attribute in its initial value. | ||
251 | type TraverseAttr struct { | ||
252 | isTraverser | ||
253 | Name string | ||
254 | SrcRange Range | ||
255 | } | ||
256 | |||
257 | func (tn TraverseAttr) TraversalStep(val cty.Value) (cty.Value, Diagnostics) { | ||
258 | if val.IsNull() { | ||
259 | return cty.DynamicVal, Diagnostics{ | ||
260 | { | ||
261 | Severity: DiagError, | ||
262 | Summary: "Attempt to get attribute from null value", | ||
263 | Detail: "This value is null, so it does not have any attributes.", | ||
264 | Subject: &tn.SrcRange, | ||
265 | }, | ||
266 | } | ||
267 | } | ||
268 | |||
269 | ty := val.Type() | ||
270 | switch { | ||
271 | case ty.IsObjectType(): | ||
272 | if !ty.HasAttribute(tn.Name) { | ||
273 | return cty.DynamicVal, Diagnostics{ | ||
274 | { | ||
275 | Severity: DiagError, | ||
276 | Summary: "Unsupported attribute", | ||
277 | Detail: fmt.Sprintf("This object does not have an attribute named %q.", tn.Name), | ||
278 | Subject: &tn.SrcRange, | ||
279 | }, | ||
280 | } | ||
281 | } | ||
282 | |||
283 | if !val.IsKnown() { | ||
284 | return cty.UnknownVal(ty.AttributeType(tn.Name)), nil | ||
285 | } | ||
286 | |||
287 | return val.GetAttr(tn.Name), nil | ||
288 | case ty.IsMapType(): | ||
289 | if !val.IsKnown() { | ||
290 | return cty.UnknownVal(ty.ElementType()), nil | ||
291 | } | ||
292 | |||
293 | idx := cty.StringVal(tn.Name) | ||
294 | if val.HasIndex(idx).False() { | ||
295 | return cty.DynamicVal, Diagnostics{ | ||
296 | { | ||
297 | Severity: DiagError, | ||
298 | Summary: "Missing map element", | ||
299 | Detail: fmt.Sprintf("This map does not have an element with the key %q.", tn.Name), | ||
300 | Subject: &tn.SrcRange, | ||
301 | }, | ||
302 | } | ||
303 | } | ||
304 | |||
305 | return val.Index(idx), nil | ||
306 | case ty == cty.DynamicPseudoType: | ||
307 | return cty.DynamicVal, nil | ||
308 | default: | ||
309 | return cty.DynamicVal, Diagnostics{ | ||
310 | { | ||
311 | Severity: DiagError, | ||
312 | Summary: "Unsupported attribute", | ||
313 | Detail: "This value does not have any attributes.", | ||
314 | Subject: &tn.SrcRange, | ||
315 | }, | ||
316 | } | ||
317 | } | ||
318 | } | ||
319 | |||
320 | func (tn TraverseAttr) SourceRange() Range { | ||
321 | return tn.SrcRange | ||
322 | } | ||
323 | |||
324 | // TraverseIndex applies the index operation to its initial value. | ||
325 | type TraverseIndex struct { | ||
326 | isTraverser | ||
327 | Key cty.Value | ||
328 | SrcRange Range | ||
329 | } | ||
330 | |||
331 | func (tn TraverseIndex) TraversalStep(val cty.Value) (cty.Value, Diagnostics) { | ||
332 | return Index(val, tn.Key, &tn.SrcRange) | ||
333 | } | ||
334 | |||
335 | func (tn TraverseIndex) SourceRange() Range { | ||
336 | return tn.SrcRange | ||
337 | } | ||
338 | |||
339 | // TraverseSplat applies the splat operation to its initial value. | ||
340 | type TraverseSplat struct { | ||
341 | isTraverser | ||
342 | Each Traversal | ||
343 | SrcRange Range | ||
344 | } | ||
345 | |||
346 | func (tn TraverseSplat) TraversalStep(val cty.Value) (cty.Value, Diagnostics) { | ||
347 | panic("TraverseSplat not yet implemented") | ||
348 | } | ||
349 | |||
350 | func (tn TraverseSplat) SourceRange() Range { | ||
351 | return tn.SrcRange | ||
352 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/traversal_for_expr.go b/vendor/github.com/hashicorp/hcl2/hcl/traversal_for_expr.go new file mode 100644 index 0000000..5f52946 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcl/traversal_for_expr.go | |||
@@ -0,0 +1,121 @@ | |||
1 | package hcl | ||
2 | |||
3 | // AbsTraversalForExpr attempts to interpret the given expression as | ||
4 | // an absolute traversal, or returns error diagnostic(s) if that is | ||
5 | // not possible for the given expression. | ||
6 | // | ||
7 | // A particular Expression implementation can support this function by | ||
8 | // offering a method called AsTraversal that takes no arguments and | ||
9 | // returns either a valid absolute traversal or nil to indicate that | ||
10 | // no traversal is possible. Alternatively, an implementation can support | ||
11 | // UnwrapExpression to delegate handling of this function to a wrapped | ||
12 | // Expression object. | ||
13 | // | ||
14 | // In most cases the calling application is interested in the value | ||
15 | // that results from an expression, but in rarer cases the application | ||
16 | // needs to see the the name of the variable and subsequent | ||
17 | // attributes/indexes itself, for example to allow users to give references | ||
18 | // to the variables themselves rather than to their values. An implementer | ||
19 | // of this function should at least support attribute and index steps. | ||
20 | func AbsTraversalForExpr(expr Expression) (Traversal, Diagnostics) { | ||
21 | type asTraversal interface { | ||
22 | AsTraversal() Traversal | ||
23 | } | ||
24 | |||
25 | physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool { | ||
26 | _, supported := expr.(asTraversal) | ||
27 | return supported | ||
28 | }) | ||
29 | |||
30 | if asT, supported := physExpr.(asTraversal); supported { | ||
31 | if traversal := asT.AsTraversal(); traversal != nil { | ||
32 | return traversal, nil | ||
33 | } | ||
34 | } | ||
35 | return nil, Diagnostics{ | ||
36 | &Diagnostic{ | ||
37 | Severity: DiagError, | ||
38 | Summary: "Invalid expression", | ||
39 | Detail: "A static variable reference is required.", | ||
40 | Subject: expr.Range().Ptr(), | ||
41 | }, | ||
42 | } | ||
43 | } | ||
44 | |||
45 | // RelTraversalForExpr is similar to AbsTraversalForExpr but it returns | ||
46 | // a relative traversal instead. Due to the nature of HCL expressions, the | ||
47 | // first element of the returned traversal is always a TraverseAttr, and | ||
48 | // then it will be followed by zero or more other expressions. | ||
49 | // | ||
50 | // Any expression accepted by AbsTraversalForExpr is also accepted by | ||
51 | // RelTraversalForExpr. | ||
52 | func RelTraversalForExpr(expr Expression) (Traversal, Diagnostics) { | ||
53 | traversal, diags := AbsTraversalForExpr(expr) | ||
54 | if len(traversal) > 0 { | ||
55 | root := traversal[0].(TraverseRoot) | ||
56 | traversal[0] = TraverseAttr{ | ||
57 | Name: root.Name, | ||
58 | SrcRange: root.SrcRange, | ||
59 | } | ||
60 | } | ||
61 | return traversal, diags | ||
62 | } | ||
63 | |||
64 | // ExprAsKeyword attempts to interpret the given expression as a static keyword, | ||
65 | // returning the keyword string if possible, and the empty string if not. | ||
66 | // | ||
67 | // A static keyword, for the sake of this function, is a single identifier. | ||
68 | // For example, the following attribute has an expression that would produce | ||
69 | // the keyword "foo": | ||
70 | // | ||
71 | // example = foo | ||
72 | // | ||
73 | // This function is a variant of AbsTraversalForExpr, which uses the same | ||
74 | // interface on the given expression. This helper constrains the result | ||
75 | // further by requiring only a single root identifier. | ||
76 | // | ||
77 | // This function is intended to be used with the following idiom, to recognize | ||
78 | // situations where one of a fixed set of keywords is required and arbitrary | ||
79 | // expressions are not allowed: | ||
80 | // | ||
81 | // switch hcl.ExprAsKeyword(expr) { | ||
82 | // case "allow": | ||
83 | // // (take suitable action for keyword "allow") | ||
84 | // case "deny": | ||
85 | // // (take suitable action for keyword "deny") | ||
86 | // default: | ||
87 | // diags = append(diags, &hcl.Diagnostic{ | ||
88 | // // ... "invalid keyword" diagnostic message ... | ||
89 | // }) | ||
90 | // } | ||
91 | // | ||
92 | // The above approach will generate the same message for both the use of an | ||
93 | // unrecognized keyword and for not using a keyword at all, which is usually | ||
94 | // reasonable if the message specifies that the given value must be a keyword | ||
95 | // from that fixed list. | ||
96 | // | ||
97 | // Note that in the native syntax the keywords "true", "false", and "null" are | ||
98 | // recognized as literal values during parsing and so these reserved words | ||
99 | // cannot not be accepted as keywords by this function. | ||
100 | // | ||
101 | // Since interpreting an expression as a keyword bypasses usual expression | ||
102 | // evaluation, it should be used sparingly for situations where e.g. one of | ||
103 | // a fixed set of keywords is used in a structural way in a special attribute | ||
104 | // to affect the further processing of a block. | ||
105 | func ExprAsKeyword(expr Expression) string { | ||
106 | type asTraversal interface { | ||
107 | AsTraversal() Traversal | ||
108 | } | ||
109 | |||
110 | physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool { | ||
111 | _, supported := expr.(asTraversal) | ||
112 | return supported | ||
113 | }) | ||
114 | |||
115 | if asT, supported := physExpr.(asTraversal); supported { | ||
116 | if traversal := asT.AsTraversal(); len(traversal) == 1 { | ||
117 | return traversal.RootName() | ||
118 | } | ||
119 | } | ||
120 | return "" | ||
121 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/block_labels.go b/vendor/github.com/hashicorp/hcl2/hcldec/block_labels.go new file mode 100644 index 0000000..7e652e9 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcldec/block_labels.go | |||
@@ -0,0 +1,21 @@ | |||
1 | package hcldec | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | ) | ||
6 | |||
7 | type blockLabel struct { | ||
8 | Value string | ||
9 | Range hcl.Range | ||
10 | } | ||
11 | |||
12 | func labelsForBlock(block *hcl.Block) []blockLabel { | ||
13 | ret := make([]blockLabel, len(block.Labels)) | ||
14 | for i := range block.Labels { | ||
15 | ret[i] = blockLabel{ | ||
16 | Value: block.Labels[i], | ||
17 | Range: block.LabelRanges[i], | ||
18 | } | ||
19 | } | ||
20 | return ret | ||
21 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/decode.go b/vendor/github.com/hashicorp/hcl2/hcldec/decode.go new file mode 100644 index 0000000..6cf93fe --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcldec/decode.go | |||
@@ -0,0 +1,36 @@ | |||
1 | package hcldec | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | "github.com/zclconf/go-cty/cty" | ||
6 | ) | ||
7 | |||
8 | func decode(body hcl.Body, blockLabels []blockLabel, ctx *hcl.EvalContext, spec Spec, partial bool) (cty.Value, hcl.Body, hcl.Diagnostics) { | ||
9 | schema := ImpliedSchema(spec) | ||
10 | |||
11 | var content *hcl.BodyContent | ||
12 | var diags hcl.Diagnostics | ||
13 | var leftovers hcl.Body | ||
14 | |||
15 | if partial { | ||
16 | content, leftovers, diags = body.PartialContent(schema) | ||
17 | } else { | ||
18 | content, diags = body.Content(schema) | ||
19 | } | ||
20 | |||
21 | val, valDiags := spec.decode(content, blockLabels, ctx) | ||
22 | diags = append(diags, valDiags...) | ||
23 | |||
24 | return val, leftovers, diags | ||
25 | } | ||
26 | |||
27 | func impliedType(spec Spec) cty.Type { | ||
28 | return spec.impliedType() | ||
29 | } | ||
30 | |||
31 | func sourceRange(body hcl.Body, blockLabels []blockLabel, spec Spec) hcl.Range { | ||
32 | schema := ImpliedSchema(spec) | ||
33 | content, _, _ := body.PartialContent(schema) | ||
34 | |||
35 | return spec.sourceRange(content, blockLabels) | ||
36 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/doc.go b/vendor/github.com/hashicorp/hcl2/hcldec/doc.go new file mode 100644 index 0000000..23bfe54 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcldec/doc.go | |||
@@ -0,0 +1,12 @@ | |||
1 | // Package hcldec provides a higher-level API for unpacking the content of | ||
2 | // HCL bodies, implemented in terms of the low-level "Content" API exposed | ||
3 | // by the bodies themselves. | ||
4 | // | ||
5 | // It allows decoding an entire nested configuration in a single operation | ||
6 | // by providing a description of the intended structure. | ||
7 | // | ||
8 | // For some applications it may be more convenient to use the "gohcl" | ||
9 | // package, which has a similar purpose but decodes directly into native | ||
10 | // Go data types. hcldec instead targets the cty type system, and thus allows | ||
11 | // a cty-driven application to remain within that type system. | ||
12 | package hcldec | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/gob.go b/vendor/github.com/hashicorp/hcl2/hcldec/gob.go new file mode 100644 index 0000000..e2027cf --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcldec/gob.go | |||
@@ -0,0 +1,23 @@ | |||
1 | package hcldec | ||
2 | |||
3 | import ( | ||
4 | "encoding/gob" | ||
5 | ) | ||
6 | |||
7 | func init() { | ||
8 | // Every Spec implementation should be registered with gob, so that | ||
9 | // specs can be sent over gob channels, such as using | ||
10 | // github.com/hashicorp/go-plugin with plugins that need to describe | ||
11 | // what shape of configuration they are expecting. | ||
12 | gob.Register(ObjectSpec(nil)) | ||
13 | gob.Register(TupleSpec(nil)) | ||
14 | gob.Register((*AttrSpec)(nil)) | ||
15 | gob.Register((*LiteralSpec)(nil)) | ||
16 | gob.Register((*ExprSpec)(nil)) | ||
17 | gob.Register((*BlockSpec)(nil)) | ||
18 | gob.Register((*BlockListSpec)(nil)) | ||
19 | gob.Register((*BlockSetSpec)(nil)) | ||
20 | gob.Register((*BlockMapSpec)(nil)) | ||
21 | gob.Register((*BlockLabelSpec)(nil)) | ||
22 | gob.Register((*DefaultSpec)(nil)) | ||
23 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/public.go b/vendor/github.com/hashicorp/hcl2/hcldec/public.go new file mode 100644 index 0000000..5d1f10a --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcldec/public.go | |||
@@ -0,0 +1,78 @@ | |||
1 | package hcldec | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | "github.com/zclconf/go-cty/cty" | ||
6 | ) | ||
7 | |||
8 | // Decode interprets the given body using the given specification and returns | ||
9 | // the resulting value. If the given body is not valid per the spec, error | ||
10 | // diagnostics are returned and the returned value is likely to be incomplete. | ||
11 | // | ||
12 | // The ctx argument may be nil, in which case any references to variables or | ||
13 | // functions will produce error diagnostics. | ||
14 | func Decode(body hcl.Body, spec Spec, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
15 | val, _, diags := decode(body, nil, ctx, spec, false) | ||
16 | return val, diags | ||
17 | } | ||
18 | |||
19 | // PartialDecode is like Decode except that it permits "leftover" items in | ||
20 | // the top-level body, which are returned as a new body to allow for | ||
21 | // further processing. | ||
22 | // | ||
23 | // Any descendent block bodies are _not_ decoded partially and thus must | ||
24 | // be fully described by the given specification. | ||
25 | func PartialDecode(body hcl.Body, spec Spec, ctx *hcl.EvalContext) (cty.Value, hcl.Body, hcl.Diagnostics) { | ||
26 | return decode(body, nil, ctx, spec, true) | ||
27 | } | ||
28 | |||
29 | // ImpliedType returns the value type that should result from decoding the | ||
30 | // given spec. | ||
31 | func ImpliedType(spec Spec) cty.Type { | ||
32 | return impliedType(spec) | ||
33 | } | ||
34 | |||
35 | // SourceRange interprets the given body using the given specification and | ||
36 | // then returns the source range of the value that would be used to | ||
37 | // fulfill the spec. | ||
38 | // | ||
39 | // This can be used if application-level validation detects value errors, to | ||
40 | // obtain a reasonable SourceRange to use for generated diagnostics. It works | ||
41 | // best when applied to specific body items (e.g. using AttrSpec, BlockSpec, ...) | ||
42 | // as opposed to entire bodies using ObjectSpec, TupleSpec. The result will | ||
43 | // be less useful the broader the specification, so e.g. a spec that returns | ||
44 | // the entirety of all of the blocks of a given type is likely to be | ||
45 | // _particularly_ arbitrary and useless. | ||
46 | // | ||
47 | // If the given body is not valid per the given spec, the result is best-effort | ||
48 | // and may not actually be something ideal. It's expected that an application | ||
49 | // will already have used Decode or PartialDecode earlier and thus had an | ||
50 | // opportunity to detect and report spec violations. | ||
51 | func SourceRange(body hcl.Body, spec Spec) hcl.Range { | ||
52 | return sourceRange(body, nil, spec) | ||
53 | } | ||
54 | |||
55 | // ChildBlockTypes returns a map of all of the child block types declared | ||
56 | // by the given spec, with block type names as keys and the associated | ||
57 | // nested body specs as values. | ||
58 | func ChildBlockTypes(spec Spec) map[string]Spec { | ||
59 | ret := map[string]Spec{} | ||
60 | |||
61 | // visitSameBodyChildren walks through the spec structure, calling | ||
62 | // the given callback for each descendent spec encountered. We are | ||
63 | // interested in the specs that reference attributes and blocks. | ||
64 | var visit visitFunc | ||
65 | visit = func(s Spec) { | ||
66 | if bs, ok := s.(blockSpec); ok { | ||
67 | for _, blockS := range bs.blockHeaderSchemata() { | ||
68 | ret[blockS.Type] = bs.nestedSpec() | ||
69 | } | ||
70 | } | ||
71 | |||
72 | s.visitSameBodyChildren(visit) | ||
73 | } | ||
74 | |||
75 | visit(spec) | ||
76 | |||
77 | return ret | ||
78 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/schema.go b/vendor/github.com/hashicorp/hcl2/hcldec/schema.go new file mode 100644 index 0000000..b57bd96 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcldec/schema.go | |||
@@ -0,0 +1,36 @@ | |||
1 | package hcldec | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | ) | ||
6 | |||
7 | // ImpliedSchema returns the *hcl.BodySchema implied by the given specification. | ||
8 | // This is the schema that the Decode function will use internally to | ||
9 | // access the content of a given body. | ||
10 | func ImpliedSchema(spec Spec) *hcl.BodySchema { | ||
11 | var attrs []hcl.AttributeSchema | ||
12 | var blocks []hcl.BlockHeaderSchema | ||
13 | |||
14 | // visitSameBodyChildren walks through the spec structure, calling | ||
15 | // the given callback for each descendent spec encountered. We are | ||
16 | // interested in the specs that reference attributes and blocks. | ||
17 | var visit visitFunc | ||
18 | visit = func(s Spec) { | ||
19 | if as, ok := s.(attrSpec); ok { | ||
20 | attrs = append(attrs, as.attrSchemata()...) | ||
21 | } | ||
22 | |||
23 | if bs, ok := s.(blockSpec); ok { | ||
24 | blocks = append(blocks, bs.blockHeaderSchemata()...) | ||
25 | } | ||
26 | |||
27 | s.visitSameBodyChildren(visit) | ||
28 | } | ||
29 | |||
30 | visit(spec) | ||
31 | |||
32 | return &hcl.BodySchema{ | ||
33 | Attributes: attrs, | ||
34 | Blocks: blocks, | ||
35 | } | ||
36 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/spec.go b/vendor/github.com/hashicorp/hcl2/hcldec/spec.go new file mode 100644 index 0000000..25cafcd --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcldec/spec.go | |||
@@ -0,0 +1,998 @@ | |||
1 | package hcldec | ||
2 | |||
3 | import ( | ||
4 | "bytes" | ||
5 | "fmt" | ||
6 | |||
7 | "github.com/hashicorp/hcl2/hcl" | ||
8 | "github.com/zclconf/go-cty/cty" | ||
9 | "github.com/zclconf/go-cty/cty/convert" | ||
10 | "github.com/zclconf/go-cty/cty/function" | ||
11 | ) | ||
12 | |||
13 | // A Spec is a description of how to decode a hcl.Body to a cty.Value. | ||
14 | // | ||
15 | // The various other types in this package whose names end in "Spec" are | ||
16 | // the spec implementations. The most common top-level spec is ObjectSpec, | ||
17 | // which decodes body content into a cty.Value of an object type. | ||
18 | type Spec interface { | ||
19 | // Perform the decode operation on the given body, in the context of | ||
20 | // the given block (which might be null), using the given eval context. | ||
21 | // | ||
22 | // "block" is provided only by the nested calls performed by the spec | ||
23 | // types that work on block bodies. | ||
24 | decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) | ||
25 | |||
26 | // Return the cty.Type that should be returned when decoding a body with | ||
27 | // this spec. | ||
28 | impliedType() cty.Type | ||
29 | |||
30 | // Call the given callback once for each of the nested specs that would | ||
31 | // get decoded with the same body and block as the receiver. This should | ||
32 | // not descend into the nested specs used when decoding blocks. | ||
33 | visitSameBodyChildren(cb visitFunc) | ||
34 | |||
35 | // Determine the source range of the value that would be returned for the | ||
36 | // spec in the given content, in the context of the given block | ||
37 | // (which might be null). If the corresponding item is missing, return | ||
38 | // a place where it might be inserted. | ||
39 | sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range | ||
40 | } | ||
41 | |||
42 | type visitFunc func(spec Spec) | ||
43 | |||
44 | // An ObjectSpec is a Spec that produces a cty.Value of an object type whose | ||
45 | // attributes correspond to the keys of the spec map. | ||
46 | type ObjectSpec map[string]Spec | ||
47 | |||
48 | // attrSpec is implemented by specs that require attributes from the body. | ||
49 | type attrSpec interface { | ||
50 | attrSchemata() []hcl.AttributeSchema | ||
51 | } | ||
52 | |||
53 | // blockSpec is implemented by specs that require blocks from the body. | ||
54 | type blockSpec interface { | ||
55 | blockHeaderSchemata() []hcl.BlockHeaderSchema | ||
56 | nestedSpec() Spec | ||
57 | } | ||
58 | |||
59 | // specNeedingVariables is implemented by specs that can use variables | ||
60 | // from the EvalContext, to declare which variables they need. | ||
61 | type specNeedingVariables interface { | ||
62 | variablesNeeded(content *hcl.BodyContent) []hcl.Traversal | ||
63 | } | ||
64 | |||
65 | func (s ObjectSpec) visitSameBodyChildren(cb visitFunc) { | ||
66 | for _, c := range s { | ||
67 | cb(c) | ||
68 | } | ||
69 | } | ||
70 | |||
71 | func (s ObjectSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
72 | vals := make(map[string]cty.Value, len(s)) | ||
73 | var diags hcl.Diagnostics | ||
74 | |||
75 | for k, spec := range s { | ||
76 | var kd hcl.Diagnostics | ||
77 | vals[k], kd = spec.decode(content, blockLabels, ctx) | ||
78 | diags = append(diags, kd...) | ||
79 | } | ||
80 | |||
81 | return cty.ObjectVal(vals), diags | ||
82 | } | ||
83 | |||
84 | func (s ObjectSpec) impliedType() cty.Type { | ||
85 | if len(s) == 0 { | ||
86 | return cty.EmptyObject | ||
87 | } | ||
88 | |||
89 | attrTypes := make(map[string]cty.Type) | ||
90 | for k, childSpec := range s { | ||
91 | attrTypes[k] = childSpec.impliedType() | ||
92 | } | ||
93 | return cty.Object(attrTypes) | ||
94 | } | ||
95 | |||
96 | func (s ObjectSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
97 | // This is not great, but the best we can do. In practice, it's rather | ||
98 | // strange to ask for the source range of an entire top-level body, since | ||
99 | // that's already readily available to the caller. | ||
100 | return content.MissingItemRange | ||
101 | } | ||
102 | |||
103 | // A TupleSpec is a Spec that produces a cty.Value of a tuple type whose | ||
104 | // elements correspond to the elements of the spec slice. | ||
105 | type TupleSpec []Spec | ||
106 | |||
107 | func (s TupleSpec) visitSameBodyChildren(cb visitFunc) { | ||
108 | for _, c := range s { | ||
109 | cb(c) | ||
110 | } | ||
111 | } | ||
112 | |||
113 | func (s TupleSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
114 | vals := make([]cty.Value, len(s)) | ||
115 | var diags hcl.Diagnostics | ||
116 | |||
117 | for i, spec := range s { | ||
118 | var ed hcl.Diagnostics | ||
119 | vals[i], ed = spec.decode(content, blockLabels, ctx) | ||
120 | diags = append(diags, ed...) | ||
121 | } | ||
122 | |||
123 | return cty.TupleVal(vals), diags | ||
124 | } | ||
125 | |||
126 | func (s TupleSpec) impliedType() cty.Type { | ||
127 | if len(s) == 0 { | ||
128 | return cty.EmptyTuple | ||
129 | } | ||
130 | |||
131 | attrTypes := make([]cty.Type, len(s)) | ||
132 | for i, childSpec := range s { | ||
133 | attrTypes[i] = childSpec.impliedType() | ||
134 | } | ||
135 | return cty.Tuple(attrTypes) | ||
136 | } | ||
137 | |||
138 | func (s TupleSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
139 | // This is not great, but the best we can do. In practice, it's rather | ||
140 | // strange to ask for the source range of an entire top-level body, since | ||
141 | // that's already readily available to the caller. | ||
142 | return content.MissingItemRange | ||
143 | } | ||
144 | |||
145 | // An AttrSpec is a Spec that evaluates a particular attribute expression in | ||
146 | // the body and returns its resulting value converted to the requested type, | ||
147 | // or produces a diagnostic if the type is incorrect. | ||
148 | type AttrSpec struct { | ||
149 | Name string | ||
150 | Type cty.Type | ||
151 | Required bool | ||
152 | } | ||
153 | |||
154 | func (s *AttrSpec) visitSameBodyChildren(cb visitFunc) { | ||
155 | // leaf node | ||
156 | } | ||
157 | |||
158 | // specNeedingVariables implementation | ||
159 | func (s *AttrSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { | ||
160 | attr, exists := content.Attributes[s.Name] | ||
161 | if !exists { | ||
162 | return nil | ||
163 | } | ||
164 | |||
165 | return attr.Expr.Variables() | ||
166 | } | ||
167 | |||
168 | // attrSpec implementation | ||
169 | func (s *AttrSpec) attrSchemata() []hcl.AttributeSchema { | ||
170 | return []hcl.AttributeSchema{ | ||
171 | { | ||
172 | Name: s.Name, | ||
173 | Required: s.Required, | ||
174 | }, | ||
175 | } | ||
176 | } | ||
177 | |||
178 | func (s *AttrSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
179 | attr, exists := content.Attributes[s.Name] | ||
180 | if !exists { | ||
181 | return content.MissingItemRange | ||
182 | } | ||
183 | |||
184 | return attr.Expr.Range() | ||
185 | } | ||
186 | |||
187 | func (s *AttrSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
188 | attr, exists := content.Attributes[s.Name] | ||
189 | if !exists { | ||
190 | // We don't need to check required and emit a diagnostic here, because | ||
191 | // that would already have happened when building "content". | ||
192 | return cty.NullVal(s.Type), nil | ||
193 | } | ||
194 | |||
195 | val, diags := attr.Expr.Value(ctx) | ||
196 | |||
197 | convVal, err := convert.Convert(val, s.Type) | ||
198 | if err != nil { | ||
199 | diags = append(diags, &hcl.Diagnostic{ | ||
200 | Severity: hcl.DiagError, | ||
201 | Summary: "Incorrect attribute value type", | ||
202 | Detail: fmt.Sprintf( | ||
203 | "Inappropriate value for attribute %q: %s.", | ||
204 | s.Name, err.Error(), | ||
205 | ), | ||
206 | Subject: attr.Expr.StartRange().Ptr(), | ||
207 | Context: hcl.RangeBetween(attr.NameRange, attr.Expr.StartRange()).Ptr(), | ||
208 | }) | ||
209 | // We'll return an unknown value of the _correct_ type so that the | ||
210 | // incomplete result can still be used for some analysis use-cases. | ||
211 | val = cty.UnknownVal(s.Type) | ||
212 | } else { | ||
213 | val = convVal | ||
214 | } | ||
215 | |||
216 | return val, diags | ||
217 | } | ||
218 | |||
219 | func (s *AttrSpec) impliedType() cty.Type { | ||
220 | return s.Type | ||
221 | } | ||
222 | |||
223 | // A LiteralSpec is a Spec that produces the given literal value, ignoring | ||
224 | // the given body. | ||
225 | type LiteralSpec struct { | ||
226 | Value cty.Value | ||
227 | } | ||
228 | |||
229 | func (s *LiteralSpec) visitSameBodyChildren(cb visitFunc) { | ||
230 | // leaf node | ||
231 | } | ||
232 | |||
233 | func (s *LiteralSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
234 | return s.Value, nil | ||
235 | } | ||
236 | |||
237 | func (s *LiteralSpec) impliedType() cty.Type { | ||
238 | return s.Value.Type() | ||
239 | } | ||
240 | |||
241 | func (s *LiteralSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
242 | // No sensible range to return for a literal, so the caller had better | ||
243 | // ensure it doesn't cause any diagnostics. | ||
244 | return hcl.Range{ | ||
245 | Filename: "<unknown>", | ||
246 | } | ||
247 | } | ||
248 | |||
249 | // An ExprSpec is a Spec that evaluates the given expression, ignoring the | ||
250 | // given body. | ||
251 | type ExprSpec struct { | ||
252 | Expr hcl.Expression | ||
253 | } | ||
254 | |||
255 | func (s *ExprSpec) visitSameBodyChildren(cb visitFunc) { | ||
256 | // leaf node | ||
257 | } | ||
258 | |||
259 | // specNeedingVariables implementation | ||
260 | func (s *ExprSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { | ||
261 | return s.Expr.Variables() | ||
262 | } | ||
263 | |||
264 | func (s *ExprSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
265 | return s.Expr.Value(ctx) | ||
266 | } | ||
267 | |||
268 | func (s *ExprSpec) impliedType() cty.Type { | ||
269 | // We can't know the type of our expression until we evaluate it | ||
270 | return cty.DynamicPseudoType | ||
271 | } | ||
272 | |||
273 | func (s *ExprSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
274 | return s.Expr.Range() | ||
275 | } | ||
276 | |||
277 | // A BlockSpec is a Spec that produces a cty.Value by decoding the contents | ||
278 | // of a single nested block of a given type, using a nested spec. | ||
279 | // | ||
280 | // If the Required flag is not set, the nested block may be omitted, in which | ||
281 | // case a null value is produced. If it _is_ set, an error diagnostic is | ||
282 | // produced if there are no nested blocks of the given type. | ||
283 | type BlockSpec struct { | ||
284 | TypeName string | ||
285 | Nested Spec | ||
286 | Required bool | ||
287 | } | ||
288 | |||
289 | func (s *BlockSpec) visitSameBodyChildren(cb visitFunc) { | ||
290 | // leaf node ("Nested" does not use the same body) | ||
291 | } | ||
292 | |||
293 | // blockSpec implementation | ||
294 | func (s *BlockSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { | ||
295 | return []hcl.BlockHeaderSchema{ | ||
296 | { | ||
297 | Type: s.TypeName, | ||
298 | LabelNames: findLabelSpecs(s.Nested), | ||
299 | }, | ||
300 | } | ||
301 | } | ||
302 | |||
303 | // blockSpec implementation | ||
304 | func (s *BlockSpec) nestedSpec() Spec { | ||
305 | return s.Nested | ||
306 | } | ||
307 | |||
308 | // specNeedingVariables implementation | ||
309 | func (s *BlockSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { | ||
310 | var childBlock *hcl.Block | ||
311 | for _, candidate := range content.Blocks { | ||
312 | if candidate.Type != s.TypeName { | ||
313 | continue | ||
314 | } | ||
315 | |||
316 | childBlock = candidate | ||
317 | break | ||
318 | } | ||
319 | |||
320 | if childBlock == nil { | ||
321 | return nil | ||
322 | } | ||
323 | |||
324 | return Variables(childBlock.Body, s.Nested) | ||
325 | } | ||
326 | |||
327 | func (s *BlockSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
328 | var diags hcl.Diagnostics | ||
329 | |||
330 | var childBlock *hcl.Block | ||
331 | for _, candidate := range content.Blocks { | ||
332 | if candidate.Type != s.TypeName { | ||
333 | continue | ||
334 | } | ||
335 | |||
336 | if childBlock != nil { | ||
337 | diags = append(diags, &hcl.Diagnostic{ | ||
338 | Severity: hcl.DiagError, | ||
339 | Summary: fmt.Sprintf("Duplicate %s block", s.TypeName), | ||
340 | Detail: fmt.Sprintf( | ||
341 | "Only one block of type %q is allowed. Previous definition was at %s.", | ||
342 | s.TypeName, childBlock.DefRange.String(), | ||
343 | ), | ||
344 | Subject: &candidate.DefRange, | ||
345 | }) | ||
346 | break | ||
347 | } | ||
348 | |||
349 | childBlock = candidate | ||
350 | } | ||
351 | |||
352 | if childBlock == nil { | ||
353 | if s.Required { | ||
354 | diags = append(diags, &hcl.Diagnostic{ | ||
355 | Severity: hcl.DiagError, | ||
356 | Summary: fmt.Sprintf("Missing %s block", s.TypeName), | ||
357 | Detail: fmt.Sprintf( | ||
358 | "A block of type %q is required here.", s.TypeName, | ||
359 | ), | ||
360 | Subject: &content.MissingItemRange, | ||
361 | }) | ||
362 | } | ||
363 | return cty.NullVal(s.Nested.impliedType()), diags | ||
364 | } | ||
365 | |||
366 | if s.Nested == nil { | ||
367 | panic("BlockSpec with no Nested Spec") | ||
368 | } | ||
369 | val, _, childDiags := decode(childBlock.Body, labelsForBlock(childBlock), ctx, s.Nested, false) | ||
370 | diags = append(diags, childDiags...) | ||
371 | return val, diags | ||
372 | } | ||
373 | |||
374 | func (s *BlockSpec) impliedType() cty.Type { | ||
375 | return s.Nested.impliedType() | ||
376 | } | ||
377 | |||
378 | func (s *BlockSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
379 | var childBlock *hcl.Block | ||
380 | for _, candidate := range content.Blocks { | ||
381 | if candidate.Type != s.TypeName { | ||
382 | continue | ||
383 | } | ||
384 | |||
385 | childBlock = candidate | ||
386 | break | ||
387 | } | ||
388 | |||
389 | if childBlock == nil { | ||
390 | return content.MissingItemRange | ||
391 | } | ||
392 | |||
393 | return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested) | ||
394 | } | ||
395 | |||
396 | // A BlockListSpec is a Spec that produces a cty list of the results of | ||
397 | // decoding all of the nested blocks of a given type, using a nested spec. | ||
398 | type BlockListSpec struct { | ||
399 | TypeName string | ||
400 | Nested Spec | ||
401 | MinItems int | ||
402 | MaxItems int | ||
403 | } | ||
404 | |||
405 | func (s *BlockListSpec) visitSameBodyChildren(cb visitFunc) { | ||
406 | // leaf node ("Nested" does not use the same body) | ||
407 | } | ||
408 | |||
409 | // blockSpec implementation | ||
410 | func (s *BlockListSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { | ||
411 | return []hcl.BlockHeaderSchema{ | ||
412 | { | ||
413 | Type: s.TypeName, | ||
414 | LabelNames: findLabelSpecs(s.Nested), | ||
415 | }, | ||
416 | } | ||
417 | } | ||
418 | |||
419 | // blockSpec implementation | ||
420 | func (s *BlockListSpec) nestedSpec() Spec { | ||
421 | return s.Nested | ||
422 | } | ||
423 | |||
424 | // specNeedingVariables implementation | ||
425 | func (s *BlockListSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { | ||
426 | var ret []hcl.Traversal | ||
427 | |||
428 | for _, childBlock := range content.Blocks { | ||
429 | if childBlock.Type != s.TypeName { | ||
430 | continue | ||
431 | } | ||
432 | |||
433 | ret = append(ret, Variables(childBlock.Body, s.Nested)...) | ||
434 | } | ||
435 | |||
436 | return ret | ||
437 | } | ||
438 | |||
439 | func (s *BlockListSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
440 | var diags hcl.Diagnostics | ||
441 | |||
442 | if s.Nested == nil { | ||
443 | panic("BlockListSpec with no Nested Spec") | ||
444 | } | ||
445 | |||
446 | var elems []cty.Value | ||
447 | var sourceRanges []hcl.Range | ||
448 | for _, childBlock := range content.Blocks { | ||
449 | if childBlock.Type != s.TypeName { | ||
450 | continue | ||
451 | } | ||
452 | |||
453 | val, _, childDiags := decode(childBlock.Body, labelsForBlock(childBlock), ctx, s.Nested, false) | ||
454 | diags = append(diags, childDiags...) | ||
455 | elems = append(elems, val) | ||
456 | sourceRanges = append(sourceRanges, sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested)) | ||
457 | } | ||
458 | |||
459 | if len(elems) < s.MinItems { | ||
460 | diags = append(diags, &hcl.Diagnostic{ | ||
461 | Severity: hcl.DiagError, | ||
462 | Summary: fmt.Sprintf("Insufficient %s blocks", s.TypeName), | ||
463 | Detail: fmt.Sprintf("At least %d %q blocks are required.", s.MinItems, s.TypeName), | ||
464 | Subject: &content.MissingItemRange, | ||
465 | }) | ||
466 | } else if s.MaxItems > 0 && len(elems) > s.MaxItems { | ||
467 | diags = append(diags, &hcl.Diagnostic{ | ||
468 | Severity: hcl.DiagError, | ||
469 | Summary: fmt.Sprintf("Too many %s blocks", s.TypeName), | ||
470 | Detail: fmt.Sprintf("No more than %d %q blocks are allowed", s.MaxItems, s.TypeName), | ||
471 | Subject: &sourceRanges[s.MaxItems], | ||
472 | }) | ||
473 | } | ||
474 | |||
475 | var ret cty.Value | ||
476 | |||
477 | if len(elems) == 0 { | ||
478 | ret = cty.ListValEmpty(s.Nested.impliedType()) | ||
479 | } else { | ||
480 | ret = cty.ListVal(elems) | ||
481 | } | ||
482 | |||
483 | return ret, diags | ||
484 | } | ||
485 | |||
486 | func (s *BlockListSpec) impliedType() cty.Type { | ||
487 | return cty.List(s.Nested.impliedType()) | ||
488 | } | ||
489 | |||
490 | func (s *BlockListSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
491 | // We return the source range of the _first_ block of the given type, | ||
492 | // since they are not guaranteed to form a contiguous range. | ||
493 | |||
494 | var childBlock *hcl.Block | ||
495 | for _, candidate := range content.Blocks { | ||
496 | if candidate.Type != s.TypeName { | ||
497 | continue | ||
498 | } | ||
499 | |||
500 | childBlock = candidate | ||
501 | break | ||
502 | } | ||
503 | |||
504 | if childBlock == nil { | ||
505 | return content.MissingItemRange | ||
506 | } | ||
507 | |||
508 | return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested) | ||
509 | } | ||
510 | |||
511 | // A BlockSetSpec is a Spec that produces a cty set of the results of | ||
512 | // decoding all of the nested blocks of a given type, using a nested spec. | ||
513 | type BlockSetSpec struct { | ||
514 | TypeName string | ||
515 | Nested Spec | ||
516 | MinItems int | ||
517 | MaxItems int | ||
518 | } | ||
519 | |||
520 | func (s *BlockSetSpec) visitSameBodyChildren(cb visitFunc) { | ||
521 | // leaf node ("Nested" does not use the same body) | ||
522 | } | ||
523 | |||
524 | // blockSpec implementation | ||
525 | func (s *BlockSetSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { | ||
526 | return []hcl.BlockHeaderSchema{ | ||
527 | { | ||
528 | Type: s.TypeName, | ||
529 | LabelNames: findLabelSpecs(s.Nested), | ||
530 | }, | ||
531 | } | ||
532 | } | ||
533 | |||
534 | // blockSpec implementation | ||
535 | func (s *BlockSetSpec) nestedSpec() Spec { | ||
536 | return s.Nested | ||
537 | } | ||
538 | |||
539 | // specNeedingVariables implementation | ||
540 | func (s *BlockSetSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { | ||
541 | var ret []hcl.Traversal | ||
542 | |||
543 | for _, childBlock := range content.Blocks { | ||
544 | if childBlock.Type != s.TypeName { | ||
545 | continue | ||
546 | } | ||
547 | |||
548 | ret = append(ret, Variables(childBlock.Body, s.Nested)...) | ||
549 | } | ||
550 | |||
551 | return ret | ||
552 | } | ||
553 | |||
554 | func (s *BlockSetSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
555 | var diags hcl.Diagnostics | ||
556 | |||
557 | if s.Nested == nil { | ||
558 | panic("BlockSetSpec with no Nested Spec") | ||
559 | } | ||
560 | |||
561 | var elems []cty.Value | ||
562 | var sourceRanges []hcl.Range | ||
563 | for _, childBlock := range content.Blocks { | ||
564 | if childBlock.Type != s.TypeName { | ||
565 | continue | ||
566 | } | ||
567 | |||
568 | val, _, childDiags := decode(childBlock.Body, labelsForBlock(childBlock), ctx, s.Nested, false) | ||
569 | diags = append(diags, childDiags...) | ||
570 | elems = append(elems, val) | ||
571 | sourceRanges = append(sourceRanges, sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested)) | ||
572 | } | ||
573 | |||
574 | if len(elems) < s.MinItems { | ||
575 | diags = append(diags, &hcl.Diagnostic{ | ||
576 | Severity: hcl.DiagError, | ||
577 | Summary: fmt.Sprintf("Insufficient %s blocks", s.TypeName), | ||
578 | Detail: fmt.Sprintf("At least %d %q blocks are required.", s.MinItems, s.TypeName), | ||
579 | Subject: &content.MissingItemRange, | ||
580 | }) | ||
581 | } else if s.MaxItems > 0 && len(elems) > s.MaxItems { | ||
582 | diags = append(diags, &hcl.Diagnostic{ | ||
583 | Severity: hcl.DiagError, | ||
584 | Summary: fmt.Sprintf("Too many %s blocks", s.TypeName), | ||
585 | Detail: fmt.Sprintf("No more than %d %q blocks are allowed", s.MaxItems, s.TypeName), | ||
586 | Subject: &sourceRanges[s.MaxItems], | ||
587 | }) | ||
588 | } | ||
589 | |||
590 | var ret cty.Value | ||
591 | |||
592 | if len(elems) == 0 { | ||
593 | ret = cty.SetValEmpty(s.Nested.impliedType()) | ||
594 | } else { | ||
595 | ret = cty.SetVal(elems) | ||
596 | } | ||
597 | |||
598 | return ret, diags | ||
599 | } | ||
600 | |||
601 | func (s *BlockSetSpec) impliedType() cty.Type { | ||
602 | return cty.Set(s.Nested.impliedType()) | ||
603 | } | ||
604 | |||
605 | func (s *BlockSetSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
606 | // We return the source range of the _first_ block of the given type, | ||
607 | // since they are not guaranteed to form a contiguous range. | ||
608 | |||
609 | var childBlock *hcl.Block | ||
610 | for _, candidate := range content.Blocks { | ||
611 | if candidate.Type != s.TypeName { | ||
612 | continue | ||
613 | } | ||
614 | |||
615 | childBlock = candidate | ||
616 | break | ||
617 | } | ||
618 | |||
619 | if childBlock == nil { | ||
620 | return content.MissingItemRange | ||
621 | } | ||
622 | |||
623 | return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested) | ||
624 | } | ||
625 | |||
626 | // A BlockMapSpec is a Spec that produces a cty map of the results of | ||
627 | // decoding all of the nested blocks of a given type, using a nested spec. | ||
628 | // | ||
629 | // One level of map structure is created for each of the given label names. | ||
630 | // There must be at least one given label name. | ||
631 | type BlockMapSpec struct { | ||
632 | TypeName string | ||
633 | LabelNames []string | ||
634 | Nested Spec | ||
635 | } | ||
636 | |||
637 | func (s *BlockMapSpec) visitSameBodyChildren(cb visitFunc) { | ||
638 | // leaf node ("Nested" does not use the same body) | ||
639 | } | ||
640 | |||
641 | // blockSpec implementation | ||
642 | func (s *BlockMapSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema { | ||
643 | return []hcl.BlockHeaderSchema{ | ||
644 | { | ||
645 | Type: s.TypeName, | ||
646 | LabelNames: append(s.LabelNames, findLabelSpecs(s.Nested)...), | ||
647 | }, | ||
648 | } | ||
649 | } | ||
650 | |||
651 | // blockSpec implementation | ||
652 | func (s *BlockMapSpec) nestedSpec() Spec { | ||
653 | return s.Nested | ||
654 | } | ||
655 | |||
656 | // specNeedingVariables implementation | ||
657 | func (s *BlockMapSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal { | ||
658 | var ret []hcl.Traversal | ||
659 | |||
660 | for _, childBlock := range content.Blocks { | ||
661 | if childBlock.Type != s.TypeName { | ||
662 | continue | ||
663 | } | ||
664 | |||
665 | ret = append(ret, Variables(childBlock.Body, s.Nested)...) | ||
666 | } | ||
667 | |||
668 | return ret | ||
669 | } | ||
670 | |||
671 | func (s *BlockMapSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
672 | var diags hcl.Diagnostics | ||
673 | |||
674 | if s.Nested == nil { | ||
675 | panic("BlockSetSpec with no Nested Spec") | ||
676 | } | ||
677 | |||
678 | elems := map[string]interface{}{} | ||
679 | for _, childBlock := range content.Blocks { | ||
680 | if childBlock.Type != s.TypeName { | ||
681 | continue | ||
682 | } | ||
683 | |||
684 | childLabels := labelsForBlock(childBlock) | ||
685 | val, _, childDiags := decode(childBlock.Body, childLabels[len(s.LabelNames):], ctx, s.Nested, false) | ||
686 | targetMap := elems | ||
687 | for _, key := range childBlock.Labels[:len(s.LabelNames)-1] { | ||
688 | if _, exists := targetMap[key]; !exists { | ||
689 | targetMap[key] = make(map[string]interface{}) | ||
690 | } | ||
691 | targetMap = targetMap[key].(map[string]interface{}) | ||
692 | } | ||
693 | |||
694 | diags = append(diags, childDiags...) | ||
695 | |||
696 | key := childBlock.Labels[len(s.LabelNames)-1] | ||
697 | if _, exists := targetMap[key]; exists { | ||
698 | labelsBuf := bytes.Buffer{} | ||
699 | for _, label := range childBlock.Labels { | ||
700 | fmt.Fprintf(&labelsBuf, " %q", label) | ||
701 | } | ||
702 | diags = append(diags, &hcl.Diagnostic{ | ||
703 | Severity: hcl.DiagError, | ||
704 | Summary: fmt.Sprintf("Duplicate %s block", s.TypeName), | ||
705 | Detail: fmt.Sprintf( | ||
706 | "A block for %s%s was already defined. The %s labels must be unique.", | ||
707 | s.TypeName, labelsBuf.String(), s.TypeName, | ||
708 | ), | ||
709 | Subject: &childBlock.DefRange, | ||
710 | }) | ||
711 | continue | ||
712 | } | ||
713 | |||
714 | targetMap[key] = val | ||
715 | } | ||
716 | |||
717 | if len(elems) == 0 { | ||
718 | return cty.MapValEmpty(s.Nested.impliedType()), diags | ||
719 | } | ||
720 | |||
721 | var ctyMap func(map[string]interface{}, int) cty.Value | ||
722 | ctyMap = func(raw map[string]interface{}, depth int) cty.Value { | ||
723 | vals := make(map[string]cty.Value, len(raw)) | ||
724 | if depth == 1 { | ||
725 | for k, v := range raw { | ||
726 | vals[k] = v.(cty.Value) | ||
727 | } | ||
728 | } else { | ||
729 | for k, v := range raw { | ||
730 | vals[k] = ctyMap(v.(map[string]interface{}), depth-1) | ||
731 | } | ||
732 | } | ||
733 | return cty.MapVal(vals) | ||
734 | } | ||
735 | |||
736 | return ctyMap(elems, len(s.LabelNames)), diags | ||
737 | } | ||
738 | |||
739 | func (s *BlockMapSpec) impliedType() cty.Type { | ||
740 | ret := s.Nested.impliedType() | ||
741 | for _ = range s.LabelNames { | ||
742 | ret = cty.Map(ret) | ||
743 | } | ||
744 | return ret | ||
745 | } | ||
746 | |||
747 | func (s *BlockMapSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
748 | // We return the source range of the _first_ block of the given type, | ||
749 | // since they are not guaranteed to form a contiguous range. | ||
750 | |||
751 | var childBlock *hcl.Block | ||
752 | for _, candidate := range content.Blocks { | ||
753 | if candidate.Type != s.TypeName { | ||
754 | continue | ||
755 | } | ||
756 | |||
757 | childBlock = candidate | ||
758 | break | ||
759 | } | ||
760 | |||
761 | if childBlock == nil { | ||
762 | return content.MissingItemRange | ||
763 | } | ||
764 | |||
765 | return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested) | ||
766 | } | ||
767 | |||
768 | // A BlockLabelSpec is a Spec that returns a cty.String representing the | ||
769 | // label of the block its given body belongs to, if indeed its given body | ||
770 | // belongs to a block. It is a programming error to use this in a non-block | ||
771 | // context, so this spec will panic in that case. | ||
772 | // | ||
773 | // This spec only works in the nested spec within a BlockSpec, BlockListSpec, | ||
774 | // BlockSetSpec or BlockMapSpec. | ||
775 | // | ||
776 | // The full set of label specs used against a particular block must have a | ||
777 | // consecutive set of indices starting at zero. The maximum index found | ||
778 | // defines how many labels the corresponding blocks must have in cty source. | ||
779 | type BlockLabelSpec struct { | ||
780 | Index int | ||
781 | Name string | ||
782 | } | ||
783 | |||
784 | func (s *BlockLabelSpec) visitSameBodyChildren(cb visitFunc) { | ||
785 | // leaf node | ||
786 | } | ||
787 | |||
788 | func (s *BlockLabelSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
789 | if s.Index >= len(blockLabels) { | ||
790 | panic("BlockListSpec used in non-block context") | ||
791 | } | ||
792 | |||
793 | return cty.StringVal(blockLabels[s.Index].Value), nil | ||
794 | } | ||
795 | |||
796 | func (s *BlockLabelSpec) impliedType() cty.Type { | ||
797 | return cty.String // labels are always strings | ||
798 | } | ||
799 | |||
800 | func (s *BlockLabelSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
801 | if s.Index >= len(blockLabels) { | ||
802 | panic("BlockListSpec used in non-block context") | ||
803 | } | ||
804 | |||
805 | return blockLabels[s.Index].Range | ||
806 | } | ||
807 | |||
808 | func findLabelSpecs(spec Spec) []string { | ||
809 | maxIdx := -1 | ||
810 | var names map[int]string | ||
811 | |||
812 | var visit visitFunc | ||
813 | visit = func(s Spec) { | ||
814 | if ls, ok := s.(*BlockLabelSpec); ok { | ||
815 | if maxIdx < ls.Index { | ||
816 | maxIdx = ls.Index | ||
817 | } | ||
818 | if names == nil { | ||
819 | names = make(map[int]string) | ||
820 | } | ||
821 | names[ls.Index] = ls.Name | ||
822 | } | ||
823 | s.visitSameBodyChildren(visit) | ||
824 | } | ||
825 | |||
826 | visit(spec) | ||
827 | |||
828 | if maxIdx < 0 { | ||
829 | return nil // no labels at all | ||
830 | } | ||
831 | |||
832 | ret := make([]string, maxIdx+1) | ||
833 | for i := range ret { | ||
834 | name := names[i] | ||
835 | if name == "" { | ||
836 | // Should never happen if the spec is conformant, since we require | ||
837 | // consecutive indices starting at zero. | ||
838 | name = fmt.Sprintf("missing%02d", i) | ||
839 | } | ||
840 | ret[i] = name | ||
841 | } | ||
842 | |||
843 | return ret | ||
844 | } | ||
845 | |||
846 | // DefaultSpec is a spec that wraps two specs, evaluating the primary first | ||
847 | // and then evaluating the default if the primary returns a null value. | ||
848 | // | ||
849 | // The two specifications must have the same implied result type for correct | ||
850 | // operation. If not, the result is undefined. | ||
851 | type DefaultSpec struct { | ||
852 | Primary Spec | ||
853 | Default Spec | ||
854 | } | ||
855 | |||
856 | func (s *DefaultSpec) visitSameBodyChildren(cb visitFunc) { | ||
857 | cb(s.Primary) | ||
858 | cb(s.Default) | ||
859 | } | ||
860 | |||
861 | func (s *DefaultSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
862 | val, diags := s.Primary.decode(content, blockLabels, ctx) | ||
863 | if val.IsNull() { | ||
864 | var moreDiags hcl.Diagnostics | ||
865 | val, moreDiags = s.Default.decode(content, blockLabels, ctx) | ||
866 | diags = append(diags, moreDiags...) | ||
867 | } | ||
868 | return val, diags | ||
869 | } | ||
870 | |||
871 | func (s *DefaultSpec) impliedType() cty.Type { | ||
872 | return s.Primary.impliedType() | ||
873 | } | ||
874 | |||
875 | func (s *DefaultSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
876 | // We can't tell from here which of the two specs will ultimately be used | ||
877 | // in our result, so we'll just assume the first. This is usually the right | ||
878 | // choice because the default is often a literal spec that doesn't have a | ||
879 | // reasonable source range to return anyway. | ||
880 | return s.Primary.sourceRange(content, blockLabels) | ||
881 | } | ||
882 | |||
883 | // TransformExprSpec is a spec that wraps another and then evaluates a given | ||
884 | // hcl.Expression on the result. | ||
885 | // | ||
886 | // The implied type of this spec is determined by evaluating the expression | ||
887 | // with an unknown value of the nested spec's implied type, which may cause | ||
888 | // the result to be imprecise. This spec should not be used in situations where | ||
889 | // precise result type information is needed. | ||
890 | type TransformExprSpec struct { | ||
891 | Wrapped Spec | ||
892 | Expr hcl.Expression | ||
893 | TransformCtx *hcl.EvalContext | ||
894 | VarName string | ||
895 | } | ||
896 | |||
897 | func (s *TransformExprSpec) visitSameBodyChildren(cb visitFunc) { | ||
898 | cb(s.Wrapped) | ||
899 | } | ||
900 | |||
901 | func (s *TransformExprSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
902 | wrappedVal, diags := s.Wrapped.decode(content, blockLabels, ctx) | ||
903 | if diags.HasErrors() { | ||
904 | // We won't try to run our function in this case, because it'll probably | ||
905 | // generate confusing additional errors that will distract from the | ||
906 | // root cause. | ||
907 | return cty.UnknownVal(s.impliedType()), diags | ||
908 | } | ||
909 | |||
910 | chiCtx := s.TransformCtx.NewChild() | ||
911 | chiCtx.Variables = map[string]cty.Value{ | ||
912 | s.VarName: wrappedVal, | ||
913 | } | ||
914 | resultVal, resultDiags := s.Expr.Value(chiCtx) | ||
915 | diags = append(diags, resultDiags...) | ||
916 | return resultVal, diags | ||
917 | } | ||
918 | |||
919 | func (s *TransformExprSpec) impliedType() cty.Type { | ||
920 | wrappedTy := s.Wrapped.impliedType() | ||
921 | chiCtx := s.TransformCtx.NewChild() | ||
922 | chiCtx.Variables = map[string]cty.Value{ | ||
923 | s.VarName: cty.UnknownVal(wrappedTy), | ||
924 | } | ||
925 | resultVal, _ := s.Expr.Value(chiCtx) | ||
926 | return resultVal.Type() | ||
927 | } | ||
928 | |||
929 | func (s *TransformExprSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
930 | // We'll just pass through our wrapped range here, even though that's | ||
931 | // not super-accurate, because there's nothing better to return. | ||
932 | return s.Wrapped.sourceRange(content, blockLabels) | ||
933 | } | ||
934 | |||
935 | // TransformFuncSpec is a spec that wraps another and then evaluates a given | ||
936 | // cty function with the result. The given function must expect exactly one | ||
937 | // argument, where the result of the wrapped spec will be passed. | ||
938 | // | ||
939 | // The implied type of this spec is determined by type-checking the function | ||
940 | // with an unknown value of the nested spec's implied type, which may cause | ||
941 | // the result to be imprecise. This spec should not be used in situations where | ||
942 | // precise result type information is needed. | ||
943 | // | ||
944 | // If the given function produces an error when run, this spec will produce | ||
945 | // a non-user-actionable diagnostic message. It's the caller's responsibility | ||
946 | // to ensure that the given function cannot fail for any non-error result | ||
947 | // of the wrapped spec. | ||
948 | type TransformFuncSpec struct { | ||
949 | Wrapped Spec | ||
950 | Func function.Function | ||
951 | } | ||
952 | |||
953 | func (s *TransformFuncSpec) visitSameBodyChildren(cb visitFunc) { | ||
954 | cb(s.Wrapped) | ||
955 | } | ||
956 | |||
957 | func (s *TransformFuncSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { | ||
958 | wrappedVal, diags := s.Wrapped.decode(content, blockLabels, ctx) | ||
959 | if diags.HasErrors() { | ||
960 | // We won't try to run our function in this case, because it'll probably | ||
961 | // generate confusing additional errors that will distract from the | ||
962 | // root cause. | ||
963 | return cty.UnknownVal(s.impliedType()), diags | ||
964 | } | ||
965 | |||
966 | resultVal, err := s.Func.Call([]cty.Value{wrappedVal}) | ||
967 | if err != nil { | ||
968 | // This is not a good example of a diagnostic because it is reporting | ||
969 | // a programming error in the calling application, rather than something | ||
970 | // an end-user could act on. | ||
971 | diags = append(diags, &hcl.Diagnostic{ | ||
972 | Severity: hcl.DiagError, | ||
973 | Summary: "Transform function failed", | ||
974 | Detail: fmt.Sprintf("Decoder transform returned an error: %s", err), | ||
975 | Subject: s.sourceRange(content, blockLabels).Ptr(), | ||
976 | }) | ||
977 | return cty.UnknownVal(s.impliedType()), diags | ||
978 | } | ||
979 | |||
980 | return resultVal, diags | ||
981 | } | ||
982 | |||
983 | func (s *TransformFuncSpec) impliedType() cty.Type { | ||
984 | wrappedTy := s.Wrapped.impliedType() | ||
985 | resultTy, err := s.Func.ReturnType([]cty.Type{wrappedTy}) | ||
986 | if err != nil { | ||
987 | // Should never happen with a correctly-configured spec | ||
988 | return cty.DynamicPseudoType | ||
989 | } | ||
990 | |||
991 | return resultTy | ||
992 | } | ||
993 | |||
994 | func (s *TransformFuncSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range { | ||
995 | // We'll just pass through our wrapped range here, even though that's | ||
996 | // not super-accurate, because there's nothing better to return. | ||
997 | return s.Wrapped.sourceRange(content, blockLabels) | ||
998 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/variables.go b/vendor/github.com/hashicorp/hcl2/hcldec/variables.go new file mode 100644 index 0000000..427b0d0 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hcldec/variables.go | |||
@@ -0,0 +1,34 @@ | |||
1 | package hcldec | ||
2 | |||
3 | import ( | ||
4 | "github.com/hashicorp/hcl2/hcl" | ||
5 | ) | ||
6 | |||
7 | // Variables processes the given body with the given spec and returns a | ||
8 | // list of the variable traversals that would be required to decode | ||
9 | // the same pairing of body and spec. | ||
10 | // | ||
11 | // This can be used to conditionally populate the variables in the EvalContext | ||
12 | // passed to Decode, for applications where a static scope is insufficient. | ||
13 | // | ||
14 | // If the given body is not compliant with the given schema, the result may | ||
15 | // be incomplete, but that's assumed to be okay because the eventual call | ||
16 | // to Decode will produce error diagnostics anyway. | ||
17 | func Variables(body hcl.Body, spec Spec) []hcl.Traversal { | ||
18 | schema := ImpliedSchema(spec) | ||
19 | |||
20 | content, _, _ := body.PartialContent(schema) | ||
21 | |||
22 | var vars []hcl.Traversal | ||
23 | |||
24 | if vs, ok := spec.(specNeedingVariables); ok { | ||
25 | vars = append(vars, vs.variablesNeeded(content)...) | ||
26 | } | ||
27 | spec.visitSameBodyChildren(func(s Spec) { | ||
28 | if vs, ok := s.(specNeedingVariables); ok { | ||
29 | vars = append(vars, vs.variablesNeeded(content)...) | ||
30 | } | ||
31 | }) | ||
32 | |||
33 | return vars | ||
34 | } | ||
diff --git a/vendor/github.com/hashicorp/hcl2/hclparse/parser.go b/vendor/github.com/hashicorp/hcl2/hclparse/parser.go new file mode 100644 index 0000000..6d47f12 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl2/hclparse/parser.go | |||
@@ -0,0 +1,123 @@ | |||
1 | package hclparse | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | "io/ioutil" | ||
6 | |||
7 | "github.com/hashicorp/hcl2/hcl" | ||
8 | "github.com/hashicorp/hcl2/hcl/hclsyntax" | ||
9 | "github.com/hashicorp/hcl2/hcl/json" | ||
10 | ) | ||
11 | |||
12 | // NOTE: This is the public interface for parsing. The actual parsers are | ||
13 | // in other packages alongside this one, with this package just wrapping them | ||
14 | // to provide a unified interface for the caller across all supported formats. | ||
15 | |||
16 | // Parser is the main interface for parsing configuration files. As well as | ||
17 | // parsing files, a parser also retains a registry of all of the files it | ||
18 | // has parsed so that multiple attempts to parse the same file will return | ||
19 | // the same object and so the collected files can be used when printing | ||
20 | // diagnostics. | ||
21 | // | ||
22 | // Any diagnostics for parsing a file are only returned once on the first | ||
23 | // call to parse that file. Callers are expected to collect up diagnostics | ||
24 | // and present them together, so returning diagnostics for the same file | ||
25 | // multiple times would create a confusing result. | ||
26 | type Parser struct { | ||
27 | files map[string]*hcl.File | ||
28 | } | ||
29 | |||
30 | // NewParser creates a new parser, ready to parse configuration files. | ||
31 | func NewParser() *Parser { | ||
32 | return &Parser{ | ||
33 | files: map[string]*hcl.File{}, | ||
34 | } | ||
35 | } | ||
36 | |||
37 | // ParseHCL parses the given buffer (which is assumed to have been loaded from | ||
38 | // the given filename) as a native-syntax configuration file and returns the | ||
39 | // hcl.File object representing it. | ||
40 | func (p *Parser) ParseHCL(src []byte, filename string) (*hcl.File, hcl.Diagnostics) { | ||
41 | if existing := p.files[filename]; existing != nil { | ||
42 | return existing, nil | ||
43 | } | ||
44 | |||
45 | file, diags := hclsyntax.ParseConfig(src, filename, hcl.Pos{Byte: 0, Line: 1, Column: 1}) | ||
46 | p.files[filename] = file | ||
47 | return file, diags | ||
48 | } | ||
49 | |||
50 | // ParseHCLFile reads the given filename and parses it as a native-syntax HCL | ||
51 | // configuration file. An error diagnostic is returned if the given file | ||
52 | // cannot be read. | ||
53 | func (p *Parser) ParseHCLFile(filename string) (*hcl.File, hcl.Diagnostics) { | ||
54 | if existing := p.files[filename]; existing != nil { | ||
55 | return existing, nil | ||
56 | } | ||
57 | |||
58 | src, err := ioutil.ReadFile(filename) | ||
59 | if err != nil { | ||
60 | return nil, hcl.Diagnostics{ | ||
61 | { | ||
62 | Severity: hcl.DiagError, | ||
63 | Summary: "Failed to read file", | ||
64 | Detail: fmt.Sprintf("The configuration file %q could not be read.", filename), | ||
65 | }, | ||
66 | } | ||
67 | } | ||
68 | |||
69 | return p.ParseHCL(src, filename) | ||
70 | } | ||
71 | |||
72 | // ParseJSON parses the given JSON buffer (which is assumed to have been loaded | ||
73 | // from the given filename) and returns the hcl.File object representing it. | ||
74 | func (p *Parser) ParseJSON(src []byte, filename string) (*hcl.File, hcl.Diagnostics) { | ||
75 | if existing := p.files[filename]; existing != nil { | ||
76 | return existing, nil | ||
77 | } | ||
78 | |||
79 | file, diags := json.Parse(src, filename) | ||
80 | p.files[filename] = file | ||
81 | return file, diags | ||
82 | } | ||
83 | |||
84 | // ParseJSONFile reads the given filename and parses it as JSON, similarly to | ||
85 | // ParseJSON. An error diagnostic is returned if the given file cannot be read. | ||
86 | func (p *Parser) ParseJSONFile(filename string) (*hcl.File, hcl.Diagnostics) { | ||
87 | if existing := p.files[filename]; existing != nil { | ||
88 | return existing, nil | ||
89 | } | ||
90 | |||
91 | file, diags := json.ParseFile(filename) | ||
92 | p.files[filename] = file | ||
93 | return file, diags | ||
94 | } | ||
95 | |||
96 | // AddFile allows a caller to record in a parser a file that was parsed some | ||
97 | // other way, thus allowing it to be included in the registry of sources. | ||
98 | func (p *Parser) AddFile(filename string, file *hcl.File) { | ||
99 | p.files[filename] = file | ||
100 | } | ||
101 | |||
102 | // Sources returns a map from filenames to the raw source code that was | ||
103 | // read from them. This is intended to be used, for example, to print | ||
104 | // diagnostics with contextual information. | ||
105 | // | ||
106 | // The arrays underlying the returned slices should not be modified. | ||
107 | func (p *Parser) Sources() map[string][]byte { | ||
108 | ret := make(map[string][]byte) | ||
109 | for fn, f := range p.files { | ||
110 | ret[fn] = f.Bytes | ||
111 | } | ||
112 | return ret | ||
113 | } | ||
114 | |||
115 | // Files returns a map from filenames to the File objects produced from them. | ||
116 | // This is intended to be used, for example, to print diagnostics with | ||
117 | // contextual information. | ||
118 | // | ||
119 | // The returned map and all of the objects it refers to directly or indirectly | ||
120 | // must not be modified. | ||
121 | func (p *Parser) Files() map[string]*hcl.File { | ||
122 | return p.files | ||
123 | } | ||