diff options
Diffstat (limited to 'vendor/github.com/zclconf')
24 files changed, 9338 insertions, 0 deletions
diff --git a/vendor/github.com/zclconf/go-cty-yaml/.travis.yml b/vendor/github.com/zclconf/go-cty-yaml/.travis.yml new file mode 100644 index 0000000..13ff998 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/.travis.yml | |||
@@ -0,0 +1,5 @@ | |||
1 | language: go | ||
2 | |||
3 | go: | ||
4 | - 1.12 | ||
5 | |||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/CHANGELOG.md b/vendor/github.com/zclconf/go-cty-yaml/CHANGELOG.md new file mode 100644 index 0000000..b3bc3b6 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/CHANGELOG.md | |||
@@ -0,0 +1,10 @@ | |||
1 | # 1.0.1 (July 30, 2019) | ||
2 | |||
3 | * The YAML decoder is now correctly treating quoted scalars as verbatim literal | ||
4 | strings rather than using the fuzzy type selection rules for them. Fuzzy | ||
5 | type selection rules still apply to unquoted scalars. | ||
6 | ([#4](https://github.com/zclconf/go-cty-yaml/pull/4)) | ||
7 | |||
8 | # 1.0.0 (May 26, 2019) | ||
9 | |||
10 | Initial release. | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/LICENSE b/vendor/github.com/zclconf/go-cty-yaml/LICENSE new file mode 100644 index 0000000..8dada3e --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/LICENSE | |||
@@ -0,0 +1,201 @@ | |||
1 | Apache License | ||
2 | Version 2.0, January 2004 | ||
3 | http://www.apache.org/licenses/ | ||
4 | |||
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION | ||
6 | |||
7 | 1. Definitions. | ||
8 | |||
9 | "License" shall mean the terms and conditions for use, reproduction, | ||
10 | and distribution as defined by Sections 1 through 9 of this document. | ||
11 | |||
12 | "Licensor" shall mean the copyright owner or entity authorized by | ||
13 | the copyright owner that is granting the License. | ||
14 | |||
15 | "Legal Entity" shall mean the union of the acting entity and all | ||
16 | other entities that control, are controlled by, or are under common | ||
17 | control with that entity. For the purposes of this definition, | ||
18 | "control" means (i) the power, direct or indirect, to cause the | ||
19 | direction or management of such entity, whether by contract or | ||
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the | ||
21 | outstanding shares, or (iii) beneficial ownership of such entity. | ||
22 | |||
23 | "You" (or "Your") shall mean an individual or Legal Entity | ||
24 | exercising permissions granted by this License. | ||
25 | |||
26 | "Source" form shall mean the preferred form for making modifications, | ||
27 | including but not limited to software source code, documentation | ||
28 | source, and configuration files. | ||
29 | |||
30 | "Object" form shall mean any form resulting from mechanical | ||
31 | transformation or translation of a Source form, including but | ||
32 | not limited to compiled object code, generated documentation, | ||
33 | and conversions to other media types. | ||
34 | |||
35 | "Work" shall mean the work of authorship, whether in Source or | ||
36 | Object form, made available under the License, as indicated by a | ||
37 | copyright notice that is included in or attached to the work | ||
38 | (an example is provided in the Appendix below). | ||
39 | |||
40 | "Derivative Works" shall mean any work, whether in Source or Object | ||
41 | form, that is based on (or derived from) the Work and for which the | ||
42 | editorial revisions, annotations, elaborations, or other modifications | ||
43 | represent, as a whole, an original work of authorship. For the purposes | ||
44 | of this License, Derivative Works shall not include works that remain | ||
45 | separable from, or merely link (or bind by name) to the interfaces of, | ||
46 | the Work and Derivative Works thereof. | ||
47 | |||
48 | "Contribution" shall mean any work of authorship, including | ||
49 | the original version of the Work and any modifications or additions | ||
50 | to that Work or Derivative Works thereof, that is intentionally | ||
51 | submitted to Licensor for inclusion in the Work by the copyright owner | ||
52 | or by an individual or Legal Entity authorized to submit on behalf of | ||
53 | the copyright owner. For the purposes of this definition, "submitted" | ||
54 | means any form of electronic, verbal, or written communication sent | ||
55 | to the Licensor or its representatives, including but not limited to | ||
56 | communication on electronic mailing lists, source code control systems, | ||
57 | and issue tracking systems that are managed by, or on behalf of, the | ||
58 | Licensor for the purpose of discussing and improving the Work, but | ||
59 | excluding communication that is conspicuously marked or otherwise | ||
60 | designated in writing by the copyright owner as "Not a Contribution." | ||
61 | |||
62 | "Contributor" shall mean Licensor and any individual or Legal Entity | ||
63 | on behalf of whom a Contribution has been received by Licensor and | ||
64 | subsequently incorporated within the Work. | ||
65 | |||
66 | 2. Grant of Copyright License. Subject to the terms and conditions of | ||
67 | this License, each Contributor hereby grants to You a perpetual, | ||
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable | ||
69 | copyright license to reproduce, prepare Derivative Works of, | ||
70 | publicly display, publicly perform, sublicense, and distribute the | ||
71 | Work and such Derivative Works in Source or Object form. | ||
72 | |||
73 | 3. Grant of Patent License. Subject to the terms and conditions of | ||
74 | this License, each Contributor hereby grants to You a perpetual, | ||
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable | ||
76 | (except as stated in this section) patent license to make, have made, | ||
77 | use, offer to sell, sell, import, and otherwise transfer the Work, | ||
78 | where such license applies only to those patent claims licensable | ||
79 | by such Contributor that are necessarily infringed by their | ||
80 | Contribution(s) alone or by combination of their Contribution(s) | ||
81 | with the Work to which such Contribution(s) was submitted. If You | ||
82 | institute patent litigation against any entity (including a | ||
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work | ||
84 | or a Contribution incorporated within the Work constitutes direct | ||
85 | or contributory patent infringement, then any patent licenses | ||
86 | granted to You under this License for that Work shall terminate | ||
87 | as of the date such litigation is filed. | ||
88 | |||
89 | 4. Redistribution. You may reproduce and distribute copies of the | ||
90 | Work or Derivative Works thereof in any medium, with or without | ||
91 | modifications, and in Source or Object form, provided that You | ||
92 | meet the following conditions: | ||
93 | |||
94 | (a) You must give any other recipients of the Work or | ||
95 | Derivative Works a copy of this License; and | ||
96 | |||
97 | (b) You must cause any modified files to carry prominent notices | ||
98 | stating that You changed the files; and | ||
99 | |||
100 | (c) You must retain, in the Source form of any Derivative Works | ||
101 | that You distribute, all copyright, patent, trademark, and | ||
102 | attribution notices from the Source form of the Work, | ||
103 | excluding those notices that do not pertain to any part of | ||
104 | the Derivative Works; and | ||
105 | |||
106 | (d) If the Work includes a "NOTICE" text file as part of its | ||
107 | distribution, then any Derivative Works that You distribute must | ||
108 | include a readable copy of the attribution notices contained | ||
109 | within such NOTICE file, excluding those notices that do not | ||
110 | pertain to any part of the Derivative Works, in at least one | ||
111 | of the following places: within a NOTICE text file distributed | ||
112 | as part of the Derivative Works; within the Source form or | ||
113 | documentation, if provided along with the Derivative Works; or, | ||
114 | within a display generated by the Derivative Works, if and | ||
115 | wherever such third-party notices normally appear. The contents | ||
116 | of the NOTICE file are for informational purposes only and | ||
117 | do not modify the License. You may add Your own attribution | ||
118 | notices within Derivative Works that You distribute, alongside | ||
119 | or as an addendum to the NOTICE text from the Work, provided | ||
120 | that such additional attribution notices cannot be construed | ||
121 | as modifying the License. | ||
122 | |||
123 | You may add Your own copyright statement to Your modifications and | ||
124 | may provide additional or different license terms and conditions | ||
125 | for use, reproduction, or distribution of Your modifications, or | ||
126 | for any such Derivative Works as a whole, provided Your use, | ||
127 | reproduction, and distribution of the Work otherwise complies with | ||
128 | the conditions stated in this License. | ||
129 | |||
130 | 5. Submission of Contributions. Unless You explicitly state otherwise, | ||
131 | any Contribution intentionally submitted for inclusion in the Work | ||
132 | by You to the Licensor shall be under the terms and conditions of | ||
133 | this License, without any additional terms or conditions. | ||
134 | Notwithstanding the above, nothing herein shall supersede or modify | ||
135 | the terms of any separate license agreement you may have executed | ||
136 | with Licensor regarding such Contributions. | ||
137 | |||
138 | 6. Trademarks. This License does not grant permission to use the trade | ||
139 | names, trademarks, service marks, or product names of the Licensor, | ||
140 | except as required for reasonable and customary use in describing the | ||
141 | origin of the Work and reproducing the content of the NOTICE file. | ||
142 | |||
143 | 7. Disclaimer of Warranty. Unless required by applicable law or | ||
144 | agreed to in writing, Licensor provides the Work (and each | ||
145 | Contributor provides its Contributions) on an "AS IS" BASIS, | ||
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | ||
147 | implied, including, without limitation, any warranties or conditions | ||
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A | ||
149 | PARTICULAR PURPOSE. You are solely responsible for determining the | ||
150 | appropriateness of using or redistributing the Work and assume any | ||
151 | risks associated with Your exercise of permissions under this License. | ||
152 | |||
153 | 8. Limitation of Liability. In no event and under no legal theory, | ||
154 | whether in tort (including negligence), contract, or otherwise, | ||
155 | unless required by applicable law (such as deliberate and grossly | ||
156 | negligent acts) or agreed to in writing, shall any Contributor be | ||
157 | liable to You for damages, including any direct, indirect, special, | ||
158 | incidental, or consequential damages of any character arising as a | ||
159 | result of this License or out of the use or inability to use the | ||
160 | Work (including but not limited to damages for loss of goodwill, | ||
161 | work stoppage, computer failure or malfunction, or any and all | ||
162 | other commercial damages or losses), even if such Contributor | ||
163 | has been advised of the possibility of such damages. | ||
164 | |||
165 | 9. Accepting Warranty or Additional Liability. While redistributing | ||
166 | the Work or Derivative Works thereof, You may choose to offer, | ||
167 | and charge a fee for, acceptance of support, warranty, indemnity, | ||
168 | or other liability obligations and/or rights consistent with this | ||
169 | License. However, in accepting such obligations, You may act only | ||
170 | on Your own behalf and on Your sole responsibility, not on behalf | ||
171 | of any other Contributor, and only if You agree to indemnify, | ||
172 | defend, and hold each Contributor harmless for any liability | ||
173 | incurred by, or claims asserted against, such Contributor by reason | ||
174 | of your accepting any such warranty or additional liability. | ||
175 | |||
176 | END OF TERMS AND CONDITIONS | ||
177 | |||
178 | APPENDIX: How to apply the Apache License to your work. | ||
179 | |||
180 | To apply the Apache License to your work, attach the following | ||
181 | boilerplate notice, with the fields enclosed by brackets "{}" | ||
182 | replaced with your own identifying information. (Don't include | ||
183 | the brackets!) The text should be enclosed in the appropriate | ||
184 | comment syntax for the file format. We also recommend that a | ||
185 | file or class name and description of purpose be included on the | ||
186 | same "printed page" as the copyright notice for easier | ||
187 | identification within third-party archives. | ||
188 | |||
189 | Copyright {yyyy} {name of copyright owner} | ||
190 | |||
191 | Licensed under the Apache License, Version 2.0 (the "License"); | ||
192 | you may not use this file except in compliance with the License. | ||
193 | You may obtain a copy of the License at | ||
194 | |||
195 | http://www.apache.org/licenses/LICENSE-2.0 | ||
196 | |||
197 | Unless required by applicable law or agreed to in writing, software | ||
198 | distributed under the License is distributed on an "AS IS" BASIS, | ||
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
200 | See the License for the specific language governing permissions and | ||
201 | limitations under the License. | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/LICENSE.libyaml b/vendor/github.com/zclconf/go-cty-yaml/LICENSE.libyaml new file mode 100644 index 0000000..8da58fb --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/LICENSE.libyaml | |||
@@ -0,0 +1,31 @@ | |||
1 | The following files were ported to Go from C files of libyaml, and thus | ||
2 | are still covered by their original copyright and license: | ||
3 | |||
4 | apic.go | ||
5 | emitterc.go | ||
6 | parserc.go | ||
7 | readerc.go | ||
8 | scannerc.go | ||
9 | writerc.go | ||
10 | yamlh.go | ||
11 | yamlprivateh.go | ||
12 | |||
13 | Copyright (c) 2006 Kirill Simonov | ||
14 | |||
15 | Permission is hereby granted, free of charge, to any person obtaining a copy of | ||
16 | this software and associated documentation files (the "Software"), to deal in | ||
17 | the Software without restriction, including without limitation the rights to | ||
18 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies | ||
19 | of the Software, and to permit persons to whom the Software is furnished to do | ||
20 | so, subject to the following conditions: | ||
21 | |||
22 | The above copyright notice and this permission notice shall be included in all | ||
23 | copies or substantial portions of the Software. | ||
24 | |||
25 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
26 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
27 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||
28 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
29 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||
30 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||
31 | SOFTWARE. | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/NOTICE b/vendor/github.com/zclconf/go-cty-yaml/NOTICE new file mode 100644 index 0000000..4e6c00a --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/NOTICE | |||
@@ -0,0 +1,20 @@ | |||
1 | This package is derived from gopkg.in/yaml.v2, which is copyright | ||
2 | 2011-2016 Canonical Ltd. | ||
3 | |||
4 | Licensed under the Apache License, Version 2.0 (the "License"); | ||
5 | you may not use this file except in compliance with the License. | ||
6 | You may obtain a copy of the License at | ||
7 | |||
8 | http://www.apache.org/licenses/LICENSE-2.0 | ||
9 | |||
10 | Unless required by applicable law or agreed to in writing, software | ||
11 | distributed under the License is distributed on an "AS IS" BASIS, | ||
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
13 | See the License for the specific language governing permissions and | ||
14 | limitations under the License. | ||
15 | |||
16 | Includes mechanical ports of code from libyaml, distributed under its original | ||
17 | license. See LICENSE.libyaml for more information. | ||
18 | |||
19 | Modifications for cty interfacing copyright 2019 Martin Atkins, and | ||
20 | distributed under the same license terms. | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/apic.go b/vendor/github.com/zclconf/go-cty-yaml/apic.go new file mode 100644 index 0000000..1f7e87e --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/apic.go | |||
@@ -0,0 +1,739 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "io" | ||
5 | ) | ||
6 | |||
7 | func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) { | ||
8 | //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens)) | ||
9 | |||
10 | // Check if we can move the queue at the beginning of the buffer. | ||
11 | if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) { | ||
12 | if parser.tokens_head != len(parser.tokens) { | ||
13 | copy(parser.tokens, parser.tokens[parser.tokens_head:]) | ||
14 | } | ||
15 | parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head] | ||
16 | parser.tokens_head = 0 | ||
17 | } | ||
18 | parser.tokens = append(parser.tokens, *token) | ||
19 | if pos < 0 { | ||
20 | return | ||
21 | } | ||
22 | copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:]) | ||
23 | parser.tokens[parser.tokens_head+pos] = *token | ||
24 | } | ||
25 | |||
26 | // Create a new parser object. | ||
27 | func yaml_parser_initialize(parser *yaml_parser_t) bool { | ||
28 | *parser = yaml_parser_t{ | ||
29 | raw_buffer: make([]byte, 0, input_raw_buffer_size), | ||
30 | buffer: make([]byte, 0, input_buffer_size), | ||
31 | } | ||
32 | return true | ||
33 | } | ||
34 | |||
35 | // Destroy a parser object. | ||
36 | func yaml_parser_delete(parser *yaml_parser_t) { | ||
37 | *parser = yaml_parser_t{} | ||
38 | } | ||
39 | |||
40 | // String read handler. | ||
41 | func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { | ||
42 | if parser.input_pos == len(parser.input) { | ||
43 | return 0, io.EOF | ||
44 | } | ||
45 | n = copy(buffer, parser.input[parser.input_pos:]) | ||
46 | parser.input_pos += n | ||
47 | return n, nil | ||
48 | } | ||
49 | |||
50 | // Reader read handler. | ||
51 | func yaml_reader_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { | ||
52 | return parser.input_reader.Read(buffer) | ||
53 | } | ||
54 | |||
55 | // Set a string input. | ||
56 | func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) { | ||
57 | if parser.read_handler != nil { | ||
58 | panic("must set the input source only once") | ||
59 | } | ||
60 | parser.read_handler = yaml_string_read_handler | ||
61 | parser.input = input | ||
62 | parser.input_pos = 0 | ||
63 | } | ||
64 | |||
65 | // Set a file input. | ||
66 | func yaml_parser_set_input_reader(parser *yaml_parser_t, r io.Reader) { | ||
67 | if parser.read_handler != nil { | ||
68 | panic("must set the input source only once") | ||
69 | } | ||
70 | parser.read_handler = yaml_reader_read_handler | ||
71 | parser.input_reader = r | ||
72 | } | ||
73 | |||
74 | // Set the source encoding. | ||
75 | func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) { | ||
76 | if parser.encoding != yaml_ANY_ENCODING { | ||
77 | panic("must set the encoding only once") | ||
78 | } | ||
79 | parser.encoding = encoding | ||
80 | } | ||
81 | |||
82 | // Create a new emitter object. | ||
83 | func yaml_emitter_initialize(emitter *yaml_emitter_t) { | ||
84 | *emitter = yaml_emitter_t{ | ||
85 | buffer: make([]byte, output_buffer_size), | ||
86 | raw_buffer: make([]byte, 0, output_raw_buffer_size), | ||
87 | states: make([]yaml_emitter_state_t, 0, initial_stack_size), | ||
88 | events: make([]yaml_event_t, 0, initial_queue_size), | ||
89 | } | ||
90 | } | ||
91 | |||
92 | // Destroy an emitter object. | ||
93 | func yaml_emitter_delete(emitter *yaml_emitter_t) { | ||
94 | *emitter = yaml_emitter_t{} | ||
95 | } | ||
96 | |||
97 | // String write handler. | ||
98 | func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error { | ||
99 | *emitter.output_buffer = append(*emitter.output_buffer, buffer...) | ||
100 | return nil | ||
101 | } | ||
102 | |||
103 | // yaml_writer_write_handler uses emitter.output_writer to write the | ||
104 | // emitted text. | ||
105 | func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error { | ||
106 | _, err := emitter.output_writer.Write(buffer) | ||
107 | return err | ||
108 | } | ||
109 | |||
110 | // Set a string output. | ||
111 | func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) { | ||
112 | if emitter.write_handler != nil { | ||
113 | panic("must set the output target only once") | ||
114 | } | ||
115 | emitter.write_handler = yaml_string_write_handler | ||
116 | emitter.output_buffer = output_buffer | ||
117 | } | ||
118 | |||
119 | // Set a file output. | ||
120 | func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) { | ||
121 | if emitter.write_handler != nil { | ||
122 | panic("must set the output target only once") | ||
123 | } | ||
124 | emitter.write_handler = yaml_writer_write_handler | ||
125 | emitter.output_writer = w | ||
126 | } | ||
127 | |||
128 | // Set the output encoding. | ||
129 | func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) { | ||
130 | if emitter.encoding != yaml_ANY_ENCODING { | ||
131 | panic("must set the output encoding only once") | ||
132 | } | ||
133 | emitter.encoding = encoding | ||
134 | } | ||
135 | |||
136 | // Set the canonical output style. | ||
137 | func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { | ||
138 | emitter.canonical = canonical | ||
139 | } | ||
140 | |||
141 | //// Set the indentation increment. | ||
142 | func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { | ||
143 | if indent < 2 || indent > 9 { | ||
144 | indent = 2 | ||
145 | } | ||
146 | emitter.best_indent = indent | ||
147 | } | ||
148 | |||
149 | // Set the preferred line width. | ||
150 | func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) { | ||
151 | if width < 0 { | ||
152 | width = -1 | ||
153 | } | ||
154 | emitter.best_width = width | ||
155 | } | ||
156 | |||
157 | // Set if unescaped non-ASCII characters are allowed. | ||
158 | func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) { | ||
159 | emitter.unicode = unicode | ||
160 | } | ||
161 | |||
162 | // Set the preferred line break character. | ||
163 | func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) { | ||
164 | emitter.line_break = line_break | ||
165 | } | ||
166 | |||
167 | ///* | ||
168 | // * Destroy a token object. | ||
169 | // */ | ||
170 | // | ||
171 | //YAML_DECLARE(void) | ||
172 | //yaml_token_delete(yaml_token_t *token) | ||
173 | //{ | ||
174 | // assert(token); // Non-NULL token object expected. | ||
175 | // | ||
176 | // switch (token.type) | ||
177 | // { | ||
178 | // case YAML_TAG_DIRECTIVE_TOKEN: | ||
179 | // yaml_free(token.data.tag_directive.handle); | ||
180 | // yaml_free(token.data.tag_directive.prefix); | ||
181 | // break; | ||
182 | // | ||
183 | // case YAML_ALIAS_TOKEN: | ||
184 | // yaml_free(token.data.alias.value); | ||
185 | // break; | ||
186 | // | ||
187 | // case YAML_ANCHOR_TOKEN: | ||
188 | // yaml_free(token.data.anchor.value); | ||
189 | // break; | ||
190 | // | ||
191 | // case YAML_TAG_TOKEN: | ||
192 | // yaml_free(token.data.tag.handle); | ||
193 | // yaml_free(token.data.tag.suffix); | ||
194 | // break; | ||
195 | // | ||
196 | // case YAML_SCALAR_TOKEN: | ||
197 | // yaml_free(token.data.scalar.value); | ||
198 | // break; | ||
199 | // | ||
200 | // default: | ||
201 | // break; | ||
202 | // } | ||
203 | // | ||
204 | // memset(token, 0, sizeof(yaml_token_t)); | ||
205 | //} | ||
206 | // | ||
207 | ///* | ||
208 | // * Check if a string is a valid UTF-8 sequence. | ||
209 | // * | ||
210 | // * Check 'reader.c' for more details on UTF-8 encoding. | ||
211 | // */ | ||
212 | // | ||
213 | //static int | ||
214 | //yaml_check_utf8(yaml_char_t *start, size_t length) | ||
215 | //{ | ||
216 | // yaml_char_t *end = start+length; | ||
217 | // yaml_char_t *pointer = start; | ||
218 | // | ||
219 | // while (pointer < end) { | ||
220 | // unsigned char octet; | ||
221 | // unsigned int width; | ||
222 | // unsigned int value; | ||
223 | // size_t k; | ||
224 | // | ||
225 | // octet = pointer[0]; | ||
226 | // width = (octet & 0x80) == 0x00 ? 1 : | ||
227 | // (octet & 0xE0) == 0xC0 ? 2 : | ||
228 | // (octet & 0xF0) == 0xE0 ? 3 : | ||
229 | // (octet & 0xF8) == 0xF0 ? 4 : 0; | ||
230 | // value = (octet & 0x80) == 0x00 ? octet & 0x7F : | ||
231 | // (octet & 0xE0) == 0xC0 ? octet & 0x1F : | ||
232 | // (octet & 0xF0) == 0xE0 ? octet & 0x0F : | ||
233 | // (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; | ||
234 | // if (!width) return 0; | ||
235 | // if (pointer+width > end) return 0; | ||
236 | // for (k = 1; k < width; k ++) { | ||
237 | // octet = pointer[k]; | ||
238 | // if ((octet & 0xC0) != 0x80) return 0; | ||
239 | // value = (value << 6) + (octet & 0x3F); | ||
240 | // } | ||
241 | // if (!((width == 1) || | ||
242 | // (width == 2 && value >= 0x80) || | ||
243 | // (width == 3 && value >= 0x800) || | ||
244 | // (width == 4 && value >= 0x10000))) return 0; | ||
245 | // | ||
246 | // pointer += width; | ||
247 | // } | ||
248 | // | ||
249 | // return 1; | ||
250 | //} | ||
251 | // | ||
252 | |||
253 | // Create STREAM-START. | ||
254 | func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) { | ||
255 | *event = yaml_event_t{ | ||
256 | typ: yaml_STREAM_START_EVENT, | ||
257 | encoding: encoding, | ||
258 | } | ||
259 | } | ||
260 | |||
261 | // Create STREAM-END. | ||
262 | func yaml_stream_end_event_initialize(event *yaml_event_t) { | ||
263 | *event = yaml_event_t{ | ||
264 | typ: yaml_STREAM_END_EVENT, | ||
265 | } | ||
266 | } | ||
267 | |||
268 | // Create DOCUMENT-START. | ||
269 | func yaml_document_start_event_initialize( | ||
270 | event *yaml_event_t, | ||
271 | version_directive *yaml_version_directive_t, | ||
272 | tag_directives []yaml_tag_directive_t, | ||
273 | implicit bool, | ||
274 | ) { | ||
275 | *event = yaml_event_t{ | ||
276 | typ: yaml_DOCUMENT_START_EVENT, | ||
277 | version_directive: version_directive, | ||
278 | tag_directives: tag_directives, | ||
279 | implicit: implicit, | ||
280 | } | ||
281 | } | ||
282 | |||
283 | // Create DOCUMENT-END. | ||
284 | func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) { | ||
285 | *event = yaml_event_t{ | ||
286 | typ: yaml_DOCUMENT_END_EVENT, | ||
287 | implicit: implicit, | ||
288 | } | ||
289 | } | ||
290 | |||
291 | ///* | ||
292 | // * Create ALIAS. | ||
293 | // */ | ||
294 | // | ||
295 | //YAML_DECLARE(int) | ||
296 | //yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t) | ||
297 | //{ | ||
298 | // mark yaml_mark_t = { 0, 0, 0 } | ||
299 | // anchor_copy *yaml_char_t = NULL | ||
300 | // | ||
301 | // assert(event) // Non-NULL event object is expected. | ||
302 | // assert(anchor) // Non-NULL anchor is expected. | ||
303 | // | ||
304 | // if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0 | ||
305 | // | ||
306 | // anchor_copy = yaml_strdup(anchor) | ||
307 | // if (!anchor_copy) | ||
308 | // return 0 | ||
309 | // | ||
310 | // ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark) | ||
311 | // | ||
312 | // return 1 | ||
313 | //} | ||
314 | |||
315 | // Create SCALAR. | ||
316 | func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool { | ||
317 | *event = yaml_event_t{ | ||
318 | typ: yaml_SCALAR_EVENT, | ||
319 | anchor: anchor, | ||
320 | tag: tag, | ||
321 | value: value, | ||
322 | implicit: plain_implicit, | ||
323 | quoted_implicit: quoted_implicit, | ||
324 | style: yaml_style_t(style), | ||
325 | } | ||
326 | return true | ||
327 | } | ||
328 | |||
329 | // Create SEQUENCE-START. | ||
330 | func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool { | ||
331 | *event = yaml_event_t{ | ||
332 | typ: yaml_SEQUENCE_START_EVENT, | ||
333 | anchor: anchor, | ||
334 | tag: tag, | ||
335 | implicit: implicit, | ||
336 | style: yaml_style_t(style), | ||
337 | } | ||
338 | return true | ||
339 | } | ||
340 | |||
341 | // Create SEQUENCE-END. | ||
342 | func yaml_sequence_end_event_initialize(event *yaml_event_t) bool { | ||
343 | *event = yaml_event_t{ | ||
344 | typ: yaml_SEQUENCE_END_EVENT, | ||
345 | } | ||
346 | return true | ||
347 | } | ||
348 | |||
349 | // Create MAPPING-START. | ||
350 | func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) { | ||
351 | *event = yaml_event_t{ | ||
352 | typ: yaml_MAPPING_START_EVENT, | ||
353 | anchor: anchor, | ||
354 | tag: tag, | ||
355 | implicit: implicit, | ||
356 | style: yaml_style_t(style), | ||
357 | } | ||
358 | } | ||
359 | |||
360 | // Create MAPPING-END. | ||
361 | func yaml_mapping_end_event_initialize(event *yaml_event_t) { | ||
362 | *event = yaml_event_t{ | ||
363 | typ: yaml_MAPPING_END_EVENT, | ||
364 | } | ||
365 | } | ||
366 | |||
367 | // Destroy an event object. | ||
368 | func yaml_event_delete(event *yaml_event_t) { | ||
369 | *event = yaml_event_t{} | ||
370 | } | ||
371 | |||
372 | ///* | ||
373 | // * Create a document object. | ||
374 | // */ | ||
375 | // | ||
376 | //YAML_DECLARE(int) | ||
377 | //yaml_document_initialize(document *yaml_document_t, | ||
378 | // version_directive *yaml_version_directive_t, | ||
379 | // tag_directives_start *yaml_tag_directive_t, | ||
380 | // tag_directives_end *yaml_tag_directive_t, | ||
381 | // start_implicit int, end_implicit int) | ||
382 | //{ | ||
383 | // struct { | ||
384 | // error yaml_error_type_t | ||
385 | // } context | ||
386 | // struct { | ||
387 | // start *yaml_node_t | ||
388 | // end *yaml_node_t | ||
389 | // top *yaml_node_t | ||
390 | // } nodes = { NULL, NULL, NULL } | ||
391 | // version_directive_copy *yaml_version_directive_t = NULL | ||
392 | // struct { | ||
393 | // start *yaml_tag_directive_t | ||
394 | // end *yaml_tag_directive_t | ||
395 | // top *yaml_tag_directive_t | ||
396 | // } tag_directives_copy = { NULL, NULL, NULL } | ||
397 | // value yaml_tag_directive_t = { NULL, NULL } | ||
398 | // mark yaml_mark_t = { 0, 0, 0 } | ||
399 | // | ||
400 | // assert(document) // Non-NULL document object is expected. | ||
401 | // assert((tag_directives_start && tag_directives_end) || | ||
402 | // (tag_directives_start == tag_directives_end)) | ||
403 | // // Valid tag directives are expected. | ||
404 | // | ||
405 | // if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error | ||
406 | // | ||
407 | // if (version_directive) { | ||
408 | // version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)) | ||
409 | // if (!version_directive_copy) goto error | ||
410 | // version_directive_copy.major = version_directive.major | ||
411 | // version_directive_copy.minor = version_directive.minor | ||
412 | // } | ||
413 | // | ||
414 | // if (tag_directives_start != tag_directives_end) { | ||
415 | // tag_directive *yaml_tag_directive_t | ||
416 | // if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) | ||
417 | // goto error | ||
418 | // for (tag_directive = tag_directives_start | ||
419 | // tag_directive != tag_directives_end; tag_directive ++) { | ||
420 | // assert(tag_directive.handle) | ||
421 | // assert(tag_directive.prefix) | ||
422 | // if (!yaml_check_utf8(tag_directive.handle, | ||
423 | // strlen((char *)tag_directive.handle))) | ||
424 | // goto error | ||
425 | // if (!yaml_check_utf8(tag_directive.prefix, | ||
426 | // strlen((char *)tag_directive.prefix))) | ||
427 | // goto error | ||
428 | // value.handle = yaml_strdup(tag_directive.handle) | ||
429 | // value.prefix = yaml_strdup(tag_directive.prefix) | ||
430 | // if (!value.handle || !value.prefix) goto error | ||
431 | // if (!PUSH(&context, tag_directives_copy, value)) | ||
432 | // goto error | ||
433 | // value.handle = NULL | ||
434 | // value.prefix = NULL | ||
435 | // } | ||
436 | // } | ||
437 | // | ||
438 | // DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, | ||
439 | // tag_directives_copy.start, tag_directives_copy.top, | ||
440 | // start_implicit, end_implicit, mark, mark) | ||
441 | // | ||
442 | // return 1 | ||
443 | // | ||
444 | //error: | ||
445 | // STACK_DEL(&context, nodes) | ||
446 | // yaml_free(version_directive_copy) | ||
447 | // while (!STACK_EMPTY(&context, tag_directives_copy)) { | ||
448 | // value yaml_tag_directive_t = POP(&context, tag_directives_copy) | ||
449 | // yaml_free(value.handle) | ||
450 | // yaml_free(value.prefix) | ||
451 | // } | ||
452 | // STACK_DEL(&context, tag_directives_copy) | ||
453 | // yaml_free(value.handle) | ||
454 | // yaml_free(value.prefix) | ||
455 | // | ||
456 | // return 0 | ||
457 | //} | ||
458 | // | ||
459 | ///* | ||
460 | // * Destroy a document object. | ||
461 | // */ | ||
462 | // | ||
463 | //YAML_DECLARE(void) | ||
464 | //yaml_document_delete(document *yaml_document_t) | ||
465 | //{ | ||
466 | // struct { | ||
467 | // error yaml_error_type_t | ||
468 | // } context | ||
469 | // tag_directive *yaml_tag_directive_t | ||
470 | // | ||
471 | // context.error = YAML_NO_ERROR // Eliminate a compiler warning. | ||
472 | // | ||
473 | // assert(document) // Non-NULL document object is expected. | ||
474 | // | ||
475 | // while (!STACK_EMPTY(&context, document.nodes)) { | ||
476 | // node yaml_node_t = POP(&context, document.nodes) | ||
477 | // yaml_free(node.tag) | ||
478 | // switch (node.type) { | ||
479 | // case YAML_SCALAR_NODE: | ||
480 | // yaml_free(node.data.scalar.value) | ||
481 | // break | ||
482 | // case YAML_SEQUENCE_NODE: | ||
483 | // STACK_DEL(&context, node.data.sequence.items) | ||
484 | // break | ||
485 | // case YAML_MAPPING_NODE: | ||
486 | // STACK_DEL(&context, node.data.mapping.pairs) | ||
487 | // break | ||
488 | // default: | ||
489 | // assert(0) // Should not happen. | ||
490 | // } | ||
491 | // } | ||
492 | // STACK_DEL(&context, document.nodes) | ||
493 | // | ||
494 | // yaml_free(document.version_directive) | ||
495 | // for (tag_directive = document.tag_directives.start | ||
496 | // tag_directive != document.tag_directives.end | ||
497 | // tag_directive++) { | ||
498 | // yaml_free(tag_directive.handle) | ||
499 | // yaml_free(tag_directive.prefix) | ||
500 | // } | ||
501 | // yaml_free(document.tag_directives.start) | ||
502 | // | ||
503 | // memset(document, 0, sizeof(yaml_document_t)) | ||
504 | //} | ||
505 | // | ||
506 | ///** | ||
507 | // * Get a document node. | ||
508 | // */ | ||
509 | // | ||
510 | //YAML_DECLARE(yaml_node_t *) | ||
511 | //yaml_document_get_node(document *yaml_document_t, index int) | ||
512 | //{ | ||
513 | // assert(document) // Non-NULL document object is expected. | ||
514 | // | ||
515 | // if (index > 0 && document.nodes.start + index <= document.nodes.top) { | ||
516 | // return document.nodes.start + index - 1 | ||
517 | // } | ||
518 | // return NULL | ||
519 | //} | ||
520 | // | ||
521 | ///** | ||
522 | // * Get the root object. | ||
523 | // */ | ||
524 | // | ||
525 | //YAML_DECLARE(yaml_node_t *) | ||
526 | //yaml_document_get_root_node(document *yaml_document_t) | ||
527 | //{ | ||
528 | // assert(document) // Non-NULL document object is expected. | ||
529 | // | ||
530 | // if (document.nodes.top != document.nodes.start) { | ||
531 | // return document.nodes.start | ||
532 | // } | ||
533 | // return NULL | ||
534 | //} | ||
535 | // | ||
536 | ///* | ||
537 | // * Add a scalar node to a document. | ||
538 | // */ | ||
539 | // | ||
540 | //YAML_DECLARE(int) | ||
541 | //yaml_document_add_scalar(document *yaml_document_t, | ||
542 | // tag *yaml_char_t, value *yaml_char_t, length int, | ||
543 | // style yaml_scalar_style_t) | ||
544 | //{ | ||
545 | // struct { | ||
546 | // error yaml_error_type_t | ||
547 | // } context | ||
548 | // mark yaml_mark_t = { 0, 0, 0 } | ||
549 | // tag_copy *yaml_char_t = NULL | ||
550 | // value_copy *yaml_char_t = NULL | ||
551 | // node yaml_node_t | ||
552 | // | ||
553 | // assert(document) // Non-NULL document object is expected. | ||
554 | // assert(value) // Non-NULL value is expected. | ||
555 | // | ||
556 | // if (!tag) { | ||
557 | // tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG | ||
558 | // } | ||
559 | // | ||
560 | // if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error | ||
561 | // tag_copy = yaml_strdup(tag) | ||
562 | // if (!tag_copy) goto error | ||
563 | // | ||
564 | // if (length < 0) { | ||
565 | // length = strlen((char *)value) | ||
566 | // } | ||
567 | // | ||
568 | // if (!yaml_check_utf8(value, length)) goto error | ||
569 | // value_copy = yaml_malloc(length+1) | ||
570 | // if (!value_copy) goto error | ||
571 | // memcpy(value_copy, value, length) | ||
572 | // value_copy[length] = '\0' | ||
573 | // | ||
574 | // SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark) | ||
575 | // if (!PUSH(&context, document.nodes, node)) goto error | ||
576 | // | ||
577 | // return document.nodes.top - document.nodes.start | ||
578 | // | ||
579 | //error: | ||
580 | // yaml_free(tag_copy) | ||
581 | // yaml_free(value_copy) | ||
582 | // | ||
583 | // return 0 | ||
584 | //} | ||
585 | // | ||
586 | ///* | ||
587 | // * Add a sequence node to a document. | ||
588 | // */ | ||
589 | // | ||
590 | //YAML_DECLARE(int) | ||
591 | //yaml_document_add_sequence(document *yaml_document_t, | ||
592 | // tag *yaml_char_t, style yaml_sequence_style_t) | ||
593 | //{ | ||
594 | // struct { | ||
595 | // error yaml_error_type_t | ||
596 | // } context | ||
597 | // mark yaml_mark_t = { 0, 0, 0 } | ||
598 | // tag_copy *yaml_char_t = NULL | ||
599 | // struct { | ||
600 | // start *yaml_node_item_t | ||
601 | // end *yaml_node_item_t | ||
602 | // top *yaml_node_item_t | ||
603 | // } items = { NULL, NULL, NULL } | ||
604 | // node yaml_node_t | ||
605 | // | ||
606 | // assert(document) // Non-NULL document object is expected. | ||
607 | // | ||
608 | // if (!tag) { | ||
609 | // tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG | ||
610 | // } | ||
611 | // | ||
612 | // if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error | ||
613 | // tag_copy = yaml_strdup(tag) | ||
614 | // if (!tag_copy) goto error | ||
615 | // | ||
616 | // if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error | ||
617 | // | ||
618 | // SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, | ||
619 | // style, mark, mark) | ||
620 | // if (!PUSH(&context, document.nodes, node)) goto error | ||
621 | // | ||
622 | // return document.nodes.top - document.nodes.start | ||
623 | // | ||
624 | //error: | ||
625 | // STACK_DEL(&context, items) | ||
626 | // yaml_free(tag_copy) | ||
627 | // | ||
628 | // return 0 | ||
629 | //} | ||
630 | // | ||
631 | ///* | ||
632 | // * Add a mapping node to a document. | ||
633 | // */ | ||
634 | // | ||
635 | //YAML_DECLARE(int) | ||
636 | //yaml_document_add_mapping(document *yaml_document_t, | ||
637 | // tag *yaml_char_t, style yaml_mapping_style_t) | ||
638 | //{ | ||
639 | // struct { | ||
640 | // error yaml_error_type_t | ||
641 | // } context | ||
642 | // mark yaml_mark_t = { 0, 0, 0 } | ||
643 | // tag_copy *yaml_char_t = NULL | ||
644 | // struct { | ||
645 | // start *yaml_node_pair_t | ||
646 | // end *yaml_node_pair_t | ||
647 | // top *yaml_node_pair_t | ||
648 | // } pairs = { NULL, NULL, NULL } | ||
649 | // node yaml_node_t | ||
650 | // | ||
651 | // assert(document) // Non-NULL document object is expected. | ||
652 | // | ||
653 | // if (!tag) { | ||
654 | // tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG | ||
655 | // } | ||
656 | // | ||
657 | // if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error | ||
658 | // tag_copy = yaml_strdup(tag) | ||
659 | // if (!tag_copy) goto error | ||
660 | // | ||
661 | // if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error | ||
662 | // | ||
663 | // MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, | ||
664 | // style, mark, mark) | ||
665 | // if (!PUSH(&context, document.nodes, node)) goto error | ||
666 | // | ||
667 | // return document.nodes.top - document.nodes.start | ||
668 | // | ||
669 | //error: | ||
670 | // STACK_DEL(&context, pairs) | ||
671 | // yaml_free(tag_copy) | ||
672 | // | ||
673 | // return 0 | ||
674 | //} | ||
675 | // | ||
676 | ///* | ||
677 | // * Append an item to a sequence node. | ||
678 | // */ | ||
679 | // | ||
680 | //YAML_DECLARE(int) | ||
681 | //yaml_document_append_sequence_item(document *yaml_document_t, | ||
682 | // sequence int, item int) | ||
683 | //{ | ||
684 | // struct { | ||
685 | // error yaml_error_type_t | ||
686 | // } context | ||
687 | // | ||
688 | // assert(document) // Non-NULL document is required. | ||
689 | // assert(sequence > 0 | ||
690 | // && document.nodes.start + sequence <= document.nodes.top) | ||
691 | // // Valid sequence id is required. | ||
692 | // assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE) | ||
693 | // // A sequence node is required. | ||
694 | // assert(item > 0 && document.nodes.start + item <= document.nodes.top) | ||
695 | // // Valid item id is required. | ||
696 | // | ||
697 | // if (!PUSH(&context, | ||
698 | // document.nodes.start[sequence-1].data.sequence.items, item)) | ||
699 | // return 0 | ||
700 | // | ||
701 | // return 1 | ||
702 | //} | ||
703 | // | ||
704 | ///* | ||
705 | // * Append a pair of a key and a value to a mapping node. | ||
706 | // */ | ||
707 | // | ||
708 | //YAML_DECLARE(int) | ||
709 | //yaml_document_append_mapping_pair(document *yaml_document_t, | ||
710 | // mapping int, key int, value int) | ||
711 | //{ | ||
712 | // struct { | ||
713 | // error yaml_error_type_t | ||
714 | // } context | ||
715 | // | ||
716 | // pair yaml_node_pair_t | ||
717 | // | ||
718 | // assert(document) // Non-NULL document is required. | ||
719 | // assert(mapping > 0 | ||
720 | // && document.nodes.start + mapping <= document.nodes.top) | ||
721 | // // Valid mapping id is required. | ||
722 | // assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE) | ||
723 | // // A mapping node is required. | ||
724 | // assert(key > 0 && document.nodes.start + key <= document.nodes.top) | ||
725 | // // Valid key id is required. | ||
726 | // assert(value > 0 && document.nodes.start + value <= document.nodes.top) | ||
727 | // // Valid value id is required. | ||
728 | // | ||
729 | // pair.key = key | ||
730 | // pair.value = value | ||
731 | // | ||
732 | // if (!PUSH(&context, | ||
733 | // document.nodes.start[mapping-1].data.mapping.pairs, pair)) | ||
734 | // return 0 | ||
735 | // | ||
736 | // return 1 | ||
737 | //} | ||
738 | // | ||
739 | // | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/converter.go b/vendor/github.com/zclconf/go-cty-yaml/converter.go new file mode 100644 index 0000000..a73b34a --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/converter.go | |||
@@ -0,0 +1,69 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "github.com/zclconf/go-cty/cty" | ||
5 | ) | ||
6 | |||
7 | // ConverterConfig is used to configure a new converter, using NewConverter. | ||
8 | type ConverterConfig struct { | ||
9 | // EncodeAsFlow, when set to true, causes Marshal to produce flow-style | ||
10 | // mapping and sequence serializations. | ||
11 | EncodeAsFlow bool | ||
12 | } | ||
13 | |||
14 | // A Converter can marshal and unmarshal between cty values and YAML bytes. | ||
15 | // | ||
16 | // Because there are many different ways to map cty to YAML and vice-versa, | ||
17 | // a converter is configurable using the settings in ConverterConfig, which | ||
18 | // allow for a few different permutations of mapping to YAML. | ||
19 | // | ||
20 | // If you are just trying to work with generic, standard YAML, the predefined | ||
21 | // converter in Standard should be good enough. | ||
22 | type Converter struct { | ||
23 | encodeAsFlow bool | ||
24 | } | ||
25 | |||
26 | // NewConverter creates a new Converter with the given configuration. | ||
27 | func NewConverter(config *ConverterConfig) *Converter { | ||
28 | return &Converter{ | ||
29 | encodeAsFlow: config.EncodeAsFlow, | ||
30 | } | ||
31 | } | ||
32 | |||
33 | // Standard is a predefined Converter that produces and consumes generic YAML | ||
34 | // using only built-in constructs that any other YAML implementation ought to | ||
35 | // understand. | ||
36 | var Standard *Converter = NewConverter(&ConverterConfig{}) | ||
37 | |||
38 | // ImpliedType analyzes the given source code and returns a suitable type that | ||
39 | // it could be decoded into. | ||
40 | // | ||
41 | // For a converter that is using standard YAML rather than cty-specific custom | ||
42 | // tags, only a subset of cty types can be produced: strings, numbers, bools, | ||
43 | // tuple types, and object types. | ||
44 | func (c *Converter) ImpliedType(src []byte) (cty.Type, error) { | ||
45 | return c.impliedType(src) | ||
46 | } | ||
47 | |||
48 | // Marshal serializes the given value into a YAML document, using a fixed | ||
49 | // mapping from cty types to YAML constructs. | ||
50 | // | ||
51 | // Note that unlike the function of the same name in the cty JSON package, | ||
52 | // this does not take a type constraint and therefore the YAML serialization | ||
53 | // cannot preserve late-bound type information in the serialization to be | ||
54 | // recovered from Unmarshal. Instead, any cty.DynamicPseudoType in the type | ||
55 | // constraint given to Unmarshal will be decoded as if the corresponding portion | ||
56 | // of the input were processed with ImpliedType to find a target type. | ||
57 | func (c *Converter) Marshal(v cty.Value) ([]byte, error) { | ||
58 | return c.marshal(v) | ||
59 | } | ||
60 | |||
61 | // Unmarshal reads the document found within the given source buffer | ||
62 | // and attempts to convert it into a value conforming to the given type | ||
63 | // constraint. | ||
64 | // | ||
65 | // An error is returned if the given source contains any YAML document | ||
66 | // delimiters. | ||
67 | func (c *Converter) Unmarshal(src []byte, ty cty.Type) (cty.Value, error) { | ||
68 | return c.unmarshal(src, ty) | ||
69 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/cty_funcs.go b/vendor/github.com/zclconf/go-cty-yaml/cty_funcs.go new file mode 100644 index 0000000..b91141c --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/cty_funcs.go | |||
@@ -0,0 +1,57 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "github.com/zclconf/go-cty/cty" | ||
5 | "github.com/zclconf/go-cty/cty/function" | ||
6 | ) | ||
7 | |||
8 | // YAMLDecodeFunc is a cty function for decoding arbitrary YAML source code | ||
9 | // into a cty Value, using the ImpliedType and Unmarshal methods of the | ||
10 | // Standard pre-defined converter. | ||
11 | var YAMLDecodeFunc = function.New(&function.Spec{ | ||
12 | Params: []function.Parameter{ | ||
13 | { | ||
14 | Name: "src", | ||
15 | Type: cty.String, | ||
16 | }, | ||
17 | }, | ||
18 | Type: func(args []cty.Value) (cty.Type, error) { | ||
19 | if !args[0].IsKnown() { | ||
20 | return cty.DynamicPseudoType, nil | ||
21 | } | ||
22 | if args[0].IsNull() { | ||
23 | return cty.NilType, function.NewArgErrorf(0, "YAML source code cannot be null") | ||
24 | } | ||
25 | return Standard.ImpliedType([]byte(args[0].AsString())) | ||
26 | }, | ||
27 | Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { | ||
28 | if retType == cty.DynamicPseudoType { | ||
29 | return cty.DynamicVal, nil | ||
30 | } | ||
31 | return Standard.Unmarshal([]byte(args[0].AsString()), retType) | ||
32 | }, | ||
33 | }) | ||
34 | |||
35 | // YAMLEncodeFunc is a cty function for encoding an arbitrary cty value | ||
36 | // into YAML. | ||
37 | var YAMLEncodeFunc = function.New(&function.Spec{ | ||
38 | Params: []function.Parameter{ | ||
39 | { | ||
40 | Name: "value", | ||
41 | Type: cty.DynamicPseudoType, | ||
42 | AllowNull: true, | ||
43 | AllowDynamicType: true, | ||
44 | }, | ||
45 | }, | ||
46 | Type: function.StaticReturnType(cty.String), | ||
47 | Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { | ||
48 | if !args[0].IsWhollyKnown() { | ||
49 | return cty.UnknownVal(retType), nil | ||
50 | } | ||
51 | raw, err := Standard.Marshal(args[0]) | ||
52 | if err != nil { | ||
53 | return cty.NilVal, err | ||
54 | } | ||
55 | return cty.StringVal(string(raw)), nil | ||
56 | }, | ||
57 | }) | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/decode.go b/vendor/github.com/zclconf/go-cty-yaml/decode.go new file mode 100644 index 0000000..e369ff2 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/decode.go | |||
@@ -0,0 +1,261 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "errors" | ||
5 | "fmt" | ||
6 | |||
7 | "github.com/zclconf/go-cty/cty" | ||
8 | "github.com/zclconf/go-cty/cty/convert" | ||
9 | ) | ||
10 | |||
11 | func (c *Converter) unmarshal(src []byte, ty cty.Type) (cty.Value, error) { | ||
12 | p := &yaml_parser_t{} | ||
13 | if !yaml_parser_initialize(p) { | ||
14 | return cty.NilVal, errors.New("failed to initialize YAML parser") | ||
15 | } | ||
16 | if len(src) == 0 { | ||
17 | src = []byte{'\n'} | ||
18 | } | ||
19 | |||
20 | an := &valueAnalysis{ | ||
21 | anchorsPending: map[string]int{}, | ||
22 | anchorVals: map[string]cty.Value{}, | ||
23 | } | ||
24 | |||
25 | yaml_parser_set_input_string(p, src) | ||
26 | |||
27 | var evt yaml_event_t | ||
28 | if !yaml_parser_parse(p, &evt) { | ||
29 | return cty.NilVal, parserError(p) | ||
30 | } | ||
31 | if evt.typ != yaml_STREAM_START_EVENT { | ||
32 | return cty.NilVal, parseEventErrorf(&evt, "missing stream start token") | ||
33 | } | ||
34 | if !yaml_parser_parse(p, &evt) { | ||
35 | return cty.NilVal, parserError(p) | ||
36 | } | ||
37 | if evt.typ != yaml_DOCUMENT_START_EVENT { | ||
38 | return cty.NilVal, parseEventErrorf(&evt, "missing start of document") | ||
39 | } | ||
40 | |||
41 | v, err := c.unmarshalParse(an, p) | ||
42 | if err != nil { | ||
43 | return cty.NilVal, err | ||
44 | } | ||
45 | |||
46 | if !yaml_parser_parse(p, &evt) { | ||
47 | return cty.NilVal, parserError(p) | ||
48 | } | ||
49 | if evt.typ == yaml_DOCUMENT_START_EVENT { | ||
50 | return cty.NilVal, parseEventErrorf(&evt, "only a single document is allowed") | ||
51 | } | ||
52 | if evt.typ != yaml_DOCUMENT_END_EVENT { | ||
53 | return cty.NilVal, parseEventErrorf(&evt, "unexpected extra content (%s) after value", evt.typ.String()) | ||
54 | } | ||
55 | if !yaml_parser_parse(p, &evt) { | ||
56 | return cty.NilVal, parserError(p) | ||
57 | } | ||
58 | if evt.typ != yaml_STREAM_END_EVENT { | ||
59 | return cty.NilVal, parseEventErrorf(&evt, "unexpected extra content after value") | ||
60 | } | ||
61 | |||
62 | return convert.Convert(v, ty) | ||
63 | } | ||
64 | |||
65 | func (c *Converter) unmarshalParse(an *valueAnalysis, p *yaml_parser_t) (cty.Value, error) { | ||
66 | var evt yaml_event_t | ||
67 | if !yaml_parser_parse(p, &evt) { | ||
68 | return cty.NilVal, parserError(p) | ||
69 | } | ||
70 | return c.unmarshalParseRemainder(an, &evt, p) | ||
71 | } | ||
72 | |||
73 | func (c *Converter) unmarshalParseRemainder(an *valueAnalysis, evt *yaml_event_t, p *yaml_parser_t) (cty.Value, error) { | ||
74 | switch evt.typ { | ||
75 | case yaml_SCALAR_EVENT: | ||
76 | return c.unmarshalScalar(an, evt, p) | ||
77 | case yaml_ALIAS_EVENT: | ||
78 | return c.unmarshalAlias(an, evt, p) | ||
79 | case yaml_MAPPING_START_EVENT: | ||
80 | return c.unmarshalMapping(an, evt, p) | ||
81 | case yaml_SEQUENCE_START_EVENT: | ||
82 | return c.unmarshalSequence(an, evt, p) | ||
83 | case yaml_DOCUMENT_START_EVENT: | ||
84 | return cty.NilVal, parseEventErrorf(evt, "only a single document is allowed") | ||
85 | case yaml_STREAM_END_EVENT: | ||
86 | // Decoding an empty buffer, probably | ||
87 | return cty.NilVal, parseEventErrorf(evt, "expecting value but found end of stream") | ||
88 | default: | ||
89 | // Should never happen; the above should be comprehensive | ||
90 | return cty.NilVal, parseEventErrorf(evt, "unexpected parser event %s", evt.typ.String()) | ||
91 | } | ||
92 | } | ||
93 | |||
94 | func (c *Converter) unmarshalScalar(an *valueAnalysis, evt *yaml_event_t, p *yaml_parser_t) (cty.Value, error) { | ||
95 | src := evt.value | ||
96 | tag := string(evt.tag) | ||
97 | anchor := string(evt.anchor) | ||
98 | |||
99 | if len(anchor) > 0 { | ||
100 | an.beginAnchor(anchor) | ||
101 | } | ||
102 | |||
103 | val, err := c.resolveScalar(tag, string(src), yaml_scalar_style_t(evt.style)) | ||
104 | if err != nil { | ||
105 | return cty.NilVal, parseEventErrorWrap(evt, err) | ||
106 | } | ||
107 | |||
108 | if val.RawEquals(mergeMappingVal) { | ||
109 | // In any context other than a mapping key, this is just a plain string | ||
110 | val = cty.StringVal("<<") | ||
111 | } | ||
112 | |||
113 | if len(anchor) > 0 { | ||
114 | an.completeAnchor(anchor, val) | ||
115 | } | ||
116 | return val, nil | ||
117 | } | ||
118 | |||
119 | func (c *Converter) unmarshalMapping(an *valueAnalysis, evt *yaml_event_t, p *yaml_parser_t) (cty.Value, error) { | ||
120 | tag := string(evt.tag) | ||
121 | anchor := string(evt.anchor) | ||
122 | |||
123 | if tag != "" && tag != yaml_MAP_TAG { | ||
124 | return cty.NilVal, parseEventErrorf(evt, "can't interpret mapping as %s", tag) | ||
125 | } | ||
126 | |||
127 | if anchor != "" { | ||
128 | an.beginAnchor(anchor) | ||
129 | } | ||
130 | |||
131 | vals := make(map[string]cty.Value) | ||
132 | for { | ||
133 | var nextEvt yaml_event_t | ||
134 | if !yaml_parser_parse(p, &nextEvt) { | ||
135 | return cty.NilVal, parserError(p) | ||
136 | } | ||
137 | if nextEvt.typ == yaml_MAPPING_END_EVENT { | ||
138 | v := cty.ObjectVal(vals) | ||
139 | if anchor != "" { | ||
140 | an.completeAnchor(anchor, v) | ||
141 | } | ||
142 | return v, nil | ||
143 | } | ||
144 | |||
145 | if nextEvt.typ != yaml_SCALAR_EVENT { | ||
146 | return cty.NilVal, parseEventErrorf(&nextEvt, "only strings are allowed as mapping keys") | ||
147 | } | ||
148 | keyVal, err := c.resolveScalar(string(nextEvt.tag), string(nextEvt.value), yaml_scalar_style_t(nextEvt.style)) | ||
149 | if err != nil { | ||
150 | return cty.NilVal, err | ||
151 | } | ||
152 | if keyVal.RawEquals(mergeMappingVal) { | ||
153 | // Merging the value (which must be a mapping) into our mapping, | ||
154 | // then. | ||
155 | val, err := c.unmarshalParse(an, p) | ||
156 | if err != nil { | ||
157 | return cty.NilVal, err | ||
158 | } | ||
159 | ty := val.Type() | ||
160 | if !(ty.IsObjectType() || ty.IsMapType()) { | ||
161 | return cty.NilVal, parseEventErrorf(&nextEvt, "cannot merge %s into mapping", ty.FriendlyName()) | ||
162 | } | ||
163 | for it := val.ElementIterator(); it.Next(); { | ||
164 | k, v := it.Element() | ||
165 | vals[k.AsString()] = v | ||
166 | } | ||
167 | continue | ||
168 | } | ||
169 | if keyValStr, err := convert.Convert(keyVal, cty.String); err == nil { | ||
170 | keyVal = keyValStr | ||
171 | } else { | ||
172 | return cty.NilVal, parseEventErrorf(&nextEvt, "only strings are allowed as mapping keys") | ||
173 | } | ||
174 | if keyVal.IsNull() { | ||
175 | return cty.NilVal, parseEventErrorf(&nextEvt, "mapping key cannot be null") | ||
176 | } | ||
177 | if !keyVal.IsKnown() { | ||
178 | return cty.NilVal, parseEventErrorf(&nextEvt, "mapping key must be known") | ||
179 | } | ||
180 | val, err := c.unmarshalParse(an, p) | ||
181 | if err != nil { | ||
182 | return cty.NilVal, err | ||
183 | } | ||
184 | |||
185 | vals[keyVal.AsString()] = val | ||
186 | } | ||
187 | } | ||
188 | |||
189 | func (c *Converter) unmarshalSequence(an *valueAnalysis, evt *yaml_event_t, p *yaml_parser_t) (cty.Value, error) { | ||
190 | tag := string(evt.tag) | ||
191 | anchor := string(evt.anchor) | ||
192 | |||
193 | if tag != "" && tag != yaml_SEQ_TAG { | ||
194 | return cty.NilVal, parseEventErrorf(evt, "can't interpret sequence as %s", tag) | ||
195 | } | ||
196 | |||
197 | if anchor != "" { | ||
198 | an.beginAnchor(anchor) | ||
199 | } | ||
200 | |||
201 | var vals []cty.Value | ||
202 | for { | ||
203 | var nextEvt yaml_event_t | ||
204 | if !yaml_parser_parse(p, &nextEvt) { | ||
205 | return cty.NilVal, parserError(p) | ||
206 | } | ||
207 | if nextEvt.typ == yaml_SEQUENCE_END_EVENT { | ||
208 | ty := cty.TupleVal(vals) | ||
209 | if anchor != "" { | ||
210 | an.completeAnchor(anchor, ty) | ||
211 | } | ||
212 | return ty, nil | ||
213 | } | ||
214 | |||
215 | val, err := c.unmarshalParseRemainder(an, &nextEvt, p) | ||
216 | if err != nil { | ||
217 | return cty.NilVal, err | ||
218 | } | ||
219 | |||
220 | vals = append(vals, val) | ||
221 | } | ||
222 | } | ||
223 | |||
224 | func (c *Converter) unmarshalAlias(an *valueAnalysis, evt *yaml_event_t, p *yaml_parser_t) (cty.Value, error) { | ||
225 | v, err := an.anchorVal(string(evt.anchor)) | ||
226 | if err != nil { | ||
227 | err = parseEventErrorWrap(evt, err) | ||
228 | } | ||
229 | return v, err | ||
230 | } | ||
231 | |||
232 | type valueAnalysis struct { | ||
233 | anchorsPending map[string]int | ||
234 | anchorVals map[string]cty.Value | ||
235 | } | ||
236 | |||
237 | func (an *valueAnalysis) beginAnchor(name string) { | ||
238 | an.anchorsPending[name]++ | ||
239 | } | ||
240 | |||
241 | func (an *valueAnalysis) completeAnchor(name string, v cty.Value) { | ||
242 | an.anchorsPending[name]-- | ||
243 | if an.anchorsPending[name] == 0 { | ||
244 | delete(an.anchorsPending, name) | ||
245 | } | ||
246 | an.anchorVals[name] = v | ||
247 | } | ||
248 | |||
249 | func (an *valueAnalysis) anchorVal(name string) (cty.Value, error) { | ||
250 | if _, pending := an.anchorsPending[name]; pending { | ||
251 | // YAML normally allows self-referencing structures, but cty cannot | ||
252 | // represent them (it requires all structures to be finite) so we | ||
253 | // must fail here. | ||
254 | return cty.NilVal, fmt.Errorf("cannot refer to anchor %q from inside its own definition", name) | ||
255 | } | ||
256 | ty, ok := an.anchorVals[name] | ||
257 | if !ok { | ||
258 | return cty.NilVal, fmt.Errorf("reference to undefined anchor %q", name) | ||
259 | } | ||
260 | return ty, nil | ||
261 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/emitterc.go b/vendor/github.com/zclconf/go-cty-yaml/emitterc.go new file mode 100644 index 0000000..a1c2cc5 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/emitterc.go | |||
@@ -0,0 +1,1685 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "bytes" | ||
5 | "fmt" | ||
6 | ) | ||
7 | |||
8 | // Flush the buffer if needed. | ||
9 | func flush(emitter *yaml_emitter_t) bool { | ||
10 | if emitter.buffer_pos+5 >= len(emitter.buffer) { | ||
11 | return yaml_emitter_flush(emitter) | ||
12 | } | ||
13 | return true | ||
14 | } | ||
15 | |||
16 | // Put a character to the output buffer. | ||
17 | func put(emitter *yaml_emitter_t, value byte) bool { | ||
18 | if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { | ||
19 | return false | ||
20 | } | ||
21 | emitter.buffer[emitter.buffer_pos] = value | ||
22 | emitter.buffer_pos++ | ||
23 | emitter.column++ | ||
24 | return true | ||
25 | } | ||
26 | |||
27 | // Put a line break to the output buffer. | ||
28 | func put_break(emitter *yaml_emitter_t) bool { | ||
29 | if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { | ||
30 | return false | ||
31 | } | ||
32 | switch emitter.line_break { | ||
33 | case yaml_CR_BREAK: | ||
34 | emitter.buffer[emitter.buffer_pos] = '\r' | ||
35 | emitter.buffer_pos += 1 | ||
36 | case yaml_LN_BREAK: | ||
37 | emitter.buffer[emitter.buffer_pos] = '\n' | ||
38 | emitter.buffer_pos += 1 | ||
39 | case yaml_CRLN_BREAK: | ||
40 | emitter.buffer[emitter.buffer_pos+0] = '\r' | ||
41 | emitter.buffer[emitter.buffer_pos+1] = '\n' | ||
42 | emitter.buffer_pos += 2 | ||
43 | default: | ||
44 | panic("unknown line break setting") | ||
45 | } | ||
46 | emitter.column = 0 | ||
47 | emitter.line++ | ||
48 | return true | ||
49 | } | ||
50 | |||
51 | // Copy a character from a string into buffer. | ||
52 | func write(emitter *yaml_emitter_t, s []byte, i *int) bool { | ||
53 | if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { | ||
54 | return false | ||
55 | } | ||
56 | p := emitter.buffer_pos | ||
57 | w := width(s[*i]) | ||
58 | switch w { | ||
59 | case 4: | ||
60 | emitter.buffer[p+3] = s[*i+3] | ||
61 | fallthrough | ||
62 | case 3: | ||
63 | emitter.buffer[p+2] = s[*i+2] | ||
64 | fallthrough | ||
65 | case 2: | ||
66 | emitter.buffer[p+1] = s[*i+1] | ||
67 | fallthrough | ||
68 | case 1: | ||
69 | emitter.buffer[p+0] = s[*i+0] | ||
70 | default: | ||
71 | panic("unknown character width") | ||
72 | } | ||
73 | emitter.column++ | ||
74 | emitter.buffer_pos += w | ||
75 | *i += w | ||
76 | return true | ||
77 | } | ||
78 | |||
79 | // Write a whole string into buffer. | ||
80 | func write_all(emitter *yaml_emitter_t, s []byte) bool { | ||
81 | for i := 0; i < len(s); { | ||
82 | if !write(emitter, s, &i) { | ||
83 | return false | ||
84 | } | ||
85 | } | ||
86 | return true | ||
87 | } | ||
88 | |||
89 | // Copy a line break character from a string into buffer. | ||
90 | func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool { | ||
91 | if s[*i] == '\n' { | ||
92 | if !put_break(emitter) { | ||
93 | return false | ||
94 | } | ||
95 | *i++ | ||
96 | } else { | ||
97 | if !write(emitter, s, i) { | ||
98 | return false | ||
99 | } | ||
100 | emitter.column = 0 | ||
101 | emitter.line++ | ||
102 | } | ||
103 | return true | ||
104 | } | ||
105 | |||
106 | // Set an emitter error and return false. | ||
107 | func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool { | ||
108 | emitter.error = yaml_EMITTER_ERROR | ||
109 | emitter.problem = problem | ||
110 | return false | ||
111 | } | ||
112 | |||
113 | // Emit an event. | ||
114 | func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
115 | emitter.events = append(emitter.events, *event) | ||
116 | for !yaml_emitter_need_more_events(emitter) { | ||
117 | event := &emitter.events[emitter.events_head] | ||
118 | if !yaml_emitter_analyze_event(emitter, event) { | ||
119 | return false | ||
120 | } | ||
121 | if !yaml_emitter_state_machine(emitter, event) { | ||
122 | return false | ||
123 | } | ||
124 | yaml_event_delete(event) | ||
125 | emitter.events_head++ | ||
126 | } | ||
127 | return true | ||
128 | } | ||
129 | |||
130 | // Check if we need to accumulate more events before emitting. | ||
131 | // | ||
132 | // We accumulate extra | ||
133 | // - 1 event for DOCUMENT-START | ||
134 | // - 2 events for SEQUENCE-START | ||
135 | // - 3 events for MAPPING-START | ||
136 | // | ||
137 | func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { | ||
138 | if emitter.events_head == len(emitter.events) { | ||
139 | return true | ||
140 | } | ||
141 | var accumulate int | ||
142 | switch emitter.events[emitter.events_head].typ { | ||
143 | case yaml_DOCUMENT_START_EVENT: | ||
144 | accumulate = 1 | ||
145 | break | ||
146 | case yaml_SEQUENCE_START_EVENT: | ||
147 | accumulate = 2 | ||
148 | break | ||
149 | case yaml_MAPPING_START_EVENT: | ||
150 | accumulate = 3 | ||
151 | break | ||
152 | default: | ||
153 | return false | ||
154 | } | ||
155 | if len(emitter.events)-emitter.events_head > accumulate { | ||
156 | return false | ||
157 | } | ||
158 | var level int | ||
159 | for i := emitter.events_head; i < len(emitter.events); i++ { | ||
160 | switch emitter.events[i].typ { | ||
161 | case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT: | ||
162 | level++ | ||
163 | case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT: | ||
164 | level-- | ||
165 | } | ||
166 | if level == 0 { | ||
167 | return false | ||
168 | } | ||
169 | } | ||
170 | return true | ||
171 | } | ||
172 | |||
173 | // Append a directive to the directives stack. | ||
174 | func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool { | ||
175 | for i := 0; i < len(emitter.tag_directives); i++ { | ||
176 | if bytes.Equal(value.handle, emitter.tag_directives[i].handle) { | ||
177 | if allow_duplicates { | ||
178 | return true | ||
179 | } | ||
180 | return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive") | ||
181 | } | ||
182 | } | ||
183 | |||
184 | // [Go] Do we actually need to copy this given garbage collection | ||
185 | // and the lack of deallocating destructors? | ||
186 | tag_copy := yaml_tag_directive_t{ | ||
187 | handle: make([]byte, len(value.handle)), | ||
188 | prefix: make([]byte, len(value.prefix)), | ||
189 | } | ||
190 | copy(tag_copy.handle, value.handle) | ||
191 | copy(tag_copy.prefix, value.prefix) | ||
192 | emitter.tag_directives = append(emitter.tag_directives, tag_copy) | ||
193 | return true | ||
194 | } | ||
195 | |||
196 | // Increase the indentation level. | ||
197 | func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool { | ||
198 | emitter.indents = append(emitter.indents, emitter.indent) | ||
199 | if emitter.indent < 0 { | ||
200 | if flow { | ||
201 | emitter.indent = emitter.best_indent | ||
202 | } else { | ||
203 | emitter.indent = 0 | ||
204 | } | ||
205 | } else if !indentless { | ||
206 | emitter.indent += emitter.best_indent | ||
207 | } | ||
208 | return true | ||
209 | } | ||
210 | |||
211 | // State dispatcher. | ||
212 | func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
213 | switch emitter.state { | ||
214 | default: | ||
215 | case yaml_EMIT_STREAM_START_STATE: | ||
216 | return yaml_emitter_emit_stream_start(emitter, event) | ||
217 | |||
218 | case yaml_EMIT_FIRST_DOCUMENT_START_STATE: | ||
219 | return yaml_emitter_emit_document_start(emitter, event, true) | ||
220 | |||
221 | case yaml_EMIT_DOCUMENT_START_STATE: | ||
222 | return yaml_emitter_emit_document_start(emitter, event, false) | ||
223 | |||
224 | case yaml_EMIT_DOCUMENT_CONTENT_STATE: | ||
225 | return yaml_emitter_emit_document_content(emitter, event) | ||
226 | |||
227 | case yaml_EMIT_DOCUMENT_END_STATE: | ||
228 | return yaml_emitter_emit_document_end(emitter, event) | ||
229 | |||
230 | case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: | ||
231 | return yaml_emitter_emit_flow_sequence_item(emitter, event, true) | ||
232 | |||
233 | case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: | ||
234 | return yaml_emitter_emit_flow_sequence_item(emitter, event, false) | ||
235 | |||
236 | case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: | ||
237 | return yaml_emitter_emit_flow_mapping_key(emitter, event, true) | ||
238 | |||
239 | case yaml_EMIT_FLOW_MAPPING_KEY_STATE: | ||
240 | return yaml_emitter_emit_flow_mapping_key(emitter, event, false) | ||
241 | |||
242 | case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: | ||
243 | return yaml_emitter_emit_flow_mapping_value(emitter, event, true) | ||
244 | |||
245 | case yaml_EMIT_FLOW_MAPPING_VALUE_STATE: | ||
246 | return yaml_emitter_emit_flow_mapping_value(emitter, event, false) | ||
247 | |||
248 | case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: | ||
249 | return yaml_emitter_emit_block_sequence_item(emitter, event, true) | ||
250 | |||
251 | case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE: | ||
252 | return yaml_emitter_emit_block_sequence_item(emitter, event, false) | ||
253 | |||
254 | case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: | ||
255 | return yaml_emitter_emit_block_mapping_key(emitter, event, true) | ||
256 | |||
257 | case yaml_EMIT_BLOCK_MAPPING_KEY_STATE: | ||
258 | return yaml_emitter_emit_block_mapping_key(emitter, event, false) | ||
259 | |||
260 | case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: | ||
261 | return yaml_emitter_emit_block_mapping_value(emitter, event, true) | ||
262 | |||
263 | case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE: | ||
264 | return yaml_emitter_emit_block_mapping_value(emitter, event, false) | ||
265 | |||
266 | case yaml_EMIT_END_STATE: | ||
267 | return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END") | ||
268 | } | ||
269 | panic("invalid emitter state") | ||
270 | } | ||
271 | |||
272 | // Expect STREAM-START. | ||
273 | func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
274 | if event.typ != yaml_STREAM_START_EVENT { | ||
275 | return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START") | ||
276 | } | ||
277 | if emitter.encoding == yaml_ANY_ENCODING { | ||
278 | emitter.encoding = event.encoding | ||
279 | if emitter.encoding == yaml_ANY_ENCODING { | ||
280 | emitter.encoding = yaml_UTF8_ENCODING | ||
281 | } | ||
282 | } | ||
283 | if emitter.best_indent < 2 || emitter.best_indent > 9 { | ||
284 | emitter.best_indent = 2 | ||
285 | } | ||
286 | if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 { | ||
287 | emitter.best_width = 80 | ||
288 | } | ||
289 | if emitter.best_width < 0 { | ||
290 | emitter.best_width = 1<<31 - 1 | ||
291 | } | ||
292 | if emitter.line_break == yaml_ANY_BREAK { | ||
293 | emitter.line_break = yaml_LN_BREAK | ||
294 | } | ||
295 | |||
296 | emitter.indent = -1 | ||
297 | emitter.line = 0 | ||
298 | emitter.column = 0 | ||
299 | emitter.whitespace = true | ||
300 | emitter.indention = true | ||
301 | |||
302 | if emitter.encoding != yaml_UTF8_ENCODING { | ||
303 | if !yaml_emitter_write_bom(emitter) { | ||
304 | return false | ||
305 | } | ||
306 | } | ||
307 | emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE | ||
308 | return true | ||
309 | } | ||
310 | |||
311 | // Expect DOCUMENT-START or STREAM-END. | ||
312 | func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { | ||
313 | |||
314 | if event.typ == yaml_DOCUMENT_START_EVENT { | ||
315 | |||
316 | if event.version_directive != nil { | ||
317 | if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) { | ||
318 | return false | ||
319 | } | ||
320 | } | ||
321 | |||
322 | for i := 0; i < len(event.tag_directives); i++ { | ||
323 | tag_directive := &event.tag_directives[i] | ||
324 | if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) { | ||
325 | return false | ||
326 | } | ||
327 | if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) { | ||
328 | return false | ||
329 | } | ||
330 | } | ||
331 | |||
332 | for i := 0; i < len(default_tag_directives); i++ { | ||
333 | tag_directive := &default_tag_directives[i] | ||
334 | if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) { | ||
335 | return false | ||
336 | } | ||
337 | } | ||
338 | |||
339 | implicit := event.implicit | ||
340 | if !first || emitter.canonical { | ||
341 | implicit = false | ||
342 | } | ||
343 | |||
344 | if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) { | ||
345 | if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { | ||
346 | return false | ||
347 | } | ||
348 | if !yaml_emitter_write_indent(emitter) { | ||
349 | return false | ||
350 | } | ||
351 | } | ||
352 | |||
353 | if event.version_directive != nil { | ||
354 | implicit = false | ||
355 | if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) { | ||
356 | return false | ||
357 | } | ||
358 | if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) { | ||
359 | return false | ||
360 | } | ||
361 | if !yaml_emitter_write_indent(emitter) { | ||
362 | return false | ||
363 | } | ||
364 | } | ||
365 | |||
366 | if len(event.tag_directives) > 0 { | ||
367 | implicit = false | ||
368 | for i := 0; i < len(event.tag_directives); i++ { | ||
369 | tag_directive := &event.tag_directives[i] | ||
370 | if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) { | ||
371 | return false | ||
372 | } | ||
373 | if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) { | ||
374 | return false | ||
375 | } | ||
376 | if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) { | ||
377 | return false | ||
378 | } | ||
379 | if !yaml_emitter_write_indent(emitter) { | ||
380 | return false | ||
381 | } | ||
382 | } | ||
383 | } | ||
384 | |||
385 | if yaml_emitter_check_empty_document(emitter) { | ||
386 | implicit = false | ||
387 | } | ||
388 | if !implicit { | ||
389 | if !yaml_emitter_write_indent(emitter) { | ||
390 | return false | ||
391 | } | ||
392 | if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { | ||
393 | return false | ||
394 | } | ||
395 | if emitter.canonical { | ||
396 | if !yaml_emitter_write_indent(emitter) { | ||
397 | return false | ||
398 | } | ||
399 | } | ||
400 | } | ||
401 | |||
402 | emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE | ||
403 | return true | ||
404 | } | ||
405 | |||
406 | if event.typ == yaml_STREAM_END_EVENT { | ||
407 | if emitter.open_ended { | ||
408 | if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { | ||
409 | return false | ||
410 | } | ||
411 | if !yaml_emitter_write_indent(emitter) { | ||
412 | return false | ||
413 | } | ||
414 | } | ||
415 | if !yaml_emitter_flush(emitter) { | ||
416 | return false | ||
417 | } | ||
418 | emitter.state = yaml_EMIT_END_STATE | ||
419 | return true | ||
420 | } | ||
421 | |||
422 | return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END") | ||
423 | } | ||
424 | |||
425 | // Expect the root node. | ||
426 | func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
427 | emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) | ||
428 | return yaml_emitter_emit_node(emitter, event, true, false, false, false) | ||
429 | } | ||
430 | |||
431 | // Expect DOCUMENT-END. | ||
432 | func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
433 | if event.typ != yaml_DOCUMENT_END_EVENT { | ||
434 | return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END") | ||
435 | } | ||
436 | if !yaml_emitter_write_indent(emitter) { | ||
437 | return false | ||
438 | } | ||
439 | if !event.implicit { | ||
440 | // [Go] Allocate the slice elsewhere. | ||
441 | if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { | ||
442 | return false | ||
443 | } | ||
444 | if !yaml_emitter_write_indent(emitter) { | ||
445 | return false | ||
446 | } | ||
447 | } | ||
448 | if !yaml_emitter_flush(emitter) { | ||
449 | return false | ||
450 | } | ||
451 | emitter.state = yaml_EMIT_DOCUMENT_START_STATE | ||
452 | emitter.tag_directives = emitter.tag_directives[:0] | ||
453 | return true | ||
454 | } | ||
455 | |||
456 | // Expect a flow item node. | ||
457 | func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { | ||
458 | if first { | ||
459 | if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) { | ||
460 | return false | ||
461 | } | ||
462 | if !yaml_emitter_increase_indent(emitter, true, false) { | ||
463 | return false | ||
464 | } | ||
465 | emitter.flow_level++ | ||
466 | } | ||
467 | |||
468 | if event.typ == yaml_SEQUENCE_END_EVENT { | ||
469 | emitter.flow_level-- | ||
470 | emitter.indent = emitter.indents[len(emitter.indents)-1] | ||
471 | emitter.indents = emitter.indents[:len(emitter.indents)-1] | ||
472 | if emitter.canonical && !first { | ||
473 | if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { | ||
474 | return false | ||
475 | } | ||
476 | if !yaml_emitter_write_indent(emitter) { | ||
477 | return false | ||
478 | } | ||
479 | } | ||
480 | if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) { | ||
481 | return false | ||
482 | } | ||
483 | emitter.state = emitter.states[len(emitter.states)-1] | ||
484 | emitter.states = emitter.states[:len(emitter.states)-1] | ||
485 | |||
486 | return true | ||
487 | } | ||
488 | |||
489 | if !first { | ||
490 | if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { | ||
491 | return false | ||
492 | } | ||
493 | } | ||
494 | |||
495 | if emitter.canonical || emitter.column > emitter.best_width { | ||
496 | if !yaml_emitter_write_indent(emitter) { | ||
497 | return false | ||
498 | } | ||
499 | } | ||
500 | emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) | ||
501 | return yaml_emitter_emit_node(emitter, event, false, true, false, false) | ||
502 | } | ||
503 | |||
504 | // Expect a flow key node. | ||
505 | func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { | ||
506 | if first { | ||
507 | if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) { | ||
508 | return false | ||
509 | } | ||
510 | if !yaml_emitter_increase_indent(emitter, true, false) { | ||
511 | return false | ||
512 | } | ||
513 | emitter.flow_level++ | ||
514 | } | ||
515 | |||
516 | if event.typ == yaml_MAPPING_END_EVENT { | ||
517 | emitter.flow_level-- | ||
518 | emitter.indent = emitter.indents[len(emitter.indents)-1] | ||
519 | emitter.indents = emitter.indents[:len(emitter.indents)-1] | ||
520 | if emitter.canonical && !first { | ||
521 | if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { | ||
522 | return false | ||
523 | } | ||
524 | if !yaml_emitter_write_indent(emitter) { | ||
525 | return false | ||
526 | } | ||
527 | } | ||
528 | if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) { | ||
529 | return false | ||
530 | } | ||
531 | emitter.state = emitter.states[len(emitter.states)-1] | ||
532 | emitter.states = emitter.states[:len(emitter.states)-1] | ||
533 | return true | ||
534 | } | ||
535 | |||
536 | if !first { | ||
537 | if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { | ||
538 | return false | ||
539 | } | ||
540 | } | ||
541 | if emitter.canonical || emitter.column > emitter.best_width { | ||
542 | if !yaml_emitter_write_indent(emitter) { | ||
543 | return false | ||
544 | } | ||
545 | } | ||
546 | |||
547 | if !emitter.canonical && yaml_emitter_check_simple_key(emitter) { | ||
548 | emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE) | ||
549 | return yaml_emitter_emit_node(emitter, event, false, false, true, true) | ||
550 | } | ||
551 | if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) { | ||
552 | return false | ||
553 | } | ||
554 | emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE) | ||
555 | return yaml_emitter_emit_node(emitter, event, false, false, true, false) | ||
556 | } | ||
557 | |||
558 | // Expect a flow value node. | ||
559 | func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { | ||
560 | if simple { | ||
561 | if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { | ||
562 | return false | ||
563 | } | ||
564 | } else { | ||
565 | if emitter.canonical || emitter.column > emitter.best_width { | ||
566 | if !yaml_emitter_write_indent(emitter) { | ||
567 | return false | ||
568 | } | ||
569 | } | ||
570 | if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) { | ||
571 | return false | ||
572 | } | ||
573 | } | ||
574 | emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) | ||
575 | return yaml_emitter_emit_node(emitter, event, false, false, true, false) | ||
576 | } | ||
577 | |||
578 | // Expect a block item node. | ||
579 | func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { | ||
580 | if first { | ||
581 | if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) { | ||
582 | return false | ||
583 | } | ||
584 | } | ||
585 | if event.typ == yaml_SEQUENCE_END_EVENT { | ||
586 | emitter.indent = emitter.indents[len(emitter.indents)-1] | ||
587 | emitter.indents = emitter.indents[:len(emitter.indents)-1] | ||
588 | emitter.state = emitter.states[len(emitter.states)-1] | ||
589 | emitter.states = emitter.states[:len(emitter.states)-1] | ||
590 | return true | ||
591 | } | ||
592 | if !yaml_emitter_write_indent(emitter) { | ||
593 | return false | ||
594 | } | ||
595 | if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) { | ||
596 | return false | ||
597 | } | ||
598 | emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) | ||
599 | return yaml_emitter_emit_node(emitter, event, false, true, false, false) | ||
600 | } | ||
601 | |||
602 | // Expect a block key node. | ||
603 | func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { | ||
604 | if first { | ||
605 | if !yaml_emitter_increase_indent(emitter, false, false) { | ||
606 | return false | ||
607 | } | ||
608 | } | ||
609 | if event.typ == yaml_MAPPING_END_EVENT { | ||
610 | emitter.indent = emitter.indents[len(emitter.indents)-1] | ||
611 | emitter.indents = emitter.indents[:len(emitter.indents)-1] | ||
612 | emitter.state = emitter.states[len(emitter.states)-1] | ||
613 | emitter.states = emitter.states[:len(emitter.states)-1] | ||
614 | return true | ||
615 | } | ||
616 | if !yaml_emitter_write_indent(emitter) { | ||
617 | return false | ||
618 | } | ||
619 | if yaml_emitter_check_simple_key(emitter) { | ||
620 | emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) | ||
621 | return yaml_emitter_emit_node(emitter, event, false, false, true, true) | ||
622 | } | ||
623 | if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) { | ||
624 | return false | ||
625 | } | ||
626 | emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE) | ||
627 | return yaml_emitter_emit_node(emitter, event, false, false, true, false) | ||
628 | } | ||
629 | |||
630 | // Expect a block value node. | ||
631 | func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { | ||
632 | if simple { | ||
633 | if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { | ||
634 | return false | ||
635 | } | ||
636 | } else { | ||
637 | if !yaml_emitter_write_indent(emitter) { | ||
638 | return false | ||
639 | } | ||
640 | if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) { | ||
641 | return false | ||
642 | } | ||
643 | } | ||
644 | emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) | ||
645 | return yaml_emitter_emit_node(emitter, event, false, false, true, false) | ||
646 | } | ||
647 | |||
648 | // Expect a node. | ||
649 | func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t, | ||
650 | root bool, sequence bool, mapping bool, simple_key bool) bool { | ||
651 | |||
652 | emitter.root_context = root | ||
653 | emitter.sequence_context = sequence | ||
654 | emitter.mapping_context = mapping | ||
655 | emitter.simple_key_context = simple_key | ||
656 | |||
657 | switch event.typ { | ||
658 | case yaml_ALIAS_EVENT: | ||
659 | return yaml_emitter_emit_alias(emitter, event) | ||
660 | case yaml_SCALAR_EVENT: | ||
661 | return yaml_emitter_emit_scalar(emitter, event) | ||
662 | case yaml_SEQUENCE_START_EVENT: | ||
663 | return yaml_emitter_emit_sequence_start(emitter, event) | ||
664 | case yaml_MAPPING_START_EVENT: | ||
665 | return yaml_emitter_emit_mapping_start(emitter, event) | ||
666 | default: | ||
667 | return yaml_emitter_set_emitter_error(emitter, | ||
668 | fmt.Sprintf("expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS, but got %v", event.typ)) | ||
669 | } | ||
670 | } | ||
671 | |||
672 | // Expect ALIAS. | ||
673 | func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
674 | if !yaml_emitter_process_anchor(emitter) { | ||
675 | return false | ||
676 | } | ||
677 | emitter.state = emitter.states[len(emitter.states)-1] | ||
678 | emitter.states = emitter.states[:len(emitter.states)-1] | ||
679 | return true | ||
680 | } | ||
681 | |||
682 | // Expect SCALAR. | ||
683 | func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
684 | if !yaml_emitter_select_scalar_style(emitter, event) { | ||
685 | return false | ||
686 | } | ||
687 | if !yaml_emitter_process_anchor(emitter) { | ||
688 | return false | ||
689 | } | ||
690 | if !yaml_emitter_process_tag(emitter) { | ||
691 | return false | ||
692 | } | ||
693 | if !yaml_emitter_increase_indent(emitter, true, false) { | ||
694 | return false | ||
695 | } | ||
696 | if !yaml_emitter_process_scalar(emitter) { | ||
697 | return false | ||
698 | } | ||
699 | emitter.indent = emitter.indents[len(emitter.indents)-1] | ||
700 | emitter.indents = emitter.indents[:len(emitter.indents)-1] | ||
701 | emitter.state = emitter.states[len(emitter.states)-1] | ||
702 | emitter.states = emitter.states[:len(emitter.states)-1] | ||
703 | return true | ||
704 | } | ||
705 | |||
706 | // Expect SEQUENCE-START. | ||
707 | func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
708 | if !yaml_emitter_process_anchor(emitter) { | ||
709 | return false | ||
710 | } | ||
711 | if !yaml_emitter_process_tag(emitter) { | ||
712 | return false | ||
713 | } | ||
714 | if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE || | ||
715 | yaml_emitter_check_empty_sequence(emitter) { | ||
716 | emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE | ||
717 | } else { | ||
718 | emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE | ||
719 | } | ||
720 | return true | ||
721 | } | ||
722 | |||
723 | // Expect MAPPING-START. | ||
724 | func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
725 | if !yaml_emitter_process_anchor(emitter) { | ||
726 | return false | ||
727 | } | ||
728 | if !yaml_emitter_process_tag(emitter) { | ||
729 | return false | ||
730 | } | ||
731 | if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE || | ||
732 | yaml_emitter_check_empty_mapping(emitter) { | ||
733 | emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE | ||
734 | } else { | ||
735 | emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE | ||
736 | } | ||
737 | return true | ||
738 | } | ||
739 | |||
740 | // Check if the document content is an empty scalar. | ||
741 | func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool { | ||
742 | return false // [Go] Huh? | ||
743 | } | ||
744 | |||
745 | // Check if the next events represent an empty sequence. | ||
746 | func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool { | ||
747 | if len(emitter.events)-emitter.events_head < 2 { | ||
748 | return false | ||
749 | } | ||
750 | return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT && | ||
751 | emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT | ||
752 | } | ||
753 | |||
754 | // Check if the next events represent an empty mapping. | ||
755 | func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool { | ||
756 | if len(emitter.events)-emitter.events_head < 2 { | ||
757 | return false | ||
758 | } | ||
759 | return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT && | ||
760 | emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT | ||
761 | } | ||
762 | |||
763 | // Check if the next node can be expressed as a simple key. | ||
764 | func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool { | ||
765 | length := 0 | ||
766 | switch emitter.events[emitter.events_head].typ { | ||
767 | case yaml_ALIAS_EVENT: | ||
768 | length += len(emitter.anchor_data.anchor) | ||
769 | case yaml_SCALAR_EVENT: | ||
770 | if emitter.scalar_data.multiline { | ||
771 | return false | ||
772 | } | ||
773 | length += len(emitter.anchor_data.anchor) + | ||
774 | len(emitter.tag_data.handle) + | ||
775 | len(emitter.tag_data.suffix) + | ||
776 | len(emitter.scalar_data.value) | ||
777 | case yaml_SEQUENCE_START_EVENT: | ||
778 | if !yaml_emitter_check_empty_sequence(emitter) { | ||
779 | return false | ||
780 | } | ||
781 | length += len(emitter.anchor_data.anchor) + | ||
782 | len(emitter.tag_data.handle) + | ||
783 | len(emitter.tag_data.suffix) | ||
784 | case yaml_MAPPING_START_EVENT: | ||
785 | if !yaml_emitter_check_empty_mapping(emitter) { | ||
786 | return false | ||
787 | } | ||
788 | length += len(emitter.anchor_data.anchor) + | ||
789 | len(emitter.tag_data.handle) + | ||
790 | len(emitter.tag_data.suffix) | ||
791 | default: | ||
792 | return false | ||
793 | } | ||
794 | return length <= 128 | ||
795 | } | ||
796 | |||
797 | // Determine an acceptable scalar style. | ||
798 | func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
799 | |||
800 | no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 | ||
801 | if no_tag && !event.implicit && !event.quoted_implicit { | ||
802 | return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified") | ||
803 | } | ||
804 | |||
805 | style := event.scalar_style() | ||
806 | if style == yaml_ANY_SCALAR_STYLE { | ||
807 | style = yaml_PLAIN_SCALAR_STYLE | ||
808 | } | ||
809 | if emitter.canonical { | ||
810 | style = yaml_DOUBLE_QUOTED_SCALAR_STYLE | ||
811 | } | ||
812 | if emitter.simple_key_context && emitter.scalar_data.multiline { | ||
813 | style = yaml_DOUBLE_QUOTED_SCALAR_STYLE | ||
814 | } | ||
815 | |||
816 | if style == yaml_PLAIN_SCALAR_STYLE { | ||
817 | if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed || | ||
818 | emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed { | ||
819 | style = yaml_SINGLE_QUOTED_SCALAR_STYLE | ||
820 | } | ||
821 | if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) { | ||
822 | style = yaml_SINGLE_QUOTED_SCALAR_STYLE | ||
823 | } | ||
824 | if no_tag && !event.implicit { | ||
825 | style = yaml_SINGLE_QUOTED_SCALAR_STYLE | ||
826 | } | ||
827 | } | ||
828 | if style == yaml_SINGLE_QUOTED_SCALAR_STYLE { | ||
829 | if !emitter.scalar_data.single_quoted_allowed { | ||
830 | style = yaml_DOUBLE_QUOTED_SCALAR_STYLE | ||
831 | } | ||
832 | } | ||
833 | if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE { | ||
834 | if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context { | ||
835 | style = yaml_DOUBLE_QUOTED_SCALAR_STYLE | ||
836 | } | ||
837 | } | ||
838 | |||
839 | if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE { | ||
840 | emitter.tag_data.handle = []byte{'!'} | ||
841 | } | ||
842 | emitter.scalar_data.style = style | ||
843 | return true | ||
844 | } | ||
845 | |||
846 | // Write an anchor. | ||
847 | func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool { | ||
848 | if emitter.anchor_data.anchor == nil { | ||
849 | return true | ||
850 | } | ||
851 | c := []byte{'&'} | ||
852 | if emitter.anchor_data.alias { | ||
853 | c[0] = '*' | ||
854 | } | ||
855 | if !yaml_emitter_write_indicator(emitter, c, true, false, false) { | ||
856 | return false | ||
857 | } | ||
858 | return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor) | ||
859 | } | ||
860 | |||
861 | // Write a tag. | ||
862 | func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool { | ||
863 | if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 { | ||
864 | return true | ||
865 | } | ||
866 | if len(emitter.tag_data.handle) > 0 { | ||
867 | if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) { | ||
868 | return false | ||
869 | } | ||
870 | if len(emitter.tag_data.suffix) > 0 { | ||
871 | if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { | ||
872 | return false | ||
873 | } | ||
874 | } | ||
875 | } else { | ||
876 | // [Go] Allocate these slices elsewhere. | ||
877 | if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) { | ||
878 | return false | ||
879 | } | ||
880 | if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { | ||
881 | return false | ||
882 | } | ||
883 | if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) { | ||
884 | return false | ||
885 | } | ||
886 | } | ||
887 | return true | ||
888 | } | ||
889 | |||
890 | // Write a scalar. | ||
891 | func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { | ||
892 | switch emitter.scalar_data.style { | ||
893 | case yaml_PLAIN_SCALAR_STYLE: | ||
894 | return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) | ||
895 | |||
896 | case yaml_SINGLE_QUOTED_SCALAR_STYLE: | ||
897 | return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) | ||
898 | |||
899 | case yaml_DOUBLE_QUOTED_SCALAR_STYLE: | ||
900 | return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) | ||
901 | |||
902 | case yaml_LITERAL_SCALAR_STYLE: | ||
903 | return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value) | ||
904 | |||
905 | case yaml_FOLDED_SCALAR_STYLE: | ||
906 | return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value) | ||
907 | } | ||
908 | panic("unknown scalar style") | ||
909 | } | ||
910 | |||
911 | // Check if a %YAML directive is valid. | ||
912 | func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool { | ||
913 | if version_directive.major != 1 || version_directive.minor != 1 { | ||
914 | return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive") | ||
915 | } | ||
916 | return true | ||
917 | } | ||
918 | |||
919 | // Check if a %TAG directive is valid. | ||
920 | func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool { | ||
921 | handle := tag_directive.handle | ||
922 | prefix := tag_directive.prefix | ||
923 | if len(handle) == 0 { | ||
924 | return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty") | ||
925 | } | ||
926 | if handle[0] != '!' { | ||
927 | return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'") | ||
928 | } | ||
929 | if handle[len(handle)-1] != '!' { | ||
930 | return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'") | ||
931 | } | ||
932 | for i := 1; i < len(handle)-1; i += width(handle[i]) { | ||
933 | if !is_alpha(handle, i) { | ||
934 | return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only") | ||
935 | } | ||
936 | } | ||
937 | if len(prefix) == 0 { | ||
938 | return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty") | ||
939 | } | ||
940 | return true | ||
941 | } | ||
942 | |||
943 | // Check if an anchor is valid. | ||
944 | func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool { | ||
945 | if len(anchor) == 0 { | ||
946 | problem := "anchor value must not be empty" | ||
947 | if alias { | ||
948 | problem = "alias value must not be empty" | ||
949 | } | ||
950 | return yaml_emitter_set_emitter_error(emitter, problem) | ||
951 | } | ||
952 | for i := 0; i < len(anchor); i += width(anchor[i]) { | ||
953 | if !is_alpha(anchor, i) { | ||
954 | problem := "anchor value must contain alphanumerical characters only" | ||
955 | if alias { | ||
956 | problem = "alias value must contain alphanumerical characters only" | ||
957 | } | ||
958 | return yaml_emitter_set_emitter_error(emitter, problem) | ||
959 | } | ||
960 | } | ||
961 | emitter.anchor_data.anchor = anchor | ||
962 | emitter.anchor_data.alias = alias | ||
963 | return true | ||
964 | } | ||
965 | |||
966 | // Check if a tag is valid. | ||
967 | func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool { | ||
968 | if len(tag) == 0 { | ||
969 | return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty") | ||
970 | } | ||
971 | for i := 0; i < len(emitter.tag_directives); i++ { | ||
972 | tag_directive := &emitter.tag_directives[i] | ||
973 | if bytes.HasPrefix(tag, tag_directive.prefix) { | ||
974 | emitter.tag_data.handle = tag_directive.handle | ||
975 | emitter.tag_data.suffix = tag[len(tag_directive.prefix):] | ||
976 | return true | ||
977 | } | ||
978 | } | ||
979 | emitter.tag_data.suffix = tag | ||
980 | return true | ||
981 | } | ||
982 | |||
983 | // Check if a scalar is valid. | ||
984 | func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { | ||
985 | var ( | ||
986 | block_indicators = false | ||
987 | flow_indicators = false | ||
988 | line_breaks = false | ||
989 | special_characters = false | ||
990 | |||
991 | leading_space = false | ||
992 | leading_break = false | ||
993 | trailing_space = false | ||
994 | trailing_break = false | ||
995 | break_space = false | ||
996 | space_break = false | ||
997 | |||
998 | preceded_by_whitespace = false | ||
999 | followed_by_whitespace = false | ||
1000 | previous_space = false | ||
1001 | previous_break = false | ||
1002 | ) | ||
1003 | |||
1004 | emitter.scalar_data.value = value | ||
1005 | |||
1006 | if len(value) == 0 { | ||
1007 | emitter.scalar_data.multiline = false | ||
1008 | emitter.scalar_data.flow_plain_allowed = false | ||
1009 | emitter.scalar_data.block_plain_allowed = true | ||
1010 | emitter.scalar_data.single_quoted_allowed = true | ||
1011 | emitter.scalar_data.block_allowed = false | ||
1012 | return true | ||
1013 | } | ||
1014 | |||
1015 | if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) { | ||
1016 | block_indicators = true | ||
1017 | flow_indicators = true | ||
1018 | } | ||
1019 | |||
1020 | preceded_by_whitespace = true | ||
1021 | for i, w := 0, 0; i < len(value); i += w { | ||
1022 | w = width(value[i]) | ||
1023 | followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w) | ||
1024 | |||
1025 | if i == 0 { | ||
1026 | switch value[i] { | ||
1027 | case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`': | ||
1028 | flow_indicators = true | ||
1029 | block_indicators = true | ||
1030 | case '?', ':': | ||
1031 | flow_indicators = true | ||
1032 | if followed_by_whitespace { | ||
1033 | block_indicators = true | ||
1034 | } | ||
1035 | case '-': | ||
1036 | if followed_by_whitespace { | ||
1037 | flow_indicators = true | ||
1038 | block_indicators = true | ||
1039 | } | ||
1040 | } | ||
1041 | } else { | ||
1042 | switch value[i] { | ||
1043 | case ',', '?', '[', ']', '{', '}': | ||
1044 | flow_indicators = true | ||
1045 | case ':': | ||
1046 | flow_indicators = true | ||
1047 | if followed_by_whitespace { | ||
1048 | block_indicators = true | ||
1049 | } | ||
1050 | case '#': | ||
1051 | if preceded_by_whitespace { | ||
1052 | flow_indicators = true | ||
1053 | block_indicators = true | ||
1054 | } | ||
1055 | } | ||
1056 | } | ||
1057 | |||
1058 | if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { | ||
1059 | special_characters = true | ||
1060 | } | ||
1061 | if is_space(value, i) { | ||
1062 | if i == 0 { | ||
1063 | leading_space = true | ||
1064 | } | ||
1065 | if i+width(value[i]) == len(value) { | ||
1066 | trailing_space = true | ||
1067 | } | ||
1068 | if previous_break { | ||
1069 | break_space = true | ||
1070 | } | ||
1071 | previous_space = true | ||
1072 | previous_break = false | ||
1073 | } else if is_break(value, i) { | ||
1074 | line_breaks = true | ||
1075 | if i == 0 { | ||
1076 | leading_break = true | ||
1077 | } | ||
1078 | if i+width(value[i]) == len(value) { | ||
1079 | trailing_break = true | ||
1080 | } | ||
1081 | if previous_space { | ||
1082 | space_break = true | ||
1083 | } | ||
1084 | previous_space = false | ||
1085 | previous_break = true | ||
1086 | } else { | ||
1087 | previous_space = false | ||
1088 | previous_break = false | ||
1089 | } | ||
1090 | |||
1091 | // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition. | ||
1092 | preceded_by_whitespace = is_blankz(value, i) | ||
1093 | } | ||
1094 | |||
1095 | emitter.scalar_data.multiline = line_breaks | ||
1096 | emitter.scalar_data.flow_plain_allowed = true | ||
1097 | emitter.scalar_data.block_plain_allowed = true | ||
1098 | emitter.scalar_data.single_quoted_allowed = true | ||
1099 | emitter.scalar_data.block_allowed = true | ||
1100 | |||
1101 | if leading_space || leading_break || trailing_space || trailing_break { | ||
1102 | emitter.scalar_data.flow_plain_allowed = false | ||
1103 | emitter.scalar_data.block_plain_allowed = false | ||
1104 | } | ||
1105 | if trailing_space { | ||
1106 | emitter.scalar_data.block_allowed = false | ||
1107 | } | ||
1108 | if break_space { | ||
1109 | emitter.scalar_data.flow_plain_allowed = false | ||
1110 | emitter.scalar_data.block_plain_allowed = false | ||
1111 | emitter.scalar_data.single_quoted_allowed = false | ||
1112 | } | ||
1113 | if space_break || special_characters { | ||
1114 | emitter.scalar_data.flow_plain_allowed = false | ||
1115 | emitter.scalar_data.block_plain_allowed = false | ||
1116 | emitter.scalar_data.single_quoted_allowed = false | ||
1117 | emitter.scalar_data.block_allowed = false | ||
1118 | } | ||
1119 | if line_breaks { | ||
1120 | emitter.scalar_data.flow_plain_allowed = false | ||
1121 | emitter.scalar_data.block_plain_allowed = false | ||
1122 | } | ||
1123 | if flow_indicators { | ||
1124 | emitter.scalar_data.flow_plain_allowed = false | ||
1125 | } | ||
1126 | if block_indicators { | ||
1127 | emitter.scalar_data.block_plain_allowed = false | ||
1128 | } | ||
1129 | return true | ||
1130 | } | ||
1131 | |||
1132 | // Check if the event data is valid. | ||
1133 | func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { | ||
1134 | |||
1135 | emitter.anchor_data.anchor = nil | ||
1136 | emitter.tag_data.handle = nil | ||
1137 | emitter.tag_data.suffix = nil | ||
1138 | emitter.scalar_data.value = nil | ||
1139 | |||
1140 | switch event.typ { | ||
1141 | case yaml_ALIAS_EVENT: | ||
1142 | if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) { | ||
1143 | return false | ||
1144 | } | ||
1145 | |||
1146 | case yaml_SCALAR_EVENT: | ||
1147 | if len(event.anchor) > 0 { | ||
1148 | if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { | ||
1149 | return false | ||
1150 | } | ||
1151 | } | ||
1152 | if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) { | ||
1153 | if !yaml_emitter_analyze_tag(emitter, event.tag) { | ||
1154 | return false | ||
1155 | } | ||
1156 | } | ||
1157 | if !yaml_emitter_analyze_scalar(emitter, event.value) { | ||
1158 | return false | ||
1159 | } | ||
1160 | |||
1161 | case yaml_SEQUENCE_START_EVENT: | ||
1162 | if len(event.anchor) > 0 { | ||
1163 | if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { | ||
1164 | return false | ||
1165 | } | ||
1166 | } | ||
1167 | if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { | ||
1168 | if !yaml_emitter_analyze_tag(emitter, event.tag) { | ||
1169 | return false | ||
1170 | } | ||
1171 | } | ||
1172 | |||
1173 | case yaml_MAPPING_START_EVENT: | ||
1174 | if len(event.anchor) > 0 { | ||
1175 | if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { | ||
1176 | return false | ||
1177 | } | ||
1178 | } | ||
1179 | if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { | ||
1180 | if !yaml_emitter_analyze_tag(emitter, event.tag) { | ||
1181 | return false | ||
1182 | } | ||
1183 | } | ||
1184 | } | ||
1185 | return true | ||
1186 | } | ||
1187 | |||
1188 | // Write the BOM character. | ||
1189 | func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool { | ||
1190 | if !flush(emitter) { | ||
1191 | return false | ||
1192 | } | ||
1193 | pos := emitter.buffer_pos | ||
1194 | emitter.buffer[pos+0] = '\xEF' | ||
1195 | emitter.buffer[pos+1] = '\xBB' | ||
1196 | emitter.buffer[pos+2] = '\xBF' | ||
1197 | emitter.buffer_pos += 3 | ||
1198 | return true | ||
1199 | } | ||
1200 | |||
1201 | func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { | ||
1202 | indent := emitter.indent | ||
1203 | if indent < 0 { | ||
1204 | indent = 0 | ||
1205 | } | ||
1206 | if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) { | ||
1207 | if !put_break(emitter) { | ||
1208 | return false | ||
1209 | } | ||
1210 | } | ||
1211 | for emitter.column < indent { | ||
1212 | if !put(emitter, ' ') { | ||
1213 | return false | ||
1214 | } | ||
1215 | } | ||
1216 | emitter.whitespace = true | ||
1217 | emitter.indention = true | ||
1218 | return true | ||
1219 | } | ||
1220 | |||
1221 | func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool { | ||
1222 | if need_whitespace && !emitter.whitespace { | ||
1223 | if !put(emitter, ' ') { | ||
1224 | return false | ||
1225 | } | ||
1226 | } | ||
1227 | if !write_all(emitter, indicator) { | ||
1228 | return false | ||
1229 | } | ||
1230 | emitter.whitespace = is_whitespace | ||
1231 | emitter.indention = (emitter.indention && is_indention) | ||
1232 | emitter.open_ended = false | ||
1233 | return true | ||
1234 | } | ||
1235 | |||
1236 | func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool { | ||
1237 | if !write_all(emitter, value) { | ||
1238 | return false | ||
1239 | } | ||
1240 | emitter.whitespace = false | ||
1241 | emitter.indention = false | ||
1242 | return true | ||
1243 | } | ||
1244 | |||
1245 | func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool { | ||
1246 | if !emitter.whitespace { | ||
1247 | if !put(emitter, ' ') { | ||
1248 | return false | ||
1249 | } | ||
1250 | } | ||
1251 | if !write_all(emitter, value) { | ||
1252 | return false | ||
1253 | } | ||
1254 | emitter.whitespace = false | ||
1255 | emitter.indention = false | ||
1256 | return true | ||
1257 | } | ||
1258 | |||
1259 | func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool { | ||
1260 | if need_whitespace && !emitter.whitespace { | ||
1261 | if !put(emitter, ' ') { | ||
1262 | return false | ||
1263 | } | ||
1264 | } | ||
1265 | for i := 0; i < len(value); { | ||
1266 | var must_write bool | ||
1267 | switch value[i] { | ||
1268 | case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']': | ||
1269 | must_write = true | ||
1270 | default: | ||
1271 | must_write = is_alpha(value, i) | ||
1272 | } | ||
1273 | if must_write { | ||
1274 | if !write(emitter, value, &i) { | ||
1275 | return false | ||
1276 | } | ||
1277 | } else { | ||
1278 | w := width(value[i]) | ||
1279 | for k := 0; k < w; k++ { | ||
1280 | octet := value[i] | ||
1281 | i++ | ||
1282 | if !put(emitter, '%') { | ||
1283 | return false | ||
1284 | } | ||
1285 | |||
1286 | c := octet >> 4 | ||
1287 | if c < 10 { | ||
1288 | c += '0' | ||
1289 | } else { | ||
1290 | c += 'A' - 10 | ||
1291 | } | ||
1292 | if !put(emitter, c) { | ||
1293 | return false | ||
1294 | } | ||
1295 | |||
1296 | c = octet & 0x0f | ||
1297 | if c < 10 { | ||
1298 | c += '0' | ||
1299 | } else { | ||
1300 | c += 'A' - 10 | ||
1301 | } | ||
1302 | if !put(emitter, c) { | ||
1303 | return false | ||
1304 | } | ||
1305 | } | ||
1306 | } | ||
1307 | } | ||
1308 | emitter.whitespace = false | ||
1309 | emitter.indention = false | ||
1310 | return true | ||
1311 | } | ||
1312 | |||
1313 | func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { | ||
1314 | if !emitter.whitespace { | ||
1315 | if !put(emitter, ' ') { | ||
1316 | return false | ||
1317 | } | ||
1318 | } | ||
1319 | |||
1320 | spaces := false | ||
1321 | breaks := false | ||
1322 | for i := 0; i < len(value); { | ||
1323 | if is_space(value, i) { | ||
1324 | if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) { | ||
1325 | if !yaml_emitter_write_indent(emitter) { | ||
1326 | return false | ||
1327 | } | ||
1328 | i += width(value[i]) | ||
1329 | } else { | ||
1330 | if !write(emitter, value, &i) { | ||
1331 | return false | ||
1332 | } | ||
1333 | } | ||
1334 | spaces = true | ||
1335 | } else if is_break(value, i) { | ||
1336 | if !breaks && value[i] == '\n' { | ||
1337 | if !put_break(emitter) { | ||
1338 | return false | ||
1339 | } | ||
1340 | } | ||
1341 | if !write_break(emitter, value, &i) { | ||
1342 | return false | ||
1343 | } | ||
1344 | emitter.indention = true | ||
1345 | breaks = true | ||
1346 | } else { | ||
1347 | if breaks { | ||
1348 | if !yaml_emitter_write_indent(emitter) { | ||
1349 | return false | ||
1350 | } | ||
1351 | } | ||
1352 | if !write(emitter, value, &i) { | ||
1353 | return false | ||
1354 | } | ||
1355 | emitter.indention = false | ||
1356 | spaces = false | ||
1357 | breaks = false | ||
1358 | } | ||
1359 | } | ||
1360 | |||
1361 | emitter.whitespace = false | ||
1362 | emitter.indention = false | ||
1363 | if emitter.root_context { | ||
1364 | emitter.open_ended = true | ||
1365 | } | ||
1366 | |||
1367 | return true | ||
1368 | } | ||
1369 | |||
1370 | func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { | ||
1371 | |||
1372 | if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) { | ||
1373 | return false | ||
1374 | } | ||
1375 | |||
1376 | spaces := false | ||
1377 | breaks := false | ||
1378 | for i := 0; i < len(value); { | ||
1379 | if is_space(value, i) { | ||
1380 | if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) { | ||
1381 | if !yaml_emitter_write_indent(emitter) { | ||
1382 | return false | ||
1383 | } | ||
1384 | i += width(value[i]) | ||
1385 | } else { | ||
1386 | if !write(emitter, value, &i) { | ||
1387 | return false | ||
1388 | } | ||
1389 | } | ||
1390 | spaces = true | ||
1391 | } else if is_break(value, i) { | ||
1392 | if !breaks && value[i] == '\n' { | ||
1393 | if !put_break(emitter) { | ||
1394 | return false | ||
1395 | } | ||
1396 | } | ||
1397 | if !write_break(emitter, value, &i) { | ||
1398 | return false | ||
1399 | } | ||
1400 | emitter.indention = true | ||
1401 | breaks = true | ||
1402 | } else { | ||
1403 | if breaks { | ||
1404 | if !yaml_emitter_write_indent(emitter) { | ||
1405 | return false | ||
1406 | } | ||
1407 | } | ||
1408 | if value[i] == '\'' { | ||
1409 | if !put(emitter, '\'') { | ||
1410 | return false | ||
1411 | } | ||
1412 | } | ||
1413 | if !write(emitter, value, &i) { | ||
1414 | return false | ||
1415 | } | ||
1416 | emitter.indention = false | ||
1417 | spaces = false | ||
1418 | breaks = false | ||
1419 | } | ||
1420 | } | ||
1421 | if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) { | ||
1422 | return false | ||
1423 | } | ||
1424 | emitter.whitespace = false | ||
1425 | emitter.indention = false | ||
1426 | return true | ||
1427 | } | ||
1428 | |||
1429 | func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { | ||
1430 | spaces := false | ||
1431 | if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) { | ||
1432 | return false | ||
1433 | } | ||
1434 | |||
1435 | for i := 0; i < len(value); { | ||
1436 | if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) || | ||
1437 | is_bom(value, i) || is_break(value, i) || | ||
1438 | value[i] == '"' || value[i] == '\\' { | ||
1439 | |||
1440 | octet := value[i] | ||
1441 | |||
1442 | var w int | ||
1443 | var v rune | ||
1444 | switch { | ||
1445 | case octet&0x80 == 0x00: | ||
1446 | w, v = 1, rune(octet&0x7F) | ||
1447 | case octet&0xE0 == 0xC0: | ||
1448 | w, v = 2, rune(octet&0x1F) | ||
1449 | case octet&0xF0 == 0xE0: | ||
1450 | w, v = 3, rune(octet&0x0F) | ||
1451 | case octet&0xF8 == 0xF0: | ||
1452 | w, v = 4, rune(octet&0x07) | ||
1453 | } | ||
1454 | for k := 1; k < w; k++ { | ||
1455 | octet = value[i+k] | ||
1456 | v = (v << 6) + (rune(octet) & 0x3F) | ||
1457 | } | ||
1458 | i += w | ||
1459 | |||
1460 | if !put(emitter, '\\') { | ||
1461 | return false | ||
1462 | } | ||
1463 | |||
1464 | var ok bool | ||
1465 | switch v { | ||
1466 | case 0x00: | ||
1467 | ok = put(emitter, '0') | ||
1468 | case 0x07: | ||
1469 | ok = put(emitter, 'a') | ||
1470 | case 0x08: | ||
1471 | ok = put(emitter, 'b') | ||
1472 | case 0x09: | ||
1473 | ok = put(emitter, 't') | ||
1474 | case 0x0A: | ||
1475 | ok = put(emitter, 'n') | ||
1476 | case 0x0b: | ||
1477 | ok = put(emitter, 'v') | ||
1478 | case 0x0c: | ||
1479 | ok = put(emitter, 'f') | ||
1480 | case 0x0d: | ||
1481 | ok = put(emitter, 'r') | ||
1482 | case 0x1b: | ||
1483 | ok = put(emitter, 'e') | ||
1484 | case 0x22: | ||
1485 | ok = put(emitter, '"') | ||
1486 | case 0x5c: | ||
1487 | ok = put(emitter, '\\') | ||
1488 | case 0x85: | ||
1489 | ok = put(emitter, 'N') | ||
1490 | case 0xA0: | ||
1491 | ok = put(emitter, '_') | ||
1492 | case 0x2028: | ||
1493 | ok = put(emitter, 'L') | ||
1494 | case 0x2029: | ||
1495 | ok = put(emitter, 'P') | ||
1496 | default: | ||
1497 | if v <= 0xFF { | ||
1498 | ok = put(emitter, 'x') | ||
1499 | w = 2 | ||
1500 | } else if v <= 0xFFFF { | ||
1501 | ok = put(emitter, 'u') | ||
1502 | w = 4 | ||
1503 | } else { | ||
1504 | ok = put(emitter, 'U') | ||
1505 | w = 8 | ||
1506 | } | ||
1507 | for k := (w - 1) * 4; ok && k >= 0; k -= 4 { | ||
1508 | digit := byte((v >> uint(k)) & 0x0F) | ||
1509 | if digit < 10 { | ||
1510 | ok = put(emitter, digit+'0') | ||
1511 | } else { | ||
1512 | ok = put(emitter, digit+'A'-10) | ||
1513 | } | ||
1514 | } | ||
1515 | } | ||
1516 | if !ok { | ||
1517 | return false | ||
1518 | } | ||
1519 | spaces = false | ||
1520 | } else if is_space(value, i) { | ||
1521 | if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 { | ||
1522 | if !yaml_emitter_write_indent(emitter) { | ||
1523 | return false | ||
1524 | } | ||
1525 | if is_space(value, i+1) { | ||
1526 | if !put(emitter, '\\') { | ||
1527 | return false | ||
1528 | } | ||
1529 | } | ||
1530 | i += width(value[i]) | ||
1531 | } else if !write(emitter, value, &i) { | ||
1532 | return false | ||
1533 | } | ||
1534 | spaces = true | ||
1535 | } else { | ||
1536 | if !write(emitter, value, &i) { | ||
1537 | return false | ||
1538 | } | ||
1539 | spaces = false | ||
1540 | } | ||
1541 | } | ||
1542 | if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) { | ||
1543 | return false | ||
1544 | } | ||
1545 | emitter.whitespace = false | ||
1546 | emitter.indention = false | ||
1547 | return true | ||
1548 | } | ||
1549 | |||
1550 | func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool { | ||
1551 | if is_space(value, 0) || is_break(value, 0) { | ||
1552 | indent_hint := []byte{'0' + byte(emitter.best_indent)} | ||
1553 | if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) { | ||
1554 | return false | ||
1555 | } | ||
1556 | } | ||
1557 | |||
1558 | emitter.open_ended = false | ||
1559 | |||
1560 | var chomp_hint [1]byte | ||
1561 | if len(value) == 0 { | ||
1562 | chomp_hint[0] = '-' | ||
1563 | } else { | ||
1564 | i := len(value) - 1 | ||
1565 | for value[i]&0xC0 == 0x80 { | ||
1566 | i-- | ||
1567 | } | ||
1568 | if !is_break(value, i) { | ||
1569 | chomp_hint[0] = '-' | ||
1570 | } else if i == 0 { | ||
1571 | chomp_hint[0] = '+' | ||
1572 | emitter.open_ended = true | ||
1573 | } else { | ||
1574 | i-- | ||
1575 | for value[i]&0xC0 == 0x80 { | ||
1576 | i-- | ||
1577 | } | ||
1578 | if is_break(value, i) { | ||
1579 | chomp_hint[0] = '+' | ||
1580 | emitter.open_ended = true | ||
1581 | } | ||
1582 | } | ||
1583 | } | ||
1584 | if chomp_hint[0] != 0 { | ||
1585 | if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) { | ||
1586 | return false | ||
1587 | } | ||
1588 | } | ||
1589 | return true | ||
1590 | } | ||
1591 | |||
1592 | func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool { | ||
1593 | if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) { | ||
1594 | return false | ||
1595 | } | ||
1596 | if !yaml_emitter_write_block_scalar_hints(emitter, value) { | ||
1597 | return false | ||
1598 | } | ||
1599 | if !put_break(emitter) { | ||
1600 | return false | ||
1601 | } | ||
1602 | emitter.indention = true | ||
1603 | emitter.whitespace = true | ||
1604 | breaks := true | ||
1605 | for i := 0; i < len(value); { | ||
1606 | if is_break(value, i) { | ||
1607 | if !write_break(emitter, value, &i) { | ||
1608 | return false | ||
1609 | } | ||
1610 | emitter.indention = true | ||
1611 | breaks = true | ||
1612 | } else { | ||
1613 | if breaks { | ||
1614 | if !yaml_emitter_write_indent(emitter) { | ||
1615 | return false | ||
1616 | } | ||
1617 | } | ||
1618 | if !write(emitter, value, &i) { | ||
1619 | return false | ||
1620 | } | ||
1621 | emitter.indention = false | ||
1622 | breaks = false | ||
1623 | } | ||
1624 | } | ||
1625 | |||
1626 | return true | ||
1627 | } | ||
1628 | |||
1629 | func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool { | ||
1630 | if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) { | ||
1631 | return false | ||
1632 | } | ||
1633 | if !yaml_emitter_write_block_scalar_hints(emitter, value) { | ||
1634 | return false | ||
1635 | } | ||
1636 | |||
1637 | if !put_break(emitter) { | ||
1638 | return false | ||
1639 | } | ||
1640 | emitter.indention = true | ||
1641 | emitter.whitespace = true | ||
1642 | |||
1643 | breaks := true | ||
1644 | leading_spaces := true | ||
1645 | for i := 0; i < len(value); { | ||
1646 | if is_break(value, i) { | ||
1647 | if !breaks && !leading_spaces && value[i] == '\n' { | ||
1648 | k := 0 | ||
1649 | for is_break(value, k) { | ||
1650 | k += width(value[k]) | ||
1651 | } | ||
1652 | if !is_blankz(value, k) { | ||
1653 | if !put_break(emitter) { | ||
1654 | return false | ||
1655 | } | ||
1656 | } | ||
1657 | } | ||
1658 | if !write_break(emitter, value, &i) { | ||
1659 | return false | ||
1660 | } | ||
1661 | emitter.indention = true | ||
1662 | breaks = true | ||
1663 | } else { | ||
1664 | if breaks { | ||
1665 | if !yaml_emitter_write_indent(emitter) { | ||
1666 | return false | ||
1667 | } | ||
1668 | leading_spaces = is_blank(value, i) | ||
1669 | } | ||
1670 | if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width { | ||
1671 | if !yaml_emitter_write_indent(emitter) { | ||
1672 | return false | ||
1673 | } | ||
1674 | i += width(value[i]) | ||
1675 | } else { | ||
1676 | if !write(emitter, value, &i) { | ||
1677 | return false | ||
1678 | } | ||
1679 | } | ||
1680 | emitter.indention = false | ||
1681 | breaks = false | ||
1682 | } | ||
1683 | } | ||
1684 | return true | ||
1685 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/encode.go b/vendor/github.com/zclconf/go-cty-yaml/encode.go new file mode 100644 index 0000000..daa1478 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/encode.go | |||
@@ -0,0 +1,189 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "bytes" | ||
5 | "fmt" | ||
6 | "strings" | ||
7 | |||
8 | "github.com/zclconf/go-cty/cty" | ||
9 | ) | ||
10 | |||
11 | func (c *Converter) marshal(v cty.Value) ([]byte, error) { | ||
12 | var buf bytes.Buffer | ||
13 | |||
14 | e := &yaml_emitter_t{} | ||
15 | yaml_emitter_initialize(e) | ||
16 | yaml_emitter_set_output_writer(e, &buf) | ||
17 | yaml_emitter_set_unicode(e, true) | ||
18 | |||
19 | var evt yaml_event_t | ||
20 | yaml_stream_start_event_initialize(&evt, yaml_UTF8_ENCODING) | ||
21 | if !yaml_emitter_emit(e, &evt) { | ||
22 | return nil, emitterError(e) | ||
23 | } | ||
24 | yaml_document_start_event_initialize(&evt, nil, nil, true) | ||
25 | if !yaml_emitter_emit(e, &evt) { | ||
26 | return nil, emitterError(e) | ||
27 | } | ||
28 | |||
29 | if err := c.marshalEmit(v, e); err != nil { | ||
30 | return nil, err | ||
31 | } | ||
32 | |||
33 | yaml_document_end_event_initialize(&evt, true) | ||
34 | if !yaml_emitter_emit(e, &evt) { | ||
35 | return nil, emitterError(e) | ||
36 | } | ||
37 | yaml_stream_end_event_initialize(&evt) | ||
38 | if !yaml_emitter_emit(e, &evt) { | ||
39 | return nil, emitterError(e) | ||
40 | } | ||
41 | |||
42 | return buf.Bytes(), nil | ||
43 | } | ||
44 | |||
45 | func (c *Converter) marshalEmit(v cty.Value, e *yaml_emitter_t) error { | ||
46 | ty := v.Type() | ||
47 | switch { | ||
48 | case v.IsNull(): | ||
49 | return c.marshalPrimitive(v, e) | ||
50 | case !v.IsKnown(): | ||
51 | return fmt.Errorf("cannot serialize unknown value as YAML") | ||
52 | case ty.IsPrimitiveType(): | ||
53 | return c.marshalPrimitive(v, e) | ||
54 | case ty.IsTupleType(), ty.IsListType(), ty.IsSetType(): | ||
55 | return c.marshalSequence(v, e) | ||
56 | case ty.IsObjectType(), ty.IsMapType(): | ||
57 | return c.marshalMapping(v, e) | ||
58 | default: | ||
59 | return fmt.Errorf("can't marshal %s as YAML", ty.FriendlyName()) | ||
60 | } | ||
61 | } | ||
62 | |||
63 | func (c *Converter) marshalPrimitive(v cty.Value, e *yaml_emitter_t) error { | ||
64 | var evt yaml_event_t | ||
65 | |||
66 | if v.IsNull() { | ||
67 | yaml_scalar_event_initialize( | ||
68 | &evt, | ||
69 | nil, | ||
70 | nil, | ||
71 | []byte("null"), | ||
72 | true, | ||
73 | true, | ||
74 | yaml_PLAIN_SCALAR_STYLE, | ||
75 | ) | ||
76 | if !yaml_emitter_emit(e, &evt) { | ||
77 | return emitterError(e) | ||
78 | } | ||
79 | return nil | ||
80 | } | ||
81 | |||
82 | switch v.Type() { | ||
83 | case cty.String: | ||
84 | str := v.AsString() | ||
85 | style := yaml_DOUBLE_QUOTED_SCALAR_STYLE | ||
86 | if strings.Contains(str, "\n") { | ||
87 | style = yaml_LITERAL_SCALAR_STYLE | ||
88 | } | ||
89 | yaml_scalar_event_initialize( | ||
90 | &evt, | ||
91 | nil, | ||
92 | nil, | ||
93 | []byte(str), | ||
94 | true, | ||
95 | true, | ||
96 | style, | ||
97 | ) | ||
98 | case cty.Number: | ||
99 | str := v.AsBigFloat().Text('f', -1) | ||
100 | yaml_scalar_event_initialize( | ||
101 | &evt, | ||
102 | nil, | ||
103 | nil, | ||
104 | []byte(str), | ||
105 | true, | ||
106 | true, | ||
107 | yaml_PLAIN_SCALAR_STYLE, | ||
108 | ) | ||
109 | case cty.Bool: | ||
110 | var str string | ||
111 | switch v { | ||
112 | case cty.True: | ||
113 | str = "true" | ||
114 | case cty.False: | ||
115 | str = "false" | ||
116 | } | ||
117 | yaml_scalar_event_initialize( | ||
118 | &evt, | ||
119 | nil, | ||
120 | nil, | ||
121 | []byte(str), | ||
122 | true, | ||
123 | true, | ||
124 | yaml_PLAIN_SCALAR_STYLE, | ||
125 | ) | ||
126 | } | ||
127 | if !yaml_emitter_emit(e, &evt) { | ||
128 | return emitterError(e) | ||
129 | } | ||
130 | return nil | ||
131 | } | ||
132 | |||
133 | func (c *Converter) marshalSequence(v cty.Value, e *yaml_emitter_t) error { | ||
134 | style := yaml_BLOCK_SEQUENCE_STYLE | ||
135 | if c.encodeAsFlow { | ||
136 | style = yaml_FLOW_SEQUENCE_STYLE | ||
137 | } | ||
138 | |||
139 | var evt yaml_event_t | ||
140 | yaml_sequence_start_event_initialize(&evt, nil, nil, true, style) | ||
141 | if !yaml_emitter_emit(e, &evt) { | ||
142 | return emitterError(e) | ||
143 | } | ||
144 | |||
145 | for it := v.ElementIterator(); it.Next(); { | ||
146 | _, v := it.Element() | ||
147 | err := c.marshalEmit(v, e) | ||
148 | if err != nil { | ||
149 | return err | ||
150 | } | ||
151 | } | ||
152 | |||
153 | yaml_sequence_end_event_initialize(&evt) | ||
154 | if !yaml_emitter_emit(e, &evt) { | ||
155 | return emitterError(e) | ||
156 | } | ||
157 | return nil | ||
158 | } | ||
159 | |||
160 | func (c *Converter) marshalMapping(v cty.Value, e *yaml_emitter_t) error { | ||
161 | style := yaml_BLOCK_MAPPING_STYLE | ||
162 | if c.encodeAsFlow { | ||
163 | style = yaml_FLOW_MAPPING_STYLE | ||
164 | } | ||
165 | |||
166 | var evt yaml_event_t | ||
167 | yaml_mapping_start_event_initialize(&evt, nil, nil, true, style) | ||
168 | if !yaml_emitter_emit(e, &evt) { | ||
169 | return emitterError(e) | ||
170 | } | ||
171 | |||
172 | for it := v.ElementIterator(); it.Next(); { | ||
173 | k, v := it.Element() | ||
174 | err := c.marshalEmit(k, e) | ||
175 | if err != nil { | ||
176 | return err | ||
177 | } | ||
178 | err = c.marshalEmit(v, e) | ||
179 | if err != nil { | ||
180 | return err | ||
181 | } | ||
182 | } | ||
183 | |||
184 | yaml_mapping_end_event_initialize(&evt) | ||
185 | if !yaml_emitter_emit(e, &evt) { | ||
186 | return emitterError(e) | ||
187 | } | ||
188 | return nil | ||
189 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/error.go b/vendor/github.com/zclconf/go-cty-yaml/error.go new file mode 100644 index 0000000..ae41c48 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/error.go | |||
@@ -0,0 +1,97 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "errors" | ||
5 | "fmt" | ||
6 | ) | ||
7 | |||
8 | // Error is an error implementation used to report errors that correspond to | ||
9 | // a particular position in an input buffer. | ||
10 | type Error struct { | ||
11 | cause error | ||
12 | Line, Column int | ||
13 | } | ||
14 | |||
15 | func (e Error) Error() string { | ||
16 | return fmt.Sprintf("on line %d, column %d: %s", e.Line, e.Column, e.cause.Error()) | ||
17 | } | ||
18 | |||
19 | // Cause is an implementation of the interface used by | ||
20 | // github.com/pkg/errors.Cause, returning the underlying error without the | ||
21 | // position information. | ||
22 | func (e Error) Cause() error { | ||
23 | return e.cause | ||
24 | } | ||
25 | |||
26 | // WrappedErrors is an implementation of github.com/hashicorp/errwrap.Wrapper | ||
27 | // returning the underlying error without the position information. | ||
28 | func (e Error) WrappedErrors() []error { | ||
29 | return []error{e.cause} | ||
30 | } | ||
31 | |||
32 | func parserError(p *yaml_parser_t) error { | ||
33 | var cause error | ||
34 | if len(p.problem) > 0 { | ||
35 | cause = errors.New(p.problem) | ||
36 | } else { | ||
37 | cause = errors.New("invalid YAML syntax") // useless generic error, then | ||
38 | } | ||
39 | |||
40 | return parserErrorWrap(p, cause) | ||
41 | } | ||
42 | |||
43 | func parserErrorWrap(p *yaml_parser_t, cause error) error { | ||
44 | switch { | ||
45 | case p.problem_mark.line != 0: | ||
46 | line := p.problem_mark.line | ||
47 | column := p.problem_mark.column | ||
48 | // Scanner errors don't iterate line before returning error | ||
49 | if p.error == yaml_SCANNER_ERROR { | ||
50 | line++ | ||
51 | column = 0 | ||
52 | } | ||
53 | return Error{ | ||
54 | cause: cause, | ||
55 | Line: line, | ||
56 | Column: column + 1, | ||
57 | } | ||
58 | case p.context_mark.line != 0: | ||
59 | return Error{ | ||
60 | cause: cause, | ||
61 | Line: p.context_mark.line, | ||
62 | Column: p.context_mark.column + 1, | ||
63 | } | ||
64 | default: | ||
65 | return cause | ||
66 | } | ||
67 | } | ||
68 | |||
69 | func parserErrorf(p *yaml_parser_t, f string, vals ...interface{}) error { | ||
70 | return parserErrorWrap(p, fmt.Errorf(f, vals...)) | ||
71 | } | ||
72 | |||
73 | func parseEventErrorWrap(evt *yaml_event_t, cause error) error { | ||
74 | if evt.start_mark.line == 0 { | ||
75 | // Event does not have a start mark, so we won't wrap the error at all | ||
76 | return cause | ||
77 | } | ||
78 | return Error{ | ||
79 | cause: cause, | ||
80 | Line: evt.start_mark.line, | ||
81 | Column: evt.start_mark.column + 1, | ||
82 | } | ||
83 | } | ||
84 | |||
85 | func parseEventErrorf(evt *yaml_event_t, f string, vals ...interface{}) error { | ||
86 | return parseEventErrorWrap(evt, fmt.Errorf(f, vals...)) | ||
87 | } | ||
88 | |||
89 | func emitterError(e *yaml_emitter_t) error { | ||
90 | var cause error | ||
91 | if len(e.problem) > 0 { | ||
92 | cause = errors.New(e.problem) | ||
93 | } else { | ||
94 | cause = errors.New("failed to write YAML token") // useless generic error, then | ||
95 | } | ||
96 | return cause | ||
97 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/go.mod b/vendor/github.com/zclconf/go-cty-yaml/go.mod new file mode 100644 index 0000000..3d52268 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/go.mod | |||
@@ -0,0 +1,3 @@ | |||
1 | module github.com/zclconf/go-cty-yaml | ||
2 | |||
3 | require github.com/zclconf/go-cty v1.0.0 | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/go.sum b/vendor/github.com/zclconf/go-cty-yaml/go.sum new file mode 100644 index 0000000..841f7fc --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/go.sum | |||
@@ -0,0 +1,18 @@ | |||
1 | github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= | ||
2 | github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= | ||
3 | github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= | ||
4 | github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= | ||
5 | github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= | ||
6 | github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= | ||
7 | github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= | ||
8 | github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= | ||
9 | github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= | ||
10 | github.com/zclconf/go-cty v1.0.0 h1:EWtv3gKe2wPLIB9hQRQJa7k/059oIfAqcEkCNnaVckk= | ||
11 | github.com/zclconf/go-cty v1.0.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= | ||
12 | golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | ||
13 | golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||
14 | golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= | ||
15 | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||
16 | google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= | ||
17 | gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= | ||
18 | gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/implied_type.go b/vendor/github.com/zclconf/go-cty-yaml/implied_type.go new file mode 100644 index 0000000..5b7b068 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/implied_type.go | |||
@@ -0,0 +1,268 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "errors" | ||
5 | "fmt" | ||
6 | |||
7 | "github.com/zclconf/go-cty/cty" | ||
8 | "github.com/zclconf/go-cty/cty/convert" | ||
9 | ) | ||
10 | |||
11 | func (c *Converter) impliedType(src []byte) (cty.Type, error) { | ||
12 | p := &yaml_parser_t{} | ||
13 | if !yaml_parser_initialize(p) { | ||
14 | return cty.NilType, errors.New("failed to initialize YAML parser") | ||
15 | } | ||
16 | if len(src) == 0 { | ||
17 | src = []byte{'\n'} | ||
18 | } | ||
19 | |||
20 | an := &typeAnalysis{ | ||
21 | anchorsPending: map[string]int{}, | ||
22 | anchorTypes: map[string]cty.Type{}, | ||
23 | } | ||
24 | |||
25 | yaml_parser_set_input_string(p, src) | ||
26 | |||
27 | var evt yaml_event_t | ||
28 | if !yaml_parser_parse(p, &evt) { | ||
29 | return cty.NilType, parserError(p) | ||
30 | } | ||
31 | if evt.typ != yaml_STREAM_START_EVENT { | ||
32 | return cty.NilType, parseEventErrorf(&evt, "missing stream start token") | ||
33 | } | ||
34 | if !yaml_parser_parse(p, &evt) { | ||
35 | return cty.NilType, parserError(p) | ||
36 | } | ||
37 | if evt.typ != yaml_DOCUMENT_START_EVENT { | ||
38 | return cty.NilType, parseEventErrorf(&evt, "missing start of document") | ||
39 | } | ||
40 | |||
41 | ty, err := c.impliedTypeParse(an, p) | ||
42 | if err != nil { | ||
43 | return cty.NilType, err | ||
44 | } | ||
45 | |||
46 | if !yaml_parser_parse(p, &evt) { | ||
47 | return cty.NilType, parserError(p) | ||
48 | } | ||
49 | if evt.typ == yaml_DOCUMENT_START_EVENT { | ||
50 | return cty.NilType, parseEventErrorf(&evt, "only a single document is allowed") | ||
51 | } | ||
52 | if evt.typ != yaml_DOCUMENT_END_EVENT { | ||
53 | return cty.NilType, parseEventErrorf(&evt, "unexpected extra content (%s) after value", evt.typ.String()) | ||
54 | } | ||
55 | if !yaml_parser_parse(p, &evt) { | ||
56 | return cty.NilType, parserError(p) | ||
57 | } | ||
58 | if evt.typ != yaml_STREAM_END_EVENT { | ||
59 | return cty.NilType, parseEventErrorf(&evt, "unexpected extra content after value") | ||
60 | } | ||
61 | |||
62 | return ty, err | ||
63 | } | ||
64 | |||
65 | func (c *Converter) impliedTypeParse(an *typeAnalysis, p *yaml_parser_t) (cty.Type, error) { | ||
66 | var evt yaml_event_t | ||
67 | if !yaml_parser_parse(p, &evt) { | ||
68 | return cty.NilType, parserError(p) | ||
69 | } | ||
70 | return c.impliedTypeParseRemainder(an, &evt, p) | ||
71 | } | ||
72 | |||
73 | func (c *Converter) impliedTypeParseRemainder(an *typeAnalysis, evt *yaml_event_t, p *yaml_parser_t) (cty.Type, error) { | ||
74 | switch evt.typ { | ||
75 | case yaml_SCALAR_EVENT: | ||
76 | return c.impliedTypeScalar(an, evt, p) | ||
77 | case yaml_ALIAS_EVENT: | ||
78 | return c.impliedTypeAlias(an, evt, p) | ||
79 | case yaml_MAPPING_START_EVENT: | ||
80 | return c.impliedTypeMapping(an, evt, p) | ||
81 | case yaml_SEQUENCE_START_EVENT: | ||
82 | return c.impliedTypeSequence(an, evt, p) | ||
83 | case yaml_DOCUMENT_START_EVENT: | ||
84 | return cty.NilType, parseEventErrorf(evt, "only a single document is allowed") | ||
85 | case yaml_STREAM_END_EVENT: | ||
86 | // Decoding an empty buffer, probably | ||
87 | return cty.NilType, parseEventErrorf(evt, "expecting value but found end of stream") | ||
88 | default: | ||
89 | // Should never happen; the above should be comprehensive | ||
90 | return cty.NilType, parseEventErrorf(evt, "unexpected parser event %s", evt.typ.String()) | ||
91 | } | ||
92 | } | ||
93 | |||
94 | func (c *Converter) impliedTypeScalar(an *typeAnalysis, evt *yaml_event_t, p *yaml_parser_t) (cty.Type, error) { | ||
95 | src := evt.value | ||
96 | tag := string(evt.tag) | ||
97 | anchor := string(evt.anchor) | ||
98 | implicit := evt.implicit | ||
99 | |||
100 | if len(anchor) > 0 { | ||
101 | an.beginAnchor(anchor) | ||
102 | } | ||
103 | |||
104 | var ty cty.Type | ||
105 | switch { | ||
106 | case tag == "" && !implicit: | ||
107 | // Untagged explicit string | ||
108 | ty = cty.String | ||
109 | default: | ||
110 | v, err := c.resolveScalar(tag, string(src), yaml_scalar_style_t(evt.style)) | ||
111 | if err != nil { | ||
112 | return cty.NilType, parseEventErrorWrap(evt, err) | ||
113 | } | ||
114 | if v.RawEquals(mergeMappingVal) { | ||
115 | // In any context other than a mapping key, this is just a plain string | ||
116 | ty = cty.String | ||
117 | } else { | ||
118 | ty = v.Type() | ||
119 | } | ||
120 | } | ||
121 | |||
122 | if len(anchor) > 0 { | ||
123 | an.completeAnchor(anchor, ty) | ||
124 | } | ||
125 | return ty, nil | ||
126 | } | ||
127 | |||
128 | func (c *Converter) impliedTypeMapping(an *typeAnalysis, evt *yaml_event_t, p *yaml_parser_t) (cty.Type, error) { | ||
129 | tag := string(evt.tag) | ||
130 | anchor := string(evt.anchor) | ||
131 | |||
132 | if tag != "" && tag != yaml_MAP_TAG { | ||
133 | return cty.NilType, parseEventErrorf(evt, "can't interpret mapping as %s", tag) | ||
134 | } | ||
135 | |||
136 | if anchor != "" { | ||
137 | an.beginAnchor(anchor) | ||
138 | } | ||
139 | |||
140 | atys := make(map[string]cty.Type) | ||
141 | for { | ||
142 | var nextEvt yaml_event_t | ||
143 | if !yaml_parser_parse(p, &nextEvt) { | ||
144 | return cty.NilType, parserError(p) | ||
145 | } | ||
146 | if nextEvt.typ == yaml_MAPPING_END_EVENT { | ||
147 | ty := cty.Object(atys) | ||
148 | if anchor != "" { | ||
149 | an.completeAnchor(anchor, ty) | ||
150 | } | ||
151 | return ty, nil | ||
152 | } | ||
153 | |||
154 | if nextEvt.typ != yaml_SCALAR_EVENT { | ||
155 | return cty.NilType, parseEventErrorf(&nextEvt, "only strings are allowed as mapping keys") | ||
156 | } | ||
157 | keyVal, err := c.resolveScalar(string(nextEvt.tag), string(nextEvt.value), yaml_scalar_style_t(nextEvt.style)) | ||
158 | if err != nil { | ||
159 | return cty.NilType, err | ||
160 | } | ||
161 | if keyVal.RawEquals(mergeMappingVal) { | ||
162 | // Merging the value (which must be a mapping) into our mapping, | ||
163 | // then. | ||
164 | ty, err := c.impliedTypeParse(an, p) | ||
165 | if err != nil { | ||
166 | return cty.NilType, err | ||
167 | } | ||
168 | if !ty.IsObjectType() { | ||
169 | return cty.NilType, parseEventErrorf(&nextEvt, "cannot merge %s into mapping", ty.FriendlyName()) | ||
170 | } | ||
171 | for name, aty := range ty.AttributeTypes() { | ||
172 | atys[name] = aty | ||
173 | } | ||
174 | continue | ||
175 | } | ||
176 | if keyValStr, err := convert.Convert(keyVal, cty.String); err == nil { | ||
177 | keyVal = keyValStr | ||
178 | } else { | ||
179 | return cty.NilType, parseEventErrorf(&nextEvt, "only strings are allowed as mapping keys") | ||
180 | } | ||
181 | if keyVal.IsNull() { | ||
182 | return cty.NilType, parseEventErrorf(&nextEvt, "mapping key cannot be null") | ||
183 | } | ||
184 | if !keyVal.IsKnown() { | ||
185 | return cty.NilType, parseEventErrorf(&nextEvt, "mapping key must be known") | ||
186 | } | ||
187 | valTy, err := c.impliedTypeParse(an, p) | ||
188 | if err != nil { | ||
189 | return cty.NilType, err | ||
190 | } | ||
191 | |||
192 | atys[keyVal.AsString()] = valTy | ||
193 | } | ||
194 | } | ||
195 | |||
196 | func (c *Converter) impliedTypeSequence(an *typeAnalysis, evt *yaml_event_t, p *yaml_parser_t) (cty.Type, error) { | ||
197 | tag := string(evt.tag) | ||
198 | anchor := string(evt.anchor) | ||
199 | |||
200 | if tag != "" && tag != yaml_SEQ_TAG { | ||
201 | return cty.NilType, parseEventErrorf(evt, "can't interpret sequence as %s", tag) | ||
202 | } | ||
203 | |||
204 | if anchor != "" { | ||
205 | an.beginAnchor(anchor) | ||
206 | } | ||
207 | |||
208 | var atys []cty.Type | ||
209 | for { | ||
210 | var nextEvt yaml_event_t | ||
211 | if !yaml_parser_parse(p, &nextEvt) { | ||
212 | return cty.NilType, parserError(p) | ||
213 | } | ||
214 | if nextEvt.typ == yaml_SEQUENCE_END_EVENT { | ||
215 | ty := cty.Tuple(atys) | ||
216 | if anchor != "" { | ||
217 | an.completeAnchor(anchor, ty) | ||
218 | } | ||
219 | return ty, nil | ||
220 | } | ||
221 | |||
222 | valTy, err := c.impliedTypeParseRemainder(an, &nextEvt, p) | ||
223 | if err != nil { | ||
224 | return cty.NilType, err | ||
225 | } | ||
226 | |||
227 | atys = append(atys, valTy) | ||
228 | } | ||
229 | } | ||
230 | |||
231 | func (c *Converter) impliedTypeAlias(an *typeAnalysis, evt *yaml_event_t, p *yaml_parser_t) (cty.Type, error) { | ||
232 | ty, err := an.anchorType(string(evt.anchor)) | ||
233 | if err != nil { | ||
234 | err = parseEventErrorWrap(evt, err) | ||
235 | } | ||
236 | return ty, err | ||
237 | } | ||
238 | |||
239 | type typeAnalysis struct { | ||
240 | anchorsPending map[string]int | ||
241 | anchorTypes map[string]cty.Type | ||
242 | } | ||
243 | |||
244 | func (an *typeAnalysis) beginAnchor(name string) { | ||
245 | an.anchorsPending[name]++ | ||
246 | } | ||
247 | |||
248 | func (an *typeAnalysis) completeAnchor(name string, ty cty.Type) { | ||
249 | an.anchorsPending[name]-- | ||
250 | if an.anchorsPending[name] == 0 { | ||
251 | delete(an.anchorsPending, name) | ||
252 | } | ||
253 | an.anchorTypes[name] = ty | ||
254 | } | ||
255 | |||
256 | func (an *typeAnalysis) anchorType(name string) (cty.Type, error) { | ||
257 | if _, pending := an.anchorsPending[name]; pending { | ||
258 | // YAML normally allows self-referencing structures, but cty cannot | ||
259 | // represent them (it requires all structures to be finite) so we | ||
260 | // must fail here. | ||
261 | return cty.NilType, fmt.Errorf("cannot refer to anchor %q from inside its own definition", name) | ||
262 | } | ||
263 | ty, ok := an.anchorTypes[name] | ||
264 | if !ok { | ||
265 | return cty.NilType, fmt.Errorf("reference to undefined anchor %q", name) | ||
266 | } | ||
267 | return ty, nil | ||
268 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/parserc.go b/vendor/github.com/zclconf/go-cty-yaml/parserc.go new file mode 100644 index 0000000..81d05df --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/parserc.go | |||
@@ -0,0 +1,1095 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "bytes" | ||
5 | ) | ||
6 | |||
7 | // The parser implements the following grammar: | ||
8 | // | ||
9 | // stream ::= STREAM-START implicit_document? explicit_document* STREAM-END | ||
10 | // implicit_document ::= block_node DOCUMENT-END* | ||
11 | // explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* | ||
12 | // block_node_or_indentless_sequence ::= | ||
13 | // ALIAS | ||
14 | // | properties (block_content | indentless_block_sequence)? | ||
15 | // | block_content | ||
16 | // | indentless_block_sequence | ||
17 | // block_node ::= ALIAS | ||
18 | // | properties block_content? | ||
19 | // | block_content | ||
20 | // flow_node ::= ALIAS | ||
21 | // | properties flow_content? | ||
22 | // | flow_content | ||
23 | // properties ::= TAG ANCHOR? | ANCHOR TAG? | ||
24 | // block_content ::= block_collection | flow_collection | SCALAR | ||
25 | // flow_content ::= flow_collection | SCALAR | ||
26 | // block_collection ::= block_sequence | block_mapping | ||
27 | // flow_collection ::= flow_sequence | flow_mapping | ||
28 | // block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END | ||
29 | // indentless_sequence ::= (BLOCK-ENTRY block_node?)+ | ||
30 | // block_mapping ::= BLOCK-MAPPING_START | ||
31 | // ((KEY block_node_or_indentless_sequence?)? | ||
32 | // (VALUE block_node_or_indentless_sequence?)?)* | ||
33 | // BLOCK-END | ||
34 | // flow_sequence ::= FLOW-SEQUENCE-START | ||
35 | // (flow_sequence_entry FLOW-ENTRY)* | ||
36 | // flow_sequence_entry? | ||
37 | // FLOW-SEQUENCE-END | ||
38 | // flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | ||
39 | // flow_mapping ::= FLOW-MAPPING-START | ||
40 | // (flow_mapping_entry FLOW-ENTRY)* | ||
41 | // flow_mapping_entry? | ||
42 | // FLOW-MAPPING-END | ||
43 | // flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | ||
44 | |||
45 | // Peek the next token in the token queue. | ||
46 | func peek_token(parser *yaml_parser_t) *yaml_token_t { | ||
47 | if parser.token_available || yaml_parser_fetch_more_tokens(parser) { | ||
48 | return &parser.tokens[parser.tokens_head] | ||
49 | } | ||
50 | return nil | ||
51 | } | ||
52 | |||
53 | // Remove the next token from the queue (must be called after peek_token). | ||
54 | func skip_token(parser *yaml_parser_t) { | ||
55 | parser.token_available = false | ||
56 | parser.tokens_parsed++ | ||
57 | parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN | ||
58 | parser.tokens_head++ | ||
59 | } | ||
60 | |||
61 | // Get the next event. | ||
62 | func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { | ||
63 | // Erase the event object. | ||
64 | *event = yaml_event_t{} | ||
65 | |||
66 | // No events after the end of the stream or error. | ||
67 | if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE { | ||
68 | return true | ||
69 | } | ||
70 | |||
71 | // Generate the next event. | ||
72 | return yaml_parser_state_machine(parser, event) | ||
73 | } | ||
74 | |||
75 | // Set parser error. | ||
76 | func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool { | ||
77 | parser.error = yaml_PARSER_ERROR | ||
78 | parser.problem = problem | ||
79 | parser.problem_mark = problem_mark | ||
80 | return false | ||
81 | } | ||
82 | |||
83 | func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool { | ||
84 | parser.error = yaml_PARSER_ERROR | ||
85 | parser.context = context | ||
86 | parser.context_mark = context_mark | ||
87 | parser.problem = problem | ||
88 | parser.problem_mark = problem_mark | ||
89 | return false | ||
90 | } | ||
91 | |||
92 | // State dispatcher. | ||
93 | func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { | ||
94 | //trace("yaml_parser_state_machine", "state:", parser.state.String()) | ||
95 | |||
96 | switch parser.state { | ||
97 | case yaml_PARSE_STREAM_START_STATE: | ||
98 | return yaml_parser_parse_stream_start(parser, event) | ||
99 | |||
100 | case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: | ||
101 | return yaml_parser_parse_document_start(parser, event, true) | ||
102 | |||
103 | case yaml_PARSE_DOCUMENT_START_STATE: | ||
104 | return yaml_parser_parse_document_start(parser, event, false) | ||
105 | |||
106 | case yaml_PARSE_DOCUMENT_CONTENT_STATE: | ||
107 | return yaml_parser_parse_document_content(parser, event) | ||
108 | |||
109 | case yaml_PARSE_DOCUMENT_END_STATE: | ||
110 | return yaml_parser_parse_document_end(parser, event) | ||
111 | |||
112 | case yaml_PARSE_BLOCK_NODE_STATE: | ||
113 | return yaml_parser_parse_node(parser, event, true, false) | ||
114 | |||
115 | case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: | ||
116 | return yaml_parser_parse_node(parser, event, true, true) | ||
117 | |||
118 | case yaml_PARSE_FLOW_NODE_STATE: | ||
119 | return yaml_parser_parse_node(parser, event, false, false) | ||
120 | |||
121 | case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: | ||
122 | return yaml_parser_parse_block_sequence_entry(parser, event, true) | ||
123 | |||
124 | case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: | ||
125 | return yaml_parser_parse_block_sequence_entry(parser, event, false) | ||
126 | |||
127 | case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: | ||
128 | return yaml_parser_parse_indentless_sequence_entry(parser, event) | ||
129 | |||
130 | case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: | ||
131 | return yaml_parser_parse_block_mapping_key(parser, event, true) | ||
132 | |||
133 | case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: | ||
134 | return yaml_parser_parse_block_mapping_key(parser, event, false) | ||
135 | |||
136 | case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: | ||
137 | return yaml_parser_parse_block_mapping_value(parser, event) | ||
138 | |||
139 | case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: | ||
140 | return yaml_parser_parse_flow_sequence_entry(parser, event, true) | ||
141 | |||
142 | case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: | ||
143 | return yaml_parser_parse_flow_sequence_entry(parser, event, false) | ||
144 | |||
145 | case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: | ||
146 | return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) | ||
147 | |||
148 | case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: | ||
149 | return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) | ||
150 | |||
151 | case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: | ||
152 | return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) | ||
153 | |||
154 | case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: | ||
155 | return yaml_parser_parse_flow_mapping_key(parser, event, true) | ||
156 | |||
157 | case yaml_PARSE_FLOW_MAPPING_KEY_STATE: | ||
158 | return yaml_parser_parse_flow_mapping_key(parser, event, false) | ||
159 | |||
160 | case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: | ||
161 | return yaml_parser_parse_flow_mapping_value(parser, event, false) | ||
162 | |||
163 | case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: | ||
164 | return yaml_parser_parse_flow_mapping_value(parser, event, true) | ||
165 | |||
166 | default: | ||
167 | panic("invalid parser state") | ||
168 | } | ||
169 | } | ||
170 | |||
171 | // Parse the production: | ||
172 | // stream ::= STREAM-START implicit_document? explicit_document* STREAM-END | ||
173 | // ************ | ||
174 | func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { | ||
175 | token := peek_token(parser) | ||
176 | if token == nil { | ||
177 | return false | ||
178 | } | ||
179 | if token.typ != yaml_STREAM_START_TOKEN { | ||
180 | return yaml_parser_set_parser_error(parser, "did not find expected <stream-start>", token.start_mark) | ||
181 | } | ||
182 | parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE | ||
183 | *event = yaml_event_t{ | ||
184 | typ: yaml_STREAM_START_EVENT, | ||
185 | start_mark: token.start_mark, | ||
186 | end_mark: token.end_mark, | ||
187 | encoding: token.encoding, | ||
188 | } | ||
189 | skip_token(parser) | ||
190 | return true | ||
191 | } | ||
192 | |||
193 | // Parse the productions: | ||
194 | // implicit_document ::= block_node DOCUMENT-END* | ||
195 | // * | ||
196 | // explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* | ||
197 | // ************************* | ||
198 | func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool { | ||
199 | |||
200 | token := peek_token(parser) | ||
201 | if token == nil { | ||
202 | return false | ||
203 | } | ||
204 | |||
205 | // Parse extra document end indicators. | ||
206 | if !implicit { | ||
207 | for token.typ == yaml_DOCUMENT_END_TOKEN { | ||
208 | skip_token(parser) | ||
209 | token = peek_token(parser) | ||
210 | if token == nil { | ||
211 | return false | ||
212 | } | ||
213 | } | ||
214 | } | ||
215 | |||
216 | if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN && | ||
217 | token.typ != yaml_TAG_DIRECTIVE_TOKEN && | ||
218 | token.typ != yaml_DOCUMENT_START_TOKEN && | ||
219 | token.typ != yaml_STREAM_END_TOKEN { | ||
220 | // Parse an implicit document. | ||
221 | if !yaml_parser_process_directives(parser, nil, nil) { | ||
222 | return false | ||
223 | } | ||
224 | parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) | ||
225 | parser.state = yaml_PARSE_BLOCK_NODE_STATE | ||
226 | |||
227 | *event = yaml_event_t{ | ||
228 | typ: yaml_DOCUMENT_START_EVENT, | ||
229 | start_mark: token.start_mark, | ||
230 | end_mark: token.end_mark, | ||
231 | } | ||
232 | |||
233 | } else if token.typ != yaml_STREAM_END_TOKEN { | ||
234 | // Parse an explicit document. | ||
235 | var version_directive *yaml_version_directive_t | ||
236 | var tag_directives []yaml_tag_directive_t | ||
237 | start_mark := token.start_mark | ||
238 | if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) { | ||
239 | return false | ||
240 | } | ||
241 | token = peek_token(parser) | ||
242 | if token == nil { | ||
243 | return false | ||
244 | } | ||
245 | if token.typ != yaml_DOCUMENT_START_TOKEN { | ||
246 | yaml_parser_set_parser_error(parser, | ||
247 | "did not find expected <document start>", token.start_mark) | ||
248 | return false | ||
249 | } | ||
250 | parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) | ||
251 | parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE | ||
252 | end_mark := token.end_mark | ||
253 | |||
254 | *event = yaml_event_t{ | ||
255 | typ: yaml_DOCUMENT_START_EVENT, | ||
256 | start_mark: start_mark, | ||
257 | end_mark: end_mark, | ||
258 | version_directive: version_directive, | ||
259 | tag_directives: tag_directives, | ||
260 | implicit: false, | ||
261 | } | ||
262 | skip_token(parser) | ||
263 | |||
264 | } else { | ||
265 | // Parse the stream end. | ||
266 | parser.state = yaml_PARSE_END_STATE | ||
267 | *event = yaml_event_t{ | ||
268 | typ: yaml_STREAM_END_EVENT, | ||
269 | start_mark: token.start_mark, | ||
270 | end_mark: token.end_mark, | ||
271 | } | ||
272 | skip_token(parser) | ||
273 | } | ||
274 | |||
275 | return true | ||
276 | } | ||
277 | |||
278 | // Parse the productions: | ||
279 | // explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* | ||
280 | // *********** | ||
281 | // | ||
282 | func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { | ||
283 | token := peek_token(parser) | ||
284 | if token == nil { | ||
285 | return false | ||
286 | } | ||
287 | if token.typ == yaml_VERSION_DIRECTIVE_TOKEN || | ||
288 | token.typ == yaml_TAG_DIRECTIVE_TOKEN || | ||
289 | token.typ == yaml_DOCUMENT_START_TOKEN || | ||
290 | token.typ == yaml_DOCUMENT_END_TOKEN || | ||
291 | token.typ == yaml_STREAM_END_TOKEN { | ||
292 | parser.state = parser.states[len(parser.states)-1] | ||
293 | parser.states = parser.states[:len(parser.states)-1] | ||
294 | return yaml_parser_process_empty_scalar(parser, event, | ||
295 | token.start_mark) | ||
296 | } | ||
297 | return yaml_parser_parse_node(parser, event, true, false) | ||
298 | } | ||
299 | |||
300 | // Parse the productions: | ||
301 | // implicit_document ::= block_node DOCUMENT-END* | ||
302 | // ************* | ||
303 | // explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* | ||
304 | // | ||
305 | func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { | ||
306 | token := peek_token(parser) | ||
307 | if token == nil { | ||
308 | return false | ||
309 | } | ||
310 | |||
311 | start_mark := token.start_mark | ||
312 | end_mark := token.start_mark | ||
313 | |||
314 | implicit := true | ||
315 | if token.typ == yaml_DOCUMENT_END_TOKEN { | ||
316 | end_mark = token.end_mark | ||
317 | skip_token(parser) | ||
318 | implicit = false | ||
319 | } | ||
320 | |||
321 | parser.tag_directives = parser.tag_directives[:0] | ||
322 | |||
323 | parser.state = yaml_PARSE_DOCUMENT_START_STATE | ||
324 | *event = yaml_event_t{ | ||
325 | typ: yaml_DOCUMENT_END_EVENT, | ||
326 | start_mark: start_mark, | ||
327 | end_mark: end_mark, | ||
328 | implicit: implicit, | ||
329 | } | ||
330 | return true | ||
331 | } | ||
332 | |||
333 | // Parse the productions: | ||
334 | // block_node_or_indentless_sequence ::= | ||
335 | // ALIAS | ||
336 | // ***** | ||
337 | // | properties (block_content | indentless_block_sequence)? | ||
338 | // ********** * | ||
339 | // | block_content | indentless_block_sequence | ||
340 | // * | ||
341 | // block_node ::= ALIAS | ||
342 | // ***** | ||
343 | // | properties block_content? | ||
344 | // ********** * | ||
345 | // | block_content | ||
346 | // * | ||
347 | // flow_node ::= ALIAS | ||
348 | // ***** | ||
349 | // | properties flow_content? | ||
350 | // ********** * | ||
351 | // | flow_content | ||
352 | // * | ||
353 | // properties ::= TAG ANCHOR? | ANCHOR TAG? | ||
354 | // ************************* | ||
355 | // block_content ::= block_collection | flow_collection | SCALAR | ||
356 | // ****** | ||
357 | // flow_content ::= flow_collection | SCALAR | ||
358 | // ****** | ||
359 | func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool { | ||
360 | //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)() | ||
361 | |||
362 | token := peek_token(parser) | ||
363 | if token == nil { | ||
364 | return false | ||
365 | } | ||
366 | |||
367 | if token.typ == yaml_ALIAS_TOKEN { | ||
368 | parser.state = parser.states[len(parser.states)-1] | ||
369 | parser.states = parser.states[:len(parser.states)-1] | ||
370 | *event = yaml_event_t{ | ||
371 | typ: yaml_ALIAS_EVENT, | ||
372 | start_mark: token.start_mark, | ||
373 | end_mark: token.end_mark, | ||
374 | anchor: token.value, | ||
375 | } | ||
376 | skip_token(parser) | ||
377 | return true | ||
378 | } | ||
379 | |||
380 | start_mark := token.start_mark | ||
381 | end_mark := token.start_mark | ||
382 | |||
383 | var tag_token bool | ||
384 | var tag_handle, tag_suffix, anchor []byte | ||
385 | var tag_mark yaml_mark_t | ||
386 | if token.typ == yaml_ANCHOR_TOKEN { | ||
387 | anchor = token.value | ||
388 | start_mark = token.start_mark | ||
389 | end_mark = token.end_mark | ||
390 | skip_token(parser) | ||
391 | token = peek_token(parser) | ||
392 | if token == nil { | ||
393 | return false | ||
394 | } | ||
395 | if token.typ == yaml_TAG_TOKEN { | ||
396 | tag_token = true | ||
397 | tag_handle = token.value | ||
398 | tag_suffix = token.suffix | ||
399 | tag_mark = token.start_mark | ||
400 | end_mark = token.end_mark | ||
401 | skip_token(parser) | ||
402 | token = peek_token(parser) | ||
403 | if token == nil { | ||
404 | return false | ||
405 | } | ||
406 | } | ||
407 | } else if token.typ == yaml_TAG_TOKEN { | ||
408 | tag_token = true | ||
409 | tag_handle = token.value | ||
410 | tag_suffix = token.suffix | ||
411 | start_mark = token.start_mark | ||
412 | tag_mark = token.start_mark | ||
413 | end_mark = token.end_mark | ||
414 | skip_token(parser) | ||
415 | token = peek_token(parser) | ||
416 | if token == nil { | ||
417 | return false | ||
418 | } | ||
419 | if token.typ == yaml_ANCHOR_TOKEN { | ||
420 | anchor = token.value | ||
421 | end_mark = token.end_mark | ||
422 | skip_token(parser) | ||
423 | token = peek_token(parser) | ||
424 | if token == nil { | ||
425 | return false | ||
426 | } | ||
427 | } | ||
428 | } | ||
429 | |||
430 | var tag []byte | ||
431 | if tag_token { | ||
432 | if len(tag_handle) == 0 { | ||
433 | tag = tag_suffix | ||
434 | tag_suffix = nil | ||
435 | } else { | ||
436 | for i := range parser.tag_directives { | ||
437 | if bytes.Equal(parser.tag_directives[i].handle, tag_handle) { | ||
438 | tag = append([]byte(nil), parser.tag_directives[i].prefix...) | ||
439 | tag = append(tag, tag_suffix...) | ||
440 | break | ||
441 | } | ||
442 | } | ||
443 | if len(tag) == 0 { | ||
444 | yaml_parser_set_parser_error_context(parser, | ||
445 | "while parsing a node", start_mark, | ||
446 | "found undefined tag handle", tag_mark) | ||
447 | return false | ||
448 | } | ||
449 | } | ||
450 | } | ||
451 | |||
452 | implicit := len(tag) == 0 | ||
453 | if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN { | ||
454 | end_mark = token.end_mark | ||
455 | parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE | ||
456 | *event = yaml_event_t{ | ||
457 | typ: yaml_SEQUENCE_START_EVENT, | ||
458 | start_mark: start_mark, | ||
459 | end_mark: end_mark, | ||
460 | anchor: anchor, | ||
461 | tag: tag, | ||
462 | implicit: implicit, | ||
463 | style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), | ||
464 | } | ||
465 | return true | ||
466 | } | ||
467 | if token.typ == yaml_SCALAR_TOKEN { | ||
468 | var plain_implicit, quoted_implicit bool | ||
469 | end_mark = token.end_mark | ||
470 | if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') { | ||
471 | plain_implicit = true | ||
472 | } else if len(tag) == 0 { | ||
473 | quoted_implicit = true | ||
474 | } | ||
475 | parser.state = parser.states[len(parser.states)-1] | ||
476 | parser.states = parser.states[:len(parser.states)-1] | ||
477 | |||
478 | *event = yaml_event_t{ | ||
479 | typ: yaml_SCALAR_EVENT, | ||
480 | start_mark: start_mark, | ||
481 | end_mark: end_mark, | ||
482 | anchor: anchor, | ||
483 | tag: tag, | ||
484 | value: token.value, | ||
485 | implicit: plain_implicit, | ||
486 | quoted_implicit: quoted_implicit, | ||
487 | style: yaml_style_t(token.style), | ||
488 | } | ||
489 | skip_token(parser) | ||
490 | return true | ||
491 | } | ||
492 | if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN { | ||
493 | // [Go] Some of the events below can be merged as they differ only on style. | ||
494 | end_mark = token.end_mark | ||
495 | parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE | ||
496 | *event = yaml_event_t{ | ||
497 | typ: yaml_SEQUENCE_START_EVENT, | ||
498 | start_mark: start_mark, | ||
499 | end_mark: end_mark, | ||
500 | anchor: anchor, | ||
501 | tag: tag, | ||
502 | implicit: implicit, | ||
503 | style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), | ||
504 | } | ||
505 | return true | ||
506 | } | ||
507 | if token.typ == yaml_FLOW_MAPPING_START_TOKEN { | ||
508 | end_mark = token.end_mark | ||
509 | parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE | ||
510 | *event = yaml_event_t{ | ||
511 | typ: yaml_MAPPING_START_EVENT, | ||
512 | start_mark: start_mark, | ||
513 | end_mark: end_mark, | ||
514 | anchor: anchor, | ||
515 | tag: tag, | ||
516 | implicit: implicit, | ||
517 | style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), | ||
518 | } | ||
519 | return true | ||
520 | } | ||
521 | if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN { | ||
522 | end_mark = token.end_mark | ||
523 | parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE | ||
524 | *event = yaml_event_t{ | ||
525 | typ: yaml_SEQUENCE_START_EVENT, | ||
526 | start_mark: start_mark, | ||
527 | end_mark: end_mark, | ||
528 | anchor: anchor, | ||
529 | tag: tag, | ||
530 | implicit: implicit, | ||
531 | style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), | ||
532 | } | ||
533 | return true | ||
534 | } | ||
535 | if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN { | ||
536 | end_mark = token.end_mark | ||
537 | parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE | ||
538 | *event = yaml_event_t{ | ||
539 | typ: yaml_MAPPING_START_EVENT, | ||
540 | start_mark: start_mark, | ||
541 | end_mark: end_mark, | ||
542 | anchor: anchor, | ||
543 | tag: tag, | ||
544 | implicit: implicit, | ||
545 | style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), | ||
546 | } | ||
547 | return true | ||
548 | } | ||
549 | if len(anchor) > 0 || len(tag) > 0 { | ||
550 | parser.state = parser.states[len(parser.states)-1] | ||
551 | parser.states = parser.states[:len(parser.states)-1] | ||
552 | |||
553 | *event = yaml_event_t{ | ||
554 | typ: yaml_SCALAR_EVENT, | ||
555 | start_mark: start_mark, | ||
556 | end_mark: end_mark, | ||
557 | anchor: anchor, | ||
558 | tag: tag, | ||
559 | implicit: implicit, | ||
560 | quoted_implicit: false, | ||
561 | style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), | ||
562 | } | ||
563 | return true | ||
564 | } | ||
565 | |||
566 | context := "while parsing a flow node" | ||
567 | if block { | ||
568 | context = "while parsing a block node" | ||
569 | } | ||
570 | yaml_parser_set_parser_error_context(parser, context, start_mark, | ||
571 | "did not find expected node content", token.start_mark) | ||
572 | return false | ||
573 | } | ||
574 | |||
575 | // Parse the productions: | ||
576 | // block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END | ||
577 | // ******************** *********** * ********* | ||
578 | // | ||
579 | func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { | ||
580 | if first { | ||
581 | token := peek_token(parser) | ||
582 | parser.marks = append(parser.marks, token.start_mark) | ||
583 | skip_token(parser) | ||
584 | } | ||
585 | |||
586 | token := peek_token(parser) | ||
587 | if token == nil { | ||
588 | return false | ||
589 | } | ||
590 | |||
591 | if token.typ == yaml_BLOCK_ENTRY_TOKEN { | ||
592 | mark := token.end_mark | ||
593 | skip_token(parser) | ||
594 | token = peek_token(parser) | ||
595 | if token == nil { | ||
596 | return false | ||
597 | } | ||
598 | if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN { | ||
599 | parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) | ||
600 | return yaml_parser_parse_node(parser, event, true, false) | ||
601 | } else { | ||
602 | parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE | ||
603 | return yaml_parser_process_empty_scalar(parser, event, mark) | ||
604 | } | ||
605 | } | ||
606 | if token.typ == yaml_BLOCK_END_TOKEN { | ||
607 | parser.state = parser.states[len(parser.states)-1] | ||
608 | parser.states = parser.states[:len(parser.states)-1] | ||
609 | parser.marks = parser.marks[:len(parser.marks)-1] | ||
610 | |||
611 | *event = yaml_event_t{ | ||
612 | typ: yaml_SEQUENCE_END_EVENT, | ||
613 | start_mark: token.start_mark, | ||
614 | end_mark: token.end_mark, | ||
615 | } | ||
616 | |||
617 | skip_token(parser) | ||
618 | return true | ||
619 | } | ||
620 | |||
621 | context_mark := parser.marks[len(parser.marks)-1] | ||
622 | parser.marks = parser.marks[:len(parser.marks)-1] | ||
623 | return yaml_parser_set_parser_error_context(parser, | ||
624 | "while parsing a block collection", context_mark, | ||
625 | "did not find expected '-' indicator", token.start_mark) | ||
626 | } | ||
627 | |||
628 | // Parse the productions: | ||
629 | // indentless_sequence ::= (BLOCK-ENTRY block_node?)+ | ||
630 | // *********** * | ||
631 | func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool { | ||
632 | token := peek_token(parser) | ||
633 | if token == nil { | ||
634 | return false | ||
635 | } | ||
636 | |||
637 | if token.typ == yaml_BLOCK_ENTRY_TOKEN { | ||
638 | mark := token.end_mark | ||
639 | skip_token(parser) | ||
640 | token = peek_token(parser) | ||
641 | if token == nil { | ||
642 | return false | ||
643 | } | ||
644 | if token.typ != yaml_BLOCK_ENTRY_TOKEN && | ||
645 | token.typ != yaml_KEY_TOKEN && | ||
646 | token.typ != yaml_VALUE_TOKEN && | ||
647 | token.typ != yaml_BLOCK_END_TOKEN { | ||
648 | parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) | ||
649 | return yaml_parser_parse_node(parser, event, true, false) | ||
650 | } | ||
651 | parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE | ||
652 | return yaml_parser_process_empty_scalar(parser, event, mark) | ||
653 | } | ||
654 | parser.state = parser.states[len(parser.states)-1] | ||
655 | parser.states = parser.states[:len(parser.states)-1] | ||
656 | |||
657 | *event = yaml_event_t{ | ||
658 | typ: yaml_SEQUENCE_END_EVENT, | ||
659 | start_mark: token.start_mark, | ||
660 | end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark? | ||
661 | } | ||
662 | return true | ||
663 | } | ||
664 | |||
665 | // Parse the productions: | ||
666 | // block_mapping ::= BLOCK-MAPPING_START | ||
667 | // ******************* | ||
668 | // ((KEY block_node_or_indentless_sequence?)? | ||
669 | // *** * | ||
670 | // (VALUE block_node_or_indentless_sequence?)?)* | ||
671 | // | ||
672 | // BLOCK-END | ||
673 | // ********* | ||
674 | // | ||
675 | func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { | ||
676 | if first { | ||
677 | token := peek_token(parser) | ||
678 | parser.marks = append(parser.marks, token.start_mark) | ||
679 | skip_token(parser) | ||
680 | } | ||
681 | |||
682 | token := peek_token(parser) | ||
683 | if token == nil { | ||
684 | return false | ||
685 | } | ||
686 | |||
687 | if token.typ == yaml_KEY_TOKEN { | ||
688 | mark := token.end_mark | ||
689 | skip_token(parser) | ||
690 | token = peek_token(parser) | ||
691 | if token == nil { | ||
692 | return false | ||
693 | } | ||
694 | if token.typ != yaml_KEY_TOKEN && | ||
695 | token.typ != yaml_VALUE_TOKEN && | ||
696 | token.typ != yaml_BLOCK_END_TOKEN { | ||
697 | parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) | ||
698 | return yaml_parser_parse_node(parser, event, true, true) | ||
699 | } else { | ||
700 | parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE | ||
701 | return yaml_parser_process_empty_scalar(parser, event, mark) | ||
702 | } | ||
703 | } else if token.typ == yaml_BLOCK_END_TOKEN { | ||
704 | parser.state = parser.states[len(parser.states)-1] | ||
705 | parser.states = parser.states[:len(parser.states)-1] | ||
706 | parser.marks = parser.marks[:len(parser.marks)-1] | ||
707 | *event = yaml_event_t{ | ||
708 | typ: yaml_MAPPING_END_EVENT, | ||
709 | start_mark: token.start_mark, | ||
710 | end_mark: token.end_mark, | ||
711 | } | ||
712 | skip_token(parser) | ||
713 | return true | ||
714 | } | ||
715 | |||
716 | context_mark := parser.marks[len(parser.marks)-1] | ||
717 | parser.marks = parser.marks[:len(parser.marks)-1] | ||
718 | return yaml_parser_set_parser_error_context(parser, | ||
719 | "while parsing a block mapping", context_mark, | ||
720 | "did not find expected key", token.start_mark) | ||
721 | } | ||
722 | |||
723 | // Parse the productions: | ||
724 | // block_mapping ::= BLOCK-MAPPING_START | ||
725 | // | ||
726 | // ((KEY block_node_or_indentless_sequence?)? | ||
727 | // | ||
728 | // (VALUE block_node_or_indentless_sequence?)?)* | ||
729 | // ***** * | ||
730 | // BLOCK-END | ||
731 | // | ||
732 | // | ||
733 | func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { | ||
734 | token := peek_token(parser) | ||
735 | if token == nil { | ||
736 | return false | ||
737 | } | ||
738 | if token.typ == yaml_VALUE_TOKEN { | ||
739 | mark := token.end_mark | ||
740 | skip_token(parser) | ||
741 | token = peek_token(parser) | ||
742 | if token == nil { | ||
743 | return false | ||
744 | } | ||
745 | if token.typ != yaml_KEY_TOKEN && | ||
746 | token.typ != yaml_VALUE_TOKEN && | ||
747 | token.typ != yaml_BLOCK_END_TOKEN { | ||
748 | parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) | ||
749 | return yaml_parser_parse_node(parser, event, true, true) | ||
750 | } | ||
751 | parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE | ||
752 | return yaml_parser_process_empty_scalar(parser, event, mark) | ||
753 | } | ||
754 | parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE | ||
755 | return yaml_parser_process_empty_scalar(parser, event, token.start_mark) | ||
756 | } | ||
757 | |||
758 | // Parse the productions: | ||
759 | // flow_sequence ::= FLOW-SEQUENCE-START | ||
760 | // ******************* | ||
761 | // (flow_sequence_entry FLOW-ENTRY)* | ||
762 | // * ********** | ||
763 | // flow_sequence_entry? | ||
764 | // * | ||
765 | // FLOW-SEQUENCE-END | ||
766 | // ***************** | ||
767 | // flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | ||
768 | // * | ||
769 | // | ||
770 | func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { | ||
771 | if first { | ||
772 | token := peek_token(parser) | ||
773 | parser.marks = append(parser.marks, token.start_mark) | ||
774 | skip_token(parser) | ||
775 | } | ||
776 | token := peek_token(parser) | ||
777 | if token == nil { | ||
778 | return false | ||
779 | } | ||
780 | if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { | ||
781 | if !first { | ||
782 | if token.typ == yaml_FLOW_ENTRY_TOKEN { | ||
783 | skip_token(parser) | ||
784 | token = peek_token(parser) | ||
785 | if token == nil { | ||
786 | return false | ||
787 | } | ||
788 | } else { | ||
789 | context_mark := parser.marks[len(parser.marks)-1] | ||
790 | parser.marks = parser.marks[:len(parser.marks)-1] | ||
791 | return yaml_parser_set_parser_error_context(parser, | ||
792 | "while parsing a flow sequence", context_mark, | ||
793 | "did not find expected ',' or ']'", token.start_mark) | ||
794 | } | ||
795 | } | ||
796 | |||
797 | if token.typ == yaml_KEY_TOKEN { | ||
798 | parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE | ||
799 | *event = yaml_event_t{ | ||
800 | typ: yaml_MAPPING_START_EVENT, | ||
801 | start_mark: token.start_mark, | ||
802 | end_mark: token.end_mark, | ||
803 | implicit: true, | ||
804 | style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), | ||
805 | } | ||
806 | skip_token(parser) | ||
807 | return true | ||
808 | } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { | ||
809 | parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) | ||
810 | return yaml_parser_parse_node(parser, event, false, false) | ||
811 | } | ||
812 | } | ||
813 | |||
814 | parser.state = parser.states[len(parser.states)-1] | ||
815 | parser.states = parser.states[:len(parser.states)-1] | ||
816 | parser.marks = parser.marks[:len(parser.marks)-1] | ||
817 | |||
818 | *event = yaml_event_t{ | ||
819 | typ: yaml_SEQUENCE_END_EVENT, | ||
820 | start_mark: token.start_mark, | ||
821 | end_mark: token.end_mark, | ||
822 | } | ||
823 | |||
824 | skip_token(parser) | ||
825 | return true | ||
826 | } | ||
827 | |||
828 | // | ||
829 | // Parse the productions: | ||
830 | // flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | ||
831 | // *** * | ||
832 | // | ||
833 | func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool { | ||
834 | token := peek_token(parser) | ||
835 | if token == nil { | ||
836 | return false | ||
837 | } | ||
838 | if token.typ != yaml_VALUE_TOKEN && | ||
839 | token.typ != yaml_FLOW_ENTRY_TOKEN && | ||
840 | token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { | ||
841 | parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) | ||
842 | return yaml_parser_parse_node(parser, event, false, false) | ||
843 | } | ||
844 | mark := token.end_mark | ||
845 | skip_token(parser) | ||
846 | parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE | ||
847 | return yaml_parser_process_empty_scalar(parser, event, mark) | ||
848 | } | ||
849 | |||
850 | // Parse the productions: | ||
851 | // flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | ||
852 | // ***** * | ||
853 | // | ||
854 | func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { | ||
855 | token := peek_token(parser) | ||
856 | if token == nil { | ||
857 | return false | ||
858 | } | ||
859 | if token.typ == yaml_VALUE_TOKEN { | ||
860 | skip_token(parser) | ||
861 | token := peek_token(parser) | ||
862 | if token == nil { | ||
863 | return false | ||
864 | } | ||
865 | if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { | ||
866 | parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) | ||
867 | return yaml_parser_parse_node(parser, event, false, false) | ||
868 | } | ||
869 | } | ||
870 | parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE | ||
871 | return yaml_parser_process_empty_scalar(parser, event, token.start_mark) | ||
872 | } | ||
873 | |||
874 | // Parse the productions: | ||
875 | // flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | ||
876 | // * | ||
877 | // | ||
878 | func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool { | ||
879 | token := peek_token(parser) | ||
880 | if token == nil { | ||
881 | return false | ||
882 | } | ||
883 | parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE | ||
884 | *event = yaml_event_t{ | ||
885 | typ: yaml_MAPPING_END_EVENT, | ||
886 | start_mark: token.start_mark, | ||
887 | end_mark: token.start_mark, // [Go] Shouldn't this be end_mark? | ||
888 | } | ||
889 | return true | ||
890 | } | ||
891 | |||
892 | // Parse the productions: | ||
893 | // flow_mapping ::= FLOW-MAPPING-START | ||
894 | // ****************** | ||
895 | // (flow_mapping_entry FLOW-ENTRY)* | ||
896 | // * ********** | ||
897 | // flow_mapping_entry? | ||
898 | // ****************** | ||
899 | // FLOW-MAPPING-END | ||
900 | // **************** | ||
901 | // flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | ||
902 | // * *** * | ||
903 | // | ||
904 | func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { | ||
905 | if first { | ||
906 | token := peek_token(parser) | ||
907 | parser.marks = append(parser.marks, token.start_mark) | ||
908 | skip_token(parser) | ||
909 | } | ||
910 | |||
911 | token := peek_token(parser) | ||
912 | if token == nil { | ||
913 | return false | ||
914 | } | ||
915 | |||
916 | if token.typ != yaml_FLOW_MAPPING_END_TOKEN { | ||
917 | if !first { | ||
918 | if token.typ == yaml_FLOW_ENTRY_TOKEN { | ||
919 | skip_token(parser) | ||
920 | token = peek_token(parser) | ||
921 | if token == nil { | ||
922 | return false | ||
923 | } | ||
924 | } else { | ||
925 | context_mark := parser.marks[len(parser.marks)-1] | ||
926 | parser.marks = parser.marks[:len(parser.marks)-1] | ||
927 | return yaml_parser_set_parser_error_context(parser, | ||
928 | "while parsing a flow mapping", context_mark, | ||
929 | "did not find expected ',' or '}'", token.start_mark) | ||
930 | } | ||
931 | } | ||
932 | |||
933 | if token.typ == yaml_KEY_TOKEN { | ||
934 | skip_token(parser) | ||
935 | token = peek_token(parser) | ||
936 | if token == nil { | ||
937 | return false | ||
938 | } | ||
939 | if token.typ != yaml_VALUE_TOKEN && | ||
940 | token.typ != yaml_FLOW_ENTRY_TOKEN && | ||
941 | token.typ != yaml_FLOW_MAPPING_END_TOKEN { | ||
942 | parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) | ||
943 | return yaml_parser_parse_node(parser, event, false, false) | ||
944 | } else { | ||
945 | parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE | ||
946 | return yaml_parser_process_empty_scalar(parser, event, token.start_mark) | ||
947 | } | ||
948 | } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN { | ||
949 | parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) | ||
950 | return yaml_parser_parse_node(parser, event, false, false) | ||
951 | } | ||
952 | } | ||
953 | |||
954 | parser.state = parser.states[len(parser.states)-1] | ||
955 | parser.states = parser.states[:len(parser.states)-1] | ||
956 | parser.marks = parser.marks[:len(parser.marks)-1] | ||
957 | *event = yaml_event_t{ | ||
958 | typ: yaml_MAPPING_END_EVENT, | ||
959 | start_mark: token.start_mark, | ||
960 | end_mark: token.end_mark, | ||
961 | } | ||
962 | skip_token(parser) | ||
963 | return true | ||
964 | } | ||
965 | |||
966 | // Parse the productions: | ||
967 | // flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | ||
968 | // * ***** * | ||
969 | // | ||
970 | func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool { | ||
971 | token := peek_token(parser) | ||
972 | if token == nil { | ||
973 | return false | ||
974 | } | ||
975 | if empty { | ||
976 | parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE | ||
977 | return yaml_parser_process_empty_scalar(parser, event, token.start_mark) | ||
978 | } | ||
979 | if token.typ == yaml_VALUE_TOKEN { | ||
980 | skip_token(parser) | ||
981 | token = peek_token(parser) | ||
982 | if token == nil { | ||
983 | return false | ||
984 | } | ||
985 | if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN { | ||
986 | parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) | ||
987 | return yaml_parser_parse_node(parser, event, false, false) | ||
988 | } | ||
989 | } | ||
990 | parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE | ||
991 | return yaml_parser_process_empty_scalar(parser, event, token.start_mark) | ||
992 | } | ||
993 | |||
994 | // Generate an empty scalar event. | ||
995 | func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool { | ||
996 | *event = yaml_event_t{ | ||
997 | typ: yaml_SCALAR_EVENT, | ||
998 | start_mark: mark, | ||
999 | end_mark: mark, | ||
1000 | value: nil, // Empty | ||
1001 | implicit: true, | ||
1002 | style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), | ||
1003 | } | ||
1004 | return true | ||
1005 | } | ||
1006 | |||
1007 | var default_tag_directives = []yaml_tag_directive_t{ | ||
1008 | {[]byte("!"), []byte("!")}, | ||
1009 | {[]byte("!!"), []byte("tag:yaml.org,2002:")}, | ||
1010 | } | ||
1011 | |||
1012 | // Parse directives. | ||
1013 | func yaml_parser_process_directives(parser *yaml_parser_t, | ||
1014 | version_directive_ref **yaml_version_directive_t, | ||
1015 | tag_directives_ref *[]yaml_tag_directive_t) bool { | ||
1016 | |||
1017 | var version_directive *yaml_version_directive_t | ||
1018 | var tag_directives []yaml_tag_directive_t | ||
1019 | |||
1020 | token := peek_token(parser) | ||
1021 | if token == nil { | ||
1022 | return false | ||
1023 | } | ||
1024 | |||
1025 | for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN { | ||
1026 | if token.typ == yaml_VERSION_DIRECTIVE_TOKEN { | ||
1027 | if version_directive != nil { | ||
1028 | yaml_parser_set_parser_error(parser, | ||
1029 | "found duplicate %YAML directive", token.start_mark) | ||
1030 | return false | ||
1031 | } | ||
1032 | if token.major != 1 || token.minor != 1 { | ||
1033 | yaml_parser_set_parser_error(parser, | ||
1034 | "found incompatible YAML document", token.start_mark) | ||
1035 | return false | ||
1036 | } | ||
1037 | version_directive = &yaml_version_directive_t{ | ||
1038 | major: token.major, | ||
1039 | minor: token.minor, | ||
1040 | } | ||
1041 | } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN { | ||
1042 | value := yaml_tag_directive_t{ | ||
1043 | handle: token.value, | ||
1044 | prefix: token.prefix, | ||
1045 | } | ||
1046 | if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) { | ||
1047 | return false | ||
1048 | } | ||
1049 | tag_directives = append(tag_directives, value) | ||
1050 | } | ||
1051 | |||
1052 | skip_token(parser) | ||
1053 | token = peek_token(parser) | ||
1054 | if token == nil { | ||
1055 | return false | ||
1056 | } | ||
1057 | } | ||
1058 | |||
1059 | for i := range default_tag_directives { | ||
1060 | if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { | ||
1061 | return false | ||
1062 | } | ||
1063 | } | ||
1064 | |||
1065 | if version_directive_ref != nil { | ||
1066 | *version_directive_ref = version_directive | ||
1067 | } | ||
1068 | if tag_directives_ref != nil { | ||
1069 | *tag_directives_ref = tag_directives | ||
1070 | } | ||
1071 | return true | ||
1072 | } | ||
1073 | |||
1074 | // Append a tag directive to the directives stack. | ||
1075 | func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool { | ||
1076 | for i := range parser.tag_directives { | ||
1077 | if bytes.Equal(value.handle, parser.tag_directives[i].handle) { | ||
1078 | if allow_duplicates { | ||
1079 | return true | ||
1080 | } | ||
1081 | return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) | ||
1082 | } | ||
1083 | } | ||
1084 | |||
1085 | // [Go] I suspect the copy is unnecessary. This was likely done | ||
1086 | // because there was no way to track ownership of the data. | ||
1087 | value_copy := yaml_tag_directive_t{ | ||
1088 | handle: make([]byte, len(value.handle)), | ||
1089 | prefix: make([]byte, len(value.prefix)), | ||
1090 | } | ||
1091 | copy(value_copy.handle, value.handle) | ||
1092 | copy(value_copy.prefix, value.prefix) | ||
1093 | parser.tag_directives = append(parser.tag_directives, value_copy) | ||
1094 | return true | ||
1095 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/readerc.go b/vendor/github.com/zclconf/go-cty-yaml/readerc.go new file mode 100644 index 0000000..7c1f5fa --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/readerc.go | |||
@@ -0,0 +1,412 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "io" | ||
5 | ) | ||
6 | |||
7 | // Set the reader error and return 0. | ||
8 | func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool { | ||
9 | parser.error = yaml_READER_ERROR | ||
10 | parser.problem = problem | ||
11 | parser.problem_offset = offset | ||
12 | parser.problem_value = value | ||
13 | return false | ||
14 | } | ||
15 | |||
16 | // Byte order marks. | ||
17 | const ( | ||
18 | bom_UTF8 = "\xef\xbb\xbf" | ||
19 | bom_UTF16LE = "\xff\xfe" | ||
20 | bom_UTF16BE = "\xfe\xff" | ||
21 | ) | ||
22 | |||
23 | // Determine the input stream encoding by checking the BOM symbol. If no BOM is | ||
24 | // found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. | ||
25 | func yaml_parser_determine_encoding(parser *yaml_parser_t) bool { | ||
26 | // Ensure that we had enough bytes in the raw buffer. | ||
27 | for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 { | ||
28 | if !yaml_parser_update_raw_buffer(parser) { | ||
29 | return false | ||
30 | } | ||
31 | } | ||
32 | |||
33 | // Determine the encoding. | ||
34 | buf := parser.raw_buffer | ||
35 | pos := parser.raw_buffer_pos | ||
36 | avail := len(buf) - pos | ||
37 | if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] { | ||
38 | parser.encoding = yaml_UTF16LE_ENCODING | ||
39 | parser.raw_buffer_pos += 2 | ||
40 | parser.offset += 2 | ||
41 | } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] { | ||
42 | parser.encoding = yaml_UTF16BE_ENCODING | ||
43 | parser.raw_buffer_pos += 2 | ||
44 | parser.offset += 2 | ||
45 | } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] { | ||
46 | parser.encoding = yaml_UTF8_ENCODING | ||
47 | parser.raw_buffer_pos += 3 | ||
48 | parser.offset += 3 | ||
49 | } else { | ||
50 | parser.encoding = yaml_UTF8_ENCODING | ||
51 | } | ||
52 | return true | ||
53 | } | ||
54 | |||
55 | // Update the raw buffer. | ||
56 | func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool { | ||
57 | size_read := 0 | ||
58 | |||
59 | // Return if the raw buffer is full. | ||
60 | if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) { | ||
61 | return true | ||
62 | } | ||
63 | |||
64 | // Return on EOF. | ||
65 | if parser.eof { | ||
66 | return true | ||
67 | } | ||
68 | |||
69 | // Move the remaining bytes in the raw buffer to the beginning. | ||
70 | if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) { | ||
71 | copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:]) | ||
72 | } | ||
73 | parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos] | ||
74 | parser.raw_buffer_pos = 0 | ||
75 | |||
76 | // Call the read handler to fill the buffer. | ||
77 | size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)]) | ||
78 | parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read] | ||
79 | if err == io.EOF { | ||
80 | parser.eof = true | ||
81 | } else if err != nil { | ||
82 | return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1) | ||
83 | } | ||
84 | return true | ||
85 | } | ||
86 | |||
87 | // Ensure that the buffer contains at least `length` characters. | ||
88 | // Return true on success, false on failure. | ||
89 | // | ||
90 | // The length is supposed to be significantly less that the buffer size. | ||
91 | func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool { | ||
92 | if parser.read_handler == nil { | ||
93 | panic("read handler must be set") | ||
94 | } | ||
95 | |||
96 | // [Go] This function was changed to guarantee the requested length size at EOF. | ||
97 | // The fact we need to do this is pretty awful, but the description above implies | ||
98 | // for that to be the case, and there are tests | ||
99 | |||
100 | // If the EOF flag is set and the raw buffer is empty, do nothing. | ||
101 | if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) { | ||
102 | // [Go] ACTUALLY! Read the documentation of this function above. | ||
103 | // This is just broken. To return true, we need to have the | ||
104 | // given length in the buffer. Not doing that means every single | ||
105 | // check that calls this function to make sure the buffer has a | ||
106 | // given length is Go) panicking; or C) accessing invalid memory. | ||
107 | //return true | ||
108 | } | ||
109 | |||
110 | // Return if the buffer contains enough characters. | ||
111 | if parser.unread >= length { | ||
112 | return true | ||
113 | } | ||
114 | |||
115 | // Determine the input encoding if it is not known yet. | ||
116 | if parser.encoding == yaml_ANY_ENCODING { | ||
117 | if !yaml_parser_determine_encoding(parser) { | ||
118 | return false | ||
119 | } | ||
120 | } | ||
121 | |||
122 | // Move the unread characters to the beginning of the buffer. | ||
123 | buffer_len := len(parser.buffer) | ||
124 | if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len { | ||
125 | copy(parser.buffer, parser.buffer[parser.buffer_pos:]) | ||
126 | buffer_len -= parser.buffer_pos | ||
127 | parser.buffer_pos = 0 | ||
128 | } else if parser.buffer_pos == buffer_len { | ||
129 | buffer_len = 0 | ||
130 | parser.buffer_pos = 0 | ||
131 | } | ||
132 | |||
133 | // Open the whole buffer for writing, and cut it before returning. | ||
134 | parser.buffer = parser.buffer[:cap(parser.buffer)] | ||
135 | |||
136 | // Fill the buffer until it has enough characters. | ||
137 | first := true | ||
138 | for parser.unread < length { | ||
139 | |||
140 | // Fill the raw buffer if necessary. | ||
141 | if !first || parser.raw_buffer_pos == len(parser.raw_buffer) { | ||
142 | if !yaml_parser_update_raw_buffer(parser) { | ||
143 | parser.buffer = parser.buffer[:buffer_len] | ||
144 | return false | ||
145 | } | ||
146 | } | ||
147 | first = false | ||
148 | |||
149 | // Decode the raw buffer. | ||
150 | inner: | ||
151 | for parser.raw_buffer_pos != len(parser.raw_buffer) { | ||
152 | var value rune | ||
153 | var width int | ||
154 | |||
155 | raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos | ||
156 | |||
157 | // Decode the next character. | ||
158 | switch parser.encoding { | ||
159 | case yaml_UTF8_ENCODING: | ||
160 | // Decode a UTF-8 character. Check RFC 3629 | ||
161 | // (http://www.ietf.org/rfc/rfc3629.txt) for more details. | ||
162 | // | ||
163 | // The following table (taken from the RFC) is used for | ||
164 | // decoding. | ||
165 | // | ||
166 | // Char. number range | UTF-8 octet sequence | ||
167 | // (hexadecimal) | (binary) | ||
168 | // --------------------+------------------------------------ | ||
169 | // 0000 0000-0000 007F | 0xxxxxxx | ||
170 | // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx | ||
171 | // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx | ||
172 | // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx | ||
173 | // | ||
174 | // Additionally, the characters in the range 0xD800-0xDFFF | ||
175 | // are prohibited as they are reserved for use with UTF-16 | ||
176 | // surrogate pairs. | ||
177 | |||
178 | // Determine the length of the UTF-8 sequence. | ||
179 | octet := parser.raw_buffer[parser.raw_buffer_pos] | ||
180 | switch { | ||
181 | case octet&0x80 == 0x00: | ||
182 | width = 1 | ||
183 | case octet&0xE0 == 0xC0: | ||
184 | width = 2 | ||
185 | case octet&0xF0 == 0xE0: | ||
186 | width = 3 | ||
187 | case octet&0xF8 == 0xF0: | ||
188 | width = 4 | ||
189 | default: | ||
190 | // The leading octet is invalid. | ||
191 | return yaml_parser_set_reader_error(parser, | ||
192 | "invalid leading UTF-8 octet", | ||
193 | parser.offset, int(octet)) | ||
194 | } | ||
195 | |||
196 | // Check if the raw buffer contains an incomplete character. | ||
197 | if width > raw_unread { | ||
198 | if parser.eof { | ||
199 | return yaml_parser_set_reader_error(parser, | ||
200 | "incomplete UTF-8 octet sequence", | ||
201 | parser.offset, -1) | ||
202 | } | ||
203 | break inner | ||
204 | } | ||
205 | |||
206 | // Decode the leading octet. | ||
207 | switch { | ||
208 | case octet&0x80 == 0x00: | ||
209 | value = rune(octet & 0x7F) | ||
210 | case octet&0xE0 == 0xC0: | ||
211 | value = rune(octet & 0x1F) | ||
212 | case octet&0xF0 == 0xE0: | ||
213 | value = rune(octet & 0x0F) | ||
214 | case octet&0xF8 == 0xF0: | ||
215 | value = rune(octet & 0x07) | ||
216 | default: | ||
217 | value = 0 | ||
218 | } | ||
219 | |||
220 | // Check and decode the trailing octets. | ||
221 | for k := 1; k < width; k++ { | ||
222 | octet = parser.raw_buffer[parser.raw_buffer_pos+k] | ||
223 | |||
224 | // Check if the octet is valid. | ||
225 | if (octet & 0xC0) != 0x80 { | ||
226 | return yaml_parser_set_reader_error(parser, | ||
227 | "invalid trailing UTF-8 octet", | ||
228 | parser.offset+k, int(octet)) | ||
229 | } | ||
230 | |||
231 | // Decode the octet. | ||
232 | value = (value << 6) + rune(octet&0x3F) | ||
233 | } | ||
234 | |||
235 | // Check the length of the sequence against the value. | ||
236 | switch { | ||
237 | case width == 1: | ||
238 | case width == 2 && value >= 0x80: | ||
239 | case width == 3 && value >= 0x800: | ||
240 | case width == 4 && value >= 0x10000: | ||
241 | default: | ||
242 | return yaml_parser_set_reader_error(parser, | ||
243 | "invalid length of a UTF-8 sequence", | ||
244 | parser.offset, -1) | ||
245 | } | ||
246 | |||
247 | // Check the range of the value. | ||
248 | if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF { | ||
249 | return yaml_parser_set_reader_error(parser, | ||
250 | "invalid Unicode character", | ||
251 | parser.offset, int(value)) | ||
252 | } | ||
253 | |||
254 | case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING: | ||
255 | var low, high int | ||
256 | if parser.encoding == yaml_UTF16LE_ENCODING { | ||
257 | low, high = 0, 1 | ||
258 | } else { | ||
259 | low, high = 1, 0 | ||
260 | } | ||
261 | |||
262 | // The UTF-16 encoding is not as simple as one might | ||
263 | // naively think. Check RFC 2781 | ||
264 | // (http://www.ietf.org/rfc/rfc2781.txt). | ||
265 | // | ||
266 | // Normally, two subsequent bytes describe a Unicode | ||
267 | // character. However a special technique (called a | ||
268 | // surrogate pair) is used for specifying character | ||
269 | // values larger than 0xFFFF. | ||
270 | // | ||
271 | // A surrogate pair consists of two pseudo-characters: | ||
272 | // high surrogate area (0xD800-0xDBFF) | ||
273 | // low surrogate area (0xDC00-0xDFFF) | ||
274 | // | ||
275 | // The following formulas are used for decoding | ||
276 | // and encoding characters using surrogate pairs: | ||
277 | // | ||
278 | // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) | ||
279 | // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) | ||
280 | // W1 = 110110yyyyyyyyyy | ||
281 | // W2 = 110111xxxxxxxxxx | ||
282 | // | ||
283 | // where U is the character value, W1 is the high surrogate | ||
284 | // area, W2 is the low surrogate area. | ||
285 | |||
286 | // Check for incomplete UTF-16 character. | ||
287 | if raw_unread < 2 { | ||
288 | if parser.eof { | ||
289 | return yaml_parser_set_reader_error(parser, | ||
290 | "incomplete UTF-16 character", | ||
291 | parser.offset, -1) | ||
292 | } | ||
293 | break inner | ||
294 | } | ||
295 | |||
296 | // Get the character. | ||
297 | value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) + | ||
298 | (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8) | ||
299 | |||
300 | // Check for unexpected low surrogate area. | ||
301 | if value&0xFC00 == 0xDC00 { | ||
302 | return yaml_parser_set_reader_error(parser, | ||
303 | "unexpected low surrogate area", | ||
304 | parser.offset, int(value)) | ||
305 | } | ||
306 | |||
307 | // Check for a high surrogate area. | ||
308 | if value&0xFC00 == 0xD800 { | ||
309 | width = 4 | ||
310 | |||
311 | // Check for incomplete surrogate pair. | ||
312 | if raw_unread < 4 { | ||
313 | if parser.eof { | ||
314 | return yaml_parser_set_reader_error(parser, | ||
315 | "incomplete UTF-16 surrogate pair", | ||
316 | parser.offset, -1) | ||
317 | } | ||
318 | break inner | ||
319 | } | ||
320 | |||
321 | // Get the next character. | ||
322 | value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) + | ||
323 | (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8) | ||
324 | |||
325 | // Check for a low surrogate area. | ||
326 | if value2&0xFC00 != 0xDC00 { | ||
327 | return yaml_parser_set_reader_error(parser, | ||
328 | "expected low surrogate area", | ||
329 | parser.offset+2, int(value2)) | ||
330 | } | ||
331 | |||
332 | // Generate the value of the surrogate pair. | ||
333 | value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF) | ||
334 | } else { | ||
335 | width = 2 | ||
336 | } | ||
337 | |||
338 | default: | ||
339 | panic("impossible") | ||
340 | } | ||
341 | |||
342 | // Check if the character is in the allowed range: | ||
343 | // #x9 | #xA | #xD | [#x20-#x7E] (8 bit) | ||
344 | // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) | ||
345 | // | [#x10000-#x10FFFF] (32 bit) | ||
346 | switch { | ||
347 | case value == 0x09: | ||
348 | case value == 0x0A: | ||
349 | case value == 0x0D: | ||
350 | case value >= 0x20 && value <= 0x7E: | ||
351 | case value == 0x85: | ||
352 | case value >= 0xA0 && value <= 0xD7FF: | ||
353 | case value >= 0xE000 && value <= 0xFFFD: | ||
354 | case value >= 0x10000 && value <= 0x10FFFF: | ||
355 | default: | ||
356 | return yaml_parser_set_reader_error(parser, | ||
357 | "control characters are not allowed", | ||
358 | parser.offset, int(value)) | ||
359 | } | ||
360 | |||
361 | // Move the raw pointers. | ||
362 | parser.raw_buffer_pos += width | ||
363 | parser.offset += width | ||
364 | |||
365 | // Finally put the character into the buffer. | ||
366 | if value <= 0x7F { | ||
367 | // 0000 0000-0000 007F . 0xxxxxxx | ||
368 | parser.buffer[buffer_len+0] = byte(value) | ||
369 | buffer_len += 1 | ||
370 | } else if value <= 0x7FF { | ||
371 | // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx | ||
372 | parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6)) | ||
373 | parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F)) | ||
374 | buffer_len += 2 | ||
375 | } else if value <= 0xFFFF { | ||
376 | // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx | ||
377 | parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12)) | ||
378 | parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F)) | ||
379 | parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F)) | ||
380 | buffer_len += 3 | ||
381 | } else { | ||
382 | // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx | ||
383 | parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18)) | ||
384 | parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F)) | ||
385 | parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F)) | ||
386 | parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F)) | ||
387 | buffer_len += 4 | ||
388 | } | ||
389 | |||
390 | parser.unread++ | ||
391 | } | ||
392 | |||
393 | // On EOF, put NUL into the buffer and return. | ||
394 | if parser.eof { | ||
395 | parser.buffer[buffer_len] = 0 | ||
396 | buffer_len++ | ||
397 | parser.unread++ | ||
398 | break | ||
399 | } | ||
400 | } | ||
401 | // [Go] Read the documentation of this function above. To return true, | ||
402 | // we need to have the given length in the buffer. Not doing that means | ||
403 | // every single check that calls this function to make sure the buffer | ||
404 | // has a given length is Go) panicking; or C) accessing invalid memory. | ||
405 | // This happens here due to the EOF above breaking early. | ||
406 | for buffer_len < length { | ||
407 | parser.buffer[buffer_len] = 0 | ||
408 | buffer_len++ | ||
409 | } | ||
410 | parser.buffer = parser.buffer[:buffer_len] | ||
411 | return true | ||
412 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/resolve.go b/vendor/github.com/zclconf/go-cty-yaml/resolve.go new file mode 100644 index 0000000..0f64383 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/resolve.go | |||
@@ -0,0 +1,288 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "encoding/base64" | ||
5 | "fmt" | ||
6 | "reflect" | ||
7 | "regexp" | ||
8 | "strconv" | ||
9 | "strings" | ||
10 | "time" | ||
11 | |||
12 | "github.com/zclconf/go-cty/cty" | ||
13 | ) | ||
14 | |||
15 | type resolveMapItem struct { | ||
16 | value cty.Value | ||
17 | tag string | ||
18 | } | ||
19 | |||
20 | var resolveTable = make([]byte, 256) | ||
21 | var resolveMap = make(map[string]resolveMapItem) | ||
22 | |||
23 | func init() { | ||
24 | t := resolveTable | ||
25 | t[int('+')] = 'S' // Sign | ||
26 | t[int('-')] = 'S' | ||
27 | for _, c := range "0123456789" { | ||
28 | t[int(c)] = 'D' // Digit | ||
29 | } | ||
30 | for _, c := range "yYnNtTfFoO~" { | ||
31 | t[int(c)] = 'M' // In map | ||
32 | } | ||
33 | t[int('.')] = '.' // Float (potentially in map) | ||
34 | |||
35 | var resolveMapList = []struct { | ||
36 | v cty.Value | ||
37 | tag string | ||
38 | l []string | ||
39 | }{ | ||
40 | {cty.True, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}}, | ||
41 | {cty.True, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}}, | ||
42 | {cty.True, yaml_BOOL_TAG, []string{"on", "On", "ON"}}, | ||
43 | {cty.False, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}}, | ||
44 | {cty.False, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}}, | ||
45 | {cty.False, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}}, | ||
46 | {cty.NullVal(cty.DynamicPseudoType), yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}}, | ||
47 | {cty.PositiveInfinity, yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}}, | ||
48 | {cty.PositiveInfinity, yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}}, | ||
49 | {cty.NegativeInfinity, yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}}, | ||
50 | } | ||
51 | |||
52 | m := resolveMap | ||
53 | for _, item := range resolveMapList { | ||
54 | for _, s := range item.l { | ||
55 | m[s] = resolveMapItem{item.v, item.tag} | ||
56 | } | ||
57 | } | ||
58 | } | ||
59 | |||
60 | const longTagPrefix = "tag:yaml.org,2002:" | ||
61 | |||
62 | func shortTag(tag string) string { | ||
63 | // TODO This can easily be made faster and produce less garbage. | ||
64 | if strings.HasPrefix(tag, longTagPrefix) { | ||
65 | return "!!" + tag[len(longTagPrefix):] | ||
66 | } | ||
67 | return tag | ||
68 | } | ||
69 | |||
70 | func longTag(tag string) string { | ||
71 | if strings.HasPrefix(tag, "!!") { | ||
72 | return longTagPrefix + tag[2:] | ||
73 | } | ||
74 | return tag | ||
75 | } | ||
76 | |||
77 | func resolvableTag(tag string) bool { | ||
78 | switch tag { | ||
79 | case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG, yaml_TIMESTAMP_TAG, yaml_BINARY_TAG: | ||
80 | return true | ||
81 | } | ||
82 | return false | ||
83 | } | ||
84 | |||
85 | var yamlStyleFloat = regexp.MustCompile(`^[-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?$`) | ||
86 | |||
87 | func (c *Converter) resolveScalar(tag string, src string, style yaml_scalar_style_t) (cty.Value, error) { | ||
88 | if !resolvableTag(tag) { | ||
89 | return cty.NilVal, fmt.Errorf("unsupported tag %q", tag) | ||
90 | } | ||
91 | |||
92 | // Any data is accepted as a !!str or !!binary. | ||
93 | // Otherwise, the prefix is enough of a hint about what it might be. | ||
94 | hint := byte('N') | ||
95 | if src != "" { | ||
96 | hint = resolveTable[src[0]] | ||
97 | } | ||
98 | if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG { | ||
99 | if style == yaml_SINGLE_QUOTED_SCALAR_STYLE || style == yaml_DOUBLE_QUOTED_SCALAR_STYLE { | ||
100 | return cty.StringVal(src), nil | ||
101 | } | ||
102 | |||
103 | // Handle things we can lookup in a map. | ||
104 | if item, ok := resolveMap[src]; ok { | ||
105 | return item.value, nil | ||
106 | } | ||
107 | |||
108 | if tag == "" { | ||
109 | for _, nan := range []string{".nan", ".NaN", ".NAN"} { | ||
110 | if src == nan { | ||
111 | // cty cannot represent NaN, so this is an error | ||
112 | return cty.NilVal, fmt.Errorf("floating point NaN is not supported") | ||
113 | } | ||
114 | } | ||
115 | } | ||
116 | |||
117 | // Base 60 floats are intentionally not supported. | ||
118 | |||
119 | switch hint { | ||
120 | case 'M': | ||
121 | // We've already checked the map above. | ||
122 | |||
123 | case '.': | ||
124 | // Not in the map, so maybe a normal float. | ||
125 | if numberVal, err := cty.ParseNumberVal(src); err == nil { | ||
126 | return numberVal, nil | ||
127 | } | ||
128 | |||
129 | case 'D', 'S': | ||
130 | // Int, float, or timestamp. | ||
131 | // Only try values as a timestamp if the value is unquoted or there's an explicit | ||
132 | // !!timestamp tag. | ||
133 | if tag == "" || tag == yaml_TIMESTAMP_TAG { | ||
134 | t, ok := parseTimestamp(src) | ||
135 | if ok { | ||
136 | // cty has no timestamp type, but its functions stdlib | ||
137 | // conventionally uses strings in an RFC3339 encoding | ||
138 | // to represent time, so we'll follow that convention here. | ||
139 | return cty.StringVal(t.Format(time.RFC3339)), nil | ||
140 | } | ||
141 | } | ||
142 | |||
143 | plain := strings.Replace(src, "_", "", -1) | ||
144 | if numberVal, err := cty.ParseNumberVal(plain); err == nil { | ||
145 | return numberVal, nil | ||
146 | } | ||
147 | if strings.HasPrefix(plain, "0b") || strings.HasPrefix(plain, "-0b") { | ||
148 | tag = yaml_INT_TAG // will handle parsing below in our tag switch | ||
149 | } | ||
150 | default: | ||
151 | panic(fmt.Sprintf("cannot resolve tag %q with source %q", tag, src)) | ||
152 | } | ||
153 | } | ||
154 | |||
155 | if tag == "" && src == "<<" { | ||
156 | return mergeMappingVal, nil | ||
157 | } | ||
158 | |||
159 | switch tag { | ||
160 | case yaml_STR_TAG, yaml_BINARY_TAG: | ||
161 | // If it's binary then we want to keep the base64 representation, because | ||
162 | // cty has no binary type, but we will check that it's actually base64. | ||
163 | if tag == yaml_BINARY_TAG { | ||
164 | _, err := base64.StdEncoding.DecodeString(src) | ||
165 | if err != nil { | ||
166 | return cty.NilVal, fmt.Errorf("cannot parse %q as %s: not valid base64", src, tag) | ||
167 | } | ||
168 | } | ||
169 | return cty.StringVal(src), nil | ||
170 | case yaml_BOOL_TAG: | ||
171 | item, ok := resolveMap[src] | ||
172 | if !ok || item.tag != yaml_BOOL_TAG { | ||
173 | return cty.NilVal, fmt.Errorf("cannot parse %q as %s", src, tag) | ||
174 | } | ||
175 | return item.value, nil | ||
176 | case yaml_FLOAT_TAG, yaml_INT_TAG: | ||
177 | // Note: We don't actually check that a value tagged INT is a whole | ||
178 | // number here. We could, but cty generally doesn't care about the | ||
179 | // int/float distinction, so we'll just be generous and accept it. | ||
180 | plain := strings.Replace(src, "_", "", -1) | ||
181 | if numberVal, err := cty.ParseNumberVal(plain); err == nil { // handles decimal integers and floats | ||
182 | return numberVal, nil | ||
183 | } | ||
184 | if intv, err := strconv.ParseInt(plain, 0, 64); err == nil { // handles 0x and 00 prefixes | ||
185 | return cty.NumberIntVal(intv), nil | ||
186 | } | ||
187 | if uintv, err := strconv.ParseUint(plain, 0, 64); err == nil { // handles 0x and 00 prefixes | ||
188 | return cty.NumberUIntVal(uintv), nil | ||
189 | } | ||
190 | if strings.HasPrefix(plain, "0b") { | ||
191 | intv, err := strconv.ParseInt(plain[2:], 2, 64) | ||
192 | if err == nil { | ||
193 | return cty.NumberIntVal(intv), nil | ||
194 | } | ||
195 | uintv, err := strconv.ParseUint(plain[2:], 2, 64) | ||
196 | if err == nil { | ||
197 | return cty.NumberUIntVal(uintv), nil | ||
198 | } | ||
199 | } else if strings.HasPrefix(plain, "-0b") { | ||
200 | intv, err := strconv.ParseInt("-"+plain[3:], 2, 64) | ||
201 | if err == nil { | ||
202 | return cty.NumberIntVal(intv), nil | ||
203 | } | ||
204 | } | ||
205 | return cty.NilVal, fmt.Errorf("cannot parse %q as %s", src, tag) | ||
206 | case yaml_TIMESTAMP_TAG: | ||
207 | t, ok := parseTimestamp(src) | ||
208 | if ok { | ||
209 | // cty has no timestamp type, but its functions stdlib | ||
210 | // conventionally uses strings in an RFC3339 encoding | ||
211 | // to represent time, so we'll follow that convention here. | ||
212 | return cty.StringVal(t.Format(time.RFC3339)), nil | ||
213 | } | ||
214 | return cty.NilVal, fmt.Errorf("cannot parse %q as %s", src, tag) | ||
215 | case yaml_NULL_TAG: | ||
216 | return cty.NullVal(cty.DynamicPseudoType), nil | ||
217 | case "": | ||
218 | return cty.StringVal(src), nil | ||
219 | default: | ||
220 | return cty.NilVal, fmt.Errorf("unsupported tag %q", tag) | ||
221 | } | ||
222 | } | ||
223 | |||
224 | // encodeBase64 encodes s as base64 that is broken up into multiple lines | ||
225 | // as appropriate for the resulting length. | ||
226 | func encodeBase64(s string) string { | ||
227 | const lineLen = 70 | ||
228 | encLen := base64.StdEncoding.EncodedLen(len(s)) | ||
229 | lines := encLen/lineLen + 1 | ||
230 | buf := make([]byte, encLen*2+lines) | ||
231 | in := buf[0:encLen] | ||
232 | out := buf[encLen:] | ||
233 | base64.StdEncoding.Encode(in, []byte(s)) | ||
234 | k := 0 | ||
235 | for i := 0; i < len(in); i += lineLen { | ||
236 | j := i + lineLen | ||
237 | if j > len(in) { | ||
238 | j = len(in) | ||
239 | } | ||
240 | k += copy(out[k:], in[i:j]) | ||
241 | if lines > 1 { | ||
242 | out[k] = '\n' | ||
243 | k++ | ||
244 | } | ||
245 | } | ||
246 | return string(out[:k]) | ||
247 | } | ||
248 | |||
249 | // This is a subset of the formats allowed by the regular expression | ||
250 | // defined at http://yaml.org/type/timestamp.html. | ||
251 | var allowedTimestampFormats = []string{ | ||
252 | "2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields. | ||
253 | "2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t". | ||
254 | "2006-1-2 15:4:5.999999999", // space separated with no time zone | ||
255 | "2006-1-2", // date only | ||
256 | // Notable exception: time.Parse cannot handle: "2001-12-14 21:59:43.10 -5" | ||
257 | // from the set of examples. | ||
258 | } | ||
259 | |||
260 | // parseTimestamp parses s as a timestamp string and | ||
261 | // returns the timestamp and reports whether it succeeded. | ||
262 | // Timestamp formats are defined at http://yaml.org/type/timestamp.html | ||
263 | func parseTimestamp(s string) (time.Time, bool) { | ||
264 | // TODO write code to check all the formats supported by | ||
265 | // http://yaml.org/type/timestamp.html instead of using time.Parse. | ||
266 | |||
267 | // Quick check: all date formats start with YYYY-. | ||
268 | i := 0 | ||
269 | for ; i < len(s); i++ { | ||
270 | if c := s[i]; c < '0' || c > '9' { | ||
271 | break | ||
272 | } | ||
273 | } | ||
274 | if i != 4 || i == len(s) || s[i] != '-' { | ||
275 | return time.Time{}, false | ||
276 | } | ||
277 | for _, format := range allowedTimestampFormats { | ||
278 | if t, err := time.Parse(format, s); err == nil { | ||
279 | return t, true | ||
280 | } | ||
281 | } | ||
282 | return time.Time{}, false | ||
283 | } | ||
284 | |||
285 | type mergeMapping struct{} | ||
286 | |||
287 | var mergeMappingTy = cty.Capsule("merge mapping", reflect.TypeOf(mergeMapping{})) | ||
288 | var mergeMappingVal = cty.CapsuleVal(mergeMappingTy, &mergeMapping{}) | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/scannerc.go b/vendor/github.com/zclconf/go-cty-yaml/scannerc.go new file mode 100644 index 0000000..077fd1d --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/scannerc.go | |||
@@ -0,0 +1,2696 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "bytes" | ||
5 | "fmt" | ||
6 | ) | ||
7 | |||
8 | // Introduction | ||
9 | // ************ | ||
10 | // | ||
11 | // The following notes assume that you are familiar with the YAML specification | ||
12 | // (http://yaml.org/spec/1.2/spec.html). We mostly follow it, although in | ||
13 | // some cases we are less restrictive that it requires. | ||
14 | // | ||
15 | // The process of transforming a YAML stream into a sequence of events is | ||
16 | // divided on two steps: Scanning and Parsing. | ||
17 | // | ||
18 | // The Scanner transforms the input stream into a sequence of tokens, while the | ||
19 | // parser transform the sequence of tokens produced by the Scanner into a | ||
20 | // sequence of parsing events. | ||
21 | // | ||
22 | // The Scanner is rather clever and complicated. The Parser, on the contrary, | ||
23 | // is a straightforward implementation of a recursive-descendant parser (or, | ||
24 | // LL(1) parser, as it is usually called). | ||
25 | // | ||
26 | // Actually there are two issues of Scanning that might be called "clever", the | ||
27 | // rest is quite straightforward. The issues are "block collection start" and | ||
28 | // "simple keys". Both issues are explained below in details. | ||
29 | // | ||
30 | // Here the Scanning step is explained and implemented. We start with the list | ||
31 | // of all the tokens produced by the Scanner together with short descriptions. | ||
32 | // | ||
33 | // Now, tokens: | ||
34 | // | ||
35 | // STREAM-START(encoding) # The stream start. | ||
36 | // STREAM-END # The stream end. | ||
37 | // VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. | ||
38 | // TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. | ||
39 | // DOCUMENT-START # '---' | ||
40 | // DOCUMENT-END # '...' | ||
41 | // BLOCK-SEQUENCE-START # Indentation increase denoting a block | ||
42 | // BLOCK-MAPPING-START # sequence or a block mapping. | ||
43 | // BLOCK-END # Indentation decrease. | ||
44 | // FLOW-SEQUENCE-START # '[' | ||
45 | // FLOW-SEQUENCE-END # ']' | ||
46 | // BLOCK-SEQUENCE-START # '{' | ||
47 | // BLOCK-SEQUENCE-END # '}' | ||
48 | // BLOCK-ENTRY # '-' | ||
49 | // FLOW-ENTRY # ',' | ||
50 | // KEY # '?' or nothing (simple keys). | ||
51 | // VALUE # ':' | ||
52 | // ALIAS(anchor) # '*anchor' | ||
53 | // ANCHOR(anchor) # '&anchor' | ||
54 | // TAG(handle,suffix) # '!handle!suffix' | ||
55 | // SCALAR(value,style) # A scalar. | ||
56 | // | ||
57 | // The following two tokens are "virtual" tokens denoting the beginning and the | ||
58 | // end of the stream: | ||
59 | // | ||
60 | // STREAM-START(encoding) | ||
61 | // STREAM-END | ||
62 | // | ||
63 | // We pass the information about the input stream encoding with the | ||
64 | // STREAM-START token. | ||
65 | // | ||
66 | // The next two tokens are responsible for tags: | ||
67 | // | ||
68 | // VERSION-DIRECTIVE(major,minor) | ||
69 | // TAG-DIRECTIVE(handle,prefix) | ||
70 | // | ||
71 | // Example: | ||
72 | // | ||
73 | // %YAML 1.1 | ||
74 | // %TAG ! !foo | ||
75 | // %TAG !yaml! tag:yaml.org,2002: | ||
76 | // --- | ||
77 | // | ||
78 | // The correspoding sequence of tokens: | ||
79 | // | ||
80 | // STREAM-START(utf-8) | ||
81 | // VERSION-DIRECTIVE(1,1) | ||
82 | // TAG-DIRECTIVE("!","!foo") | ||
83 | // TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") | ||
84 | // DOCUMENT-START | ||
85 | // STREAM-END | ||
86 | // | ||
87 | // Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole | ||
88 | // line. | ||
89 | // | ||
90 | // The document start and end indicators are represented by: | ||
91 | // | ||
92 | // DOCUMENT-START | ||
93 | // DOCUMENT-END | ||
94 | // | ||
95 | // Note that if a YAML stream contains an implicit document (without '---' | ||
96 | // and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be | ||
97 | // produced. | ||
98 | // | ||
99 | // In the following examples, we present whole documents together with the | ||
100 | // produced tokens. | ||
101 | // | ||
102 | // 1. An implicit document: | ||
103 | // | ||
104 | // 'a scalar' | ||
105 | // | ||
106 | // Tokens: | ||
107 | // | ||
108 | // STREAM-START(utf-8) | ||
109 | // SCALAR("a scalar",single-quoted) | ||
110 | // STREAM-END | ||
111 | // | ||
112 | // 2. An explicit document: | ||
113 | // | ||
114 | // --- | ||
115 | // 'a scalar' | ||
116 | // ... | ||
117 | // | ||
118 | // Tokens: | ||
119 | // | ||
120 | // STREAM-START(utf-8) | ||
121 | // DOCUMENT-START | ||
122 | // SCALAR("a scalar",single-quoted) | ||
123 | // DOCUMENT-END | ||
124 | // STREAM-END | ||
125 | // | ||
126 | // 3. Several documents in a stream: | ||
127 | // | ||
128 | // 'a scalar' | ||
129 | // --- | ||
130 | // 'another scalar' | ||
131 | // --- | ||
132 | // 'yet another scalar' | ||
133 | // | ||
134 | // Tokens: | ||
135 | // | ||
136 | // STREAM-START(utf-8) | ||
137 | // SCALAR("a scalar",single-quoted) | ||
138 | // DOCUMENT-START | ||
139 | // SCALAR("another scalar",single-quoted) | ||
140 | // DOCUMENT-START | ||
141 | // SCALAR("yet another scalar",single-quoted) | ||
142 | // STREAM-END | ||
143 | // | ||
144 | // We have already introduced the SCALAR token above. The following tokens are | ||
145 | // used to describe aliases, anchors, tag, and scalars: | ||
146 | // | ||
147 | // ALIAS(anchor) | ||
148 | // ANCHOR(anchor) | ||
149 | // TAG(handle,suffix) | ||
150 | // SCALAR(value,style) | ||
151 | // | ||
152 | // The following series of examples illustrate the usage of these tokens: | ||
153 | // | ||
154 | // 1. A recursive sequence: | ||
155 | // | ||
156 | // &A [ *A ] | ||
157 | // | ||
158 | // Tokens: | ||
159 | // | ||
160 | // STREAM-START(utf-8) | ||
161 | // ANCHOR("A") | ||
162 | // FLOW-SEQUENCE-START | ||
163 | // ALIAS("A") | ||
164 | // FLOW-SEQUENCE-END | ||
165 | // STREAM-END | ||
166 | // | ||
167 | // 2. A tagged scalar: | ||
168 | // | ||
169 | // !!float "3.14" # A good approximation. | ||
170 | // | ||
171 | // Tokens: | ||
172 | // | ||
173 | // STREAM-START(utf-8) | ||
174 | // TAG("!!","float") | ||
175 | // SCALAR("3.14",double-quoted) | ||
176 | // STREAM-END | ||
177 | // | ||
178 | // 3. Various scalar styles: | ||
179 | // | ||
180 | // --- # Implicit empty plain scalars do not produce tokens. | ||
181 | // --- a plain scalar | ||
182 | // --- 'a single-quoted scalar' | ||
183 | // --- "a double-quoted scalar" | ||
184 | // --- |- | ||
185 | // a literal scalar | ||
186 | // --- >- | ||
187 | // a folded | ||
188 | // scalar | ||
189 | // | ||
190 | // Tokens: | ||
191 | // | ||
192 | // STREAM-START(utf-8) | ||
193 | // DOCUMENT-START | ||
194 | // DOCUMENT-START | ||
195 | // SCALAR("a plain scalar",plain) | ||
196 | // DOCUMENT-START | ||
197 | // SCALAR("a single-quoted scalar",single-quoted) | ||
198 | // DOCUMENT-START | ||
199 | // SCALAR("a double-quoted scalar",double-quoted) | ||
200 | // DOCUMENT-START | ||
201 | // SCALAR("a literal scalar",literal) | ||
202 | // DOCUMENT-START | ||
203 | // SCALAR("a folded scalar",folded) | ||
204 | // STREAM-END | ||
205 | // | ||
206 | // Now it's time to review collection-related tokens. We will start with | ||
207 | // flow collections: | ||
208 | // | ||
209 | // FLOW-SEQUENCE-START | ||
210 | // FLOW-SEQUENCE-END | ||
211 | // FLOW-MAPPING-START | ||
212 | // FLOW-MAPPING-END | ||
213 | // FLOW-ENTRY | ||
214 | // KEY | ||
215 | // VALUE | ||
216 | // | ||
217 | // The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and | ||
218 | // FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' | ||
219 | // correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the | ||
220 | // indicators '?' and ':', which are used for denoting mapping keys and values, | ||
221 | // are represented by the KEY and VALUE tokens. | ||
222 | // | ||
223 | // The following examples show flow collections: | ||
224 | // | ||
225 | // 1. A flow sequence: | ||
226 | // | ||
227 | // [item 1, item 2, item 3] | ||
228 | // | ||
229 | // Tokens: | ||
230 | // | ||
231 | // STREAM-START(utf-8) | ||
232 | // FLOW-SEQUENCE-START | ||
233 | // SCALAR("item 1",plain) | ||
234 | // FLOW-ENTRY | ||
235 | // SCALAR("item 2",plain) | ||
236 | // FLOW-ENTRY | ||
237 | // SCALAR("item 3",plain) | ||
238 | // FLOW-SEQUENCE-END | ||
239 | // STREAM-END | ||
240 | // | ||
241 | // 2. A flow mapping: | ||
242 | // | ||
243 | // { | ||
244 | // a simple key: a value, # Note that the KEY token is produced. | ||
245 | // ? a complex key: another value, | ||
246 | // } | ||
247 | // | ||
248 | // Tokens: | ||
249 | // | ||
250 | // STREAM-START(utf-8) | ||
251 | // FLOW-MAPPING-START | ||
252 | // KEY | ||
253 | // SCALAR("a simple key",plain) | ||
254 | // VALUE | ||
255 | // SCALAR("a value",plain) | ||
256 | // FLOW-ENTRY | ||
257 | // KEY | ||
258 | // SCALAR("a complex key",plain) | ||
259 | // VALUE | ||
260 | // SCALAR("another value",plain) | ||
261 | // FLOW-ENTRY | ||
262 | // FLOW-MAPPING-END | ||
263 | // STREAM-END | ||
264 | // | ||
265 | // A simple key is a key which is not denoted by the '?' indicator. Note that | ||
266 | // the Scanner still produce the KEY token whenever it encounters a simple key. | ||
267 | // | ||
268 | // For scanning block collections, the following tokens are used (note that we | ||
269 | // repeat KEY and VALUE here): | ||
270 | // | ||
271 | // BLOCK-SEQUENCE-START | ||
272 | // BLOCK-MAPPING-START | ||
273 | // BLOCK-END | ||
274 | // BLOCK-ENTRY | ||
275 | // KEY | ||
276 | // VALUE | ||
277 | // | ||
278 | // The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation | ||
279 | // increase that precedes a block collection (cf. the INDENT token in Python). | ||
280 | // The token BLOCK-END denote indentation decrease that ends a block collection | ||
281 | // (cf. the DEDENT token in Python). However YAML has some syntax pecularities | ||
282 | // that makes detections of these tokens more complex. | ||
283 | // | ||
284 | // The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators | ||
285 | // '-', '?', and ':' correspondingly. | ||
286 | // | ||
287 | // The following examples show how the tokens BLOCK-SEQUENCE-START, | ||
288 | // BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: | ||
289 | // | ||
290 | // 1. Block sequences: | ||
291 | // | ||
292 | // - item 1 | ||
293 | // - item 2 | ||
294 | // - | ||
295 | // - item 3.1 | ||
296 | // - item 3.2 | ||
297 | // - | ||
298 | // key 1: value 1 | ||
299 | // key 2: value 2 | ||
300 | // | ||
301 | // Tokens: | ||
302 | // | ||
303 | // STREAM-START(utf-8) | ||
304 | // BLOCK-SEQUENCE-START | ||
305 | // BLOCK-ENTRY | ||
306 | // SCALAR("item 1",plain) | ||
307 | // BLOCK-ENTRY | ||
308 | // SCALAR("item 2",plain) | ||
309 | // BLOCK-ENTRY | ||
310 | // BLOCK-SEQUENCE-START | ||
311 | // BLOCK-ENTRY | ||
312 | // SCALAR("item 3.1",plain) | ||
313 | // BLOCK-ENTRY | ||
314 | // SCALAR("item 3.2",plain) | ||
315 | // BLOCK-END | ||
316 | // BLOCK-ENTRY | ||
317 | // BLOCK-MAPPING-START | ||
318 | // KEY | ||
319 | // SCALAR("key 1",plain) | ||
320 | // VALUE | ||
321 | // SCALAR("value 1",plain) | ||
322 | // KEY | ||
323 | // SCALAR("key 2",plain) | ||
324 | // VALUE | ||
325 | // SCALAR("value 2",plain) | ||
326 | // BLOCK-END | ||
327 | // BLOCK-END | ||
328 | // STREAM-END | ||
329 | // | ||
330 | // 2. Block mappings: | ||
331 | // | ||
332 | // a simple key: a value # The KEY token is produced here. | ||
333 | // ? a complex key | ||
334 | // : another value | ||
335 | // a mapping: | ||
336 | // key 1: value 1 | ||
337 | // key 2: value 2 | ||
338 | // a sequence: | ||
339 | // - item 1 | ||
340 | // - item 2 | ||
341 | // | ||
342 | // Tokens: | ||
343 | // | ||
344 | // STREAM-START(utf-8) | ||
345 | // BLOCK-MAPPING-START | ||
346 | // KEY | ||
347 | // SCALAR("a simple key",plain) | ||
348 | // VALUE | ||
349 | // SCALAR("a value",plain) | ||
350 | // KEY | ||
351 | // SCALAR("a complex key",plain) | ||
352 | // VALUE | ||
353 | // SCALAR("another value",plain) | ||
354 | // KEY | ||
355 | // SCALAR("a mapping",plain) | ||
356 | // BLOCK-MAPPING-START | ||
357 | // KEY | ||
358 | // SCALAR("key 1",plain) | ||
359 | // VALUE | ||
360 | // SCALAR("value 1",plain) | ||
361 | // KEY | ||
362 | // SCALAR("key 2",plain) | ||
363 | // VALUE | ||
364 | // SCALAR("value 2",plain) | ||
365 | // BLOCK-END | ||
366 | // KEY | ||
367 | // SCALAR("a sequence",plain) | ||
368 | // VALUE | ||
369 | // BLOCK-SEQUENCE-START | ||
370 | // BLOCK-ENTRY | ||
371 | // SCALAR("item 1",plain) | ||
372 | // BLOCK-ENTRY | ||
373 | // SCALAR("item 2",plain) | ||
374 | // BLOCK-END | ||
375 | // BLOCK-END | ||
376 | // STREAM-END | ||
377 | // | ||
378 | // YAML does not always require to start a new block collection from a new | ||
379 | // line. If the current line contains only '-', '?', and ':' indicators, a new | ||
380 | // block collection may start at the current line. The following examples | ||
381 | // illustrate this case: | ||
382 | // | ||
383 | // 1. Collections in a sequence: | ||
384 | // | ||
385 | // - - item 1 | ||
386 | // - item 2 | ||
387 | // - key 1: value 1 | ||
388 | // key 2: value 2 | ||
389 | // - ? complex key | ||
390 | // : complex value | ||
391 | // | ||
392 | // Tokens: | ||
393 | // | ||
394 | // STREAM-START(utf-8) | ||
395 | // BLOCK-SEQUENCE-START | ||
396 | // BLOCK-ENTRY | ||
397 | // BLOCK-SEQUENCE-START | ||
398 | // BLOCK-ENTRY | ||
399 | // SCALAR("item 1",plain) | ||
400 | // BLOCK-ENTRY | ||
401 | // SCALAR("item 2",plain) | ||
402 | // BLOCK-END | ||
403 | // BLOCK-ENTRY | ||
404 | // BLOCK-MAPPING-START | ||
405 | // KEY | ||
406 | // SCALAR("key 1",plain) | ||
407 | // VALUE | ||
408 | // SCALAR("value 1",plain) | ||
409 | // KEY | ||
410 | // SCALAR("key 2",plain) | ||
411 | // VALUE | ||
412 | // SCALAR("value 2",plain) | ||
413 | // BLOCK-END | ||
414 | // BLOCK-ENTRY | ||
415 | // BLOCK-MAPPING-START | ||
416 | // KEY | ||
417 | // SCALAR("complex key") | ||
418 | // VALUE | ||
419 | // SCALAR("complex value") | ||
420 | // BLOCK-END | ||
421 | // BLOCK-END | ||
422 | // STREAM-END | ||
423 | // | ||
424 | // 2. Collections in a mapping: | ||
425 | // | ||
426 | // ? a sequence | ||
427 | // : - item 1 | ||
428 | // - item 2 | ||
429 | // ? a mapping | ||
430 | // : key 1: value 1 | ||
431 | // key 2: value 2 | ||
432 | // | ||
433 | // Tokens: | ||
434 | // | ||
435 | // STREAM-START(utf-8) | ||
436 | // BLOCK-MAPPING-START | ||
437 | // KEY | ||
438 | // SCALAR("a sequence",plain) | ||
439 | // VALUE | ||
440 | // BLOCK-SEQUENCE-START | ||
441 | // BLOCK-ENTRY | ||
442 | // SCALAR("item 1",plain) | ||
443 | // BLOCK-ENTRY | ||
444 | // SCALAR("item 2",plain) | ||
445 | // BLOCK-END | ||
446 | // KEY | ||
447 | // SCALAR("a mapping",plain) | ||
448 | // VALUE | ||
449 | // BLOCK-MAPPING-START | ||
450 | // KEY | ||
451 | // SCALAR("key 1",plain) | ||
452 | // VALUE | ||
453 | // SCALAR("value 1",plain) | ||
454 | // KEY | ||
455 | // SCALAR("key 2",plain) | ||
456 | // VALUE | ||
457 | // SCALAR("value 2",plain) | ||
458 | // BLOCK-END | ||
459 | // BLOCK-END | ||
460 | // STREAM-END | ||
461 | // | ||
462 | // YAML also permits non-indented sequences if they are included into a block | ||
463 | // mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: | ||
464 | // | ||
465 | // key: | ||
466 | // - item 1 # BLOCK-SEQUENCE-START is NOT produced here. | ||
467 | // - item 2 | ||
468 | // | ||
469 | // Tokens: | ||
470 | // | ||
471 | // STREAM-START(utf-8) | ||
472 | // BLOCK-MAPPING-START | ||
473 | // KEY | ||
474 | // SCALAR("key",plain) | ||
475 | // VALUE | ||
476 | // BLOCK-ENTRY | ||
477 | // SCALAR("item 1",plain) | ||
478 | // BLOCK-ENTRY | ||
479 | // SCALAR("item 2",plain) | ||
480 | // BLOCK-END | ||
481 | // | ||
482 | |||
483 | // Ensure that the buffer contains the required number of characters. | ||
484 | // Return true on success, false on failure (reader error or memory error). | ||
485 | func cache(parser *yaml_parser_t, length int) bool { | ||
486 | // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B) | ||
487 | return parser.unread >= length || yaml_parser_update_buffer(parser, length) | ||
488 | } | ||
489 | |||
490 | // Advance the buffer pointer. | ||
491 | func skip(parser *yaml_parser_t) { | ||
492 | parser.mark.index++ | ||
493 | parser.mark.column++ | ||
494 | parser.unread-- | ||
495 | parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) | ||
496 | } | ||
497 | |||
498 | func skip_line(parser *yaml_parser_t) { | ||
499 | if is_crlf(parser.buffer, parser.buffer_pos) { | ||
500 | parser.mark.index += 2 | ||
501 | parser.mark.column = 0 | ||
502 | parser.mark.line++ | ||
503 | parser.unread -= 2 | ||
504 | parser.buffer_pos += 2 | ||
505 | } else if is_break(parser.buffer, parser.buffer_pos) { | ||
506 | parser.mark.index++ | ||
507 | parser.mark.column = 0 | ||
508 | parser.mark.line++ | ||
509 | parser.unread-- | ||
510 | parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) | ||
511 | } | ||
512 | } | ||
513 | |||
514 | // Copy a character to a string buffer and advance pointers. | ||
515 | func read(parser *yaml_parser_t, s []byte) []byte { | ||
516 | w := width(parser.buffer[parser.buffer_pos]) | ||
517 | if w == 0 { | ||
518 | panic("invalid character sequence") | ||
519 | } | ||
520 | if len(s) == 0 { | ||
521 | s = make([]byte, 0, 32) | ||
522 | } | ||
523 | if w == 1 && len(s)+w <= cap(s) { | ||
524 | s = s[:len(s)+1] | ||
525 | s[len(s)-1] = parser.buffer[parser.buffer_pos] | ||
526 | parser.buffer_pos++ | ||
527 | } else { | ||
528 | s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...) | ||
529 | parser.buffer_pos += w | ||
530 | } | ||
531 | parser.mark.index++ | ||
532 | parser.mark.column++ | ||
533 | parser.unread-- | ||
534 | return s | ||
535 | } | ||
536 | |||
537 | // Copy a line break character to a string buffer and advance pointers. | ||
538 | func read_line(parser *yaml_parser_t, s []byte) []byte { | ||
539 | buf := parser.buffer | ||
540 | pos := parser.buffer_pos | ||
541 | switch { | ||
542 | case buf[pos] == '\r' && buf[pos+1] == '\n': | ||
543 | // CR LF . LF | ||
544 | s = append(s, '\n') | ||
545 | parser.buffer_pos += 2 | ||
546 | parser.mark.index++ | ||
547 | parser.unread-- | ||
548 | case buf[pos] == '\r' || buf[pos] == '\n': | ||
549 | // CR|LF . LF | ||
550 | s = append(s, '\n') | ||
551 | parser.buffer_pos += 1 | ||
552 | case buf[pos] == '\xC2' && buf[pos+1] == '\x85': | ||
553 | // NEL . LF | ||
554 | s = append(s, '\n') | ||
555 | parser.buffer_pos += 2 | ||
556 | case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'): | ||
557 | // LS|PS . LS|PS | ||
558 | s = append(s, buf[parser.buffer_pos:pos+3]...) | ||
559 | parser.buffer_pos += 3 | ||
560 | default: | ||
561 | return s | ||
562 | } | ||
563 | parser.mark.index++ | ||
564 | parser.mark.column = 0 | ||
565 | parser.mark.line++ | ||
566 | parser.unread-- | ||
567 | return s | ||
568 | } | ||
569 | |||
570 | // Get the next token. | ||
571 | func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool { | ||
572 | // Erase the token object. | ||
573 | *token = yaml_token_t{} // [Go] Is this necessary? | ||
574 | |||
575 | // No tokens after STREAM-END or error. | ||
576 | if parser.stream_end_produced || parser.error != yaml_NO_ERROR { | ||
577 | return true | ||
578 | } | ||
579 | |||
580 | // Ensure that the tokens queue contains enough tokens. | ||
581 | if !parser.token_available { | ||
582 | if !yaml_parser_fetch_more_tokens(parser) { | ||
583 | return false | ||
584 | } | ||
585 | } | ||
586 | |||
587 | // Fetch the next token from the queue. | ||
588 | *token = parser.tokens[parser.tokens_head] | ||
589 | parser.tokens_head++ | ||
590 | parser.tokens_parsed++ | ||
591 | parser.token_available = false | ||
592 | |||
593 | if token.typ == yaml_STREAM_END_TOKEN { | ||
594 | parser.stream_end_produced = true | ||
595 | } | ||
596 | return true | ||
597 | } | ||
598 | |||
599 | // Set the scanner error and return false. | ||
600 | func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool { | ||
601 | parser.error = yaml_SCANNER_ERROR | ||
602 | parser.context = context | ||
603 | parser.context_mark = context_mark | ||
604 | parser.problem = problem | ||
605 | parser.problem_mark = parser.mark | ||
606 | return false | ||
607 | } | ||
608 | |||
609 | func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool { | ||
610 | context := "while parsing a tag" | ||
611 | if directive { | ||
612 | context = "while parsing a %TAG directive" | ||
613 | } | ||
614 | return yaml_parser_set_scanner_error(parser, context, context_mark, problem) | ||
615 | } | ||
616 | |||
617 | func trace(args ...interface{}) func() { | ||
618 | pargs := append([]interface{}{"+++"}, args...) | ||
619 | fmt.Println(pargs...) | ||
620 | pargs = append([]interface{}{"---"}, args...) | ||
621 | return func() { fmt.Println(pargs...) } | ||
622 | } | ||
623 | |||
624 | // Ensure that the tokens queue contains at least one token which can be | ||
625 | // returned to the Parser. | ||
626 | func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool { | ||
627 | // While we need more tokens to fetch, do it. | ||
628 | for { | ||
629 | // Check if we really need to fetch more tokens. | ||
630 | need_more_tokens := false | ||
631 | |||
632 | if parser.tokens_head == len(parser.tokens) { | ||
633 | // Queue is empty. | ||
634 | need_more_tokens = true | ||
635 | } else { | ||
636 | // Check if any potential simple key may occupy the head position. | ||
637 | if !yaml_parser_stale_simple_keys(parser) { | ||
638 | return false | ||
639 | } | ||
640 | |||
641 | for i := range parser.simple_keys { | ||
642 | simple_key := &parser.simple_keys[i] | ||
643 | if simple_key.possible && simple_key.token_number == parser.tokens_parsed { | ||
644 | need_more_tokens = true | ||
645 | break | ||
646 | } | ||
647 | } | ||
648 | } | ||
649 | |||
650 | // We are finished. | ||
651 | if !need_more_tokens { | ||
652 | break | ||
653 | } | ||
654 | // Fetch the next token. | ||
655 | if !yaml_parser_fetch_next_token(parser) { | ||
656 | return false | ||
657 | } | ||
658 | } | ||
659 | |||
660 | parser.token_available = true | ||
661 | return true | ||
662 | } | ||
663 | |||
664 | // The dispatcher for token fetchers. | ||
665 | func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool { | ||
666 | // Ensure that the buffer is initialized. | ||
667 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
668 | return false | ||
669 | } | ||
670 | |||
671 | // Check if we just started scanning. Fetch STREAM-START then. | ||
672 | if !parser.stream_start_produced { | ||
673 | return yaml_parser_fetch_stream_start(parser) | ||
674 | } | ||
675 | |||
676 | // Eat whitespaces and comments until we reach the next token. | ||
677 | if !yaml_parser_scan_to_next_token(parser) { | ||
678 | return false | ||
679 | } | ||
680 | |||
681 | // Remove obsolete potential simple keys. | ||
682 | if !yaml_parser_stale_simple_keys(parser) { | ||
683 | return false | ||
684 | } | ||
685 | |||
686 | // Check the indentation level against the current column. | ||
687 | if !yaml_parser_unroll_indent(parser, parser.mark.column) { | ||
688 | return false | ||
689 | } | ||
690 | |||
691 | // Ensure that the buffer contains at least 4 characters. 4 is the length | ||
692 | // of the longest indicators ('--- ' and '... '). | ||
693 | if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { | ||
694 | return false | ||
695 | } | ||
696 | |||
697 | // Is it the end of the stream? | ||
698 | if is_z(parser.buffer, parser.buffer_pos) { | ||
699 | return yaml_parser_fetch_stream_end(parser) | ||
700 | } | ||
701 | |||
702 | // Is it a directive? | ||
703 | if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' { | ||
704 | return yaml_parser_fetch_directive(parser) | ||
705 | } | ||
706 | |||
707 | buf := parser.buffer | ||
708 | pos := parser.buffer_pos | ||
709 | |||
710 | // Is it the document start indicator? | ||
711 | if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) { | ||
712 | return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN) | ||
713 | } | ||
714 | |||
715 | // Is it the document end indicator? | ||
716 | if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) { | ||
717 | return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN) | ||
718 | } | ||
719 | |||
720 | // Is it the flow sequence start indicator? | ||
721 | if buf[pos] == '[' { | ||
722 | return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN) | ||
723 | } | ||
724 | |||
725 | // Is it the flow mapping start indicator? | ||
726 | if parser.buffer[parser.buffer_pos] == '{' { | ||
727 | return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN) | ||
728 | } | ||
729 | |||
730 | // Is it the flow sequence end indicator? | ||
731 | if parser.buffer[parser.buffer_pos] == ']' { | ||
732 | return yaml_parser_fetch_flow_collection_end(parser, | ||
733 | yaml_FLOW_SEQUENCE_END_TOKEN) | ||
734 | } | ||
735 | |||
736 | // Is it the flow mapping end indicator? | ||
737 | if parser.buffer[parser.buffer_pos] == '}' { | ||
738 | return yaml_parser_fetch_flow_collection_end(parser, | ||
739 | yaml_FLOW_MAPPING_END_TOKEN) | ||
740 | } | ||
741 | |||
742 | // Is it the flow entry indicator? | ||
743 | if parser.buffer[parser.buffer_pos] == ',' { | ||
744 | return yaml_parser_fetch_flow_entry(parser) | ||
745 | } | ||
746 | |||
747 | // Is it the block entry indicator? | ||
748 | if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) { | ||
749 | return yaml_parser_fetch_block_entry(parser) | ||
750 | } | ||
751 | |||
752 | // Is it the key indicator? | ||
753 | if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { | ||
754 | return yaml_parser_fetch_key(parser) | ||
755 | } | ||
756 | |||
757 | // Is it the value indicator? | ||
758 | if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { | ||
759 | return yaml_parser_fetch_value(parser) | ||
760 | } | ||
761 | |||
762 | // Is it an alias? | ||
763 | if parser.buffer[parser.buffer_pos] == '*' { | ||
764 | return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN) | ||
765 | } | ||
766 | |||
767 | // Is it an anchor? | ||
768 | if parser.buffer[parser.buffer_pos] == '&' { | ||
769 | return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN) | ||
770 | } | ||
771 | |||
772 | // Is it a tag? | ||
773 | if parser.buffer[parser.buffer_pos] == '!' { | ||
774 | return yaml_parser_fetch_tag(parser) | ||
775 | } | ||
776 | |||
777 | // Is it a literal scalar? | ||
778 | if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 { | ||
779 | return yaml_parser_fetch_block_scalar(parser, true) | ||
780 | } | ||
781 | |||
782 | // Is it a folded scalar? | ||
783 | if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 { | ||
784 | return yaml_parser_fetch_block_scalar(parser, false) | ||
785 | } | ||
786 | |||
787 | // Is it a single-quoted scalar? | ||
788 | if parser.buffer[parser.buffer_pos] == '\'' { | ||
789 | return yaml_parser_fetch_flow_scalar(parser, true) | ||
790 | } | ||
791 | |||
792 | // Is it a double-quoted scalar? | ||
793 | if parser.buffer[parser.buffer_pos] == '"' { | ||
794 | return yaml_parser_fetch_flow_scalar(parser, false) | ||
795 | } | ||
796 | |||
797 | // Is it a plain scalar? | ||
798 | // | ||
799 | // A plain scalar may start with any non-blank characters except | ||
800 | // | ||
801 | // '-', '?', ':', ',', '[', ']', '{', '}', | ||
802 | // '#', '&', '*', '!', '|', '>', '\'', '\"', | ||
803 | // '%', '@', '`'. | ||
804 | // | ||
805 | // In the block context (and, for the '-' indicator, in the flow context | ||
806 | // too), it may also start with the characters | ||
807 | // | ||
808 | // '-', '?', ':' | ||
809 | // | ||
810 | // if it is followed by a non-space character. | ||
811 | // | ||
812 | // The last rule is more restrictive than the specification requires. | ||
813 | // [Go] Make this logic more reasonable. | ||
814 | //switch parser.buffer[parser.buffer_pos] { | ||
815 | //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`': | ||
816 | //} | ||
817 | if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' || | ||
818 | parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' || | ||
819 | parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' || | ||
820 | parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || | ||
821 | parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' || | ||
822 | parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' || | ||
823 | parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' || | ||
824 | parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' || | ||
825 | parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' || | ||
826 | parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') || | ||
827 | (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) || | ||
828 | (parser.flow_level == 0 && | ||
829 | (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') && | ||
830 | !is_blankz(parser.buffer, parser.buffer_pos+1)) { | ||
831 | return yaml_parser_fetch_plain_scalar(parser) | ||
832 | } | ||
833 | |||
834 | // If we don't determine the token type so far, it is an error. | ||
835 | return yaml_parser_set_scanner_error(parser, | ||
836 | "while scanning for the next token", parser.mark, | ||
837 | "found character that cannot start any token") | ||
838 | } | ||
839 | |||
840 | // Check the list of potential simple keys and remove the positions that | ||
841 | // cannot contain simple keys anymore. | ||
842 | func yaml_parser_stale_simple_keys(parser *yaml_parser_t) bool { | ||
843 | // Check for a potential simple key for each flow level. | ||
844 | for i := range parser.simple_keys { | ||
845 | simple_key := &parser.simple_keys[i] | ||
846 | |||
847 | // The specification requires that a simple key | ||
848 | // | ||
849 | // - is limited to a single line, | ||
850 | // - is shorter than 1024 characters. | ||
851 | if simple_key.possible && (simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index) { | ||
852 | |||
853 | // Check if the potential simple key to be removed is required. | ||
854 | if simple_key.required { | ||
855 | return yaml_parser_set_scanner_error(parser, | ||
856 | "while scanning a simple key", simple_key.mark, | ||
857 | "could not find expected ':'") | ||
858 | } | ||
859 | simple_key.possible = false | ||
860 | } | ||
861 | } | ||
862 | return true | ||
863 | } | ||
864 | |||
865 | // Check if a simple key may start at the current position and add it if | ||
866 | // needed. | ||
867 | func yaml_parser_save_simple_key(parser *yaml_parser_t) bool { | ||
868 | // A simple key is required at the current position if the scanner is in | ||
869 | // the block context and the current column coincides with the indentation | ||
870 | // level. | ||
871 | |||
872 | required := parser.flow_level == 0 && parser.indent == parser.mark.column | ||
873 | |||
874 | // | ||
875 | // If the current position may start a simple key, save it. | ||
876 | // | ||
877 | if parser.simple_key_allowed { | ||
878 | simple_key := yaml_simple_key_t{ | ||
879 | possible: true, | ||
880 | required: required, | ||
881 | token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), | ||
882 | } | ||
883 | simple_key.mark = parser.mark | ||
884 | |||
885 | if !yaml_parser_remove_simple_key(parser) { | ||
886 | return false | ||
887 | } | ||
888 | parser.simple_keys[len(parser.simple_keys)-1] = simple_key | ||
889 | } | ||
890 | return true | ||
891 | } | ||
892 | |||
893 | // Remove a potential simple key at the current flow level. | ||
894 | func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool { | ||
895 | i := len(parser.simple_keys) - 1 | ||
896 | if parser.simple_keys[i].possible { | ||
897 | // If the key is required, it is an error. | ||
898 | if parser.simple_keys[i].required { | ||
899 | return yaml_parser_set_scanner_error(parser, | ||
900 | "while scanning a simple key", parser.simple_keys[i].mark, | ||
901 | "could not find expected ':'") | ||
902 | } | ||
903 | } | ||
904 | // Remove the key from the stack. | ||
905 | parser.simple_keys[i].possible = false | ||
906 | return true | ||
907 | } | ||
908 | |||
909 | // Increase the flow level and resize the simple key list if needed. | ||
910 | func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool { | ||
911 | // Reset the simple key on the next level. | ||
912 | parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) | ||
913 | |||
914 | // Increase the flow level. | ||
915 | parser.flow_level++ | ||
916 | return true | ||
917 | } | ||
918 | |||
919 | // Decrease the flow level. | ||
920 | func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool { | ||
921 | if parser.flow_level > 0 { | ||
922 | parser.flow_level-- | ||
923 | parser.simple_keys = parser.simple_keys[:len(parser.simple_keys)-1] | ||
924 | } | ||
925 | return true | ||
926 | } | ||
927 | |||
928 | // Push the current indentation level to the stack and set the new level | ||
929 | // the current column is greater than the indentation level. In this case, | ||
930 | // append or insert the specified token into the token queue. | ||
931 | func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool { | ||
932 | // In the flow context, do nothing. | ||
933 | if parser.flow_level > 0 { | ||
934 | return true | ||
935 | } | ||
936 | |||
937 | if parser.indent < column { | ||
938 | // Push the current indentation level to the stack and set the new | ||
939 | // indentation level. | ||
940 | parser.indents = append(parser.indents, parser.indent) | ||
941 | parser.indent = column | ||
942 | |||
943 | // Create a token and insert it into the queue. | ||
944 | token := yaml_token_t{ | ||
945 | typ: typ, | ||
946 | start_mark: mark, | ||
947 | end_mark: mark, | ||
948 | } | ||
949 | if number > -1 { | ||
950 | number -= parser.tokens_parsed | ||
951 | } | ||
952 | yaml_insert_token(parser, number, &token) | ||
953 | } | ||
954 | return true | ||
955 | } | ||
956 | |||
957 | // Pop indentation levels from the indents stack until the current level | ||
958 | // becomes less or equal to the column. For each indentation level, append | ||
959 | // the BLOCK-END token. | ||
960 | func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool { | ||
961 | // In the flow context, do nothing. | ||
962 | if parser.flow_level > 0 { | ||
963 | return true | ||
964 | } | ||
965 | |||
966 | // Loop through the indentation levels in the stack. | ||
967 | for parser.indent > column { | ||
968 | // Create a token and append it to the queue. | ||
969 | token := yaml_token_t{ | ||
970 | typ: yaml_BLOCK_END_TOKEN, | ||
971 | start_mark: parser.mark, | ||
972 | end_mark: parser.mark, | ||
973 | } | ||
974 | yaml_insert_token(parser, -1, &token) | ||
975 | |||
976 | // Pop the indentation level. | ||
977 | parser.indent = parser.indents[len(parser.indents)-1] | ||
978 | parser.indents = parser.indents[:len(parser.indents)-1] | ||
979 | } | ||
980 | return true | ||
981 | } | ||
982 | |||
983 | // Initialize the scanner and produce the STREAM-START token. | ||
984 | func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool { | ||
985 | |||
986 | // Set the initial indentation. | ||
987 | parser.indent = -1 | ||
988 | |||
989 | // Initialize the simple key stack. | ||
990 | parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) | ||
991 | |||
992 | // A simple key is allowed at the beginning of the stream. | ||
993 | parser.simple_key_allowed = true | ||
994 | |||
995 | // We have started. | ||
996 | parser.stream_start_produced = true | ||
997 | |||
998 | // Create the STREAM-START token and append it to the queue. | ||
999 | token := yaml_token_t{ | ||
1000 | typ: yaml_STREAM_START_TOKEN, | ||
1001 | start_mark: parser.mark, | ||
1002 | end_mark: parser.mark, | ||
1003 | encoding: parser.encoding, | ||
1004 | } | ||
1005 | yaml_insert_token(parser, -1, &token) | ||
1006 | return true | ||
1007 | } | ||
1008 | |||
1009 | // Produce the STREAM-END token and shut down the scanner. | ||
1010 | func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool { | ||
1011 | |||
1012 | // Force new line. | ||
1013 | if parser.mark.column != 0 { | ||
1014 | parser.mark.column = 0 | ||
1015 | parser.mark.line++ | ||
1016 | } | ||
1017 | |||
1018 | // Reset the indentation level. | ||
1019 | if !yaml_parser_unroll_indent(parser, -1) { | ||
1020 | return false | ||
1021 | } | ||
1022 | |||
1023 | // Reset simple keys. | ||
1024 | if !yaml_parser_remove_simple_key(parser) { | ||
1025 | return false | ||
1026 | } | ||
1027 | |||
1028 | parser.simple_key_allowed = false | ||
1029 | |||
1030 | // Create the STREAM-END token and append it to the queue. | ||
1031 | token := yaml_token_t{ | ||
1032 | typ: yaml_STREAM_END_TOKEN, | ||
1033 | start_mark: parser.mark, | ||
1034 | end_mark: parser.mark, | ||
1035 | } | ||
1036 | yaml_insert_token(parser, -1, &token) | ||
1037 | return true | ||
1038 | } | ||
1039 | |||
1040 | // Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. | ||
1041 | func yaml_parser_fetch_directive(parser *yaml_parser_t) bool { | ||
1042 | // Reset the indentation level. | ||
1043 | if !yaml_parser_unroll_indent(parser, -1) { | ||
1044 | return false | ||
1045 | } | ||
1046 | |||
1047 | // Reset simple keys. | ||
1048 | if !yaml_parser_remove_simple_key(parser) { | ||
1049 | return false | ||
1050 | } | ||
1051 | |||
1052 | parser.simple_key_allowed = false | ||
1053 | |||
1054 | // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. | ||
1055 | token := yaml_token_t{} | ||
1056 | if !yaml_parser_scan_directive(parser, &token) { | ||
1057 | return false | ||
1058 | } | ||
1059 | // Append the token to the queue. | ||
1060 | yaml_insert_token(parser, -1, &token) | ||
1061 | return true | ||
1062 | } | ||
1063 | |||
1064 | // Produce the DOCUMENT-START or DOCUMENT-END token. | ||
1065 | func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool { | ||
1066 | // Reset the indentation level. | ||
1067 | if !yaml_parser_unroll_indent(parser, -1) { | ||
1068 | return false | ||
1069 | } | ||
1070 | |||
1071 | // Reset simple keys. | ||
1072 | if !yaml_parser_remove_simple_key(parser) { | ||
1073 | return false | ||
1074 | } | ||
1075 | |||
1076 | parser.simple_key_allowed = false | ||
1077 | |||
1078 | // Consume the token. | ||
1079 | start_mark := parser.mark | ||
1080 | |||
1081 | skip(parser) | ||
1082 | skip(parser) | ||
1083 | skip(parser) | ||
1084 | |||
1085 | end_mark := parser.mark | ||
1086 | |||
1087 | // Create the DOCUMENT-START or DOCUMENT-END token. | ||
1088 | token := yaml_token_t{ | ||
1089 | typ: typ, | ||
1090 | start_mark: start_mark, | ||
1091 | end_mark: end_mark, | ||
1092 | } | ||
1093 | // Append the token to the queue. | ||
1094 | yaml_insert_token(parser, -1, &token) | ||
1095 | return true | ||
1096 | } | ||
1097 | |||
1098 | // Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. | ||
1099 | func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool { | ||
1100 | // The indicators '[' and '{' may start a simple key. | ||
1101 | if !yaml_parser_save_simple_key(parser) { | ||
1102 | return false | ||
1103 | } | ||
1104 | |||
1105 | // Increase the flow level. | ||
1106 | if !yaml_parser_increase_flow_level(parser) { | ||
1107 | return false | ||
1108 | } | ||
1109 | |||
1110 | // A simple key may follow the indicators '[' and '{'. | ||
1111 | parser.simple_key_allowed = true | ||
1112 | |||
1113 | // Consume the token. | ||
1114 | start_mark := parser.mark | ||
1115 | skip(parser) | ||
1116 | end_mark := parser.mark | ||
1117 | |||
1118 | // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. | ||
1119 | token := yaml_token_t{ | ||
1120 | typ: typ, | ||
1121 | start_mark: start_mark, | ||
1122 | end_mark: end_mark, | ||
1123 | } | ||
1124 | // Append the token to the queue. | ||
1125 | yaml_insert_token(parser, -1, &token) | ||
1126 | return true | ||
1127 | } | ||
1128 | |||
1129 | // Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. | ||
1130 | func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool { | ||
1131 | // Reset any potential simple key on the current flow level. | ||
1132 | if !yaml_parser_remove_simple_key(parser) { | ||
1133 | return false | ||
1134 | } | ||
1135 | |||
1136 | // Decrease the flow level. | ||
1137 | if !yaml_parser_decrease_flow_level(parser) { | ||
1138 | return false | ||
1139 | } | ||
1140 | |||
1141 | // No simple keys after the indicators ']' and '}'. | ||
1142 | parser.simple_key_allowed = false | ||
1143 | |||
1144 | // Consume the token. | ||
1145 | |||
1146 | start_mark := parser.mark | ||
1147 | skip(parser) | ||
1148 | end_mark := parser.mark | ||
1149 | |||
1150 | // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. | ||
1151 | token := yaml_token_t{ | ||
1152 | typ: typ, | ||
1153 | start_mark: start_mark, | ||
1154 | end_mark: end_mark, | ||
1155 | } | ||
1156 | // Append the token to the queue. | ||
1157 | yaml_insert_token(parser, -1, &token) | ||
1158 | return true | ||
1159 | } | ||
1160 | |||
1161 | // Produce the FLOW-ENTRY token. | ||
1162 | func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool { | ||
1163 | // Reset any potential simple keys on the current flow level. | ||
1164 | if !yaml_parser_remove_simple_key(parser) { | ||
1165 | return false | ||
1166 | } | ||
1167 | |||
1168 | // Simple keys are allowed after ','. | ||
1169 | parser.simple_key_allowed = true | ||
1170 | |||
1171 | // Consume the token. | ||
1172 | start_mark := parser.mark | ||
1173 | skip(parser) | ||
1174 | end_mark := parser.mark | ||
1175 | |||
1176 | // Create the FLOW-ENTRY token and append it to the queue. | ||
1177 | token := yaml_token_t{ | ||
1178 | typ: yaml_FLOW_ENTRY_TOKEN, | ||
1179 | start_mark: start_mark, | ||
1180 | end_mark: end_mark, | ||
1181 | } | ||
1182 | yaml_insert_token(parser, -1, &token) | ||
1183 | return true | ||
1184 | } | ||
1185 | |||
1186 | // Produce the BLOCK-ENTRY token. | ||
1187 | func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool { | ||
1188 | // Check if the scanner is in the block context. | ||
1189 | if parser.flow_level == 0 { | ||
1190 | // Check if we are allowed to start a new entry. | ||
1191 | if !parser.simple_key_allowed { | ||
1192 | return yaml_parser_set_scanner_error(parser, "", parser.mark, | ||
1193 | "block sequence entries are not allowed in this context") | ||
1194 | } | ||
1195 | // Add the BLOCK-SEQUENCE-START token if needed. | ||
1196 | if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) { | ||
1197 | return false | ||
1198 | } | ||
1199 | } else { | ||
1200 | // It is an error for the '-' indicator to occur in the flow context, | ||
1201 | // but we let the Parser detect and report about it because the Parser | ||
1202 | // is able to point to the context. | ||
1203 | } | ||
1204 | |||
1205 | // Reset any potential simple keys on the current flow level. | ||
1206 | if !yaml_parser_remove_simple_key(parser) { | ||
1207 | return false | ||
1208 | } | ||
1209 | |||
1210 | // Simple keys are allowed after '-'. | ||
1211 | parser.simple_key_allowed = true | ||
1212 | |||
1213 | // Consume the token. | ||
1214 | start_mark := parser.mark | ||
1215 | skip(parser) | ||
1216 | end_mark := parser.mark | ||
1217 | |||
1218 | // Create the BLOCK-ENTRY token and append it to the queue. | ||
1219 | token := yaml_token_t{ | ||
1220 | typ: yaml_BLOCK_ENTRY_TOKEN, | ||
1221 | start_mark: start_mark, | ||
1222 | end_mark: end_mark, | ||
1223 | } | ||
1224 | yaml_insert_token(parser, -1, &token) | ||
1225 | return true | ||
1226 | } | ||
1227 | |||
1228 | // Produce the KEY token. | ||
1229 | func yaml_parser_fetch_key(parser *yaml_parser_t) bool { | ||
1230 | |||
1231 | // In the block context, additional checks are required. | ||
1232 | if parser.flow_level == 0 { | ||
1233 | // Check if we are allowed to start a new key (not nessesary simple). | ||
1234 | if !parser.simple_key_allowed { | ||
1235 | return yaml_parser_set_scanner_error(parser, "", parser.mark, | ||
1236 | "mapping keys are not allowed in this context") | ||
1237 | } | ||
1238 | // Add the BLOCK-MAPPING-START token if needed. | ||
1239 | if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { | ||
1240 | return false | ||
1241 | } | ||
1242 | } | ||
1243 | |||
1244 | // Reset any potential simple keys on the current flow level. | ||
1245 | if !yaml_parser_remove_simple_key(parser) { | ||
1246 | return false | ||
1247 | } | ||
1248 | |||
1249 | // Simple keys are allowed after '?' in the block context. | ||
1250 | parser.simple_key_allowed = parser.flow_level == 0 | ||
1251 | |||
1252 | // Consume the token. | ||
1253 | start_mark := parser.mark | ||
1254 | skip(parser) | ||
1255 | end_mark := parser.mark | ||
1256 | |||
1257 | // Create the KEY token and append it to the queue. | ||
1258 | token := yaml_token_t{ | ||
1259 | typ: yaml_KEY_TOKEN, | ||
1260 | start_mark: start_mark, | ||
1261 | end_mark: end_mark, | ||
1262 | } | ||
1263 | yaml_insert_token(parser, -1, &token) | ||
1264 | return true | ||
1265 | } | ||
1266 | |||
1267 | // Produce the VALUE token. | ||
1268 | func yaml_parser_fetch_value(parser *yaml_parser_t) bool { | ||
1269 | |||
1270 | simple_key := &parser.simple_keys[len(parser.simple_keys)-1] | ||
1271 | |||
1272 | // Have we found a simple key? | ||
1273 | if simple_key.possible { | ||
1274 | // Create the KEY token and insert it into the queue. | ||
1275 | token := yaml_token_t{ | ||
1276 | typ: yaml_KEY_TOKEN, | ||
1277 | start_mark: simple_key.mark, | ||
1278 | end_mark: simple_key.mark, | ||
1279 | } | ||
1280 | yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token) | ||
1281 | |||
1282 | // In the block context, we may need to add the BLOCK-MAPPING-START token. | ||
1283 | if !yaml_parser_roll_indent(parser, simple_key.mark.column, | ||
1284 | simple_key.token_number, | ||
1285 | yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) { | ||
1286 | return false | ||
1287 | } | ||
1288 | |||
1289 | // Remove the simple key. | ||
1290 | simple_key.possible = false | ||
1291 | |||
1292 | // A simple key cannot follow another simple key. | ||
1293 | parser.simple_key_allowed = false | ||
1294 | |||
1295 | } else { | ||
1296 | // The ':' indicator follows a complex key. | ||
1297 | |||
1298 | // In the block context, extra checks are required. | ||
1299 | if parser.flow_level == 0 { | ||
1300 | |||
1301 | // Check if we are allowed to start a complex value. | ||
1302 | if !parser.simple_key_allowed { | ||
1303 | return yaml_parser_set_scanner_error(parser, "", parser.mark, | ||
1304 | "mapping values are not allowed in this context") | ||
1305 | } | ||
1306 | |||
1307 | // Add the BLOCK-MAPPING-START token if needed. | ||
1308 | if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { | ||
1309 | return false | ||
1310 | } | ||
1311 | } | ||
1312 | |||
1313 | // Simple keys after ':' are allowed in the block context. | ||
1314 | parser.simple_key_allowed = parser.flow_level == 0 | ||
1315 | } | ||
1316 | |||
1317 | // Consume the token. | ||
1318 | start_mark := parser.mark | ||
1319 | skip(parser) | ||
1320 | end_mark := parser.mark | ||
1321 | |||
1322 | // Create the VALUE token and append it to the queue. | ||
1323 | token := yaml_token_t{ | ||
1324 | typ: yaml_VALUE_TOKEN, | ||
1325 | start_mark: start_mark, | ||
1326 | end_mark: end_mark, | ||
1327 | } | ||
1328 | yaml_insert_token(parser, -1, &token) | ||
1329 | return true | ||
1330 | } | ||
1331 | |||
1332 | // Produce the ALIAS or ANCHOR token. | ||
1333 | func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool { | ||
1334 | // An anchor or an alias could be a simple key. | ||
1335 | if !yaml_parser_save_simple_key(parser) { | ||
1336 | return false | ||
1337 | } | ||
1338 | |||
1339 | // A simple key cannot follow an anchor or an alias. | ||
1340 | parser.simple_key_allowed = false | ||
1341 | |||
1342 | // Create the ALIAS or ANCHOR token and append it to the queue. | ||
1343 | var token yaml_token_t | ||
1344 | if !yaml_parser_scan_anchor(parser, &token, typ) { | ||
1345 | return false | ||
1346 | } | ||
1347 | yaml_insert_token(parser, -1, &token) | ||
1348 | return true | ||
1349 | } | ||
1350 | |||
1351 | // Produce the TAG token. | ||
1352 | func yaml_parser_fetch_tag(parser *yaml_parser_t) bool { | ||
1353 | // A tag could be a simple key. | ||
1354 | if !yaml_parser_save_simple_key(parser) { | ||
1355 | return false | ||
1356 | } | ||
1357 | |||
1358 | // A simple key cannot follow a tag. | ||
1359 | parser.simple_key_allowed = false | ||
1360 | |||
1361 | // Create the TAG token and append it to the queue. | ||
1362 | var token yaml_token_t | ||
1363 | if !yaml_parser_scan_tag(parser, &token) { | ||
1364 | return false | ||
1365 | } | ||
1366 | yaml_insert_token(parser, -1, &token) | ||
1367 | return true | ||
1368 | } | ||
1369 | |||
1370 | // Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. | ||
1371 | func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool { | ||
1372 | // Remove any potential simple keys. | ||
1373 | if !yaml_parser_remove_simple_key(parser) { | ||
1374 | return false | ||
1375 | } | ||
1376 | |||
1377 | // A simple key may follow a block scalar. | ||
1378 | parser.simple_key_allowed = true | ||
1379 | |||
1380 | // Create the SCALAR token and append it to the queue. | ||
1381 | var token yaml_token_t | ||
1382 | if !yaml_parser_scan_block_scalar(parser, &token, literal) { | ||
1383 | return false | ||
1384 | } | ||
1385 | yaml_insert_token(parser, -1, &token) | ||
1386 | return true | ||
1387 | } | ||
1388 | |||
1389 | // Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. | ||
1390 | func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool { | ||
1391 | // A plain scalar could be a simple key. | ||
1392 | if !yaml_parser_save_simple_key(parser) { | ||
1393 | return false | ||
1394 | } | ||
1395 | |||
1396 | // A simple key cannot follow a flow scalar. | ||
1397 | parser.simple_key_allowed = false | ||
1398 | |||
1399 | // Create the SCALAR token and append it to the queue. | ||
1400 | var token yaml_token_t | ||
1401 | if !yaml_parser_scan_flow_scalar(parser, &token, single) { | ||
1402 | return false | ||
1403 | } | ||
1404 | yaml_insert_token(parser, -1, &token) | ||
1405 | return true | ||
1406 | } | ||
1407 | |||
1408 | // Produce the SCALAR(...,plain) token. | ||
1409 | func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool { | ||
1410 | // A plain scalar could be a simple key. | ||
1411 | if !yaml_parser_save_simple_key(parser) { | ||
1412 | return false | ||
1413 | } | ||
1414 | |||
1415 | // A simple key cannot follow a flow scalar. | ||
1416 | parser.simple_key_allowed = false | ||
1417 | |||
1418 | // Create the SCALAR token and append it to the queue. | ||
1419 | var token yaml_token_t | ||
1420 | if !yaml_parser_scan_plain_scalar(parser, &token) { | ||
1421 | return false | ||
1422 | } | ||
1423 | yaml_insert_token(parser, -1, &token) | ||
1424 | return true | ||
1425 | } | ||
1426 | |||
1427 | // Eat whitespaces and comments until the next token is found. | ||
1428 | func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { | ||
1429 | |||
1430 | // Until the next token is not found. | ||
1431 | for { | ||
1432 | // Allow the BOM mark to start a line. | ||
1433 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1434 | return false | ||
1435 | } | ||
1436 | if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) { | ||
1437 | skip(parser) | ||
1438 | } | ||
1439 | |||
1440 | // Eat whitespaces. | ||
1441 | // Tabs are allowed: | ||
1442 | // - in the flow context | ||
1443 | // - in the block context, but not at the beginning of the line or | ||
1444 | // after '-', '?', or ':' (complex value). | ||
1445 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1446 | return false | ||
1447 | } | ||
1448 | |||
1449 | for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') { | ||
1450 | skip(parser) | ||
1451 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1452 | return false | ||
1453 | } | ||
1454 | } | ||
1455 | |||
1456 | // Eat a comment until a line break. | ||
1457 | if parser.buffer[parser.buffer_pos] == '#' { | ||
1458 | for !is_breakz(parser.buffer, parser.buffer_pos) { | ||
1459 | skip(parser) | ||
1460 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1461 | return false | ||
1462 | } | ||
1463 | } | ||
1464 | } | ||
1465 | |||
1466 | // If it is a line break, eat it. | ||
1467 | if is_break(parser.buffer, parser.buffer_pos) { | ||
1468 | if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { | ||
1469 | return false | ||
1470 | } | ||
1471 | skip_line(parser) | ||
1472 | |||
1473 | // In the block context, a new line may start a simple key. | ||
1474 | if parser.flow_level == 0 { | ||
1475 | parser.simple_key_allowed = true | ||
1476 | } | ||
1477 | } else { | ||
1478 | break // We have found a token. | ||
1479 | } | ||
1480 | } | ||
1481 | |||
1482 | return true | ||
1483 | } | ||
1484 | |||
1485 | // Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. | ||
1486 | // | ||
1487 | // Scope: | ||
1488 | // %YAML 1.1 # a comment \n | ||
1489 | // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||
1490 | // %TAG !yaml! tag:yaml.org,2002: \n | ||
1491 | // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||
1492 | // | ||
1493 | func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { | ||
1494 | // Eat '%'. | ||
1495 | start_mark := parser.mark | ||
1496 | skip(parser) | ||
1497 | |||
1498 | // Scan the directive name. | ||
1499 | var name []byte | ||
1500 | if !yaml_parser_scan_directive_name(parser, start_mark, &name) { | ||
1501 | return false | ||
1502 | } | ||
1503 | |||
1504 | // Is it a YAML directive? | ||
1505 | if bytes.Equal(name, []byte("YAML")) { | ||
1506 | // Scan the VERSION directive value. | ||
1507 | var major, minor int8 | ||
1508 | if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) { | ||
1509 | return false | ||
1510 | } | ||
1511 | end_mark := parser.mark | ||
1512 | |||
1513 | // Create a VERSION-DIRECTIVE token. | ||
1514 | *token = yaml_token_t{ | ||
1515 | typ: yaml_VERSION_DIRECTIVE_TOKEN, | ||
1516 | start_mark: start_mark, | ||
1517 | end_mark: end_mark, | ||
1518 | major: major, | ||
1519 | minor: minor, | ||
1520 | } | ||
1521 | |||
1522 | // Is it a TAG directive? | ||
1523 | } else if bytes.Equal(name, []byte("TAG")) { | ||
1524 | // Scan the TAG directive value. | ||
1525 | var handle, prefix []byte | ||
1526 | if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) { | ||
1527 | return false | ||
1528 | } | ||
1529 | end_mark := parser.mark | ||
1530 | |||
1531 | // Create a TAG-DIRECTIVE token. | ||
1532 | *token = yaml_token_t{ | ||
1533 | typ: yaml_TAG_DIRECTIVE_TOKEN, | ||
1534 | start_mark: start_mark, | ||
1535 | end_mark: end_mark, | ||
1536 | value: handle, | ||
1537 | prefix: prefix, | ||
1538 | } | ||
1539 | |||
1540 | // Unknown directive. | ||
1541 | } else { | ||
1542 | yaml_parser_set_scanner_error(parser, "while scanning a directive", | ||
1543 | start_mark, "found unknown directive name") | ||
1544 | return false | ||
1545 | } | ||
1546 | |||
1547 | // Eat the rest of the line including any comments. | ||
1548 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1549 | return false | ||
1550 | } | ||
1551 | |||
1552 | for is_blank(parser.buffer, parser.buffer_pos) { | ||
1553 | skip(parser) | ||
1554 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1555 | return false | ||
1556 | } | ||
1557 | } | ||
1558 | |||
1559 | if parser.buffer[parser.buffer_pos] == '#' { | ||
1560 | for !is_breakz(parser.buffer, parser.buffer_pos) { | ||
1561 | skip(parser) | ||
1562 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1563 | return false | ||
1564 | } | ||
1565 | } | ||
1566 | } | ||
1567 | |||
1568 | // Check if we are at the end of the line. | ||
1569 | if !is_breakz(parser.buffer, parser.buffer_pos) { | ||
1570 | yaml_parser_set_scanner_error(parser, "while scanning a directive", | ||
1571 | start_mark, "did not find expected comment or line break") | ||
1572 | return false | ||
1573 | } | ||
1574 | |||
1575 | // Eat a line break. | ||
1576 | if is_break(parser.buffer, parser.buffer_pos) { | ||
1577 | if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { | ||
1578 | return false | ||
1579 | } | ||
1580 | skip_line(parser) | ||
1581 | } | ||
1582 | |||
1583 | return true | ||
1584 | } | ||
1585 | |||
1586 | // Scan the directive name. | ||
1587 | // | ||
1588 | // Scope: | ||
1589 | // %YAML 1.1 # a comment \n | ||
1590 | // ^^^^ | ||
1591 | // %TAG !yaml! tag:yaml.org,2002: \n | ||
1592 | // ^^^ | ||
1593 | // | ||
1594 | func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool { | ||
1595 | // Consume the directive name. | ||
1596 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1597 | return false | ||
1598 | } | ||
1599 | |||
1600 | var s []byte | ||
1601 | for is_alpha(parser.buffer, parser.buffer_pos) { | ||
1602 | s = read(parser, s) | ||
1603 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1604 | return false | ||
1605 | } | ||
1606 | } | ||
1607 | |||
1608 | // Check if the name is empty. | ||
1609 | if len(s) == 0 { | ||
1610 | yaml_parser_set_scanner_error(parser, "while scanning a directive", | ||
1611 | start_mark, "could not find expected directive name") | ||
1612 | return false | ||
1613 | } | ||
1614 | |||
1615 | // Check for an blank character after the name. | ||
1616 | if !is_blankz(parser.buffer, parser.buffer_pos) { | ||
1617 | yaml_parser_set_scanner_error(parser, "while scanning a directive", | ||
1618 | start_mark, "found unexpected non-alphabetical character") | ||
1619 | return false | ||
1620 | } | ||
1621 | *name = s | ||
1622 | return true | ||
1623 | } | ||
1624 | |||
1625 | // Scan the value of VERSION-DIRECTIVE. | ||
1626 | // | ||
1627 | // Scope: | ||
1628 | // %YAML 1.1 # a comment \n | ||
1629 | // ^^^^^^ | ||
1630 | func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool { | ||
1631 | // Eat whitespaces. | ||
1632 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1633 | return false | ||
1634 | } | ||
1635 | for is_blank(parser.buffer, parser.buffer_pos) { | ||
1636 | skip(parser) | ||
1637 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1638 | return false | ||
1639 | } | ||
1640 | } | ||
1641 | |||
1642 | // Consume the major version number. | ||
1643 | if !yaml_parser_scan_version_directive_number(parser, start_mark, major) { | ||
1644 | return false | ||
1645 | } | ||
1646 | |||
1647 | // Eat '.'. | ||
1648 | if parser.buffer[parser.buffer_pos] != '.' { | ||
1649 | return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", | ||
1650 | start_mark, "did not find expected digit or '.' character") | ||
1651 | } | ||
1652 | |||
1653 | skip(parser) | ||
1654 | |||
1655 | // Consume the minor version number. | ||
1656 | if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) { | ||
1657 | return false | ||
1658 | } | ||
1659 | return true | ||
1660 | } | ||
1661 | |||
1662 | const max_number_length = 2 | ||
1663 | |||
1664 | // Scan the version number of VERSION-DIRECTIVE. | ||
1665 | // | ||
1666 | // Scope: | ||
1667 | // %YAML 1.1 # a comment \n | ||
1668 | // ^ | ||
1669 | // %YAML 1.1 # a comment \n | ||
1670 | // ^ | ||
1671 | func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool { | ||
1672 | |||
1673 | // Repeat while the next character is digit. | ||
1674 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1675 | return false | ||
1676 | } | ||
1677 | var value, length int8 | ||
1678 | for is_digit(parser.buffer, parser.buffer_pos) { | ||
1679 | // Check if the number is too long. | ||
1680 | length++ | ||
1681 | if length > max_number_length { | ||
1682 | return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", | ||
1683 | start_mark, "found extremely long version number") | ||
1684 | } | ||
1685 | value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos)) | ||
1686 | skip(parser) | ||
1687 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1688 | return false | ||
1689 | } | ||
1690 | } | ||
1691 | |||
1692 | // Check if the number was present. | ||
1693 | if length == 0 { | ||
1694 | return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", | ||
1695 | start_mark, "did not find expected version number") | ||
1696 | } | ||
1697 | *number = value | ||
1698 | return true | ||
1699 | } | ||
1700 | |||
1701 | // Scan the value of a TAG-DIRECTIVE token. | ||
1702 | // | ||
1703 | // Scope: | ||
1704 | // %TAG !yaml! tag:yaml.org,2002: \n | ||
1705 | // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||
1706 | // | ||
1707 | func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool { | ||
1708 | var handle_value, prefix_value []byte | ||
1709 | |||
1710 | // Eat whitespaces. | ||
1711 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1712 | return false | ||
1713 | } | ||
1714 | |||
1715 | for is_blank(parser.buffer, parser.buffer_pos) { | ||
1716 | skip(parser) | ||
1717 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1718 | return false | ||
1719 | } | ||
1720 | } | ||
1721 | |||
1722 | // Scan a handle. | ||
1723 | if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) { | ||
1724 | return false | ||
1725 | } | ||
1726 | |||
1727 | // Expect a whitespace. | ||
1728 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1729 | return false | ||
1730 | } | ||
1731 | if !is_blank(parser.buffer, parser.buffer_pos) { | ||
1732 | yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", | ||
1733 | start_mark, "did not find expected whitespace") | ||
1734 | return false | ||
1735 | } | ||
1736 | |||
1737 | // Eat whitespaces. | ||
1738 | for is_blank(parser.buffer, parser.buffer_pos) { | ||
1739 | skip(parser) | ||
1740 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1741 | return false | ||
1742 | } | ||
1743 | } | ||
1744 | |||
1745 | // Scan a prefix. | ||
1746 | if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) { | ||
1747 | return false | ||
1748 | } | ||
1749 | |||
1750 | // Expect a whitespace or line break. | ||
1751 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1752 | return false | ||
1753 | } | ||
1754 | if !is_blankz(parser.buffer, parser.buffer_pos) { | ||
1755 | yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", | ||
1756 | start_mark, "did not find expected whitespace or line break") | ||
1757 | return false | ||
1758 | } | ||
1759 | |||
1760 | *handle = handle_value | ||
1761 | *prefix = prefix_value | ||
1762 | return true | ||
1763 | } | ||
1764 | |||
1765 | func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool { | ||
1766 | var s []byte | ||
1767 | |||
1768 | // Eat the indicator character. | ||
1769 | start_mark := parser.mark | ||
1770 | skip(parser) | ||
1771 | |||
1772 | // Consume the value. | ||
1773 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1774 | return false | ||
1775 | } | ||
1776 | |||
1777 | for is_alpha(parser.buffer, parser.buffer_pos) { | ||
1778 | s = read(parser, s) | ||
1779 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1780 | return false | ||
1781 | } | ||
1782 | } | ||
1783 | |||
1784 | end_mark := parser.mark | ||
1785 | |||
1786 | /* | ||
1787 | * Check if length of the anchor is greater than 0 and it is followed by | ||
1788 | * a whitespace character or one of the indicators: | ||
1789 | * | ||
1790 | * '?', ':', ',', ']', '}', '%', '@', '`'. | ||
1791 | */ | ||
1792 | |||
1793 | if len(s) == 0 || | ||
1794 | !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' || | ||
1795 | parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' || | ||
1796 | parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' || | ||
1797 | parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' || | ||
1798 | parser.buffer[parser.buffer_pos] == '`') { | ||
1799 | context := "while scanning an alias" | ||
1800 | if typ == yaml_ANCHOR_TOKEN { | ||
1801 | context = "while scanning an anchor" | ||
1802 | } | ||
1803 | yaml_parser_set_scanner_error(parser, context, start_mark, | ||
1804 | "did not find expected alphabetic or numeric character") | ||
1805 | return false | ||
1806 | } | ||
1807 | |||
1808 | // Create a token. | ||
1809 | *token = yaml_token_t{ | ||
1810 | typ: typ, | ||
1811 | start_mark: start_mark, | ||
1812 | end_mark: end_mark, | ||
1813 | value: s, | ||
1814 | } | ||
1815 | |||
1816 | return true | ||
1817 | } | ||
1818 | |||
1819 | /* | ||
1820 | * Scan a TAG token. | ||
1821 | */ | ||
1822 | |||
1823 | func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool { | ||
1824 | var handle, suffix []byte | ||
1825 | |||
1826 | start_mark := parser.mark | ||
1827 | |||
1828 | // Check if the tag is in the canonical form. | ||
1829 | if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { | ||
1830 | return false | ||
1831 | } | ||
1832 | |||
1833 | if parser.buffer[parser.buffer_pos+1] == '<' { | ||
1834 | // Keep the handle as '' | ||
1835 | |||
1836 | // Eat '!<' | ||
1837 | skip(parser) | ||
1838 | skip(parser) | ||
1839 | |||
1840 | // Consume the tag value. | ||
1841 | if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { | ||
1842 | return false | ||
1843 | } | ||
1844 | |||
1845 | // Check for '>' and eat it. | ||
1846 | if parser.buffer[parser.buffer_pos] != '>' { | ||
1847 | yaml_parser_set_scanner_error(parser, "while scanning a tag", | ||
1848 | start_mark, "did not find the expected '>'") | ||
1849 | return false | ||
1850 | } | ||
1851 | |||
1852 | skip(parser) | ||
1853 | } else { | ||
1854 | // The tag has either the '!suffix' or the '!handle!suffix' form. | ||
1855 | |||
1856 | // First, try to scan a handle. | ||
1857 | if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) { | ||
1858 | return false | ||
1859 | } | ||
1860 | |||
1861 | // Check if it is, indeed, handle. | ||
1862 | if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' { | ||
1863 | // Scan the suffix now. | ||
1864 | if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { | ||
1865 | return false | ||
1866 | } | ||
1867 | } else { | ||
1868 | // It wasn't a handle after all. Scan the rest of the tag. | ||
1869 | if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) { | ||
1870 | return false | ||
1871 | } | ||
1872 | |||
1873 | // Set the handle to '!'. | ||
1874 | handle = []byte{'!'} | ||
1875 | |||
1876 | // A special case: the '!' tag. Set the handle to '' and the | ||
1877 | // suffix to '!'. | ||
1878 | if len(suffix) == 0 { | ||
1879 | handle, suffix = suffix, handle | ||
1880 | } | ||
1881 | } | ||
1882 | } | ||
1883 | |||
1884 | // Check the character which ends the tag. | ||
1885 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1886 | return false | ||
1887 | } | ||
1888 | if !is_blankz(parser.buffer, parser.buffer_pos) { | ||
1889 | yaml_parser_set_scanner_error(parser, "while scanning a tag", | ||
1890 | start_mark, "did not find expected whitespace or line break") | ||
1891 | return false | ||
1892 | } | ||
1893 | |||
1894 | end_mark := parser.mark | ||
1895 | |||
1896 | // Create a token. | ||
1897 | *token = yaml_token_t{ | ||
1898 | typ: yaml_TAG_TOKEN, | ||
1899 | start_mark: start_mark, | ||
1900 | end_mark: end_mark, | ||
1901 | value: handle, | ||
1902 | suffix: suffix, | ||
1903 | } | ||
1904 | return true | ||
1905 | } | ||
1906 | |||
1907 | // Scan a tag handle. | ||
1908 | func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool { | ||
1909 | // Check the initial '!' character. | ||
1910 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1911 | return false | ||
1912 | } | ||
1913 | if parser.buffer[parser.buffer_pos] != '!' { | ||
1914 | yaml_parser_set_scanner_tag_error(parser, directive, | ||
1915 | start_mark, "did not find expected '!'") | ||
1916 | return false | ||
1917 | } | ||
1918 | |||
1919 | var s []byte | ||
1920 | |||
1921 | // Copy the '!' character. | ||
1922 | s = read(parser, s) | ||
1923 | |||
1924 | // Copy all subsequent alphabetical and numerical characters. | ||
1925 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1926 | return false | ||
1927 | } | ||
1928 | for is_alpha(parser.buffer, parser.buffer_pos) { | ||
1929 | s = read(parser, s) | ||
1930 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1931 | return false | ||
1932 | } | ||
1933 | } | ||
1934 | |||
1935 | // Check if the trailing character is '!' and copy it. | ||
1936 | if parser.buffer[parser.buffer_pos] == '!' { | ||
1937 | s = read(parser, s) | ||
1938 | } else { | ||
1939 | // It's either the '!' tag or not really a tag handle. If it's a %TAG | ||
1940 | // directive, it's an error. If it's a tag token, it must be a part of URI. | ||
1941 | if directive && string(s) != "!" { | ||
1942 | yaml_parser_set_scanner_tag_error(parser, directive, | ||
1943 | start_mark, "did not find expected '!'") | ||
1944 | return false | ||
1945 | } | ||
1946 | } | ||
1947 | |||
1948 | *handle = s | ||
1949 | return true | ||
1950 | } | ||
1951 | |||
1952 | // Scan a tag. | ||
1953 | func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool { | ||
1954 | //size_t length = head ? strlen((char *)head) : 0 | ||
1955 | var s []byte | ||
1956 | hasTag := len(head) > 0 | ||
1957 | |||
1958 | // Copy the head if needed. | ||
1959 | // | ||
1960 | // Note that we don't copy the leading '!' character. | ||
1961 | if len(head) > 1 { | ||
1962 | s = append(s, head[1:]...) | ||
1963 | } | ||
1964 | |||
1965 | // Scan the tag. | ||
1966 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1967 | return false | ||
1968 | } | ||
1969 | |||
1970 | // The set of characters that may appear in URI is as follows: | ||
1971 | // | ||
1972 | // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', | ||
1973 | // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', | ||
1974 | // '%'. | ||
1975 | // [Go] Convert this into more reasonable logic. | ||
1976 | for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' || | ||
1977 | parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' || | ||
1978 | parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' || | ||
1979 | parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' || | ||
1980 | parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' || | ||
1981 | parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' || | ||
1982 | parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' || | ||
1983 | parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' || | ||
1984 | parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' || | ||
1985 | parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' || | ||
1986 | parser.buffer[parser.buffer_pos] == '%' { | ||
1987 | // Check if it is a URI-escape sequence. | ||
1988 | if parser.buffer[parser.buffer_pos] == '%' { | ||
1989 | if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) { | ||
1990 | return false | ||
1991 | } | ||
1992 | } else { | ||
1993 | s = read(parser, s) | ||
1994 | } | ||
1995 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
1996 | return false | ||
1997 | } | ||
1998 | hasTag = true | ||
1999 | } | ||
2000 | |||
2001 | if !hasTag { | ||
2002 | yaml_parser_set_scanner_tag_error(parser, directive, | ||
2003 | start_mark, "did not find expected tag URI") | ||
2004 | return false | ||
2005 | } | ||
2006 | *uri = s | ||
2007 | return true | ||
2008 | } | ||
2009 | |||
2010 | // Decode an URI-escape sequence corresponding to a single UTF-8 character. | ||
2011 | func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool { | ||
2012 | |||
2013 | // Decode the required number of characters. | ||
2014 | w := 1024 | ||
2015 | for w > 0 { | ||
2016 | // Check for a URI-escaped octet. | ||
2017 | if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { | ||
2018 | return false | ||
2019 | } | ||
2020 | |||
2021 | if !(parser.buffer[parser.buffer_pos] == '%' && | ||
2022 | is_hex(parser.buffer, parser.buffer_pos+1) && | ||
2023 | is_hex(parser.buffer, parser.buffer_pos+2)) { | ||
2024 | return yaml_parser_set_scanner_tag_error(parser, directive, | ||
2025 | start_mark, "did not find URI escaped octet") | ||
2026 | } | ||
2027 | |||
2028 | // Get the octet. | ||
2029 | octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2)) | ||
2030 | |||
2031 | // If it is the leading octet, determine the length of the UTF-8 sequence. | ||
2032 | if w == 1024 { | ||
2033 | w = width(octet) | ||
2034 | if w == 0 { | ||
2035 | return yaml_parser_set_scanner_tag_error(parser, directive, | ||
2036 | start_mark, "found an incorrect leading UTF-8 octet") | ||
2037 | } | ||
2038 | } else { | ||
2039 | // Check if the trailing octet is correct. | ||
2040 | if octet&0xC0 != 0x80 { | ||
2041 | return yaml_parser_set_scanner_tag_error(parser, directive, | ||
2042 | start_mark, "found an incorrect trailing UTF-8 octet") | ||
2043 | } | ||
2044 | } | ||
2045 | |||
2046 | // Copy the octet and move the pointers. | ||
2047 | *s = append(*s, octet) | ||
2048 | skip(parser) | ||
2049 | skip(parser) | ||
2050 | skip(parser) | ||
2051 | w-- | ||
2052 | } | ||
2053 | return true | ||
2054 | } | ||
2055 | |||
2056 | // Scan a block scalar. | ||
2057 | func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool { | ||
2058 | // Eat the indicator '|' or '>'. | ||
2059 | start_mark := parser.mark | ||
2060 | skip(parser) | ||
2061 | |||
2062 | // Scan the additional block scalar indicators. | ||
2063 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2064 | return false | ||
2065 | } | ||
2066 | |||
2067 | // Check for a chomping indicator. | ||
2068 | var chomping, increment int | ||
2069 | if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { | ||
2070 | // Set the chomping method and eat the indicator. | ||
2071 | if parser.buffer[parser.buffer_pos] == '+' { | ||
2072 | chomping = +1 | ||
2073 | } else { | ||
2074 | chomping = -1 | ||
2075 | } | ||
2076 | skip(parser) | ||
2077 | |||
2078 | // Check for an indentation indicator. | ||
2079 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2080 | return false | ||
2081 | } | ||
2082 | if is_digit(parser.buffer, parser.buffer_pos) { | ||
2083 | // Check that the indentation is greater than 0. | ||
2084 | if parser.buffer[parser.buffer_pos] == '0' { | ||
2085 | yaml_parser_set_scanner_error(parser, "while scanning a block scalar", | ||
2086 | start_mark, "found an indentation indicator equal to 0") | ||
2087 | return false | ||
2088 | } | ||
2089 | |||
2090 | // Get the indentation level and eat the indicator. | ||
2091 | increment = as_digit(parser.buffer, parser.buffer_pos) | ||
2092 | skip(parser) | ||
2093 | } | ||
2094 | |||
2095 | } else if is_digit(parser.buffer, parser.buffer_pos) { | ||
2096 | // Do the same as above, but in the opposite order. | ||
2097 | |||
2098 | if parser.buffer[parser.buffer_pos] == '0' { | ||
2099 | yaml_parser_set_scanner_error(parser, "while scanning a block scalar", | ||
2100 | start_mark, "found an indentation indicator equal to 0") | ||
2101 | return false | ||
2102 | } | ||
2103 | increment = as_digit(parser.buffer, parser.buffer_pos) | ||
2104 | skip(parser) | ||
2105 | |||
2106 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2107 | return false | ||
2108 | } | ||
2109 | if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { | ||
2110 | if parser.buffer[parser.buffer_pos] == '+' { | ||
2111 | chomping = +1 | ||
2112 | } else { | ||
2113 | chomping = -1 | ||
2114 | } | ||
2115 | skip(parser) | ||
2116 | } | ||
2117 | } | ||
2118 | |||
2119 | // Eat whitespaces and comments to the end of the line. | ||
2120 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2121 | return false | ||
2122 | } | ||
2123 | for is_blank(parser.buffer, parser.buffer_pos) { | ||
2124 | skip(parser) | ||
2125 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2126 | return false | ||
2127 | } | ||
2128 | } | ||
2129 | if parser.buffer[parser.buffer_pos] == '#' { | ||
2130 | for !is_breakz(parser.buffer, parser.buffer_pos) { | ||
2131 | skip(parser) | ||
2132 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2133 | return false | ||
2134 | } | ||
2135 | } | ||
2136 | } | ||
2137 | |||
2138 | // Check if we are at the end of the line. | ||
2139 | if !is_breakz(parser.buffer, parser.buffer_pos) { | ||
2140 | yaml_parser_set_scanner_error(parser, "while scanning a block scalar", | ||
2141 | start_mark, "did not find expected comment or line break") | ||
2142 | return false | ||
2143 | } | ||
2144 | |||
2145 | // Eat a line break. | ||
2146 | if is_break(parser.buffer, parser.buffer_pos) { | ||
2147 | if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { | ||
2148 | return false | ||
2149 | } | ||
2150 | skip_line(parser) | ||
2151 | } | ||
2152 | |||
2153 | end_mark := parser.mark | ||
2154 | |||
2155 | // Set the indentation level if it was specified. | ||
2156 | var indent int | ||
2157 | if increment > 0 { | ||
2158 | if parser.indent >= 0 { | ||
2159 | indent = parser.indent + increment | ||
2160 | } else { | ||
2161 | indent = increment | ||
2162 | } | ||
2163 | } | ||
2164 | |||
2165 | // Scan the leading line breaks and determine the indentation level if needed. | ||
2166 | var s, leading_break, trailing_breaks []byte | ||
2167 | if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { | ||
2168 | return false | ||
2169 | } | ||
2170 | |||
2171 | // Scan the block scalar content. | ||
2172 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2173 | return false | ||
2174 | } | ||
2175 | var leading_blank, trailing_blank bool | ||
2176 | for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) { | ||
2177 | // We are at the beginning of a non-empty line. | ||
2178 | |||
2179 | // Is it a trailing whitespace? | ||
2180 | trailing_blank = is_blank(parser.buffer, parser.buffer_pos) | ||
2181 | |||
2182 | // Check if we need to fold the leading line break. | ||
2183 | if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' { | ||
2184 | // Do we need to join the lines by space? | ||
2185 | if len(trailing_breaks) == 0 { | ||
2186 | s = append(s, ' ') | ||
2187 | } | ||
2188 | } else { | ||
2189 | s = append(s, leading_break...) | ||
2190 | } | ||
2191 | leading_break = leading_break[:0] | ||
2192 | |||
2193 | // Append the remaining line breaks. | ||
2194 | s = append(s, trailing_breaks...) | ||
2195 | trailing_breaks = trailing_breaks[:0] | ||
2196 | |||
2197 | // Is it a leading whitespace? | ||
2198 | leading_blank = is_blank(parser.buffer, parser.buffer_pos) | ||
2199 | |||
2200 | // Consume the current line. | ||
2201 | for !is_breakz(parser.buffer, parser.buffer_pos) { | ||
2202 | s = read(parser, s) | ||
2203 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2204 | return false | ||
2205 | } | ||
2206 | } | ||
2207 | |||
2208 | // Consume the line break. | ||
2209 | if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { | ||
2210 | return false | ||
2211 | } | ||
2212 | |||
2213 | leading_break = read_line(parser, leading_break) | ||
2214 | |||
2215 | // Eat the following indentation spaces and line breaks. | ||
2216 | if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { | ||
2217 | return false | ||
2218 | } | ||
2219 | } | ||
2220 | |||
2221 | // Chomp the tail. | ||
2222 | if chomping != -1 { | ||
2223 | s = append(s, leading_break...) | ||
2224 | } | ||
2225 | if chomping == 1 { | ||
2226 | s = append(s, trailing_breaks...) | ||
2227 | } | ||
2228 | |||
2229 | // Create a token. | ||
2230 | *token = yaml_token_t{ | ||
2231 | typ: yaml_SCALAR_TOKEN, | ||
2232 | start_mark: start_mark, | ||
2233 | end_mark: end_mark, | ||
2234 | value: s, | ||
2235 | style: yaml_LITERAL_SCALAR_STYLE, | ||
2236 | } | ||
2237 | if !literal { | ||
2238 | token.style = yaml_FOLDED_SCALAR_STYLE | ||
2239 | } | ||
2240 | return true | ||
2241 | } | ||
2242 | |||
2243 | // Scan indentation spaces and line breaks for a block scalar. Determine the | ||
2244 | // indentation level if needed. | ||
2245 | func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool { | ||
2246 | *end_mark = parser.mark | ||
2247 | |||
2248 | // Eat the indentation spaces and line breaks. | ||
2249 | max_indent := 0 | ||
2250 | for { | ||
2251 | // Eat the indentation spaces. | ||
2252 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2253 | return false | ||
2254 | } | ||
2255 | for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) { | ||
2256 | skip(parser) | ||
2257 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2258 | return false | ||
2259 | } | ||
2260 | } | ||
2261 | if parser.mark.column > max_indent { | ||
2262 | max_indent = parser.mark.column | ||
2263 | } | ||
2264 | |||
2265 | // Check for a tab character messing the indentation. | ||
2266 | if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) { | ||
2267 | return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", | ||
2268 | start_mark, "found a tab character where an indentation space is expected") | ||
2269 | } | ||
2270 | |||
2271 | // Have we found a non-empty line? | ||
2272 | if !is_break(parser.buffer, parser.buffer_pos) { | ||
2273 | break | ||
2274 | } | ||
2275 | |||
2276 | // Consume the line break. | ||
2277 | if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { | ||
2278 | return false | ||
2279 | } | ||
2280 | // [Go] Should really be returning breaks instead. | ||
2281 | *breaks = read_line(parser, *breaks) | ||
2282 | *end_mark = parser.mark | ||
2283 | } | ||
2284 | |||
2285 | // Determine the indentation level if needed. | ||
2286 | if *indent == 0 { | ||
2287 | *indent = max_indent | ||
2288 | if *indent < parser.indent+1 { | ||
2289 | *indent = parser.indent + 1 | ||
2290 | } | ||
2291 | if *indent < 1 { | ||
2292 | *indent = 1 | ||
2293 | } | ||
2294 | } | ||
2295 | return true | ||
2296 | } | ||
2297 | |||
2298 | // Scan a quoted scalar. | ||
2299 | func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool { | ||
2300 | // Eat the left quote. | ||
2301 | start_mark := parser.mark | ||
2302 | skip(parser) | ||
2303 | |||
2304 | // Consume the content of the quoted scalar. | ||
2305 | var s, leading_break, trailing_breaks, whitespaces []byte | ||
2306 | for { | ||
2307 | // Check that there are no document indicators at the beginning of the line. | ||
2308 | if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { | ||
2309 | return false | ||
2310 | } | ||
2311 | |||
2312 | if parser.mark.column == 0 && | ||
2313 | ((parser.buffer[parser.buffer_pos+0] == '-' && | ||
2314 | parser.buffer[parser.buffer_pos+1] == '-' && | ||
2315 | parser.buffer[parser.buffer_pos+2] == '-') || | ||
2316 | (parser.buffer[parser.buffer_pos+0] == '.' && | ||
2317 | parser.buffer[parser.buffer_pos+1] == '.' && | ||
2318 | parser.buffer[parser.buffer_pos+2] == '.')) && | ||
2319 | is_blankz(parser.buffer, parser.buffer_pos+3) { | ||
2320 | yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", | ||
2321 | start_mark, "found unexpected document indicator") | ||
2322 | return false | ||
2323 | } | ||
2324 | |||
2325 | // Check for EOF. | ||
2326 | if is_z(parser.buffer, parser.buffer_pos) { | ||
2327 | yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", | ||
2328 | start_mark, "found unexpected end of stream") | ||
2329 | return false | ||
2330 | } | ||
2331 | |||
2332 | // Consume non-blank characters. | ||
2333 | leading_blanks := false | ||
2334 | for !is_blankz(parser.buffer, parser.buffer_pos) { | ||
2335 | if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' { | ||
2336 | // Is is an escaped single quote. | ||
2337 | s = append(s, '\'') | ||
2338 | skip(parser) | ||
2339 | skip(parser) | ||
2340 | |||
2341 | } else if single && parser.buffer[parser.buffer_pos] == '\'' { | ||
2342 | // It is a right single quote. | ||
2343 | break | ||
2344 | } else if !single && parser.buffer[parser.buffer_pos] == '"' { | ||
2345 | // It is a right double quote. | ||
2346 | break | ||
2347 | |||
2348 | } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) { | ||
2349 | // It is an escaped line break. | ||
2350 | if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { | ||
2351 | return false | ||
2352 | } | ||
2353 | skip(parser) | ||
2354 | skip_line(parser) | ||
2355 | leading_blanks = true | ||
2356 | break | ||
2357 | |||
2358 | } else if !single && parser.buffer[parser.buffer_pos] == '\\' { | ||
2359 | // It is an escape sequence. | ||
2360 | code_length := 0 | ||
2361 | |||
2362 | // Check the escape character. | ||
2363 | switch parser.buffer[parser.buffer_pos+1] { | ||
2364 | case '0': | ||
2365 | s = append(s, 0) | ||
2366 | case 'a': | ||
2367 | s = append(s, '\x07') | ||
2368 | case 'b': | ||
2369 | s = append(s, '\x08') | ||
2370 | case 't', '\t': | ||
2371 | s = append(s, '\x09') | ||
2372 | case 'n': | ||
2373 | s = append(s, '\x0A') | ||
2374 | case 'v': | ||
2375 | s = append(s, '\x0B') | ||
2376 | case 'f': | ||
2377 | s = append(s, '\x0C') | ||
2378 | case 'r': | ||
2379 | s = append(s, '\x0D') | ||
2380 | case 'e': | ||
2381 | s = append(s, '\x1B') | ||
2382 | case ' ': | ||
2383 | s = append(s, '\x20') | ||
2384 | case '"': | ||
2385 | s = append(s, '"') | ||
2386 | case '\'': | ||
2387 | s = append(s, '\'') | ||
2388 | case '\\': | ||
2389 | s = append(s, '\\') | ||
2390 | case 'N': // NEL (#x85) | ||
2391 | s = append(s, '\xC2') | ||
2392 | s = append(s, '\x85') | ||
2393 | case '_': // #xA0 | ||
2394 | s = append(s, '\xC2') | ||
2395 | s = append(s, '\xA0') | ||
2396 | case 'L': // LS (#x2028) | ||
2397 | s = append(s, '\xE2') | ||
2398 | s = append(s, '\x80') | ||
2399 | s = append(s, '\xA8') | ||
2400 | case 'P': // PS (#x2029) | ||
2401 | s = append(s, '\xE2') | ||
2402 | s = append(s, '\x80') | ||
2403 | s = append(s, '\xA9') | ||
2404 | case 'x': | ||
2405 | code_length = 2 | ||
2406 | case 'u': | ||
2407 | code_length = 4 | ||
2408 | case 'U': | ||
2409 | code_length = 8 | ||
2410 | default: | ||
2411 | yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", | ||
2412 | start_mark, "found unknown escape character") | ||
2413 | return false | ||
2414 | } | ||
2415 | |||
2416 | skip(parser) | ||
2417 | skip(parser) | ||
2418 | |||
2419 | // Consume an arbitrary escape code. | ||
2420 | if code_length > 0 { | ||
2421 | var value int | ||
2422 | |||
2423 | // Scan the character value. | ||
2424 | if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) { | ||
2425 | return false | ||
2426 | } | ||
2427 | for k := 0; k < code_length; k++ { | ||
2428 | if !is_hex(parser.buffer, parser.buffer_pos+k) { | ||
2429 | yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", | ||
2430 | start_mark, "did not find expected hexdecimal number") | ||
2431 | return false | ||
2432 | } | ||
2433 | value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k) | ||
2434 | } | ||
2435 | |||
2436 | // Check the value and write the character. | ||
2437 | if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { | ||
2438 | yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", | ||
2439 | start_mark, "found invalid Unicode character escape code") | ||
2440 | return false | ||
2441 | } | ||
2442 | if value <= 0x7F { | ||
2443 | s = append(s, byte(value)) | ||
2444 | } else if value <= 0x7FF { | ||
2445 | s = append(s, byte(0xC0+(value>>6))) | ||
2446 | s = append(s, byte(0x80+(value&0x3F))) | ||
2447 | } else if value <= 0xFFFF { | ||
2448 | s = append(s, byte(0xE0+(value>>12))) | ||
2449 | s = append(s, byte(0x80+((value>>6)&0x3F))) | ||
2450 | s = append(s, byte(0x80+(value&0x3F))) | ||
2451 | } else { | ||
2452 | s = append(s, byte(0xF0+(value>>18))) | ||
2453 | s = append(s, byte(0x80+((value>>12)&0x3F))) | ||
2454 | s = append(s, byte(0x80+((value>>6)&0x3F))) | ||
2455 | s = append(s, byte(0x80+(value&0x3F))) | ||
2456 | } | ||
2457 | |||
2458 | // Advance the pointer. | ||
2459 | for k := 0; k < code_length; k++ { | ||
2460 | skip(parser) | ||
2461 | } | ||
2462 | } | ||
2463 | } else { | ||
2464 | // It is a non-escaped non-blank character. | ||
2465 | s = read(parser, s) | ||
2466 | } | ||
2467 | if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { | ||
2468 | return false | ||
2469 | } | ||
2470 | } | ||
2471 | |||
2472 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2473 | return false | ||
2474 | } | ||
2475 | |||
2476 | // Check if we are at the end of the scalar. | ||
2477 | if single { | ||
2478 | if parser.buffer[parser.buffer_pos] == '\'' { | ||
2479 | break | ||
2480 | } | ||
2481 | } else { | ||
2482 | if parser.buffer[parser.buffer_pos] == '"' { | ||
2483 | break | ||
2484 | } | ||
2485 | } | ||
2486 | |||
2487 | // Consume blank characters. | ||
2488 | for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { | ||
2489 | if is_blank(parser.buffer, parser.buffer_pos) { | ||
2490 | // Consume a space or a tab character. | ||
2491 | if !leading_blanks { | ||
2492 | whitespaces = read(parser, whitespaces) | ||
2493 | } else { | ||
2494 | skip(parser) | ||
2495 | } | ||
2496 | } else { | ||
2497 | if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { | ||
2498 | return false | ||
2499 | } | ||
2500 | |||
2501 | // Check if it is a first line break. | ||
2502 | if !leading_blanks { | ||
2503 | whitespaces = whitespaces[:0] | ||
2504 | leading_break = read_line(parser, leading_break) | ||
2505 | leading_blanks = true | ||
2506 | } else { | ||
2507 | trailing_breaks = read_line(parser, trailing_breaks) | ||
2508 | } | ||
2509 | } | ||
2510 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2511 | return false | ||
2512 | } | ||
2513 | } | ||
2514 | |||
2515 | // Join the whitespaces or fold line breaks. | ||
2516 | if leading_blanks { | ||
2517 | // Do we need to fold line breaks? | ||
2518 | if len(leading_break) > 0 && leading_break[0] == '\n' { | ||
2519 | if len(trailing_breaks) == 0 { | ||
2520 | s = append(s, ' ') | ||
2521 | } else { | ||
2522 | s = append(s, trailing_breaks...) | ||
2523 | } | ||
2524 | } else { | ||
2525 | s = append(s, leading_break...) | ||
2526 | s = append(s, trailing_breaks...) | ||
2527 | } | ||
2528 | trailing_breaks = trailing_breaks[:0] | ||
2529 | leading_break = leading_break[:0] | ||
2530 | } else { | ||
2531 | s = append(s, whitespaces...) | ||
2532 | whitespaces = whitespaces[:0] | ||
2533 | } | ||
2534 | } | ||
2535 | |||
2536 | // Eat the right quote. | ||
2537 | skip(parser) | ||
2538 | end_mark := parser.mark | ||
2539 | |||
2540 | // Create a token. | ||
2541 | *token = yaml_token_t{ | ||
2542 | typ: yaml_SCALAR_TOKEN, | ||
2543 | start_mark: start_mark, | ||
2544 | end_mark: end_mark, | ||
2545 | value: s, | ||
2546 | style: yaml_SINGLE_QUOTED_SCALAR_STYLE, | ||
2547 | } | ||
2548 | if !single { | ||
2549 | token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE | ||
2550 | } | ||
2551 | return true | ||
2552 | } | ||
2553 | |||
2554 | // Scan a plain scalar. | ||
2555 | func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool { | ||
2556 | |||
2557 | var s, leading_break, trailing_breaks, whitespaces []byte | ||
2558 | var leading_blanks bool | ||
2559 | var indent = parser.indent + 1 | ||
2560 | |||
2561 | start_mark := parser.mark | ||
2562 | end_mark := parser.mark | ||
2563 | |||
2564 | // Consume the content of the plain scalar. | ||
2565 | for { | ||
2566 | // Check for a document indicator. | ||
2567 | if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { | ||
2568 | return false | ||
2569 | } | ||
2570 | if parser.mark.column == 0 && | ||
2571 | ((parser.buffer[parser.buffer_pos+0] == '-' && | ||
2572 | parser.buffer[parser.buffer_pos+1] == '-' && | ||
2573 | parser.buffer[parser.buffer_pos+2] == '-') || | ||
2574 | (parser.buffer[parser.buffer_pos+0] == '.' && | ||
2575 | parser.buffer[parser.buffer_pos+1] == '.' && | ||
2576 | parser.buffer[parser.buffer_pos+2] == '.')) && | ||
2577 | is_blankz(parser.buffer, parser.buffer_pos+3) { | ||
2578 | break | ||
2579 | } | ||
2580 | |||
2581 | // Check for a comment. | ||
2582 | if parser.buffer[parser.buffer_pos] == '#' { | ||
2583 | break | ||
2584 | } | ||
2585 | |||
2586 | // Consume non-blank characters. | ||
2587 | for !is_blankz(parser.buffer, parser.buffer_pos) { | ||
2588 | |||
2589 | // Check for indicators that may end a plain scalar. | ||
2590 | if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) || | ||
2591 | (parser.flow_level > 0 && | ||
2592 | (parser.buffer[parser.buffer_pos] == ',' || | ||
2593 | parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' || | ||
2594 | parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || | ||
2595 | parser.buffer[parser.buffer_pos] == '}')) { | ||
2596 | break | ||
2597 | } | ||
2598 | |||
2599 | // Check if we need to join whitespaces and breaks. | ||
2600 | if leading_blanks || len(whitespaces) > 0 { | ||
2601 | if leading_blanks { | ||
2602 | // Do we need to fold line breaks? | ||
2603 | if leading_break[0] == '\n' { | ||
2604 | if len(trailing_breaks) == 0 { | ||
2605 | s = append(s, ' ') | ||
2606 | } else { | ||
2607 | s = append(s, trailing_breaks...) | ||
2608 | } | ||
2609 | } else { | ||
2610 | s = append(s, leading_break...) | ||
2611 | s = append(s, trailing_breaks...) | ||
2612 | } | ||
2613 | trailing_breaks = trailing_breaks[:0] | ||
2614 | leading_break = leading_break[:0] | ||
2615 | leading_blanks = false | ||
2616 | } else { | ||
2617 | s = append(s, whitespaces...) | ||
2618 | whitespaces = whitespaces[:0] | ||
2619 | } | ||
2620 | } | ||
2621 | |||
2622 | // Copy the character. | ||
2623 | s = read(parser, s) | ||
2624 | |||
2625 | end_mark = parser.mark | ||
2626 | if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { | ||
2627 | return false | ||
2628 | } | ||
2629 | } | ||
2630 | |||
2631 | // Is it the end? | ||
2632 | if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) { | ||
2633 | break | ||
2634 | } | ||
2635 | |||
2636 | // Consume blank characters. | ||
2637 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2638 | return false | ||
2639 | } | ||
2640 | |||
2641 | for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { | ||
2642 | if is_blank(parser.buffer, parser.buffer_pos) { | ||
2643 | |||
2644 | // Check for tab characters that abuse indentation. | ||
2645 | if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) { | ||
2646 | yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", | ||
2647 | start_mark, "found a tab character that violates indentation") | ||
2648 | return false | ||
2649 | } | ||
2650 | |||
2651 | // Consume a space or a tab character. | ||
2652 | if !leading_blanks { | ||
2653 | whitespaces = read(parser, whitespaces) | ||
2654 | } else { | ||
2655 | skip(parser) | ||
2656 | } | ||
2657 | } else { | ||
2658 | if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { | ||
2659 | return false | ||
2660 | } | ||
2661 | |||
2662 | // Check if it is a first line break. | ||
2663 | if !leading_blanks { | ||
2664 | whitespaces = whitespaces[:0] | ||
2665 | leading_break = read_line(parser, leading_break) | ||
2666 | leading_blanks = true | ||
2667 | } else { | ||
2668 | trailing_breaks = read_line(parser, trailing_breaks) | ||
2669 | } | ||
2670 | } | ||
2671 | if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { | ||
2672 | return false | ||
2673 | } | ||
2674 | } | ||
2675 | |||
2676 | // Check indentation level. | ||
2677 | if parser.flow_level == 0 && parser.mark.column < indent { | ||
2678 | break | ||
2679 | } | ||
2680 | } | ||
2681 | |||
2682 | // Create a token. | ||
2683 | *token = yaml_token_t{ | ||
2684 | typ: yaml_SCALAR_TOKEN, | ||
2685 | start_mark: start_mark, | ||
2686 | end_mark: end_mark, | ||
2687 | value: s, | ||
2688 | style: yaml_PLAIN_SCALAR_STYLE, | ||
2689 | } | ||
2690 | |||
2691 | // Note that we change the 'simple_key_allowed' flag. | ||
2692 | if leading_blanks { | ||
2693 | parser.simple_key_allowed = true | ||
2694 | } | ||
2695 | return true | ||
2696 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/writerc.go b/vendor/github.com/zclconf/go-cty-yaml/writerc.go new file mode 100644 index 0000000..a2dde60 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/writerc.go | |||
@@ -0,0 +1,26 @@ | |||
1 | package yaml | ||
2 | |||
3 | // Set the writer error and return false. | ||
4 | func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool { | ||
5 | emitter.error = yaml_WRITER_ERROR | ||
6 | emitter.problem = problem | ||
7 | return false | ||
8 | } | ||
9 | |||
10 | // Flush the output buffer. | ||
11 | func yaml_emitter_flush(emitter *yaml_emitter_t) bool { | ||
12 | if emitter.write_handler == nil { | ||
13 | panic("write handler not set") | ||
14 | } | ||
15 | |||
16 | // Check if the buffer is empty. | ||
17 | if emitter.buffer_pos == 0 { | ||
18 | return true | ||
19 | } | ||
20 | |||
21 | if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil { | ||
22 | return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) | ||
23 | } | ||
24 | emitter.buffer_pos = 0 | ||
25 | return true | ||
26 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/yaml.go b/vendor/github.com/zclconf/go-cty-yaml/yaml.go new file mode 100644 index 0000000..2c314cc --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/yaml.go | |||
@@ -0,0 +1,215 @@ | |||
1 | // Package yaml can marshal and unmarshal cty values in YAML format. | ||
2 | package yaml | ||
3 | |||
4 | import ( | ||
5 | "errors" | ||
6 | "fmt" | ||
7 | "reflect" | ||
8 | "strings" | ||
9 | "sync" | ||
10 | |||
11 | "github.com/zclconf/go-cty/cty" | ||
12 | ) | ||
13 | |||
14 | // Unmarshal reads the document found within the given source buffer | ||
15 | // and attempts to convert it into a value conforming to the given type | ||
16 | // constraint. | ||
17 | // | ||
18 | // This is an alias for Unmarshal on the predefined Converter in "Standard". | ||
19 | // | ||
20 | // An error is returned if the given source contains any YAML document | ||
21 | // delimiters. | ||
22 | func Unmarshal(src []byte, ty cty.Type) (cty.Value, error) { | ||
23 | return Standard.Unmarshal(src, ty) | ||
24 | } | ||
25 | |||
26 | // Marshal serializes the given value into a YAML document, using a fixed | ||
27 | // mapping from cty types to YAML constructs. | ||
28 | // | ||
29 | // This is an alias for Marshal on the predefined Converter in "Standard". | ||
30 | // | ||
31 | // Note that unlike the function of the same name in the cty JSON package, | ||
32 | // this does not take a type constraint and therefore the YAML serialization | ||
33 | // cannot preserve late-bound type information in the serialization to be | ||
34 | // recovered from Unmarshal. Instead, any cty.DynamicPseudoType in the type | ||
35 | // constraint given to Unmarshal will be decoded as if the corresponding portion | ||
36 | // of the input were processed with ImpliedType to find a target type. | ||
37 | func Marshal(v cty.Value) ([]byte, error) { | ||
38 | return Standard.Marshal(v) | ||
39 | } | ||
40 | |||
41 | // ImpliedType analyzes the given source code and returns a suitable type that | ||
42 | // it could be decoded into. | ||
43 | // | ||
44 | // For a converter that is using standard YAML rather than cty-specific custom | ||
45 | // tags, only a subset of cty types can be produced: strings, numbers, bools, | ||
46 | // tuple types, and object types. | ||
47 | // | ||
48 | // This is an alias for ImpliedType on the predefined Converter in "Standard". | ||
49 | func ImpliedType(src []byte) (cty.Type, error) { | ||
50 | return Standard.ImpliedType(src) | ||
51 | } | ||
52 | |||
53 | func handleErr(err *error) { | ||
54 | if v := recover(); v != nil { | ||
55 | if e, ok := v.(yamlError); ok { | ||
56 | *err = e.err | ||
57 | } else { | ||
58 | panic(v) | ||
59 | } | ||
60 | } | ||
61 | } | ||
62 | |||
63 | type yamlError struct { | ||
64 | err error | ||
65 | } | ||
66 | |||
67 | func fail(err error) { | ||
68 | panic(yamlError{err}) | ||
69 | } | ||
70 | |||
71 | func failf(format string, args ...interface{}) { | ||
72 | panic(yamlError{fmt.Errorf("yaml: "+format, args...)}) | ||
73 | } | ||
74 | |||
75 | // -------------------------------------------------------------------------- | ||
76 | // Maintain a mapping of keys to structure field indexes | ||
77 | |||
78 | // The code in this section was copied from mgo/bson. | ||
79 | |||
80 | // structInfo holds details for the serialization of fields of | ||
81 | // a given struct. | ||
82 | type structInfo struct { | ||
83 | FieldsMap map[string]fieldInfo | ||
84 | FieldsList []fieldInfo | ||
85 | |||
86 | // InlineMap is the number of the field in the struct that | ||
87 | // contains an ,inline map, or -1 if there's none. | ||
88 | InlineMap int | ||
89 | } | ||
90 | |||
91 | type fieldInfo struct { | ||
92 | Key string | ||
93 | Num int | ||
94 | OmitEmpty bool | ||
95 | Flow bool | ||
96 | // Id holds the unique field identifier, so we can cheaply | ||
97 | // check for field duplicates without maintaining an extra map. | ||
98 | Id int | ||
99 | |||
100 | // Inline holds the field index if the field is part of an inlined struct. | ||
101 | Inline []int | ||
102 | } | ||
103 | |||
104 | var structMap = make(map[reflect.Type]*structInfo) | ||
105 | var fieldMapMutex sync.RWMutex | ||
106 | |||
107 | func getStructInfo(st reflect.Type) (*structInfo, error) { | ||
108 | fieldMapMutex.RLock() | ||
109 | sinfo, found := structMap[st] | ||
110 | fieldMapMutex.RUnlock() | ||
111 | if found { | ||
112 | return sinfo, nil | ||
113 | } | ||
114 | |||
115 | n := st.NumField() | ||
116 | fieldsMap := make(map[string]fieldInfo) | ||
117 | fieldsList := make([]fieldInfo, 0, n) | ||
118 | inlineMap := -1 | ||
119 | for i := 0; i != n; i++ { | ||
120 | field := st.Field(i) | ||
121 | if field.PkgPath != "" && !field.Anonymous { | ||
122 | continue // Private field | ||
123 | } | ||
124 | |||
125 | info := fieldInfo{Num: i} | ||
126 | |||
127 | tag := field.Tag.Get("yaml") | ||
128 | if tag == "" && strings.Index(string(field.Tag), ":") < 0 { | ||
129 | tag = string(field.Tag) | ||
130 | } | ||
131 | if tag == "-" { | ||
132 | continue | ||
133 | } | ||
134 | |||
135 | inline := false | ||
136 | fields := strings.Split(tag, ",") | ||
137 | if len(fields) > 1 { | ||
138 | for _, flag := range fields[1:] { | ||
139 | switch flag { | ||
140 | case "omitempty": | ||
141 | info.OmitEmpty = true | ||
142 | case "flow": | ||
143 | info.Flow = true | ||
144 | case "inline": | ||
145 | inline = true | ||
146 | default: | ||
147 | return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st)) | ||
148 | } | ||
149 | } | ||
150 | tag = fields[0] | ||
151 | } | ||
152 | |||
153 | if inline { | ||
154 | switch field.Type.Kind() { | ||
155 | case reflect.Map: | ||
156 | if inlineMap >= 0 { | ||
157 | return nil, errors.New("Multiple ,inline maps in struct " + st.String()) | ||
158 | } | ||
159 | if field.Type.Key() != reflect.TypeOf("") { | ||
160 | return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String()) | ||
161 | } | ||
162 | inlineMap = info.Num | ||
163 | case reflect.Struct: | ||
164 | sinfo, err := getStructInfo(field.Type) | ||
165 | if err != nil { | ||
166 | return nil, err | ||
167 | } | ||
168 | for _, finfo := range sinfo.FieldsList { | ||
169 | if _, found := fieldsMap[finfo.Key]; found { | ||
170 | msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String() | ||
171 | return nil, errors.New(msg) | ||
172 | } | ||
173 | if finfo.Inline == nil { | ||
174 | finfo.Inline = []int{i, finfo.Num} | ||
175 | } else { | ||
176 | finfo.Inline = append([]int{i}, finfo.Inline...) | ||
177 | } | ||
178 | finfo.Id = len(fieldsList) | ||
179 | fieldsMap[finfo.Key] = finfo | ||
180 | fieldsList = append(fieldsList, finfo) | ||
181 | } | ||
182 | default: | ||
183 | //return nil, errors.New("Option ,inline needs a struct value or map field") | ||
184 | return nil, errors.New("Option ,inline needs a struct value field") | ||
185 | } | ||
186 | continue | ||
187 | } | ||
188 | |||
189 | if tag != "" { | ||
190 | info.Key = tag | ||
191 | } else { | ||
192 | info.Key = strings.ToLower(field.Name) | ||
193 | } | ||
194 | |||
195 | if _, found = fieldsMap[info.Key]; found { | ||
196 | msg := "Duplicated key '" + info.Key + "' in struct " + st.String() | ||
197 | return nil, errors.New(msg) | ||
198 | } | ||
199 | |||
200 | info.Id = len(fieldsList) | ||
201 | fieldsList = append(fieldsList, info) | ||
202 | fieldsMap[info.Key] = info | ||
203 | } | ||
204 | |||
205 | sinfo = &structInfo{ | ||
206 | FieldsMap: fieldsMap, | ||
207 | FieldsList: fieldsList, | ||
208 | InlineMap: inlineMap, | ||
209 | } | ||
210 | |||
211 | fieldMapMutex.Lock() | ||
212 | structMap[st] = sinfo | ||
213 | fieldMapMutex.Unlock() | ||
214 | return sinfo, nil | ||
215 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/yamlh.go b/vendor/github.com/zclconf/go-cty-yaml/yamlh.go new file mode 100644 index 0000000..e25cee5 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/yamlh.go | |||
@@ -0,0 +1,738 @@ | |||
1 | package yaml | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | "io" | ||
6 | ) | ||
7 | |||
8 | // The version directive data. | ||
9 | type yaml_version_directive_t struct { | ||
10 | major int8 // The major version number. | ||
11 | minor int8 // The minor version number. | ||
12 | } | ||
13 | |||
14 | // The tag directive data. | ||
15 | type yaml_tag_directive_t struct { | ||
16 | handle []byte // The tag handle. | ||
17 | prefix []byte // The tag prefix. | ||
18 | } | ||
19 | |||
20 | type yaml_encoding_t int | ||
21 | |||
22 | // The stream encoding. | ||
23 | const ( | ||
24 | // Let the parser choose the encoding. | ||
25 | yaml_ANY_ENCODING yaml_encoding_t = iota | ||
26 | |||
27 | yaml_UTF8_ENCODING // The default UTF-8 encoding. | ||
28 | yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM. | ||
29 | yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM. | ||
30 | ) | ||
31 | |||
32 | type yaml_break_t int | ||
33 | |||
34 | // Line break types. | ||
35 | const ( | ||
36 | // Let the parser choose the break type. | ||
37 | yaml_ANY_BREAK yaml_break_t = iota | ||
38 | |||
39 | yaml_CR_BREAK // Use CR for line breaks (Mac style). | ||
40 | yaml_LN_BREAK // Use LN for line breaks (Unix style). | ||
41 | yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style). | ||
42 | ) | ||
43 | |||
44 | type yaml_error_type_t int | ||
45 | |||
46 | // Many bad things could happen with the parser and emitter. | ||
47 | const ( | ||
48 | // No error is produced. | ||
49 | yaml_NO_ERROR yaml_error_type_t = iota | ||
50 | |||
51 | yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory. | ||
52 | yaml_READER_ERROR // Cannot read or decode the input stream. | ||
53 | yaml_SCANNER_ERROR // Cannot scan the input stream. | ||
54 | yaml_PARSER_ERROR // Cannot parse the input stream. | ||
55 | yaml_COMPOSER_ERROR // Cannot compose a YAML document. | ||
56 | yaml_WRITER_ERROR // Cannot write to the output stream. | ||
57 | yaml_EMITTER_ERROR // Cannot emit a YAML stream. | ||
58 | ) | ||
59 | |||
60 | // The pointer position. | ||
61 | type yaml_mark_t struct { | ||
62 | index int // The position index. | ||
63 | line int // The position line. | ||
64 | column int // The position column. | ||
65 | } | ||
66 | |||
67 | // Node Styles | ||
68 | |||
69 | type yaml_style_t int8 | ||
70 | |||
71 | type yaml_scalar_style_t yaml_style_t | ||
72 | |||
73 | // Scalar styles. | ||
74 | const ( | ||
75 | // Let the emitter choose the style. | ||
76 | yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota | ||
77 | |||
78 | yaml_PLAIN_SCALAR_STYLE // The plain scalar style. | ||
79 | yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style. | ||
80 | yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style. | ||
81 | yaml_LITERAL_SCALAR_STYLE // The literal scalar style. | ||
82 | yaml_FOLDED_SCALAR_STYLE // The folded scalar style. | ||
83 | ) | ||
84 | |||
85 | type yaml_sequence_style_t yaml_style_t | ||
86 | |||
87 | // Sequence styles. | ||
88 | const ( | ||
89 | // Let the emitter choose the style. | ||
90 | yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota | ||
91 | |||
92 | yaml_BLOCK_SEQUENCE_STYLE // The block sequence style. | ||
93 | yaml_FLOW_SEQUENCE_STYLE // The flow sequence style. | ||
94 | ) | ||
95 | |||
96 | type yaml_mapping_style_t yaml_style_t | ||
97 | |||
98 | // Mapping styles. | ||
99 | const ( | ||
100 | // Let the emitter choose the style. | ||
101 | yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota | ||
102 | |||
103 | yaml_BLOCK_MAPPING_STYLE // The block mapping style. | ||
104 | yaml_FLOW_MAPPING_STYLE // The flow mapping style. | ||
105 | ) | ||
106 | |||
107 | // Tokens | ||
108 | |||
109 | type yaml_token_type_t int | ||
110 | |||
111 | // Token types. | ||
112 | const ( | ||
113 | // An empty token. | ||
114 | yaml_NO_TOKEN yaml_token_type_t = iota | ||
115 | |||
116 | yaml_STREAM_START_TOKEN // A STREAM-START token. | ||
117 | yaml_STREAM_END_TOKEN // A STREAM-END token. | ||
118 | |||
119 | yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token. | ||
120 | yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token. | ||
121 | yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token. | ||
122 | yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token. | ||
123 | |||
124 | yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token. | ||
125 | yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token. | ||
126 | yaml_BLOCK_END_TOKEN // A BLOCK-END token. | ||
127 | |||
128 | yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token. | ||
129 | yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token. | ||
130 | yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token. | ||
131 | yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token. | ||
132 | |||
133 | yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token. | ||
134 | yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token. | ||
135 | yaml_KEY_TOKEN // A KEY token. | ||
136 | yaml_VALUE_TOKEN // A VALUE token. | ||
137 | |||
138 | yaml_ALIAS_TOKEN // An ALIAS token. | ||
139 | yaml_ANCHOR_TOKEN // An ANCHOR token. | ||
140 | yaml_TAG_TOKEN // A TAG token. | ||
141 | yaml_SCALAR_TOKEN // A SCALAR token. | ||
142 | ) | ||
143 | |||
144 | func (tt yaml_token_type_t) String() string { | ||
145 | switch tt { | ||
146 | case yaml_NO_TOKEN: | ||
147 | return "yaml_NO_TOKEN" | ||
148 | case yaml_STREAM_START_TOKEN: | ||
149 | return "yaml_STREAM_START_TOKEN" | ||
150 | case yaml_STREAM_END_TOKEN: | ||
151 | return "yaml_STREAM_END_TOKEN" | ||
152 | case yaml_VERSION_DIRECTIVE_TOKEN: | ||
153 | return "yaml_VERSION_DIRECTIVE_TOKEN" | ||
154 | case yaml_TAG_DIRECTIVE_TOKEN: | ||
155 | return "yaml_TAG_DIRECTIVE_TOKEN" | ||
156 | case yaml_DOCUMENT_START_TOKEN: | ||
157 | return "yaml_DOCUMENT_START_TOKEN" | ||
158 | case yaml_DOCUMENT_END_TOKEN: | ||
159 | return "yaml_DOCUMENT_END_TOKEN" | ||
160 | case yaml_BLOCK_SEQUENCE_START_TOKEN: | ||
161 | return "yaml_BLOCK_SEQUENCE_START_TOKEN" | ||
162 | case yaml_BLOCK_MAPPING_START_TOKEN: | ||
163 | return "yaml_BLOCK_MAPPING_START_TOKEN" | ||
164 | case yaml_BLOCK_END_TOKEN: | ||
165 | return "yaml_BLOCK_END_TOKEN" | ||
166 | case yaml_FLOW_SEQUENCE_START_TOKEN: | ||
167 | return "yaml_FLOW_SEQUENCE_START_TOKEN" | ||
168 | case yaml_FLOW_SEQUENCE_END_TOKEN: | ||
169 | return "yaml_FLOW_SEQUENCE_END_TOKEN" | ||
170 | case yaml_FLOW_MAPPING_START_TOKEN: | ||
171 | return "yaml_FLOW_MAPPING_START_TOKEN" | ||
172 | case yaml_FLOW_MAPPING_END_TOKEN: | ||
173 | return "yaml_FLOW_MAPPING_END_TOKEN" | ||
174 | case yaml_BLOCK_ENTRY_TOKEN: | ||
175 | return "yaml_BLOCK_ENTRY_TOKEN" | ||
176 | case yaml_FLOW_ENTRY_TOKEN: | ||
177 | return "yaml_FLOW_ENTRY_TOKEN" | ||
178 | case yaml_KEY_TOKEN: | ||
179 | return "yaml_KEY_TOKEN" | ||
180 | case yaml_VALUE_TOKEN: | ||
181 | return "yaml_VALUE_TOKEN" | ||
182 | case yaml_ALIAS_TOKEN: | ||
183 | return "yaml_ALIAS_TOKEN" | ||
184 | case yaml_ANCHOR_TOKEN: | ||
185 | return "yaml_ANCHOR_TOKEN" | ||
186 | case yaml_TAG_TOKEN: | ||
187 | return "yaml_TAG_TOKEN" | ||
188 | case yaml_SCALAR_TOKEN: | ||
189 | return "yaml_SCALAR_TOKEN" | ||
190 | } | ||
191 | return "<unknown token>" | ||
192 | } | ||
193 | |||
194 | // The token structure. | ||
195 | type yaml_token_t struct { | ||
196 | // The token type. | ||
197 | typ yaml_token_type_t | ||
198 | |||
199 | // The start/end of the token. | ||
200 | start_mark, end_mark yaml_mark_t | ||
201 | |||
202 | // The stream encoding (for yaml_STREAM_START_TOKEN). | ||
203 | encoding yaml_encoding_t | ||
204 | |||
205 | // The alias/anchor/scalar value or tag/tag directive handle | ||
206 | // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN). | ||
207 | value []byte | ||
208 | |||
209 | // The tag suffix (for yaml_TAG_TOKEN). | ||
210 | suffix []byte | ||
211 | |||
212 | // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN). | ||
213 | prefix []byte | ||
214 | |||
215 | // The scalar style (for yaml_SCALAR_TOKEN). | ||
216 | style yaml_scalar_style_t | ||
217 | |||
218 | // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN). | ||
219 | major, minor int8 | ||
220 | } | ||
221 | |||
222 | // Events | ||
223 | |||
224 | type yaml_event_type_t int8 | ||
225 | |||
226 | // Event types. | ||
227 | const ( | ||
228 | // An empty event. | ||
229 | yaml_NO_EVENT yaml_event_type_t = iota | ||
230 | |||
231 | yaml_STREAM_START_EVENT // A STREAM-START event. | ||
232 | yaml_STREAM_END_EVENT // A STREAM-END event. | ||
233 | yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event. | ||
234 | yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event. | ||
235 | yaml_ALIAS_EVENT // An ALIAS event. | ||
236 | yaml_SCALAR_EVENT // A SCALAR event. | ||
237 | yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event. | ||
238 | yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event. | ||
239 | yaml_MAPPING_START_EVENT // A MAPPING-START event. | ||
240 | yaml_MAPPING_END_EVENT // A MAPPING-END event. | ||
241 | ) | ||
242 | |||
243 | var eventStrings = []string{ | ||
244 | yaml_NO_EVENT: "none", | ||
245 | yaml_STREAM_START_EVENT: "stream start", | ||
246 | yaml_STREAM_END_EVENT: "stream end", | ||
247 | yaml_DOCUMENT_START_EVENT: "document start", | ||
248 | yaml_DOCUMENT_END_EVENT: "document end", | ||
249 | yaml_ALIAS_EVENT: "alias", | ||
250 | yaml_SCALAR_EVENT: "scalar", | ||
251 | yaml_SEQUENCE_START_EVENT: "sequence start", | ||
252 | yaml_SEQUENCE_END_EVENT: "sequence end", | ||
253 | yaml_MAPPING_START_EVENT: "mapping start", | ||
254 | yaml_MAPPING_END_EVENT: "mapping end", | ||
255 | } | ||
256 | |||
257 | func (e yaml_event_type_t) String() string { | ||
258 | if e < 0 || int(e) >= len(eventStrings) { | ||
259 | return fmt.Sprintf("unknown event %d", e) | ||
260 | } | ||
261 | return eventStrings[e] | ||
262 | } | ||
263 | |||
264 | // The event structure. | ||
265 | type yaml_event_t struct { | ||
266 | |||
267 | // The event type. | ||
268 | typ yaml_event_type_t | ||
269 | |||
270 | // The start and end of the event. | ||
271 | start_mark, end_mark yaml_mark_t | ||
272 | |||
273 | // The document encoding (for yaml_STREAM_START_EVENT). | ||
274 | encoding yaml_encoding_t | ||
275 | |||
276 | // The version directive (for yaml_DOCUMENT_START_EVENT). | ||
277 | version_directive *yaml_version_directive_t | ||
278 | |||
279 | // The list of tag directives (for yaml_DOCUMENT_START_EVENT). | ||
280 | tag_directives []yaml_tag_directive_t | ||
281 | |||
282 | // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT). | ||
283 | anchor []byte | ||
284 | |||
285 | // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). | ||
286 | tag []byte | ||
287 | |||
288 | // The scalar value (for yaml_SCALAR_EVENT). | ||
289 | value []byte | ||
290 | |||
291 | // Is the document start/end indicator implicit, or the tag optional? | ||
292 | // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT). | ||
293 | implicit bool | ||
294 | |||
295 | // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT). | ||
296 | quoted_implicit bool | ||
297 | |||
298 | // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). | ||
299 | style yaml_style_t | ||
300 | } | ||
301 | |||
302 | func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) } | ||
303 | func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) } | ||
304 | func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) } | ||
305 | |||
306 | // Nodes | ||
307 | |||
308 | const ( | ||
309 | yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null. | ||
310 | yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false. | ||
311 | yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values. | ||
312 | yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values. | ||
313 | yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values. | ||
314 | yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values. | ||
315 | |||
316 | yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences. | ||
317 | yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping. | ||
318 | |||
319 | // Not in original libyaml. | ||
320 | yaml_BINARY_TAG = "tag:yaml.org,2002:binary" | ||
321 | yaml_MERGE_TAG = "tag:yaml.org,2002:merge" | ||
322 | |||
323 | yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str. | ||
324 | yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq. | ||
325 | yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map. | ||
326 | ) | ||
327 | |||
328 | type yaml_node_type_t int | ||
329 | |||
330 | // Node types. | ||
331 | const ( | ||
332 | // An empty node. | ||
333 | yaml_NO_NODE yaml_node_type_t = iota | ||
334 | |||
335 | yaml_SCALAR_NODE // A scalar node. | ||
336 | yaml_SEQUENCE_NODE // A sequence node. | ||
337 | yaml_MAPPING_NODE // A mapping node. | ||
338 | ) | ||
339 | |||
340 | // An element of a sequence node. | ||
341 | type yaml_node_item_t int | ||
342 | |||
343 | // An element of a mapping node. | ||
344 | type yaml_node_pair_t struct { | ||
345 | key int // The key of the element. | ||
346 | value int // The value of the element. | ||
347 | } | ||
348 | |||
349 | // The node structure. | ||
350 | type yaml_node_t struct { | ||
351 | typ yaml_node_type_t // The node type. | ||
352 | tag []byte // The node tag. | ||
353 | |||
354 | // The node data. | ||
355 | |||
356 | // The scalar parameters (for yaml_SCALAR_NODE). | ||
357 | scalar struct { | ||
358 | value []byte // The scalar value. | ||
359 | length int // The length of the scalar value. | ||
360 | style yaml_scalar_style_t // The scalar style. | ||
361 | } | ||
362 | |||
363 | // The sequence parameters (for YAML_SEQUENCE_NODE). | ||
364 | sequence struct { | ||
365 | items_data []yaml_node_item_t // The stack of sequence items. | ||
366 | style yaml_sequence_style_t // The sequence style. | ||
367 | } | ||
368 | |||
369 | // The mapping parameters (for yaml_MAPPING_NODE). | ||
370 | mapping struct { | ||
371 | pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value). | ||
372 | pairs_start *yaml_node_pair_t // The beginning of the stack. | ||
373 | pairs_end *yaml_node_pair_t // The end of the stack. | ||
374 | pairs_top *yaml_node_pair_t // The top of the stack. | ||
375 | style yaml_mapping_style_t // The mapping style. | ||
376 | } | ||
377 | |||
378 | start_mark yaml_mark_t // The beginning of the node. | ||
379 | end_mark yaml_mark_t // The end of the node. | ||
380 | |||
381 | } | ||
382 | |||
383 | // The document structure. | ||
384 | type yaml_document_t struct { | ||
385 | |||
386 | // The document nodes. | ||
387 | nodes []yaml_node_t | ||
388 | |||
389 | // The version directive. | ||
390 | version_directive *yaml_version_directive_t | ||
391 | |||
392 | // The list of tag directives. | ||
393 | tag_directives_data []yaml_tag_directive_t | ||
394 | tag_directives_start int // The beginning of the tag directives list. | ||
395 | tag_directives_end int // The end of the tag directives list. | ||
396 | |||
397 | start_implicit int // Is the document start indicator implicit? | ||
398 | end_implicit int // Is the document end indicator implicit? | ||
399 | |||
400 | // The start/end of the document. | ||
401 | start_mark, end_mark yaml_mark_t | ||
402 | } | ||
403 | |||
404 | // The prototype of a read handler. | ||
405 | // | ||
406 | // The read handler is called when the parser needs to read more bytes from the | ||
407 | // source. The handler should write not more than size bytes to the buffer. | ||
408 | // The number of written bytes should be set to the size_read variable. | ||
409 | // | ||
410 | // [in,out] data A pointer to an application data specified by | ||
411 | // yaml_parser_set_input(). | ||
412 | // [out] buffer The buffer to write the data from the source. | ||
413 | // [in] size The size of the buffer. | ||
414 | // [out] size_read The actual number of bytes read from the source. | ||
415 | // | ||
416 | // On success, the handler should return 1. If the handler failed, | ||
417 | // the returned value should be 0. On EOF, the handler should set the | ||
418 | // size_read to 0 and return 1. | ||
419 | type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error) | ||
420 | |||
421 | // This structure holds information about a potential simple key. | ||
422 | type yaml_simple_key_t struct { | ||
423 | possible bool // Is a simple key possible? | ||
424 | required bool // Is a simple key required? | ||
425 | token_number int // The number of the token. | ||
426 | mark yaml_mark_t // The position mark. | ||
427 | } | ||
428 | |||
429 | // The states of the parser. | ||
430 | type yaml_parser_state_t int | ||
431 | |||
432 | const ( | ||
433 | yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota | ||
434 | |||
435 | yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document. | ||
436 | yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START. | ||
437 | yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document. | ||
438 | yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END. | ||
439 | yaml_PARSE_BLOCK_NODE_STATE // Expect a block node. | ||
440 | yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence. | ||
441 | yaml_PARSE_FLOW_NODE_STATE // Expect a flow node. | ||
442 | yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence. | ||
443 | yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence. | ||
444 | yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence. | ||
445 | yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. | ||
446 | yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key. | ||
447 | yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value. | ||
448 | yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence. | ||
449 | yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence. | ||
450 | yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping. | ||
451 | yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping. | ||
452 | yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry. | ||
453 | yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. | ||
454 | yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. | ||
455 | yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. | ||
456 | yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping. | ||
457 | yaml_PARSE_END_STATE // Expect nothing. | ||
458 | ) | ||
459 | |||
460 | func (ps yaml_parser_state_t) String() string { | ||
461 | switch ps { | ||
462 | case yaml_PARSE_STREAM_START_STATE: | ||
463 | return "yaml_PARSE_STREAM_START_STATE" | ||
464 | case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: | ||
465 | return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE" | ||
466 | case yaml_PARSE_DOCUMENT_START_STATE: | ||
467 | return "yaml_PARSE_DOCUMENT_START_STATE" | ||
468 | case yaml_PARSE_DOCUMENT_CONTENT_STATE: | ||
469 | return "yaml_PARSE_DOCUMENT_CONTENT_STATE" | ||
470 | case yaml_PARSE_DOCUMENT_END_STATE: | ||
471 | return "yaml_PARSE_DOCUMENT_END_STATE" | ||
472 | case yaml_PARSE_BLOCK_NODE_STATE: | ||
473 | return "yaml_PARSE_BLOCK_NODE_STATE" | ||
474 | case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: | ||
475 | return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE" | ||
476 | case yaml_PARSE_FLOW_NODE_STATE: | ||
477 | return "yaml_PARSE_FLOW_NODE_STATE" | ||
478 | case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: | ||
479 | return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE" | ||
480 | case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: | ||
481 | return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE" | ||
482 | case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: | ||
483 | return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE" | ||
484 | case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: | ||
485 | return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE" | ||
486 | case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: | ||
487 | return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE" | ||
488 | case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: | ||
489 | return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE" | ||
490 | case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: | ||
491 | return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE" | ||
492 | case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: | ||
493 | return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE" | ||
494 | case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: | ||
495 | return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE" | ||
496 | case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: | ||
497 | return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE" | ||
498 | case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: | ||
499 | return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE" | ||
500 | case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: | ||
501 | return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE" | ||
502 | case yaml_PARSE_FLOW_MAPPING_KEY_STATE: | ||
503 | return "yaml_PARSE_FLOW_MAPPING_KEY_STATE" | ||
504 | case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: | ||
505 | return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE" | ||
506 | case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: | ||
507 | return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE" | ||
508 | case yaml_PARSE_END_STATE: | ||
509 | return "yaml_PARSE_END_STATE" | ||
510 | } | ||
511 | return "<unknown parser state>" | ||
512 | } | ||
513 | |||
514 | // This structure holds aliases data. | ||
515 | type yaml_alias_data_t struct { | ||
516 | anchor []byte // The anchor. | ||
517 | index int // The node id. | ||
518 | mark yaml_mark_t // The anchor mark. | ||
519 | } | ||
520 | |||
521 | // The parser structure. | ||
522 | // | ||
523 | // All members are internal. Manage the structure using the | ||
524 | // yaml_parser_ family of functions. | ||
525 | type yaml_parser_t struct { | ||
526 | |||
527 | // Error handling | ||
528 | |||
529 | error yaml_error_type_t // Error type. | ||
530 | |||
531 | problem string // Error description. | ||
532 | |||
533 | // The byte about which the problem occurred. | ||
534 | problem_offset int | ||
535 | problem_value int | ||
536 | problem_mark yaml_mark_t | ||
537 | |||
538 | // The error context. | ||
539 | context string | ||
540 | context_mark yaml_mark_t | ||
541 | |||
542 | // Reader stuff | ||
543 | |||
544 | read_handler yaml_read_handler_t // Read handler. | ||
545 | |||
546 | input_reader io.Reader // File input data. | ||
547 | input []byte // String input data. | ||
548 | input_pos int | ||
549 | |||
550 | eof bool // EOF flag | ||
551 | |||
552 | buffer []byte // The working buffer. | ||
553 | buffer_pos int // The current position of the buffer. | ||
554 | |||
555 | unread int // The number of unread characters in the buffer. | ||
556 | |||
557 | raw_buffer []byte // The raw buffer. | ||
558 | raw_buffer_pos int // The current position of the buffer. | ||
559 | |||
560 | encoding yaml_encoding_t // The input encoding. | ||
561 | |||
562 | offset int // The offset of the current position (in bytes). | ||
563 | mark yaml_mark_t // The mark of the current position. | ||
564 | |||
565 | // Scanner stuff | ||
566 | |||
567 | stream_start_produced bool // Have we started to scan the input stream? | ||
568 | stream_end_produced bool // Have we reached the end of the input stream? | ||
569 | |||
570 | flow_level int // The number of unclosed '[' and '{' indicators. | ||
571 | |||
572 | tokens []yaml_token_t // The tokens queue. | ||
573 | tokens_head int // The head of the tokens queue. | ||
574 | tokens_parsed int // The number of tokens fetched from the queue. | ||
575 | token_available bool // Does the tokens queue contain a token ready for dequeueing. | ||
576 | |||
577 | indent int // The current indentation level. | ||
578 | indents []int // The indentation levels stack. | ||
579 | |||
580 | simple_key_allowed bool // May a simple key occur at the current position? | ||
581 | simple_keys []yaml_simple_key_t // The stack of simple keys. | ||
582 | |||
583 | // Parser stuff | ||
584 | |||
585 | state yaml_parser_state_t // The current parser state. | ||
586 | states []yaml_parser_state_t // The parser states stack. | ||
587 | marks []yaml_mark_t // The stack of marks. | ||
588 | tag_directives []yaml_tag_directive_t // The list of TAG directives. | ||
589 | |||
590 | // Dumper stuff | ||
591 | |||
592 | aliases []yaml_alias_data_t // The alias data. | ||
593 | |||
594 | document *yaml_document_t // The currently parsed document. | ||
595 | } | ||
596 | |||
597 | // Emitter Definitions | ||
598 | |||
599 | // The prototype of a write handler. | ||
600 | // | ||
601 | // The write handler is called when the emitter needs to flush the accumulated | ||
602 | // characters to the output. The handler should write @a size bytes of the | ||
603 | // @a buffer to the output. | ||
604 | // | ||
605 | // @param[in,out] data A pointer to an application data specified by | ||
606 | // yaml_emitter_set_output(). | ||
607 | // @param[in] buffer The buffer with bytes to be written. | ||
608 | // @param[in] size The size of the buffer. | ||
609 | // | ||
610 | // @returns On success, the handler should return @c 1. If the handler failed, | ||
611 | // the returned value should be @c 0. | ||
612 | // | ||
613 | type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error | ||
614 | |||
615 | type yaml_emitter_state_t int | ||
616 | |||
617 | // The emitter states. | ||
618 | const ( | ||
619 | // Expect STREAM-START. | ||
620 | yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota | ||
621 | |||
622 | yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END. | ||
623 | yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END. | ||
624 | yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document. | ||
625 | yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END. | ||
626 | yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence. | ||
627 | yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence. | ||
628 | yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. | ||
629 | yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. | ||
630 | yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping. | ||
631 | yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. | ||
632 | yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence. | ||
633 | yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence. | ||
634 | yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. | ||
635 | yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping. | ||
636 | yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping. | ||
637 | yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping. | ||
638 | yaml_EMIT_END_STATE // Expect nothing. | ||
639 | ) | ||
640 | |||
641 | // The emitter structure. | ||
642 | // | ||
643 | // All members are internal. Manage the structure using the @c yaml_emitter_ | ||
644 | // family of functions. | ||
645 | type yaml_emitter_t struct { | ||
646 | |||
647 | // Error handling | ||
648 | |||
649 | error yaml_error_type_t // Error type. | ||
650 | problem string // Error description. | ||
651 | |||
652 | // Writer stuff | ||
653 | |||
654 | write_handler yaml_write_handler_t // Write handler. | ||
655 | |||
656 | output_buffer *[]byte // String output data. | ||
657 | output_writer io.Writer // File output data. | ||
658 | |||
659 | buffer []byte // The working buffer. | ||
660 | buffer_pos int // The current position of the buffer. | ||
661 | |||
662 | raw_buffer []byte // The raw buffer. | ||
663 | raw_buffer_pos int // The current position of the buffer. | ||
664 | |||
665 | encoding yaml_encoding_t // The stream encoding. | ||
666 | |||
667 | // Emitter stuff | ||
668 | |||
669 | canonical bool // If the output is in the canonical style? | ||
670 | best_indent int // The number of indentation spaces. | ||
671 | best_width int // The preferred width of the output lines. | ||
672 | unicode bool // Allow unescaped non-ASCII characters? | ||
673 | line_break yaml_break_t // The preferred line break. | ||
674 | |||
675 | state yaml_emitter_state_t // The current emitter state. | ||
676 | states []yaml_emitter_state_t // The stack of states. | ||
677 | |||
678 | events []yaml_event_t // The event queue. | ||
679 | events_head int // The head of the event queue. | ||
680 | |||
681 | indents []int // The stack of indentation levels. | ||
682 | |||
683 | tag_directives []yaml_tag_directive_t // The list of tag directives. | ||
684 | |||
685 | indent int // The current indentation level. | ||
686 | |||
687 | flow_level int // The current flow level. | ||
688 | |||
689 | root_context bool // Is it the document root context? | ||
690 | sequence_context bool // Is it a sequence context? | ||
691 | mapping_context bool // Is it a mapping context? | ||
692 | simple_key_context bool // Is it a simple mapping key context? | ||
693 | |||
694 | line int // The current line. | ||
695 | column int // The current column. | ||
696 | whitespace bool // If the last character was a whitespace? | ||
697 | indention bool // If the last character was an indentation character (' ', '-', '?', ':')? | ||
698 | open_ended bool // If an explicit document end is required? | ||
699 | |||
700 | // Anchor analysis. | ||
701 | anchor_data struct { | ||
702 | anchor []byte // The anchor value. | ||
703 | alias bool // Is it an alias? | ||
704 | } | ||
705 | |||
706 | // Tag analysis. | ||
707 | tag_data struct { | ||
708 | handle []byte // The tag handle. | ||
709 | suffix []byte // The tag suffix. | ||
710 | } | ||
711 | |||
712 | // Scalar analysis. | ||
713 | scalar_data struct { | ||
714 | value []byte // The scalar value. | ||
715 | multiline bool // Does the scalar contain line breaks? | ||
716 | flow_plain_allowed bool // Can the scalar be expessed in the flow plain style? | ||
717 | block_plain_allowed bool // Can the scalar be expressed in the block plain style? | ||
718 | single_quoted_allowed bool // Can the scalar be expressed in the single quoted style? | ||
719 | block_allowed bool // Can the scalar be expressed in the literal or folded styles? | ||
720 | style yaml_scalar_style_t // The output style. | ||
721 | } | ||
722 | |||
723 | // Dumper stuff | ||
724 | |||
725 | opened bool // If the stream was already opened? | ||
726 | closed bool // If the stream was already closed? | ||
727 | |||
728 | // The information associated with the document nodes. | ||
729 | anchors *struct { | ||
730 | references int // The number of references. | ||
731 | anchor int // The anchor id. | ||
732 | serialized bool // If the node has been emitted? | ||
733 | } | ||
734 | |||
735 | last_anchor_id int // The last assigned anchor id. | ||
736 | |||
737 | document *yaml_document_t // The currently emitted document. | ||
738 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty-yaml/yamlprivateh.go b/vendor/github.com/zclconf/go-cty-yaml/yamlprivateh.go new file mode 100644 index 0000000..8110ce3 --- /dev/null +++ b/vendor/github.com/zclconf/go-cty-yaml/yamlprivateh.go | |||
@@ -0,0 +1,173 @@ | |||
1 | package yaml | ||
2 | |||
3 | const ( | ||
4 | // The size of the input raw buffer. | ||
5 | input_raw_buffer_size = 512 | ||
6 | |||
7 | // The size of the input buffer. | ||
8 | // It should be possible to decode the whole raw buffer. | ||
9 | input_buffer_size = input_raw_buffer_size * 3 | ||
10 | |||
11 | // The size of the output buffer. | ||
12 | output_buffer_size = 128 | ||
13 | |||
14 | // The size of the output raw buffer. | ||
15 | // It should be possible to encode the whole output buffer. | ||
16 | output_raw_buffer_size = (output_buffer_size*2 + 2) | ||
17 | |||
18 | // The size of other stacks and queues. | ||
19 | initial_stack_size = 16 | ||
20 | initial_queue_size = 16 | ||
21 | initial_string_size = 16 | ||
22 | ) | ||
23 | |||
24 | // Check if the character at the specified position is an alphabetical | ||
25 | // character, a digit, '_', or '-'. | ||
26 | func is_alpha(b []byte, i int) bool { | ||
27 | return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-' | ||
28 | } | ||
29 | |||
30 | // Check if the character at the specified position is a digit. | ||
31 | func is_digit(b []byte, i int) bool { | ||
32 | return b[i] >= '0' && b[i] <= '9' | ||
33 | } | ||
34 | |||
35 | // Get the value of a digit. | ||
36 | func as_digit(b []byte, i int) int { | ||
37 | return int(b[i]) - '0' | ||
38 | } | ||
39 | |||
40 | // Check if the character at the specified position is a hex-digit. | ||
41 | func is_hex(b []byte, i int) bool { | ||
42 | return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f' | ||
43 | } | ||
44 | |||
45 | // Get the value of a hex-digit. | ||
46 | func as_hex(b []byte, i int) int { | ||
47 | bi := b[i] | ||
48 | if bi >= 'A' && bi <= 'F' { | ||
49 | return int(bi) - 'A' + 10 | ||
50 | } | ||
51 | if bi >= 'a' && bi <= 'f' { | ||
52 | return int(bi) - 'a' + 10 | ||
53 | } | ||
54 | return int(bi) - '0' | ||
55 | } | ||
56 | |||
57 | // Check if the character is ASCII. | ||
58 | func is_ascii(b []byte, i int) bool { | ||
59 | return b[i] <= 0x7F | ||
60 | } | ||
61 | |||
62 | // Check if the character at the start of the buffer can be printed unescaped. | ||
63 | func is_printable(b []byte, i int) bool { | ||
64 | return ((b[i] == 0x0A) || // . == #x0A | ||
65 | (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E | ||
66 | (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF | ||
67 | (b[i] > 0xC2 && b[i] < 0xED) || | ||
68 | (b[i] == 0xED && b[i+1] < 0xA0) || | ||
69 | (b[i] == 0xEE) || | ||
70 | (b[i] == 0xEF && // #xE000 <= . <= #xFFFD | ||
71 | !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF | ||
72 | !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF)))) | ||
73 | } | ||
74 | |||
75 | // Check if the character at the specified position is NUL. | ||
76 | func is_z(b []byte, i int) bool { | ||
77 | return b[i] == 0x00 | ||
78 | } | ||
79 | |||
80 | // Check if the beginning of the buffer is a BOM. | ||
81 | func is_bom(b []byte, i int) bool { | ||
82 | return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF | ||
83 | } | ||
84 | |||
85 | // Check if the character at the specified position is space. | ||
86 | func is_space(b []byte, i int) bool { | ||
87 | return b[i] == ' ' | ||
88 | } | ||
89 | |||
90 | // Check if the character at the specified position is tab. | ||
91 | func is_tab(b []byte, i int) bool { | ||
92 | return b[i] == '\t' | ||
93 | } | ||
94 | |||
95 | // Check if the character at the specified position is blank (space or tab). | ||
96 | func is_blank(b []byte, i int) bool { | ||
97 | //return is_space(b, i) || is_tab(b, i) | ||
98 | return b[i] == ' ' || b[i] == '\t' | ||
99 | } | ||
100 | |||
101 | // Check if the character at the specified position is a line break. | ||
102 | func is_break(b []byte, i int) bool { | ||
103 | return (b[i] == '\r' || // CR (#xD) | ||
104 | b[i] == '\n' || // LF (#xA) | ||
105 | b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) | ||
106 | b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) | ||
107 | b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029) | ||
108 | } | ||
109 | |||
110 | func is_crlf(b []byte, i int) bool { | ||
111 | return b[i] == '\r' && b[i+1] == '\n' | ||
112 | } | ||
113 | |||
114 | // Check if the character is a line break or NUL. | ||
115 | func is_breakz(b []byte, i int) bool { | ||
116 | //return is_break(b, i) || is_z(b, i) | ||
117 | return ( // is_break: | ||
118 | b[i] == '\r' || // CR (#xD) | ||
119 | b[i] == '\n' || // LF (#xA) | ||
120 | b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) | ||
121 | b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) | ||
122 | b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) | ||
123 | // is_z: | ||
124 | b[i] == 0) | ||
125 | } | ||
126 | |||
127 | // Check if the character is a line break, space, or NUL. | ||
128 | func is_spacez(b []byte, i int) bool { | ||
129 | //return is_space(b, i) || is_breakz(b, i) | ||
130 | return ( // is_space: | ||
131 | b[i] == ' ' || | ||
132 | // is_breakz: | ||
133 | b[i] == '\r' || // CR (#xD) | ||
134 | b[i] == '\n' || // LF (#xA) | ||
135 | b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) | ||
136 | b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) | ||
137 | b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) | ||
138 | b[i] == 0) | ||
139 | } | ||
140 | |||
141 | // Check if the character is a line break, space, tab, or NUL. | ||
142 | func is_blankz(b []byte, i int) bool { | ||
143 | //return is_blank(b, i) || is_breakz(b, i) | ||
144 | return ( // is_blank: | ||
145 | b[i] == ' ' || b[i] == '\t' || | ||
146 | // is_breakz: | ||
147 | b[i] == '\r' || // CR (#xD) | ||
148 | b[i] == '\n' || // LF (#xA) | ||
149 | b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) | ||
150 | b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) | ||
151 | b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) | ||
152 | b[i] == 0) | ||
153 | } | ||
154 | |||
155 | // Determine the width of the character. | ||
156 | func width(b byte) int { | ||
157 | // Don't replace these by a switch without first | ||
158 | // confirming that it is being inlined. | ||
159 | if b&0x80 == 0x00 { | ||
160 | return 1 | ||
161 | } | ||
162 | if b&0xE0 == 0xC0 { | ||
163 | return 2 | ||
164 | } | ||
165 | if b&0xF0 == 0xE0 { | ||
166 | return 3 | ||
167 | } | ||
168 | if b&0xF8 == 0xF0 { | ||
169 | return 4 | ||
170 | } | ||
171 | return 0 | ||
172 | |||
173 | } | ||
diff --git a/vendor/github.com/zclconf/go-cty/cty/path.go b/vendor/github.com/zclconf/go-cty/cty/path.go index bf1a7c1..b314449 100644 --- a/vendor/github.com/zclconf/go-cty/cty/path.go +++ b/vendor/github.com/zclconf/go-cty/cty/path.go | |||
@@ -71,6 +71,48 @@ func (p Path) GetAttr(name string) Path { | |||
71 | return ret | 71 | return ret |
72 | } | 72 | } |
73 | 73 | ||
74 | // Equals compares 2 Paths for exact equality. | ||
75 | func (p Path) Equals(other Path) bool { | ||
76 | if len(p) != len(other) { | ||
77 | return false | ||
78 | } | ||
79 | |||
80 | for i := range p { | ||
81 | pv := p[i] | ||
82 | switch pv := pv.(type) { | ||
83 | case GetAttrStep: | ||
84 | ov, ok := other[i].(GetAttrStep) | ||
85 | if !ok || pv != ov { | ||
86 | return false | ||
87 | } | ||
88 | case IndexStep: | ||
89 | ov, ok := other[i].(IndexStep) | ||
90 | if !ok { | ||
91 | return false | ||
92 | } | ||
93 | |||
94 | if !pv.Key.RawEquals(ov.Key) { | ||
95 | return false | ||
96 | } | ||
97 | default: | ||
98 | // Any invalid steps default to evaluating false. | ||
99 | return false | ||
100 | } | ||
101 | } | ||
102 | |||
103 | return true | ||
104 | |||
105 | } | ||
106 | |||
107 | // HasPrefix determines if the path p contains the provided prefix. | ||
108 | func (p Path) HasPrefix(prefix Path) bool { | ||
109 | if len(prefix) > len(p) { | ||
110 | return false | ||
111 | } | ||
112 | |||
113 | return p[:len(prefix)].Equals(prefix) | ||
114 | } | ||
115 | |||
74 | // GetAttrPath is a convenience method to start a new Path with a GetAttrStep. | 116 | // GetAttrPath is a convenience method to start a new Path with a GetAttrStep. |
75 | func GetAttrPath(name string) Path { | 117 | func GetAttrPath(name string) Path { |
76 | return Path{}.GetAttr(name) | 118 | return Path{}.GetAttr(name) |