aboutsummaryrefslogtreecommitdiffhomepage
path: root/vendor/github.com/hashicorp
diff options
context:
space:
mode:
authorAlex Pilon <apilon@hashicorp.com>2019-02-22 18:24:37 -0500
committerAlex Pilon <apilon@hashicorp.com>2019-02-22 18:24:37 -0500
commit15c0b25d011f37e7c20aeca9eaf461f78285b8d9 (patch)
tree255c250a5c9d4801c74092d33b7337d8c14438ff /vendor/github.com/hashicorp
parent07971ca38143c5faf951d152fba370ddcbe26ad5 (diff)
downloadterraform-provider-statuscake-15c0b25d011f37e7c20aeca9eaf461f78285b8d9.tar.gz
terraform-provider-statuscake-15c0b25d011f37e7c20aeca9eaf461f78285b8d9.tar.zst
terraform-provider-statuscake-15c0b25d011f37e7c20aeca9eaf461f78285b8d9.zip
deps: github.com/hashicorp/terraform@sdk-v0.11-with-go-modules
Updated via: go get github.com/hashicorp/terraform@sdk-v0.11-with-go-modules and go mod tidy
Diffstat (limited to 'vendor/github.com/hashicorp')
-rw-r--r--vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go1
-rw-r--r--vendor/github.com/hashicorp/go-cleanhttp/go.mod1
-rw-r--r--vendor/github.com/hashicorp/go-cleanhttp/handlers.go43
-rw-r--r--vendor/github.com/hashicorp/go-getter/.travis.yml8
-rw-r--r--vendor/github.com/hashicorp/go-getter/README.md58
-rw-r--r--vendor/github.com/hashicorp/go-getter/appveyor.yml2
-rw-r--r--vendor/github.com/hashicorp/go-getter/client.go31
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress.go29
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_gzip.go2
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_tar.go138
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_tbz2.go64
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_testing.go36
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_tgz.go62
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_txz.go39
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_xz.go49
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_zip.go5
-rw-r--r--vendor/github.com/hashicorp/go-getter/detect.go6
-rw-r--r--vendor/github.com/hashicorp/go-getter/detect_file.go2
-rw-r--r--vendor/github.com/hashicorp/go-getter/get.go9
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_git.go36
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_hg.go10
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_http.go49
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_s3.go75
-rw-r--r--vendor/github.com/hashicorp/go-getter/source.go26
-rw-r--r--vendor/github.com/hashicorp/go-hclog/LICENSE21
-rw-r--r--vendor/github.com/hashicorp/go-hclog/README.md123
-rw-r--r--vendor/github.com/hashicorp/go-hclog/global.go34
-rw-r--r--vendor/github.com/hashicorp/go-hclog/int.go385
-rw-r--r--vendor/github.com/hashicorp/go-hclog/log.go138
-rw-r--r--vendor/github.com/hashicorp/go-hclog/stacktrace.go108
-rw-r--r--vendor/github.com/hashicorp/go-hclog/stdlog.go62
-rw-r--r--vendor/github.com/hashicorp/go-plugin/README.md49
-rw-r--r--vendor/github.com/hashicorp/go-plugin/client.go293
-rw-r--r--vendor/github.com/hashicorp/go-plugin/grpc_broker.go455
-rw-r--r--vendor/github.com/hashicorp/go-plugin/grpc_broker.pb.go190
-rw-r--r--vendor/github.com/hashicorp/go-plugin/grpc_broker.proto14
-rw-r--r--vendor/github.com/hashicorp/go-plugin/grpc_client.go107
-rw-r--r--vendor/github.com/hashicorp/go-plugin/grpc_server.go132
-rw-r--r--vendor/github.com/hashicorp/go-plugin/log_entry.go73
-rw-r--r--vendor/github.com/hashicorp/go-plugin/plugin.go33
-rw-r--r--vendor/github.com/hashicorp/go-plugin/protocol.go45
-rw-r--r--vendor/github.com/hashicorp/go-plugin/rpc_client.go47
-rw-r--r--vendor/github.com/hashicorp/go-plugin/rpc_server.go20
-rw-r--r--vendor/github.com/hashicorp/go-plugin/server.go135
-rw-r--r--vendor/github.com/hashicorp/go-plugin/testing.go86
-rw-r--r--vendor/github.com/hashicorp/go-safetemp/LICENSE362
-rw-r--r--vendor/github.com/hashicorp/go-safetemp/README.md10
-rw-r--r--vendor/github.com/hashicorp/go-safetemp/safetemp.go40
-rw-r--r--vendor/github.com/hashicorp/go-uuid/.travis.yml12
-rw-r--r--vendor/github.com/hashicorp/go-uuid/README.md4
-rw-r--r--vendor/github.com/hashicorp/go-uuid/go.mod1
-rw-r--r--vendor/github.com/hashicorp/go-uuid/uuid.go16
-rw-r--r--vendor/github.com/hashicorp/go-version/.travis.yml2
-rw-r--r--vendor/github.com/hashicorp/go-version/constraint.go34
-rw-r--r--vendor/github.com/hashicorp/go-version/go.mod1
-rw-r--r--vendor/github.com/hashicorp/go-version/version.go59
-rw-r--r--vendor/github.com/hashicorp/hcl2/LICENSE353
-rw-r--r--vendor/github.com/hashicorp/hcl2/gohcl/decode.go304
-rw-r--r--vendor/github.com/hashicorp/hcl2/gohcl/doc.go49
-rw-r--r--vendor/github.com/hashicorp/hcl2/gohcl/schema.go174
-rw-r--r--vendor/github.com/hashicorp/hcl2/gohcl/types.go16
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/diagnostic.go103
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/diagnostic_text.go168
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/didyoumean.go24
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/doc.go1
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/eval_context.go25
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/expr_call.go46
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/expr_list.go37
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/expr_map.go44
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/expr_unwrap.go68
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/didyoumean.go24
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/doc.go7
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression.go1275
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_ops.go258
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_template.go192
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars.go76
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars_gen.go99
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/file.go20
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/generate.go9
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/keywords.go21
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/navigation.go41
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/node.go22
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go1836
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go728
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_traversal.go159
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/peeker.go212
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/public.go171
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.go301
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.rl105
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.go5443
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.rl376
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/spec.md923
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure.go379
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token.go272
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token_type_string.go69
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode2ragel.rb335
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode_derived.rl2135
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/variables.go86
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/walk.go77
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/ast.go121
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go33
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/doc.go8
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go70
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/parser.go491
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go25
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/public.go94
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go293
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/spec.md405
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/structure.go616
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go29
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/merged.go226
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/ops.go147
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/pos.go262
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/pos_scanner.go148
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/schema.go21
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/spec.md691
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/static_expr.go40
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/structure.go151
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/traversal.go352
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcl/traversal_for_expr.go121
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcldec/block_labels.go21
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcldec/decode.go36
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcldec/doc.go12
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcldec/gob.go23
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcldec/public.go78
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcldec/schema.go36
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcldec/spec.go998
-rw-r--r--vendor/github.com/hashicorp/hcl2/hcldec/variables.go34
-rw-r--r--vendor/github.com/hashicorp/hcl2/hclparse/parser.go123
-rw-r--r--vendor/github.com/hashicorp/hil/scanner/scanner.go6
-rw-r--r--vendor/github.com/hashicorp/terraform/config/append.go6
-rw-r--r--vendor/github.com/hashicorp/terraform/config/config.go396
-rw-r--r--vendor/github.com/hashicorp/terraform/config/config_string.go40
-rw-r--r--vendor/github.com/hashicorp/terraform/config/configschema/decoder_spec.go97
-rw-r--r--vendor/github.com/hashicorp/terraform/config/configschema/doc.go14
-rw-r--r--vendor/github.com/hashicorp/terraform/config/configschema/implied_type.go21
-rw-r--r--vendor/github.com/hashicorp/terraform/config/configschema/internal_validate.go92
-rw-r--r--vendor/github.com/hashicorp/terraform/config/configschema/nestingmode_string.go16
-rw-r--r--vendor/github.com/hashicorp/terraform/config/configschema/schema.go107
-rw-r--r--vendor/github.com/hashicorp/terraform/config/hcl2_shim_util.go134
-rw-r--r--vendor/github.com/hashicorp/terraform/config/hcl2shim/single_attr_body.go85
-rw-r--r--vendor/github.com/hashicorp/terraform/config/hcl2shim/values.go246
-rw-r--r--vendor/github.com/hashicorp/terraform/config/import_tree.go54
-rw-r--r--vendor/github.com/hashicorp/terraform/config/interpolate.go55
-rw-r--r--vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go429
-rw-r--r--vendor/github.com/hashicorp/terraform/config/interpolate_walk.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/config/loader.go5
-rw-r--r--vendor/github.com/hashicorp/terraform/config/loader_hcl.go126
-rw-r--r--vendor/github.com/hashicorp/terraform/config/loader_hcl2.go473
-rw-r--r--vendor/github.com/hashicorp/terraform/config/merge.go11
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/get.go20
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/inode.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/module.go6
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/storage.go365
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/testing.go6
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/tree.go314
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/versions.go95
-rw-r--r--vendor/github.com/hashicorp/terraform/config/raw_config.go125
-rw-r--r--vendor/github.com/hashicorp/terraform/config/resource_mode_string.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/config/testing.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/dag/dag.go21
-rw-r--r--vendor/github.com/hashicorp/terraform/dag/marshal.go12
-rw-r--r--vendor/github.com/hashicorp/terraform/dag/walk.go16
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/experiment/experiment.go154
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/experiment/id.go34
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/hashcode/hashcode.go13
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/logging/logging.go8
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/logging/transport.go21
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/resource/id.go5
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/resource/state.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/resource/testing.go348
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/resource/testing_config.go41
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/resource/testing_import_state.go15
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/resource/wait.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/backend.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/core_schema.go155
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/data_source_resource_shim.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/field_reader.go11
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/field_reader_config.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/field_reader_diff.go43
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/field_reader_map.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/field_writer_map.go32
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/getsource_string.go6
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/provider.go40
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/provisioner.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/resource.go98
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/resource_data.go39
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/resource_diff.go559
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/schema.go252
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/set.go31
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/testing.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/schema/valuetype_string.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/shadow/closer.go83
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/shadow/compared_value.go128
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/shadow/keyed_value.go151
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/shadow/ordered_value.go66
-rw-r--r--vendor/github.com/hashicorp/terraform/helper/shadow/value.go87
-rw-r--r--vendor/github.com/hashicorp/terraform/httpclient/client.go18
-rw-r--r--vendor/github.com/hashicorp/terraform/httpclient/useragent.go40
-rw-r--r--vendor/github.com/hashicorp/terraform/plugin/client.go9
-rw-r--r--vendor/github.com/hashicorp/terraform/plugin/discovery/find.go27
-rw-r--r--vendor/github.com/hashicorp/terraform/plugin/discovery/get.go134
-rw-r--r--vendor/github.com/hashicorp/terraform/plugin/discovery/get_cache.go48
-rw-r--r--vendor/github.com/hashicorp/terraform/plugin/resource_provider.go39
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/client.go227
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/errors.go23
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/regsrc/friendly_host.go140
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/regsrc/module.go205
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/regsrc/regsrc.go8
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/response/module.go93
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/response/module_list.go7
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/response/module_provider.go14
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/response/module_versions.go32
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/response/pagination.go65
-rw-r--r--vendor/github.com/hashicorp/terraform/registry/response/redirect.go6
-rw-r--r--vendor/github.com/hashicorp/terraform/svchost/auth/cache.go45
-rw-r--r--vendor/github.com/hashicorp/terraform/svchost/auth/credentials.go63
-rw-r--r--vendor/github.com/hashicorp/terraform/svchost/auth/from_map.go18
-rw-r--r--vendor/github.com/hashicorp/terraform/svchost/auth/helper_program.go80
-rw-r--r--vendor/github.com/hashicorp/terraform/svchost/auth/static.go28
-rw-r--r--vendor/github.com/hashicorp/terraform/svchost/auth/token_credentials.go25
-rw-r--r--vendor/github.com/hashicorp/terraform/svchost/disco/disco.go259
-rw-r--r--vendor/github.com/hashicorp/terraform/svchost/disco/host.go264
-rw-r--r--vendor/github.com/hashicorp/terraform/svchost/label_iter.go69
-rw-r--r--vendor/github.com/hashicorp/terraform/svchost/svchost.go207
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/context.go189
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/context_import.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/diff.go15
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_apply.go27
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_context.go13
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_context_builtin.go102
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_context_mock.go38
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_diff.go26
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_interpolate.go40
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_local.go86
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_output.go25
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_provider.go59
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_state.go35
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/eval_validate.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/evaltree_provider.go41
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/features.go7
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/graph.go8
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/graph_builder_apply.go29
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/graph_builder_import.go7
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/graph_builder_plan.go23
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/graph_builder_refresh.go10
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/graph_walk_context.go3
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/graphtype_string.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/instancetype_string.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/interpolate.go84
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go1
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_data_refresh.go11
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_local.go66
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_module_destroy.go29
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_module_removed.go77
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_module_variable.go19
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_output.go80
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_output_orphan.go5
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_provider.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_provider_abstract.go24
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_provider_disabled.go6
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_resource_abstract.go15
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_resource_apply.go55
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy.go13
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_resource_plan.go2
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_instance.go8
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_resource_refresh.go17
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/node_resource_validate.go3
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/path.go18
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/plan.go52
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/resource.go42
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/resource_address.go37
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/resource_provider.go23
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/resource_provider_mock.go20
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/schemas.go34
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/shadow.go28
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/shadow_components.go273
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/shadow_context.go158
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/shadow_resource_provider.go815
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/shadow_resource_provisioner.go282
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/state.go108
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/test_failure9
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform.go5
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_provider.go62
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_deposed.go38
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_destroy_edge.go10
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_import_state.go45
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_local.go40
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_orphan_output.go45
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_output.go46
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_provider.go574
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_provider_disable.go50
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_reference.go100
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_removed_modules.go32
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_resource_count.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/transform_targets.go56
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/ui_output_mock.go5
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/user_agent.go11
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/version.go27
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/version_required.go12
-rw-r--r--vendor/github.com/hashicorp/terraform/terraform/walkoperation_string.go4
-rw-r--r--vendor/github.com/hashicorp/terraform/tfdiags/diagnostic.go26
-rw-r--r--vendor/github.com/hashicorp/terraform/tfdiags/diagnostics.go181
-rw-r--r--vendor/github.com/hashicorp/terraform/tfdiags/doc.go16
-rw-r--r--vendor/github.com/hashicorp/terraform/tfdiags/error.go23
-rw-r--r--vendor/github.com/hashicorp/terraform/tfdiags/hcl.go77
-rw-r--r--vendor/github.com/hashicorp/terraform/tfdiags/rpc_friendly.go53
-rw-r--r--vendor/github.com/hashicorp/terraform/tfdiags/severity_string.go21
-rw-r--r--vendor/github.com/hashicorp/terraform/tfdiags/simple_warning.go25
-rw-r--r--vendor/github.com/hashicorp/terraform/tfdiags/source_range.go35
-rw-r--r--vendor/github.com/hashicorp/terraform/version/version.go36
313 files changed, 35484 insertions, 4076 deletions
diff --git a/vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go b/vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go
index 7d8a57c..8d306bf 100644
--- a/vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go
+++ b/vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go
@@ -26,6 +26,7 @@ func DefaultPooledTransport() *http.Transport {
26 DialContext: (&net.Dialer{ 26 DialContext: (&net.Dialer{
27 Timeout: 30 * time.Second, 27 Timeout: 30 * time.Second,
28 KeepAlive: 30 * time.Second, 28 KeepAlive: 30 * time.Second,
29 DualStack: true,
29 }).DialContext, 30 }).DialContext,
30 MaxIdleConns: 100, 31 MaxIdleConns: 100,
31 IdleConnTimeout: 90 * time.Second, 32 IdleConnTimeout: 90 * time.Second,
diff --git a/vendor/github.com/hashicorp/go-cleanhttp/go.mod b/vendor/github.com/hashicorp/go-cleanhttp/go.mod
new file mode 100644
index 0000000..310f075
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-cleanhttp/go.mod
@@ -0,0 +1 @@
module github.com/hashicorp/go-cleanhttp
diff --git a/vendor/github.com/hashicorp/go-cleanhttp/handlers.go b/vendor/github.com/hashicorp/go-cleanhttp/handlers.go
new file mode 100644
index 0000000..7eda377
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-cleanhttp/handlers.go
@@ -0,0 +1,43 @@
1package cleanhttp
2
3import (
4 "net/http"
5 "strings"
6 "unicode"
7)
8
9// HandlerInput provides input options to cleanhttp's handlers
10type HandlerInput struct {
11 ErrStatus int
12}
13
14// PrintablePathCheckHandler is a middleware that ensures the request path
15// contains only printable runes.
16func PrintablePathCheckHandler(next http.Handler, input *HandlerInput) http.Handler {
17 // Nil-check on input to make it optional
18 if input == nil {
19 input = &HandlerInput{
20 ErrStatus: http.StatusBadRequest,
21 }
22 }
23
24 // Default to http.StatusBadRequest on error
25 if input.ErrStatus == 0 {
26 input.ErrStatus = http.StatusBadRequest
27 }
28
29 return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
30 // Check URL path for non-printable characters
31 idx := strings.IndexFunc(r.URL.Path, func(c rune) bool {
32 return !unicode.IsPrint(c)
33 })
34
35 if idx != -1 {
36 w.WriteHeader(input.ErrStatus)
37 return
38 }
39
40 next.ServeHTTP(w, r)
41 return
42 })
43}
diff --git a/vendor/github.com/hashicorp/go-getter/.travis.yml b/vendor/github.com/hashicorp/go-getter/.travis.yml
index 4438286..da804c2 100644
--- a/vendor/github.com/hashicorp/go-getter/.travis.yml
+++ b/vendor/github.com/hashicorp/go-getter/.travis.yml
@@ -10,8 +10,14 @@ addons:
10language: go 10language: go
11 11
12go: 12go:
13 - 1.5 13 - 1.8.x
14 - 1.9.x
15 - master
14 16
15branches: 17branches:
16 only: 18 only:
17 - master 19 - master
20
21matrix:
22 allow_failures:
23 - go: master
diff --git a/vendor/github.com/hashicorp/go-getter/README.md b/vendor/github.com/hashicorp/go-getter/README.md
index 4a0b6a6..40ace74 100644
--- a/vendor/github.com/hashicorp/go-getter/README.md
+++ b/vendor/github.com/hashicorp/go-getter/README.md
@@ -21,8 +21,7 @@ URLs. For example: "github.com/hashicorp/go-getter" would turn into a
21Git URL. Or "./foo" would turn into a file URL. These are extensible. 21Git URL. Or "./foo" would turn into a file URL. These are extensible.
22 22
23This library is used by [Terraform](https://terraform.io) for 23This library is used by [Terraform](https://terraform.io) for
24downloading modules, [Otto](https://ottoproject.io) for dependencies and 24downloading modules and [Nomad](https://nomadproject.io) for downloading
25Appfile imports, and [Nomad](https://nomadproject.io) for downloading
26binaries. 25binaries.
27 26
28## Installation and Usage 27## Installation and Usage
@@ -119,6 +118,37 @@ The protocol-specific options are documented below the URL format
119section. But because they are part of the URL, we point it out here so 118section. But because they are part of the URL, we point it out here so
120you know they exist. 119you know they exist.
121 120
121### Subdirectories
122
123If you want to download only a specific subdirectory from a downloaded
124directory, you can specify a subdirectory after a double-slash `//`.
125go-getter will first download the URL specified _before_ the double-slash
126(as if you didn't specify a double-slash), but will then copy the
127path after the double slash into the target directory.
128
129For example, if you're downloading this GitHub repository, but you only
130want to download the `test-fixtures` directory, you can do the following:
131
132```
133https://github.com/hashicorp/go-getter.git//test-fixtures
134```
135
136If you downloaded this to the `/tmp` directory, then the file
137`/tmp/archive.gz` would exist. Notice that this file is in the `test-fixtures`
138directory in this repository, but because we specified a subdirectory,
139go-getter automatically copied only that directory contents.
140
141Subdirectory paths may contain may also use filesystem glob patterns.
142The path must match _exactly one_ entry or go-getter will return an error.
143This is useful if you're not sure the exact directory name but it follows
144a predictable naming structure.
145
146For example, the following URL would also work:
147
148```
149https://github.com/hashicorp/go-getter.git//test-*
150```
151
122### Checksumming 152### Checksumming
123 153
124For file downloads of any protocol, go-getter can automatically verify 154For file downloads of any protocol, go-getter can automatically verify
@@ -154,9 +184,11 @@ The following archive formats are supported:
154 184
155 * `tar.gz` and `tgz` 185 * `tar.gz` and `tgz`
156 * `tar.bz2` and `tbz2` 186 * `tar.bz2` and `tbz2`
187 * `tar.xz` and `txz`
157 * `zip` 188 * `zip`
158 * `gz` 189 * `gz`
159 * `bz2` 190 * `bz2`
191 * `xz`
160 192
161For example, an example URL is shown below: 193For example, an example URL is shown below:
162 194
@@ -200,6 +232,9 @@ The options below are available to all protocols:
200 * `checksum` - Checksum to verify the downloaded file or archive. See 232 * `checksum` - Checksum to verify the downloaded file or archive. See
201 the entire section on checksumming above for format and more details. 233 the entire section on checksumming above for format and more details.
202 234
235 * `filename` - When in file download mode, allows specifying the name of the
236 downloaded file on disk. Has no effect in directory mode.
237
203### Local Files (`file`) 238### Local Files (`file`)
204 239
205None 240None
@@ -222,13 +257,17 @@ None
222 257
223### HTTP (`http`) 258### HTTP (`http`)
224 259
225None 260#### Basic Authentication
261
262To use HTTP basic authentication with go-getter, simply prepend `username:password@` to the
263hostname in the URL such as `https://Aladdin:OpenSesame@www.example.com/index.html`. All special
264characters, including the username and password, must be URL encoded.
226 265
227### S3 (`s3`) 266### S3 (`s3`)
228 267
229S3 takes various access configurations in the URL. Note that it will also 268S3 takes various access configurations in the URL. Note that it will also
230read these from standard AWS environment variables if they're set. If 269read these from standard AWS environment variables if they're set. S3 compliant servers like Minio
231the query parameters are present, these take priority. 270are also supported. If the query parameters are present, these take priority.
232 271
233 * `aws_access_key_id` - AWS access key. 272 * `aws_access_key_id` - AWS access key.
234 * `aws_access_key_secret` - AWS access key secret. 273 * `aws_access_key_secret` - AWS access key secret.
@@ -240,6 +279,14 @@ If you use go-getter and want to use an EC2 IAM Instance Profile to avoid
240using credentials, then just omit these and the profile, if available will 279using credentials, then just omit these and the profile, if available will
241be used automatically. 280be used automatically.
242 281
282### Using S3 with Minio
283 If you use go-gitter for Minio support, you must consider the following:
284
285 * `aws_access_key_id` (required) - Minio access key.
286 * `aws_access_key_secret` (required) - Minio access key secret.
287 * `region` (optional - defaults to us-east-1) - Region identifier to use.
288 * `version` (optional - defaults to Minio default) - Configuration file format.
289
243#### S3 Bucket Examples 290#### S3 Bucket Examples
244 291
245S3 has several addressing schemes used to reference your bucket. These are 292S3 has several addressing schemes used to reference your bucket. These are
@@ -250,4 +297,5 @@ Some examples for these addressing schemes:
250- s3::https://s3-eu-west-1.amazonaws.com/bucket/foo 297- s3::https://s3-eu-west-1.amazonaws.com/bucket/foo
251- bucket.s3.amazonaws.com/foo 298- bucket.s3.amazonaws.com/foo
252- bucket.s3-eu-west-1.amazonaws.com/foo/bar 299- bucket.s3-eu-west-1.amazonaws.com/foo/bar
300- "s3::http://127.0.0.1:9000/test-bucket/hello.txt?aws_access_key_id=KEYID&aws_access_key_secret=SECRETKEY&region=us-east-2"
253 301
diff --git a/vendor/github.com/hashicorp/go-getter/appveyor.yml b/vendor/github.com/hashicorp/go-getter/appveyor.yml
index 159dad4..ec48d45 100644
--- a/vendor/github.com/hashicorp/go-getter/appveyor.yml
+++ b/vendor/github.com/hashicorp/go-getter/appveyor.yml
@@ -1,5 +1,5 @@
1version: "build-{branch}-{build}" 1version: "build-{branch}-{build}"
2image: Visual Studio 2015 2image: Visual Studio 2017
3clone_folder: c:\gopath\github.com\hashicorp\go-getter 3clone_folder: c:\gopath\github.com\hashicorp\go-getter
4environment: 4environment:
5 GOPATH: c:\gopath 5 GOPATH: c:\gopath
diff --git a/vendor/github.com/hashicorp/go-getter/client.go b/vendor/github.com/hashicorp/go-getter/client.go
index 876812a..300301c 100644
--- a/vendor/github.com/hashicorp/go-getter/client.go
+++ b/vendor/github.com/hashicorp/go-getter/client.go
@@ -17,6 +17,7 @@ import (
17 "strings" 17 "strings"
18 18
19 urlhelper "github.com/hashicorp/go-getter/helper/url" 19 urlhelper "github.com/hashicorp/go-getter/helper/url"
20 "github.com/hashicorp/go-safetemp"
20) 21)
21 22
22// Client is a client for downloading things. 23// Client is a client for downloading things.
@@ -100,17 +101,14 @@ func (c *Client) Get() error {
100 dst := c.Dst 101 dst := c.Dst
101 src, subDir := SourceDirSubdir(src) 102 src, subDir := SourceDirSubdir(src)
102 if subDir != "" { 103 if subDir != "" {
103 tmpDir, err := ioutil.TempDir("", "tf") 104 td, tdcloser, err := safetemp.Dir("", "getter")
104 if err != nil { 105 if err != nil {
105 return err 106 return err
106 } 107 }
107 if err := os.RemoveAll(tmpDir); err != nil { 108 defer tdcloser.Close()
108 return err
109 }
110 defer os.RemoveAll(tmpDir)
111 109
112 realDst = dst 110 realDst = dst
113 dst = tmpDir 111 dst = td
114 } 112 }
115 113
116 u, err := urlhelper.Parse(src) 114 u, err := urlhelper.Parse(src)
@@ -232,7 +230,18 @@ func (c *Client) Get() error {
232 // Destination is the base name of the URL path in "any" mode when 230 // Destination is the base name of the URL path in "any" mode when
233 // a file source is detected. 231 // a file source is detected.
234 if mode == ClientModeFile { 232 if mode == ClientModeFile {
235 dst = filepath.Join(dst, filepath.Base(u.Path)) 233 filename := filepath.Base(u.Path)
234
235 // Determine if we have a custom file name
236 if v := q.Get("filename"); v != "" {
237 // Delete the query parameter if we have it.
238 q.Del("filename")
239 u.RawQuery = q.Encode()
240
241 filename = v
242 }
243
244 dst = filepath.Join(dst, filename)
236 } 245 }
237 } 246 }
238 247
@@ -305,7 +314,13 @@ func (c *Client) Get() error {
305 return err 314 return err
306 } 315 }
307 316
308 return copyDir(realDst, filepath.Join(dst, subDir), false) 317 // Process any globs
318 subDir, err := SubdirGlob(dst, subDir)
319 if err != nil {
320 return err
321 }
322
323 return copyDir(realDst, subDir, false)
309 } 324 }
310 325
311 return nil 326 return nil
diff --git a/vendor/github.com/hashicorp/go-getter/decompress.go b/vendor/github.com/hashicorp/go-getter/decompress.go
index d18174c..198bb0e 100644
--- a/vendor/github.com/hashicorp/go-getter/decompress.go
+++ b/vendor/github.com/hashicorp/go-getter/decompress.go
@@ -1,7 +1,15 @@
1package getter 1package getter
2 2
3import (
4 "strings"
5)
6
3// Decompressor defines the interface that must be implemented to add 7// Decompressor defines the interface that must be implemented to add
4// support for decompressing a type. 8// support for decompressing a type.
9//
10// Important: if you're implementing a decompressor, please use the
11// containsDotDot helper in this file to ensure that files can't be
12// decompressed outside of the specified directory.
5type Decompressor interface { 13type Decompressor interface {
6 // Decompress should decompress src to dst. dir specifies whether dst 14 // Decompress should decompress src to dst. dir specifies whether dst
7 // is a directory or single file. src is guaranteed to be a single file 15 // is a directory or single file. src is guaranteed to be a single file
@@ -16,14 +24,35 @@ var Decompressors map[string]Decompressor
16func init() { 24func init() {
17 tbzDecompressor := new(TarBzip2Decompressor) 25 tbzDecompressor := new(TarBzip2Decompressor)
18 tgzDecompressor := new(TarGzipDecompressor) 26 tgzDecompressor := new(TarGzipDecompressor)
27 txzDecompressor := new(TarXzDecompressor)
19 28
20 Decompressors = map[string]Decompressor{ 29 Decompressors = map[string]Decompressor{
21 "bz2": new(Bzip2Decompressor), 30 "bz2": new(Bzip2Decompressor),
22 "gz": new(GzipDecompressor), 31 "gz": new(GzipDecompressor),
32 "xz": new(XzDecompressor),
23 "tar.bz2": tbzDecompressor, 33 "tar.bz2": tbzDecompressor,
24 "tar.gz": tgzDecompressor, 34 "tar.gz": tgzDecompressor,
35 "tar.xz": txzDecompressor,
25 "tbz2": tbzDecompressor, 36 "tbz2": tbzDecompressor,
26 "tgz": tgzDecompressor, 37 "tgz": tgzDecompressor,
38 "txz": txzDecompressor,
27 "zip": new(ZipDecompressor), 39 "zip": new(ZipDecompressor),
28 } 40 }
29} 41}
42
43// containsDotDot checks if the filepath value v contains a ".." entry.
44// This will check filepath components by splitting along / or \. This
45// function is copied directly from the Go net/http implementation.
46func containsDotDot(v string) bool {
47 if !strings.Contains(v, "..") {
48 return false
49 }
50 for _, ent := range strings.FieldsFunc(v, isSlashRune) {
51 if ent == ".." {
52 return true
53 }
54 }
55 return false
56}
57
58func isSlashRune(r rune) bool { return r == '/' || r == '\\' }
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_gzip.go b/vendor/github.com/hashicorp/go-getter/decompress_gzip.go
index 2001054..5ebf709 100644
--- a/vendor/github.com/hashicorp/go-getter/decompress_gzip.go
+++ b/vendor/github.com/hashicorp/go-getter/decompress_gzip.go
@@ -9,7 +9,7 @@ import (
9) 9)
10 10
11// GzipDecompressor is an implementation of Decompressor that can 11// GzipDecompressor is an implementation of Decompressor that can
12// decompress bz2 files. 12// decompress gzip files.
13type GzipDecompressor struct{} 13type GzipDecompressor struct{}
14 14
15func (d *GzipDecompressor) Decompress(dst, src string, dir bool) error { 15func (d *GzipDecompressor) Decompress(dst, src string, dir bool) error {
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_tar.go b/vendor/github.com/hashicorp/go-getter/decompress_tar.go
new file mode 100644
index 0000000..39cb392
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress_tar.go
@@ -0,0 +1,138 @@
1package getter
2
3import (
4 "archive/tar"
5 "fmt"
6 "io"
7 "os"
8 "path/filepath"
9)
10
11// untar is a shared helper for untarring an archive. The reader should provide
12// an uncompressed view of the tar archive.
13func untar(input io.Reader, dst, src string, dir bool) error {
14 tarR := tar.NewReader(input)
15 done := false
16 dirHdrs := []*tar.Header{}
17 for {
18 hdr, err := tarR.Next()
19 if err == io.EOF {
20 if !done {
21 // Empty archive
22 return fmt.Errorf("empty archive: %s", src)
23 }
24
25 break
26 }
27 if err != nil {
28 return err
29 }
30
31 if hdr.Typeflag == tar.TypeXGlobalHeader || hdr.Typeflag == tar.TypeXHeader {
32 // don't unpack extended headers as files
33 continue
34 }
35
36 path := dst
37 if dir {
38 // Disallow parent traversal
39 if containsDotDot(hdr.Name) {
40 return fmt.Errorf("entry contains '..': %s", hdr.Name)
41 }
42
43 path = filepath.Join(path, hdr.Name)
44 }
45
46 if hdr.FileInfo().IsDir() {
47 if !dir {
48 return fmt.Errorf("expected a single file: %s", src)
49 }
50
51 // A directory, just make the directory and continue unarchiving...
52 if err := os.MkdirAll(path, 0755); err != nil {
53 return err
54 }
55
56 // Record the directory information so that we may set its attributes
57 // after all files have been extracted
58 dirHdrs = append(dirHdrs, hdr)
59
60 continue
61 } else {
62 // There is no ordering guarantee that a file in a directory is
63 // listed before the directory
64 dstPath := filepath.Dir(path)
65
66 // Check that the directory exists, otherwise create it
67 if _, err := os.Stat(dstPath); os.IsNotExist(err) {
68 if err := os.MkdirAll(dstPath, 0755); err != nil {
69 return err
70 }
71 }
72 }
73
74 // We have a file. If we already decoded, then it is an error
75 if !dir && done {
76 return fmt.Errorf("expected a single file, got multiple: %s", src)
77 }
78
79 // Mark that we're done so future in single file mode errors
80 done = true
81
82 // Open the file for writing
83 dstF, err := os.Create(path)
84 if err != nil {
85 return err
86 }
87 _, err = io.Copy(dstF, tarR)
88 dstF.Close()
89 if err != nil {
90 return err
91 }
92
93 // Chmod the file
94 if err := os.Chmod(path, hdr.FileInfo().Mode()); err != nil {
95 return err
96 }
97
98 // Set the access and modification time
99 if err := os.Chtimes(path, hdr.AccessTime, hdr.ModTime); err != nil {
100 return err
101 }
102 }
103
104 // Adding a file or subdirectory changes the mtime of a directory
105 // We therefore wait until we've extracted everything and then set the mtime and atime attributes
106 for _, dirHdr := range dirHdrs {
107 path := filepath.Join(dst, dirHdr.Name)
108 if err := os.Chtimes(path, dirHdr.AccessTime, dirHdr.ModTime); err != nil {
109 return err
110 }
111 }
112
113 return nil
114}
115
116// tarDecompressor is an implementation of Decompressor that can
117// unpack tar files.
118type tarDecompressor struct{}
119
120func (d *tarDecompressor) Decompress(dst, src string, dir bool) error {
121 // If we're going into a directory we should make that first
122 mkdir := dst
123 if !dir {
124 mkdir = filepath.Dir(dst)
125 }
126 if err := os.MkdirAll(mkdir, 0755); err != nil {
127 return err
128 }
129
130 // File first
131 f, err := os.Open(src)
132 if err != nil {
133 return err
134 }
135 defer f.Close()
136
137 return untar(f, dst, src, dir)
138}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go b/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go
index c46ed44..5391b5c 100644
--- a/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go
+++ b/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go
@@ -1,10 +1,7 @@
1package getter 1package getter
2 2
3import ( 3import (
4 "archive/tar"
5 "compress/bzip2" 4 "compress/bzip2"
6 "fmt"
7 "io"
8 "os" 5 "os"
9 "path/filepath" 6 "path/filepath"
10) 7)
@@ -32,64 +29,5 @@ func (d *TarBzip2Decompressor) Decompress(dst, src string, dir bool) error {
32 29
33 // Bzip2 compression is second 30 // Bzip2 compression is second
34 bzipR := bzip2.NewReader(f) 31 bzipR := bzip2.NewReader(f)
35 32 return untar(bzipR, dst, src, dir)
36 // Once bzip decompressed we have a tar format
37 tarR := tar.NewReader(bzipR)
38 done := false
39 for {
40 hdr, err := tarR.Next()
41 if err == io.EOF {
42 if !done {
43 // Empty archive
44 return fmt.Errorf("empty archive: %s", src)
45 }
46
47 return nil
48 }
49 if err != nil {
50 return err
51 }
52
53 path := dst
54 if dir {
55 path = filepath.Join(path, hdr.Name)
56 }
57
58 if hdr.FileInfo().IsDir() {
59 if dir {
60 return fmt.Errorf("expected a single file: %s", src)
61 }
62
63 // A directory, just make the directory and continue unarchiving...
64 if err := os.MkdirAll(path, 0755); err != nil {
65 return err
66 }
67
68 continue
69 }
70
71 // We have a file. If we already decoded, then it is an error
72 if !dir && done {
73 return fmt.Errorf("expected a single file, got multiple: %s", src)
74 }
75
76 // Mark that we're done so future in single file mode errors
77 done = true
78
79 // Open the file for writing
80 dstF, err := os.Create(path)
81 if err != nil {
82 return err
83 }
84 _, err = io.Copy(dstF, tarR)
85 dstF.Close()
86 if err != nil {
87 return err
88 }
89
90 // Chmod the file
91 if err := os.Chmod(path, hdr.FileInfo().Mode()); err != nil {
92 return err
93 }
94 }
95} 33}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_testing.go b/vendor/github.com/hashicorp/go-getter/decompress_testing.go
index 686d6c2..91cf33d 100644
--- a/vendor/github.com/hashicorp/go-getter/decompress_testing.go
+++ b/vendor/github.com/hashicorp/go-getter/decompress_testing.go
@@ -11,7 +11,9 @@ import (
11 "runtime" 11 "runtime"
12 "sort" 12 "sort"
13 "strings" 13 "strings"
14 "testing" 14 "time"
15
16 "github.com/mitchellh/go-testing-interface"
15) 17)
16 18
17// TestDecompressCase is a single test case for testing decompressors 19// TestDecompressCase is a single test case for testing decompressors
@@ -21,10 +23,11 @@ type TestDecompressCase struct {
21 Err bool // Err is whether we expect an error or not 23 Err bool // Err is whether we expect an error or not
22 DirList []string // DirList is the list of files for Dir mode 24 DirList []string // DirList is the list of files for Dir mode
23 FileMD5 string // FileMD5 is the expected MD5 for a single file 25 FileMD5 string // FileMD5 is the expected MD5 for a single file
26 Mtime *time.Time // Mtime is the optionally expected mtime for a single file (or all files if in Dir mode)
24} 27}
25 28
26// TestDecompressor is a helper function for testing generic decompressors. 29// TestDecompressor is a helper function for testing generic decompressors.
27func TestDecompressor(t *testing.T, d Decompressor, cases []TestDecompressCase) { 30func TestDecompressor(t testing.T, d Decompressor, cases []TestDecompressCase) {
28 for _, tc := range cases { 31 for _, tc := range cases {
29 t.Logf("Testing: %s", tc.Input) 32 t.Logf("Testing: %s", tc.Input)
30 33
@@ -67,6 +70,14 @@ func TestDecompressor(t *testing.T, d Decompressor, cases []TestDecompressCase)
67 } 70 }
68 } 71 }
69 72
73 if tc.Mtime != nil {
74 actual := fi.ModTime()
75 expected := *tc.Mtime
76 if actual != expected {
77 t.Fatalf("err %s: expected mtime '%s' for %s, got '%s'", tc.Input, expected.String(), dst, actual.String())
78 }
79 }
80
70 return 81 return
71 } 82 }
72 83
@@ -83,11 +94,26 @@ func TestDecompressor(t *testing.T, d Decompressor, cases []TestDecompressCase)
83 if !reflect.DeepEqual(actual, expected) { 94 if !reflect.DeepEqual(actual, expected) {
84 t.Fatalf("bad %s\n\n%#v\n\n%#v", tc.Input, actual, expected) 95 t.Fatalf("bad %s\n\n%#v\n\n%#v", tc.Input, actual, expected)
85 } 96 }
97 // Check for correct atime/mtime
98 for _, dir := range actual {
99 path := filepath.Join(dst, dir)
100 if tc.Mtime != nil {
101 fi, err := os.Stat(path)
102 if err != nil {
103 t.Fatalf("err: %s", err)
104 }
105 actual := fi.ModTime()
106 expected := *tc.Mtime
107 if actual != expected {
108 t.Fatalf("err %s: expected mtime '%s' for %s, got '%s'", tc.Input, expected.String(), path, actual.String())
109 }
110 }
111 }
86 }() 112 }()
87 } 113 }
88} 114}
89 115
90func testListDir(t *testing.T, path string) []string { 116func testListDir(t testing.T, path string) []string {
91 var result []string 117 var result []string
92 err := filepath.Walk(path, func(sub string, info os.FileInfo, err error) error { 118 err := filepath.Walk(path, func(sub string, info os.FileInfo, err error) error {
93 if err != nil { 119 if err != nil {
@@ -102,7 +128,7 @@ func testListDir(t *testing.T, path string) []string {
102 128
103 // If it is a dir, add trailing sep 129 // If it is a dir, add trailing sep
104 if info.IsDir() { 130 if info.IsDir() {
105 sub += "/" 131 sub += string(os.PathSeparator)
106 } 132 }
107 133
108 result = append(result, sub) 134 result = append(result, sub)
@@ -116,7 +142,7 @@ func testListDir(t *testing.T, path string) []string {
116 return result 142 return result
117} 143}
118 144
119func testMD5(t *testing.T, path string) string { 145func testMD5(t testing.T, path string) string {
120 f, err := os.Open(path) 146 f, err := os.Open(path)
121 if err != nil { 147 if err != nil {
122 t.Fatalf("err: %s", err) 148 t.Fatalf("err: %s", err)
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_tgz.go b/vendor/github.com/hashicorp/go-getter/decompress_tgz.go
index e8b1c31..65eb70d 100644
--- a/vendor/github.com/hashicorp/go-getter/decompress_tgz.go
+++ b/vendor/github.com/hashicorp/go-getter/decompress_tgz.go
@@ -1,10 +1,8 @@
1package getter 1package getter
2 2
3import ( 3import (
4 "archive/tar"
5 "compress/gzip" 4 "compress/gzip"
6 "fmt" 5 "fmt"
7 "io"
8 "os" 6 "os"
9 "path/filepath" 7 "path/filepath"
10) 8)
@@ -37,63 +35,5 @@ func (d *TarGzipDecompressor) Decompress(dst, src string, dir bool) error {
37 } 35 }
38 defer gzipR.Close() 36 defer gzipR.Close()
39 37
40 // Once gzip decompressed we have a tar format 38 return untar(gzipR, dst, src, dir)
41 tarR := tar.NewReader(gzipR)
42 done := false
43 for {
44 hdr, err := tarR.Next()
45 if err == io.EOF {
46 if !done {
47 // Empty archive
48 return fmt.Errorf("empty archive: %s", src)
49 }
50
51 return nil
52 }
53 if err != nil {
54 return err
55 }
56
57 path := dst
58 if dir {
59 path = filepath.Join(path, hdr.Name)
60 }
61
62 if hdr.FileInfo().IsDir() {
63 if !dir {
64 return fmt.Errorf("expected a single file: %s", src)
65 }
66
67 // A directory, just make the directory and continue unarchiving...
68 if err := os.MkdirAll(path, 0755); err != nil {
69 return err
70 }
71
72 continue
73 }
74
75 // We have a file. If we already decoded, then it is an error
76 if !dir && done {
77 return fmt.Errorf("expected a single file, got multiple: %s", src)
78 }
79
80 // Mark that we're done so future in single file mode errors
81 done = true
82
83 // Open the file for writing
84 dstF, err := os.Create(path)
85 if err != nil {
86 return err
87 }
88 _, err = io.Copy(dstF, tarR)
89 dstF.Close()
90 if err != nil {
91 return err
92 }
93
94 // Chmod the file
95 if err := os.Chmod(path, hdr.FileInfo().Mode()); err != nil {
96 return err
97 }
98 }
99} 39}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_txz.go b/vendor/github.com/hashicorp/go-getter/decompress_txz.go
new file mode 100644
index 0000000..5e151c1
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress_txz.go
@@ -0,0 +1,39 @@
1package getter
2
3import (
4 "fmt"
5 "os"
6 "path/filepath"
7
8 "github.com/ulikunitz/xz"
9)
10
11// TarXzDecompressor is an implementation of Decompressor that can
12// decompress tar.xz files.
13type TarXzDecompressor struct{}
14
15func (d *TarXzDecompressor) Decompress(dst, src string, dir bool) error {
16 // If we're going into a directory we should make that first
17 mkdir := dst
18 if !dir {
19 mkdir = filepath.Dir(dst)
20 }
21 if err := os.MkdirAll(mkdir, 0755); err != nil {
22 return err
23 }
24
25 // File first
26 f, err := os.Open(src)
27 if err != nil {
28 return err
29 }
30 defer f.Close()
31
32 // xz compression is second
33 txzR, err := xz.NewReader(f)
34 if err != nil {
35 return fmt.Errorf("Error opening an xz reader for %s: %s", src, err)
36 }
37
38 return untar(txzR, dst, src, dir)
39}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_xz.go b/vendor/github.com/hashicorp/go-getter/decompress_xz.go
new file mode 100644
index 0000000..4e37aba
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress_xz.go
@@ -0,0 +1,49 @@
1package getter
2
3import (
4 "fmt"
5 "io"
6 "os"
7 "path/filepath"
8
9 "github.com/ulikunitz/xz"
10)
11
12// XzDecompressor is an implementation of Decompressor that can
13// decompress xz files.
14type XzDecompressor struct{}
15
16func (d *XzDecompressor) Decompress(dst, src string, dir bool) error {
17 // Directory isn't supported at all
18 if dir {
19 return fmt.Errorf("xz-compressed files can only unarchive to a single file")
20 }
21
22 // If we're going into a directory we should make that first
23 if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
24 return err
25 }
26
27 // File first
28 f, err := os.Open(src)
29 if err != nil {
30 return err
31 }
32 defer f.Close()
33
34 // xz compression is second
35 xzR, err := xz.NewReader(f)
36 if err != nil {
37 return err
38 }
39
40 // Copy it out
41 dstF, err := os.Create(dst)
42 if err != nil {
43 return err
44 }
45 defer dstF.Close()
46
47 _, err = io.Copy(dstF, xzR)
48 return err
49}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_zip.go b/vendor/github.com/hashicorp/go-getter/decompress_zip.go
index a065c07..b0e70ca 100644
--- a/vendor/github.com/hashicorp/go-getter/decompress_zip.go
+++ b/vendor/github.com/hashicorp/go-getter/decompress_zip.go
@@ -42,6 +42,11 @@ func (d *ZipDecompressor) Decompress(dst, src string, dir bool) error {
42 for _, f := range zipR.File { 42 for _, f := range zipR.File {
43 path := dst 43 path := dst
44 if dir { 44 if dir {
45 // Disallow parent traversal
46 if containsDotDot(f.Name) {
47 return fmt.Errorf("entry contains '..': %s", f.Name)
48 }
49
45 path = filepath.Join(path, f.Name) 50 path = filepath.Join(path, f.Name)
46 } 51 }
47 52
diff --git a/vendor/github.com/hashicorp/go-getter/detect.go b/vendor/github.com/hashicorp/go-getter/detect.go
index 481b737..c369551 100644
--- a/vendor/github.com/hashicorp/go-getter/detect.go
+++ b/vendor/github.com/hashicorp/go-getter/detect.go
@@ -72,12 +72,18 @@ func Detect(src string, pwd string, ds []Detector) (string, error) {
72 subDir = detectSubdir 72 subDir = detectSubdir
73 } 73 }
74 } 74 }
75
75 if subDir != "" { 76 if subDir != "" {
76 u, err := url.Parse(result) 77 u, err := url.Parse(result)
77 if err != nil { 78 if err != nil {
78 return "", fmt.Errorf("Error parsing URL: %s", err) 79 return "", fmt.Errorf("Error parsing URL: %s", err)
79 } 80 }
80 u.Path += "//" + subDir 81 u.Path += "//" + subDir
82
83 // a subdir may contain wildcards, but in order to support them we
84 // have to ensure the path isn't escaped.
85 u.RawPath = u.Path
86
81 result = u.String() 87 result = u.String()
82 } 88 }
83 89
diff --git a/vendor/github.com/hashicorp/go-getter/detect_file.go b/vendor/github.com/hashicorp/go-getter/detect_file.go
index 756ea43..4ef41ea 100644
--- a/vendor/github.com/hashicorp/go-getter/detect_file.go
+++ b/vendor/github.com/hashicorp/go-getter/detect_file.go
@@ -32,7 +32,7 @@ func (d *FileDetector) Detect(src, pwd string) (string, bool, error) {
32 return "", true, err 32 return "", true, err
33 } 33 }
34 if fi.Mode()&os.ModeSymlink != 0 { 34 if fi.Mode()&os.ModeSymlink != 0 {
35 pwd, err = os.Readlink(pwd) 35 pwd, err = filepath.EvalSymlinks(pwd)
36 if err != nil { 36 if err != nil {
37 return "", true, err 37 return "", true, err
38 } 38 }
diff --git a/vendor/github.com/hashicorp/go-getter/get.go b/vendor/github.com/hashicorp/go-getter/get.go
index c3236f5..e6053d9 100644
--- a/vendor/github.com/hashicorp/go-getter/get.go
+++ b/vendor/github.com/hashicorp/go-getter/get.go
@@ -18,6 +18,8 @@ import (
18 "os/exec" 18 "os/exec"
19 "regexp" 19 "regexp"
20 "syscall" 20 "syscall"
21
22 cleanhttp "github.com/hashicorp/go-cleanhttp"
21) 23)
22 24
23// Getter defines the interface that schemes must implement to download 25// Getter defines the interface that schemes must implement to download
@@ -49,8 +51,13 @@ var Getters map[string]Getter
49// syntax is schema::url, example: git::https://foo.com 51// syntax is schema::url, example: git::https://foo.com
50var forcedRegexp = regexp.MustCompile(`^([A-Za-z0-9]+)::(.+)$`) 52var forcedRegexp = regexp.MustCompile(`^([A-Za-z0-9]+)::(.+)$`)
51 53
54// httpClient is the default client to be used by HttpGetters.
55var httpClient = cleanhttp.DefaultClient()
56
52func init() { 57func init() {
53 httpGetter := &HttpGetter{Netrc: true} 58 httpGetter := &HttpGetter{
59 Netrc: true,
60 }
54 61
55 Getters = map[string]Getter{ 62 Getters = map[string]Getter{
56 "file": new(FileGetter), 63 "file": new(FileGetter),
diff --git a/vendor/github.com/hashicorp/go-getter/get_git.go b/vendor/github.com/hashicorp/go-getter/get_git.go
index 0728139..cb1d029 100644
--- a/vendor/github.com/hashicorp/go-getter/get_git.go
+++ b/vendor/github.com/hashicorp/go-getter/get_git.go
@@ -11,6 +11,7 @@ import (
11 "strings" 11 "strings"
12 12
13 urlhelper "github.com/hashicorp/go-getter/helper/url" 13 urlhelper "github.com/hashicorp/go-getter/helper/url"
14 "github.com/hashicorp/go-safetemp"
14 "github.com/hashicorp/go-version" 15 "github.com/hashicorp/go-version"
15) 16)
16 17
@@ -105,13 +106,11 @@ func (g *GitGetter) Get(dst string, u *url.URL) error {
105// GetFile for Git doesn't support updating at this time. It will download 106// GetFile for Git doesn't support updating at this time. It will download
106// the file every time. 107// the file every time.
107func (g *GitGetter) GetFile(dst string, u *url.URL) error { 108func (g *GitGetter) GetFile(dst string, u *url.URL) error {
108 td, err := ioutil.TempDir("", "getter-git") 109 td, tdcloser, err := safetemp.Dir("", "getter")
109 if err != nil { 110 if err != nil {
110 return err 111 return err
111 } 112 }
112 if err := os.RemoveAll(td); err != nil { 113 defer tdcloser.Close()
113 return err
114 }
115 114
116 // Get the filename, and strip the filename from the URL so we can 115 // Get the filename, and strip the filename from the URL so we can
117 // just get the repository directly. 116 // just get the repository directly.
@@ -180,17 +179,34 @@ func (g *GitGetter) fetchSubmodules(dst, sshKeyFile string) error {
180// setupGitEnv sets up the environment for the given command. This is used to 179// setupGitEnv sets up the environment for the given command. This is used to
181// pass configuration data to git and ssh and enables advanced cloning methods. 180// pass configuration data to git and ssh and enables advanced cloning methods.
182func setupGitEnv(cmd *exec.Cmd, sshKeyFile string) { 181func setupGitEnv(cmd *exec.Cmd, sshKeyFile string) {
183 var sshOpts []string 182 const gitSSHCommand = "GIT_SSH_COMMAND="
183 var sshCmd []string
184
185 // If we have an existing GIT_SSH_COMMAND, we need to append our options.
186 // We will also remove our old entry to make sure the behavior is the same
187 // with versions of Go < 1.9.
188 env := os.Environ()
189 for i, v := range env {
190 if strings.HasPrefix(v, gitSSHCommand) {
191 sshCmd = []string{v}
192
193 env[i], env[len(env)-1] = env[len(env)-1], env[i]
194 env = env[:len(env)-1]
195 break
196 }
197 }
198
199 if len(sshCmd) == 0 {
200 sshCmd = []string{gitSSHCommand + "ssh"}
201 }
184 202
185 if sshKeyFile != "" { 203 if sshKeyFile != "" {
186 // We have an SSH key temp file configured, tell ssh about this. 204 // We have an SSH key temp file configured, tell ssh about this.
187 sshOpts = append(sshOpts, "-i", sshKeyFile) 205 sshCmd = append(sshCmd, "-i", sshKeyFile)
188 } 206 }
189 207
190 cmd.Env = append(os.Environ(), 208 env = append(env, strings.Join(sshCmd, " "))
191 // Set the ssh command to use for clones. 209 cmd.Env = env
192 "GIT_SSH_COMMAND=ssh "+strings.Join(sshOpts, " "),
193 )
194} 210}
195 211
196// checkGitVersion is used to check the version of git installed on the system 212// checkGitVersion is used to check the version of git installed on the system
diff --git a/vendor/github.com/hashicorp/go-getter/get_hg.go b/vendor/github.com/hashicorp/go-getter/get_hg.go
index 820bdd4..f386922 100644
--- a/vendor/github.com/hashicorp/go-getter/get_hg.go
+++ b/vendor/github.com/hashicorp/go-getter/get_hg.go
@@ -2,7 +2,6 @@ package getter
2 2
3import ( 3import (
4 "fmt" 4 "fmt"
5 "io/ioutil"
6 "net/url" 5 "net/url"
7 "os" 6 "os"
8 "os/exec" 7 "os/exec"
@@ -10,6 +9,7 @@ import (
10 "runtime" 9 "runtime"
11 10
12 urlhelper "github.com/hashicorp/go-getter/helper/url" 11 urlhelper "github.com/hashicorp/go-getter/helper/url"
12 "github.com/hashicorp/go-safetemp"
13) 13)
14 14
15// HgGetter is a Getter implementation that will download a module from 15// HgGetter is a Getter implementation that will download a module from
@@ -64,13 +64,13 @@ func (g *HgGetter) Get(dst string, u *url.URL) error {
64// GetFile for Hg doesn't support updating at this time. It will download 64// GetFile for Hg doesn't support updating at this time. It will download
65// the file every time. 65// the file every time.
66func (g *HgGetter) GetFile(dst string, u *url.URL) error { 66func (g *HgGetter) GetFile(dst string, u *url.URL) error {
67 td, err := ioutil.TempDir("", "getter-hg") 67 // Create a temporary directory to store the full source. This has to be
68 // a non-existent directory.
69 td, tdcloser, err := safetemp.Dir("", "getter")
68 if err != nil { 70 if err != nil {
69 return err 71 return err
70 } 72 }
71 if err := os.RemoveAll(td); err != nil { 73 defer tdcloser.Close()
72 return err
73 }
74 74
75 // Get the filename, and strip the filename from the URL so we can 75 // Get the filename, and strip the filename from the URL so we can
76 // just get the repository directly. 76 // just get the repository directly.
diff --git a/vendor/github.com/hashicorp/go-getter/get_http.go b/vendor/github.com/hashicorp/go-getter/get_http.go
index 3c02034..d2e2879 100644
--- a/vendor/github.com/hashicorp/go-getter/get_http.go
+++ b/vendor/github.com/hashicorp/go-getter/get_http.go
@@ -4,12 +4,13 @@ import (
4 "encoding/xml" 4 "encoding/xml"
5 "fmt" 5 "fmt"
6 "io" 6 "io"
7 "io/ioutil"
8 "net/http" 7 "net/http"
9 "net/url" 8 "net/url"
10 "os" 9 "os"
11 "path/filepath" 10 "path/filepath"
12 "strings" 11 "strings"
12
13 "github.com/hashicorp/go-safetemp"
13) 14)
14 15
15// HttpGetter is a Getter implementation that will download from an HTTP 16// HttpGetter is a Getter implementation that will download from an HTTP
@@ -36,6 +37,10 @@ type HttpGetter struct {
36 // Netrc, if true, will lookup and use auth information found 37 // Netrc, if true, will lookup and use auth information found
37 // in the user's netrc file if available. 38 // in the user's netrc file if available.
38 Netrc bool 39 Netrc bool
40
41 // Client is the http.Client to use for Get requests.
42 // This defaults to a cleanhttp.DefaultClient if left unset.
43 Client *http.Client
39} 44}
40 45
41func (g *HttpGetter) ClientMode(u *url.URL) (ClientMode, error) { 46func (g *HttpGetter) ClientMode(u *url.URL) (ClientMode, error) {
@@ -57,13 +62,17 @@ func (g *HttpGetter) Get(dst string, u *url.URL) error {
57 } 62 }
58 } 63 }
59 64
65 if g.Client == nil {
66 g.Client = httpClient
67 }
68
60 // Add terraform-get to the parameter. 69 // Add terraform-get to the parameter.
61 q := u.Query() 70 q := u.Query()
62 q.Add("terraform-get", "1") 71 q.Add("terraform-get", "1")
63 u.RawQuery = q.Encode() 72 u.RawQuery = q.Encode()
64 73
65 // Get the URL 74 // Get the URL
66 resp, err := http.Get(u.String()) 75 resp, err := g.Client.Get(u.String())
67 if err != nil { 76 if err != nil {
68 return err 77 return err
69 } 78 }
@@ -98,7 +107,18 @@ func (g *HttpGetter) Get(dst string, u *url.URL) error {
98} 107}
99 108
100func (g *HttpGetter) GetFile(dst string, u *url.URL) error { 109func (g *HttpGetter) GetFile(dst string, u *url.URL) error {
101 resp, err := http.Get(u.String()) 110 if g.Netrc {
111 // Add auth from netrc if we can
112 if err := addAuthFromNetrc(u); err != nil {
113 return err
114 }
115 }
116
117 if g.Client == nil {
118 g.Client = httpClient
119 }
120
121 resp, err := g.Client.Get(u.String())
102 if err != nil { 122 if err != nil {
103 return err 123 return err
104 } 124 }
@@ -116,29 +136,40 @@ func (g *HttpGetter) GetFile(dst string, u *url.URL) error {
116 if err != nil { 136 if err != nil {
117 return err 137 return err
118 } 138 }
119 defer f.Close()
120 139
121 _, err = io.Copy(f, resp.Body) 140 n, err := io.Copy(f, resp.Body)
141 if err == nil && n < resp.ContentLength {
142 err = io.ErrShortWrite
143 }
144 if err1 := f.Close(); err == nil {
145 err = err1
146 }
122 return err 147 return err
123} 148}
124 149
125// getSubdir downloads the source into the destination, but with 150// getSubdir downloads the source into the destination, but with
126// the proper subdir. 151// the proper subdir.
127func (g *HttpGetter) getSubdir(dst, source, subDir string) error { 152func (g *HttpGetter) getSubdir(dst, source, subDir string) error {
128 // Create a temporary directory to store the full source 153 // Create a temporary directory to store the full source. This has to be
129 td, err := ioutil.TempDir("", "tf") 154 // a non-existent directory.
155 td, tdcloser, err := safetemp.Dir("", "getter")
130 if err != nil { 156 if err != nil {
131 return err 157 return err
132 } 158 }
133 defer os.RemoveAll(td) 159 defer tdcloser.Close()
134 160
135 // Download that into the given directory 161 // Download that into the given directory
136 if err := Get(td, source); err != nil { 162 if err := Get(td, source); err != nil {
137 return err 163 return err
138 } 164 }
139 165
166 // Process any globbing
167 sourcePath, err := SubdirGlob(td, subDir)
168 if err != nil {
169 return err
170 }
171
140 // Make sure the subdir path actually exists 172 // Make sure the subdir path actually exists
141 sourcePath := filepath.Join(td, subDir)
142 if _, err := os.Stat(sourcePath); err != nil { 173 if _, err := os.Stat(sourcePath); err != nil {
143 return fmt.Errorf( 174 return fmt.Errorf(
144 "Error downloading %s: %s", source, err) 175 "Error downloading %s: %s", source, err)
diff --git a/vendor/github.com/hashicorp/go-getter/get_s3.go b/vendor/github.com/hashicorp/go-getter/get_s3.go
index d3bffeb..ebb3217 100644
--- a/vendor/github.com/hashicorp/go-getter/get_s3.go
+++ b/vendor/github.com/hashicorp/go-getter/get_s3.go
@@ -28,7 +28,7 @@ func (g *S3Getter) ClientMode(u *url.URL) (ClientMode, error) {
28 } 28 }
29 29
30 // Create client config 30 // Create client config
31 config := g.getAWSConfig(region, creds) 31 config := g.getAWSConfig(region, u, creds)
32 sess := session.New(config) 32 sess := session.New(config)
33 client := s3.New(sess) 33 client := s3.New(sess)
34 34
@@ -84,7 +84,7 @@ func (g *S3Getter) Get(dst string, u *url.URL) error {
84 return err 84 return err
85 } 85 }
86 86
87 config := g.getAWSConfig(region, creds) 87 config := g.getAWSConfig(region, u, creds)
88 sess := session.New(config) 88 sess := session.New(config)
89 client := s3.New(sess) 89 client := s3.New(sess)
90 90
@@ -139,7 +139,7 @@ func (g *S3Getter) GetFile(dst string, u *url.URL) error {
139 return err 139 return err
140 } 140 }
141 141
142 config := g.getAWSConfig(region, creds) 142 config := g.getAWSConfig(region, u, creds)
143 sess := session.New(config) 143 sess := session.New(config)
144 client := s3.New(sess) 144 client := s3.New(sess)
145 return g.getObject(client, dst, bucket, path, version) 145 return g.getObject(client, dst, bucket, path, version)
@@ -174,7 +174,7 @@ func (g *S3Getter) getObject(client *s3.S3, dst, bucket, key, version string) er
174 return err 174 return err
175} 175}
176 176
177func (g *S3Getter) getAWSConfig(region string, creds *credentials.Credentials) *aws.Config { 177func (g *S3Getter) getAWSConfig(region string, url *url.URL, creds *credentials.Credentials) *aws.Config {
178 conf := &aws.Config{} 178 conf := &aws.Config{}
179 if creds == nil { 179 if creds == nil {
180 // Grab the metadata URL 180 // Grab the metadata URL
@@ -195,6 +195,14 @@ func (g *S3Getter) getAWSConfig(region string, creds *credentials.Credentials) *
195 }) 195 })
196 } 196 }
197 197
198 if creds != nil {
199 conf.Endpoint = &url.Host
200 conf.S3ForcePathStyle = aws.Bool(true)
201 if url.Scheme == "http" {
202 conf.DisableSSL = aws.Bool(true)
203 }
204 }
205
198 conf.Credentials = creds 206 conf.Credentials = creds
199 if region != "" { 207 if region != "" {
200 conf.Region = aws.String(region) 208 conf.Region = aws.String(region)
@@ -204,29 +212,48 @@ func (g *S3Getter) getAWSConfig(region string, creds *credentials.Credentials) *
204} 212}
205 213
206func (g *S3Getter) parseUrl(u *url.URL) (region, bucket, path, version string, creds *credentials.Credentials, err error) { 214func (g *S3Getter) parseUrl(u *url.URL) (region, bucket, path, version string, creds *credentials.Credentials, err error) {
207 // Expected host style: s3.amazonaws.com. They always have 3 parts, 215 // This just check whether we are dealing with S3 or
208 // although the first may differ if we're accessing a specific region. 216 // any other S3 compliant service. S3 has a predictable
209 hostParts := strings.Split(u.Host, ".") 217 // url as others do not
210 if len(hostParts) != 3 { 218 if strings.Contains(u.Host, "amazonaws.com") {
211 err = fmt.Errorf("URL is not a valid S3 URL") 219 // Expected host style: s3.amazonaws.com. They always have 3 parts,
212 return 220 // although the first may differ if we're accessing a specific region.
213 } 221 hostParts := strings.Split(u.Host, ".")
222 if len(hostParts) != 3 {
223 err = fmt.Errorf("URL is not a valid S3 URL")
224 return
225 }
214 226
215 // Parse the region out of the first part of the host 227 // Parse the region out of the first part of the host
216 region = strings.TrimPrefix(strings.TrimPrefix(hostParts[0], "s3-"), "s3") 228 region = strings.TrimPrefix(strings.TrimPrefix(hostParts[0], "s3-"), "s3")
217 if region == "" { 229 if region == "" {
218 region = "us-east-1" 230 region = "us-east-1"
219 } 231 }
220 232
221 pathParts := strings.SplitN(u.Path, "/", 3) 233 pathParts := strings.SplitN(u.Path, "/", 3)
222 if len(pathParts) != 3 { 234 if len(pathParts) != 3 {
223 err = fmt.Errorf("URL is not a valid S3 URL") 235 err = fmt.Errorf("URL is not a valid S3 URL")
224 return 236 return
225 } 237 }
238
239 bucket = pathParts[1]
240 path = pathParts[2]
241 version = u.Query().Get("version")
226 242
227 bucket = pathParts[1] 243 } else {
228 path = pathParts[2] 244 pathParts := strings.SplitN(u.Path, "/", 3)
229 version = u.Query().Get("version") 245 if len(pathParts) != 3 {
246 err = fmt.Errorf("URL is not a valid S3 complaint URL")
247 return
248 }
249 bucket = pathParts[1]
250 path = pathParts[2]
251 version = u.Query().Get("version")
252 region = u.Query().Get("region")
253 if region == "" {
254 region = "us-east-1"
255 }
256 }
230 257
231 _, hasAwsId := u.Query()["aws_access_key_id"] 258 _, hasAwsId := u.Query()["aws_access_key_id"]
232 _, hasAwsSecret := u.Query()["aws_access_key_secret"] 259 _, hasAwsSecret := u.Query()["aws_access_key_secret"]
diff --git a/vendor/github.com/hashicorp/go-getter/source.go b/vendor/github.com/hashicorp/go-getter/source.go
index 4d5ee3c..c63f2bb 100644
--- a/vendor/github.com/hashicorp/go-getter/source.go
+++ b/vendor/github.com/hashicorp/go-getter/source.go
@@ -1,6 +1,8 @@
1package getter 1package getter
2 2
3import ( 3import (
4 "fmt"
5 "path/filepath"
4 "strings" 6 "strings"
5) 7)
6 8
@@ -34,3 +36,27 @@ func SourceDirSubdir(src string) (string, string) {
34 36
35 return src, subdir 37 return src, subdir
36} 38}
39
40// SubdirGlob returns the actual subdir with globbing processed.
41//
42// dst should be a destination directory that is already populated (the
43// download is complete) and subDir should be the set subDir. If subDir
44// is an empty string, this returns an empty string.
45//
46// The returned path is the full absolute path.
47func SubdirGlob(dst, subDir string) (string, error) {
48 matches, err := filepath.Glob(filepath.Join(dst, subDir))
49 if err != nil {
50 return "", err
51 }
52
53 if len(matches) == 0 {
54 return "", fmt.Errorf("subdir %q not found", subDir)
55 }
56
57 if len(matches) > 1 {
58 return "", fmt.Errorf("subdir %q matches multiple paths", subDir)
59 }
60
61 return matches[0], nil
62}
diff --git a/vendor/github.com/hashicorp/go-hclog/LICENSE b/vendor/github.com/hashicorp/go-hclog/LICENSE
new file mode 100644
index 0000000..abaf1e4
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-hclog/LICENSE
@@ -0,0 +1,21 @@
1MIT License
2
3Copyright (c) 2017 HashiCorp
4
5Permission is hereby granted, free of charge, to any person obtaining a copy
6of this software and associated documentation files (the "Software"), to deal
7in the Software without restriction, including without limitation the rights
8to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9copies of the Software, and to permit persons to whom the Software is
10furnished to do so, subject to the following conditions:
11
12The above copyright notice and this permission notice shall be included in all
13copies or substantial portions of the Software.
14
15THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21SOFTWARE.
diff --git a/vendor/github.com/hashicorp/go-hclog/README.md b/vendor/github.com/hashicorp/go-hclog/README.md
new file mode 100644
index 0000000..614342b
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-hclog/README.md
@@ -0,0 +1,123 @@
1# go-hclog
2
3[![Go Documentation](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)][godocs]
4
5[godocs]: https://godoc.org/github.com/hashicorp/go-hclog
6
7`go-hclog` is a package for Go that provides a simple key/value logging
8interface for use in development and production environments.
9
10It provides logging levels that provide decreased output based upon the
11desired amount of output, unlike the standard library `log` package.
12
13It does not provide `Printf` style logging, only key/value logging that is
14exposed as arguments to the logging functions for simplicity.
15
16It provides a human readable output mode for use in development as well as
17JSON output mode for production.
18
19## Stability Note
20
21While this library is fully open source and HashiCorp will be maintaining it
22(since we are and will be making extensive use of it), the API and output
23format is subject to minor changes as we fully bake and vet it in our projects.
24This notice will be removed once it's fully integrated into our major projects
25and no further changes are anticipated.
26
27## Installation and Docs
28
29Install using `go get github.com/hashicorp/go-hclog`.
30
31Full documentation is available at
32http://godoc.org/github.com/hashicorp/go-hclog
33
34## Usage
35
36### Use the global logger
37
38```go
39hclog.Default().Info("hello world")
40```
41
42```text
432017-07-05T16:15:55.167-0700 [INFO ] hello world
44```
45
46(Note timestamps are removed in future examples for brevity.)
47
48### Create a new logger
49
50```go
51appLogger := hclog.New(&hclog.LoggerOptions{
52 Name: "my-app",
53 Level: hclog.LevelFromString("DEBUG"),
54})
55```
56
57### Emit an Info level message with 2 key/value pairs
58
59```go
60input := "5.5"
61_, err := strconv.ParseInt(input, 10, 32)
62if err != nil {
63 appLogger.Info("Invalid input for ParseInt", "input", input, "error", err)
64}
65```
66
67```text
68... [INFO ] my-app: Invalid input for ParseInt: input=5.5 error="strconv.ParseInt: parsing "5.5": invalid syntax"
69```
70
71### Create a new Logger for a major subsystem
72
73```go
74subsystemLogger := appLogger.Named("transport")
75subsystemLogger.Info("we are transporting something")
76```
77
78```text
79... [INFO ] my-app.transport: we are transporting something
80```
81
82Notice that logs emitted by `subsystemLogger` contain `my-app.transport`,
83reflecting both the application and subsystem names.
84
85### Create a new Logger with fixed key/value pairs
86
87Using `With()` will include a specific key-value pair in all messages emitted
88by that logger.
89
90```go
91requestID := "5fb446b6-6eba-821d-df1b-cd7501b6a363"
92requestLogger := subsystemLogger.With("request", requestID)
93requestLogger.Info("we are transporting a request")
94```
95
96```text
97... [INFO ] my-app.transport: we are transporting a request: request=5fb446b6-6eba-821d-df1b-cd7501b6a363
98```
99
100This allows sub Loggers to be context specific without having to thread that
101into all the callers.
102
103### Use this with code that uses the standard library logger
104
105If you want to use the standard library's `log.Logger` interface you can wrap
106`hclog.Logger` by calling the `StandardLogger()` method. This allows you to use
107it with the familiar `Println()`, `Printf()`, etc. For example:
108
109```go
110stdLogger := appLogger.StandardLogger(&hclog.StandardLoggerOptions{
111 InferLevels: true,
112})
113// Printf() is provided by stdlib log.Logger interface, not hclog.Logger
114stdLogger.Printf("[DEBUG] %+v", stdLogger)
115```
116
117```text
118... [DEBUG] my-app: &{mu:{state:0 sema:0} prefix: flag:0 out:0xc42000a0a0 buf:[]}
119```
120
121Notice that if `appLogger` is initialized with the `INFO` log level _and_ you
122specify `InferLevels: true`, you will not see any output here. You must change
123`appLogger` to `DEBUG` to see output. See the docs for more information.
diff --git a/vendor/github.com/hashicorp/go-hclog/global.go b/vendor/github.com/hashicorp/go-hclog/global.go
new file mode 100644
index 0000000..55ce439
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-hclog/global.go
@@ -0,0 +1,34 @@
1package hclog
2
3import (
4 "sync"
5)
6
7var (
8 protect sync.Once
9 def Logger
10
11 // The options used to create the Default logger. These are
12 // read only when the Default logger is created, so set them
13 // as soon as the process starts.
14 DefaultOptions = &LoggerOptions{
15 Level: DefaultLevel,
16 Output: DefaultOutput,
17 }
18)
19
20// Return a logger that is held globally. This can be a good starting
21// place, and then you can use .With() and .Name() to create sub-loggers
22// to be used in more specific contexts.
23func Default() Logger {
24 protect.Do(func() {
25 def = New(DefaultOptions)
26 })
27
28 return def
29}
30
31// A short alias for Default()
32func L() Logger {
33 return Default()
34}
diff --git a/vendor/github.com/hashicorp/go-hclog/int.go b/vendor/github.com/hashicorp/go-hclog/int.go
new file mode 100644
index 0000000..9f90c28
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-hclog/int.go
@@ -0,0 +1,385 @@
1package hclog
2
3import (
4 "bufio"
5 "encoding/json"
6 "fmt"
7 "log"
8 "os"
9 "runtime"
10 "strconv"
11 "strings"
12 "sync"
13 "time"
14)
15
16var (
17 _levelToBracket = map[Level]string{
18 Debug: "[DEBUG]",
19 Trace: "[TRACE]",
20 Info: "[INFO ]",
21 Warn: "[WARN ]",
22 Error: "[ERROR]",
23 }
24)
25
26// Given the options (nil for defaults), create a new Logger
27func New(opts *LoggerOptions) Logger {
28 if opts == nil {
29 opts = &LoggerOptions{}
30 }
31
32 output := opts.Output
33 if output == nil {
34 output = os.Stderr
35 }
36
37 level := opts.Level
38 if level == NoLevel {
39 level = DefaultLevel
40 }
41
42 return &intLogger{
43 m: new(sync.Mutex),
44 json: opts.JSONFormat,
45 caller: opts.IncludeLocation,
46 name: opts.Name,
47 w: bufio.NewWriter(output),
48 level: level,
49 }
50}
51
52// The internal logger implementation. Internal in that it is defined entirely
53// by this package.
54type intLogger struct {
55 json bool
56 caller bool
57 name string
58
59 // this is a pointer so that it's shared by any derived loggers, since
60 // those derived loggers share the bufio.Writer as well.
61 m *sync.Mutex
62 w *bufio.Writer
63 level Level
64
65 implied []interface{}
66}
67
68// Make sure that intLogger is a Logger
69var _ Logger = &intLogger{}
70
71// The time format to use for logging. This is a version of RFC3339 that
72// contains millisecond precision
73const TimeFormat = "2006-01-02T15:04:05.000Z0700"
74
75// Log a message and a set of key/value pairs if the given level is at
76// or more severe that the threshold configured in the Logger.
77func (z *intLogger) Log(level Level, msg string, args ...interface{}) {
78 if level < z.level {
79 return
80 }
81
82 t := time.Now()
83
84 z.m.Lock()
85 defer z.m.Unlock()
86
87 if z.json {
88 z.logJson(t, level, msg, args...)
89 } else {
90 z.log(t, level, msg, args...)
91 }
92
93 z.w.Flush()
94}
95
96// Cleanup a path by returning the last 2 segments of the path only.
97func trimCallerPath(path string) string {
98 // lovely borrowed from zap
99 // nb. To make sure we trim the path correctly on Windows too, we
100 // counter-intuitively need to use '/' and *not* os.PathSeparator here,
101 // because the path given originates from Go stdlib, specifically
102 // runtime.Caller() which (as of Mar/17) returns forward slashes even on
103 // Windows.
104 //
105 // See https://github.com/golang/go/issues/3335
106 // and https://github.com/golang/go/issues/18151
107 //
108 // for discussion on the issue on Go side.
109 //
110
111 // Find the last separator.
112 //
113 idx := strings.LastIndexByte(path, '/')
114 if idx == -1 {
115 return path
116 }
117
118 // Find the penultimate separator.
119 idx = strings.LastIndexByte(path[:idx], '/')
120 if idx == -1 {
121 return path
122 }
123
124 return path[idx+1:]
125}
126
127// Non-JSON logging format function
128func (z *intLogger) log(t time.Time, level Level, msg string, args ...interface{}) {
129 z.w.WriteString(t.Format(TimeFormat))
130 z.w.WriteByte(' ')
131
132 s, ok := _levelToBracket[level]
133 if ok {
134 z.w.WriteString(s)
135 } else {
136 z.w.WriteString("[UNKN ]")
137 }
138
139 if z.caller {
140 if _, file, line, ok := runtime.Caller(3); ok {
141 z.w.WriteByte(' ')
142 z.w.WriteString(trimCallerPath(file))
143 z.w.WriteByte(':')
144 z.w.WriteString(strconv.Itoa(line))
145 z.w.WriteByte(':')
146 }
147 }
148
149 z.w.WriteByte(' ')
150
151 if z.name != "" {
152 z.w.WriteString(z.name)
153 z.w.WriteString(": ")
154 }
155
156 z.w.WriteString(msg)
157
158 args = append(z.implied, args...)
159
160 var stacktrace CapturedStacktrace
161
162 if args != nil && len(args) > 0 {
163 if len(args)%2 != 0 {
164 cs, ok := args[len(args)-1].(CapturedStacktrace)
165 if ok {
166 args = args[:len(args)-1]
167 stacktrace = cs
168 } else {
169 args = append(args, "<unknown>")
170 }
171 }
172
173 z.w.WriteByte(':')
174
175 FOR:
176 for i := 0; i < len(args); i = i + 2 {
177 var val string
178
179 switch st := args[i+1].(type) {
180 case string:
181 val = st
182 case int:
183 val = strconv.FormatInt(int64(st), 10)
184 case int64:
185 val = strconv.FormatInt(int64(st), 10)
186 case int32:
187 val = strconv.FormatInt(int64(st), 10)
188 case int16:
189 val = strconv.FormatInt(int64(st), 10)
190 case int8:
191 val = strconv.FormatInt(int64(st), 10)
192 case uint:
193 val = strconv.FormatUint(uint64(st), 10)
194 case uint64:
195 val = strconv.FormatUint(uint64(st), 10)
196 case uint32:
197 val = strconv.FormatUint(uint64(st), 10)
198 case uint16:
199 val = strconv.FormatUint(uint64(st), 10)
200 case uint8:
201 val = strconv.FormatUint(uint64(st), 10)
202 case CapturedStacktrace:
203 stacktrace = st
204 continue FOR
205 default:
206 val = fmt.Sprintf("%v", st)
207 }
208
209 z.w.WriteByte(' ')
210 z.w.WriteString(args[i].(string))
211 z.w.WriteByte('=')
212
213 if strings.ContainsAny(val, " \t\n\r") {
214 z.w.WriteByte('"')
215 z.w.WriteString(val)
216 z.w.WriteByte('"')
217 } else {
218 z.w.WriteString(val)
219 }
220 }
221 }
222
223 z.w.WriteString("\n")
224
225 if stacktrace != "" {
226 z.w.WriteString(string(stacktrace))
227 }
228}
229
230// JSON logging function
231func (z *intLogger) logJson(t time.Time, level Level, msg string, args ...interface{}) {
232 vals := map[string]interface{}{
233 "@message": msg,
234 "@timestamp": t.Format("2006-01-02T15:04:05.000000Z07:00"),
235 }
236
237 var levelStr string
238 switch level {
239 case Error:
240 levelStr = "error"
241 case Warn:
242 levelStr = "warn"
243 case Info:
244 levelStr = "info"
245 case Debug:
246 levelStr = "debug"
247 case Trace:
248 levelStr = "trace"
249 default:
250 levelStr = "all"
251 }
252
253 vals["@level"] = levelStr
254
255 if z.name != "" {
256 vals["@module"] = z.name
257 }
258
259 if z.caller {
260 if _, file, line, ok := runtime.Caller(3); ok {
261 vals["@caller"] = fmt.Sprintf("%s:%d", file, line)
262 }
263 }
264
265 if args != nil && len(args) > 0 {
266 if len(args)%2 != 0 {
267 cs, ok := args[len(args)-1].(CapturedStacktrace)
268 if ok {
269 args = args[:len(args)-1]
270 vals["stacktrace"] = cs
271 } else {
272 args = append(args, "<unknown>")
273 }
274 }
275
276 for i := 0; i < len(args); i = i + 2 {
277 if _, ok := args[i].(string); !ok {
278 // As this is the logging function not much we can do here
279 // without injecting into logs...
280 continue
281 }
282 vals[args[i].(string)] = args[i+1]
283 }
284 }
285
286 err := json.NewEncoder(z.w).Encode(vals)
287 if err != nil {
288 panic(err)
289 }
290}
291
292// Emit the message and args at DEBUG level
293func (z *intLogger) Debug(msg string, args ...interface{}) {
294 z.Log(Debug, msg, args...)
295}
296
297// Emit the message and args at TRACE level
298func (z *intLogger) Trace(msg string, args ...interface{}) {
299 z.Log(Trace, msg, args...)
300}
301
302// Emit the message and args at INFO level
303func (z *intLogger) Info(msg string, args ...interface{}) {
304 z.Log(Info, msg, args...)
305}
306
307// Emit the message and args at WARN level
308func (z *intLogger) Warn(msg string, args ...interface{}) {
309 z.Log(Warn, msg, args...)
310}
311
312// Emit the message and args at ERROR level
313func (z *intLogger) Error(msg string, args ...interface{}) {
314 z.Log(Error, msg, args...)
315}
316
317// Indicate that the logger would emit TRACE level logs
318func (z *intLogger) IsTrace() bool {
319 return z.level == Trace
320}
321
322// Indicate that the logger would emit DEBUG level logs
323func (z *intLogger) IsDebug() bool {
324 return z.level <= Debug
325}
326
327// Indicate that the logger would emit INFO level logs
328func (z *intLogger) IsInfo() bool {
329 return z.level <= Info
330}
331
332// Indicate that the logger would emit WARN level logs
333func (z *intLogger) IsWarn() bool {
334 return z.level <= Warn
335}
336
337// Indicate that the logger would emit ERROR level logs
338func (z *intLogger) IsError() bool {
339 return z.level <= Error
340}
341
342// Return a sub-Logger for which every emitted log message will contain
343// the given key/value pairs. This is used to create a context specific
344// Logger.
345func (z *intLogger) With(args ...interface{}) Logger {
346 var nz intLogger = *z
347
348 nz.implied = append(nz.implied, args...)
349
350 return &nz
351}
352
353// Create a new sub-Logger that a name decending from the current name.
354// This is used to create a subsystem specific Logger.
355func (z *intLogger) Named(name string) Logger {
356 var nz intLogger = *z
357
358 if nz.name != "" {
359 nz.name = nz.name + "." + name
360 }
361
362 return &nz
363}
364
365// Create a new sub-Logger with an explicit name. This ignores the current
366// name. This is used to create a standalone logger that doesn't fall
367// within the normal hierarchy.
368func (z *intLogger) ResetNamed(name string) Logger {
369 var nz intLogger = *z
370
371 nz.name = name
372
373 return &nz
374}
375
376// Create a *log.Logger that will send it's data through this Logger. This
377// allows packages that expect to be using the standard library log to actually
378// use this logger.
379func (z *intLogger) StandardLogger(opts *StandardLoggerOptions) *log.Logger {
380 if opts == nil {
381 opts = &StandardLoggerOptions{}
382 }
383
384 return log.New(&stdlogAdapter{z, opts.InferLevels}, "", 0)
385}
diff --git a/vendor/github.com/hashicorp/go-hclog/log.go b/vendor/github.com/hashicorp/go-hclog/log.go
new file mode 100644
index 0000000..6bb16ba
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-hclog/log.go
@@ -0,0 +1,138 @@
1package hclog
2
3import (
4 "io"
5 "log"
6 "os"
7 "strings"
8)
9
10var (
11 DefaultOutput = os.Stderr
12 DefaultLevel = Info
13)
14
15type Level int
16
17const (
18 // This is a special level used to indicate that no level has been
19 // set and allow for a default to be used.
20 NoLevel Level = 0
21
22 // The most verbose level. Intended to be used for the tracing of actions
23 // in code, such as function enters/exits, etc.
24 Trace Level = 1
25
26 // For programmer lowlevel analysis.
27 Debug Level = 2
28
29 // For information about steady state operations.
30 Info Level = 3
31
32 // For information about rare but handled events.
33 Warn Level = 4
34
35 // For information about unrecoverable events.
36 Error Level = 5
37)
38
39// LevelFromString returns a Level type for the named log level, or "NoLevel" if
40// the level string is invalid. This facilitates setting the log level via
41// config or environment variable by name in a predictable way.
42func LevelFromString(levelStr string) Level {
43 // We don't care about case. Accept "INFO" or "info"
44 levelStr = strings.ToLower(strings.TrimSpace(levelStr))
45 switch levelStr {
46 case "trace":
47 return Trace
48 case "debug":
49 return Debug
50 case "info":
51 return Info
52 case "warn":
53 return Warn
54 case "error":
55 return Error
56 default:
57 return NoLevel
58 }
59}
60
61// The main Logger interface. All code should code against this interface only.
62type Logger interface {
63 // Args are alternating key, val pairs
64 // keys must be strings
65 // vals can be any type, but display is implementation specific
66 // Emit a message and key/value pairs at the TRACE level
67 Trace(msg string, args ...interface{})
68
69 // Emit a message and key/value pairs at the DEBUG level
70 Debug(msg string, args ...interface{})
71
72 // Emit a message and key/value pairs at the INFO level
73 Info(msg string, args ...interface{})
74
75 // Emit a message and key/value pairs at the WARN level
76 Warn(msg string, args ...interface{})
77
78 // Emit a message and key/value pairs at the ERROR level
79 Error(msg string, args ...interface{})
80
81 // Indicate if TRACE logs would be emitted. This and the other Is* guards
82 // are used to elide expensive logging code based on the current level.
83 IsTrace() bool
84
85 // Indicate if DEBUG logs would be emitted. This and the other Is* guards
86 IsDebug() bool
87
88 // Indicate if INFO logs would be emitted. This and the other Is* guards
89 IsInfo() bool
90
91 // Indicate if WARN logs would be emitted. This and the other Is* guards
92 IsWarn() bool
93
94 // Indicate if ERROR logs would be emitted. This and the other Is* guards
95 IsError() bool
96
97 // Creates a sublogger that will always have the given key/value pairs
98 With(args ...interface{}) Logger
99
100 // Create a logger that will prepend the name string on the front of all messages.
101 // If the logger already has a name, the new value will be appended to the current
102 // name. That way, a major subsystem can use this to decorate all it's own logs
103 // without losing context.
104 Named(name string) Logger
105
106 // Create a logger that will prepend the name string on the front of all messages.
107 // This sets the name of the logger to the value directly, unlike Named which honor
108 // the current name as well.
109 ResetNamed(name string) Logger
110
111 // Return a value that conforms to the stdlib log.Logger interface
112 StandardLogger(opts *StandardLoggerOptions) *log.Logger
113}
114
115type StandardLoggerOptions struct {
116 // Indicate that some minimal parsing should be done on strings to try
117 // and detect their level and re-emit them.
118 // This supports the strings like [ERROR], [ERR] [TRACE], [WARN], [INFO],
119 // [DEBUG] and strip it off before reapplying it.
120 InferLevels bool
121}
122
123type LoggerOptions struct {
124 // Name of the subsystem to prefix logs with
125 Name string
126
127 // The threshold for the logger. Anything less severe is supressed
128 Level Level
129
130 // Where to write the logs to. Defaults to os.Stdout if nil
131 Output io.Writer
132
133 // Control if the output should be in JSON.
134 JSONFormat bool
135
136 // Intclude file and line information in each log line
137 IncludeLocation bool
138}
diff --git a/vendor/github.com/hashicorp/go-hclog/stacktrace.go b/vendor/github.com/hashicorp/go-hclog/stacktrace.go
new file mode 100644
index 0000000..8af1a3b
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-hclog/stacktrace.go
@@ -0,0 +1,108 @@
1// Copyright (c) 2016 Uber Technologies, Inc.
2//
3// Permission is hereby granted, free of charge, to any person obtaining a copy
4// of this software and associated documentation files (the "Software"), to deal
5// in the Software without restriction, including without limitation the rights
6// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7// copies of the Software, and to permit persons to whom the Software is
8// furnished to do so, subject to the following conditions:
9//
10// The above copyright notice and this permission notice shall be included in
11// all copies or substantial portions of the Software.
12//
13// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19// THE SOFTWARE.
20
21package hclog
22
23import (
24 "bytes"
25 "runtime"
26 "strconv"
27 "strings"
28 "sync"
29)
30
31var (
32 _stacktraceIgnorePrefixes = []string{
33 "runtime.goexit",
34 "runtime.main",
35 }
36 _stacktracePool = sync.Pool{
37 New: func() interface{} {
38 return newProgramCounters(64)
39 },
40 }
41)
42
43// A stacktrace gathered by a previous call to log.Stacktrace. If passed
44// to a logging function, the stacktrace will be appended.
45type CapturedStacktrace string
46
47// Gather a stacktrace of the current goroutine and return it to be passed
48// to a logging function.
49func Stacktrace() CapturedStacktrace {
50 return CapturedStacktrace(takeStacktrace())
51}
52
53func takeStacktrace() string {
54 programCounters := _stacktracePool.Get().(*programCounters)
55 defer _stacktracePool.Put(programCounters)
56
57 var buffer bytes.Buffer
58
59 for {
60 // Skip the call to runtime.Counters and takeStacktrace so that the
61 // program counters start at the caller of takeStacktrace.
62 n := runtime.Callers(2, programCounters.pcs)
63 if n < cap(programCounters.pcs) {
64 programCounters.pcs = programCounters.pcs[:n]
65 break
66 }
67 // Don't put the too-short counter slice back into the pool; this lets
68 // the pool adjust if we consistently take deep stacktraces.
69 programCounters = newProgramCounters(len(programCounters.pcs) * 2)
70 }
71
72 i := 0
73 frames := runtime.CallersFrames(programCounters.pcs)
74 for frame, more := frames.Next(); more; frame, more = frames.Next() {
75 if shouldIgnoreStacktraceFunction(frame.Function) {
76 continue
77 }
78 if i != 0 {
79 buffer.WriteByte('\n')
80 }
81 i++
82 buffer.WriteString(frame.Function)
83 buffer.WriteByte('\n')
84 buffer.WriteByte('\t')
85 buffer.WriteString(frame.File)
86 buffer.WriteByte(':')
87 buffer.WriteString(strconv.Itoa(int(frame.Line)))
88 }
89
90 return buffer.String()
91}
92
93func shouldIgnoreStacktraceFunction(function string) bool {
94 for _, prefix := range _stacktraceIgnorePrefixes {
95 if strings.HasPrefix(function, prefix) {
96 return true
97 }
98 }
99 return false
100}
101
102type programCounters struct {
103 pcs []uintptr
104}
105
106func newProgramCounters(size int) *programCounters {
107 return &programCounters{make([]uintptr, size)}
108}
diff --git a/vendor/github.com/hashicorp/go-hclog/stdlog.go b/vendor/github.com/hashicorp/go-hclog/stdlog.go
new file mode 100644
index 0000000..2bb927f
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-hclog/stdlog.go
@@ -0,0 +1,62 @@
1package hclog
2
3import (
4 "bytes"
5 "strings"
6)
7
8// Provides a io.Writer to shim the data out of *log.Logger
9// and back into our Logger. This is basically the only way to
10// build upon *log.Logger.
11type stdlogAdapter struct {
12 hl Logger
13 inferLevels bool
14}
15
16// Take the data, infer the levels if configured, and send it through
17// a regular Logger
18func (s *stdlogAdapter) Write(data []byte) (int, error) {
19 str := string(bytes.TrimRight(data, " \t\n"))
20
21 if s.inferLevels {
22 level, str := s.pickLevel(str)
23 switch level {
24 case Trace:
25 s.hl.Trace(str)
26 case Debug:
27 s.hl.Debug(str)
28 case Info:
29 s.hl.Info(str)
30 case Warn:
31 s.hl.Warn(str)
32 case Error:
33 s.hl.Error(str)
34 default:
35 s.hl.Info(str)
36 }
37 } else {
38 s.hl.Info(str)
39 }
40
41 return len(data), nil
42}
43
44// Detect, based on conventions, what log level this is
45func (s *stdlogAdapter) pickLevel(str string) (Level, string) {
46 switch {
47 case strings.HasPrefix(str, "[DEBUG]"):
48 return Debug, strings.TrimSpace(str[7:])
49 case strings.HasPrefix(str, "[TRACE]"):
50 return Trace, strings.TrimSpace(str[7:])
51 case strings.HasPrefix(str, "[INFO]"):
52 return Info, strings.TrimSpace(str[6:])
53 case strings.HasPrefix(str, "[WARN]"):
54 return Warn, strings.TrimSpace(str[7:])
55 case strings.HasPrefix(str, "[ERROR]"):
56 return Error, strings.TrimSpace(str[7:])
57 case strings.HasPrefix(str, "[ERR]"):
58 return Error, strings.TrimSpace(str[5:])
59 default:
60 return Info, str
61 }
62}
diff --git a/vendor/github.com/hashicorp/go-plugin/README.md b/vendor/github.com/hashicorp/go-plugin/README.md
index 2058cfb..e4558db 100644
--- a/vendor/github.com/hashicorp/go-plugin/README.md
+++ b/vendor/github.com/hashicorp/go-plugin/README.md
@@ -1,10 +1,9 @@
1# Go Plugin System over RPC 1# Go Plugin System over RPC
2 2
3`go-plugin` is a Go (golang) plugin system over RPC. It is the plugin system 3`go-plugin` is a Go (golang) plugin system over RPC. It is the plugin system
4that has been in use by HashiCorp tooling for over 3 years. While initially 4that has been in use by HashiCorp tooling for over 4 years. While initially
5created for [Packer](https://www.packer.io), it has since been used by 5created for [Packer](https://www.packer.io), it is additionally in use by
6[Terraform](https://www.terraform.io) and [Otto](https://www.ottoproject.io), 6[Terraform](https://www.terraform.io), [Nomad](https://www.nomadproject.io), and
7with plans to also use it for [Nomad](https://www.nomadproject.io) and
8[Vault](https://www.vaultproject.io). 7[Vault](https://www.vaultproject.io).
9 8
10While the plugin system is over RPC, it is currently only designed to work 9While the plugin system is over RPC, it is currently only designed to work
@@ -24,6 +23,11 @@ interface as if it were going to run in the same process. For a plugin user:
24you just use and call functions on an interface as if it were in the same 23you just use and call functions on an interface as if it were in the same
25process. This plugin system handles the communication in between. 24process. This plugin system handles the communication in between.
26 25
26**Cross-language support.** Plugins can be written (and consumed) by
27almost every major language. This library supports serving plugins via
28[gRPC](http://www.grpc.io). gRPC-based plugins enable plugins to be written
29in any language.
30
27**Complex arguments and return values are supported.** This library 31**Complex arguments and return values are supported.** This library
28provides APIs for handling complex arguments and return values such 32provides APIs for handling complex arguments and return values such
29as interfaces, `io.Reader/Writer`, etc. We do this by giving you a library 33as interfaces, `io.Reader/Writer`, etc. We do this by giving you a library
@@ -37,7 +41,10 @@ and the plugin can call back into the host process.
37**Built-in Logging.** Any plugins that use the `log` standard library 41**Built-in Logging.** Any plugins that use the `log` standard library
38will have log data automatically sent to the host process. The host 42will have log data automatically sent to the host process. The host
39process will mirror this output prefixed with the path to the plugin 43process will mirror this output prefixed with the path to the plugin
40binary. This makes debugging with plugins simple. 44binary. This makes debugging with plugins simple. If the host system
45uses [hclog](https://github.com/hashicorp/go-hclog) then the log data
46will be structured. If the plugin also uses hclog, logs from the plugin
47will be sent to the host hclog and be structured.
41 48
42**Protocol Versioning.** A very basic "protocol version" is supported that 49**Protocol Versioning.** A very basic "protocol version" is supported that
43can be incremented to invalidate any previous plugins. This is useful when 50can be incremented to invalidate any previous plugins. This is useful when
@@ -62,13 +69,18 @@ This requires the host/plugin to know this is possible and daemonize
62properly. `NewClient` takes a `ReattachConfig` to determine if and how to 69properly. `NewClient` takes a `ReattachConfig` to determine if and how to
63reattach. 70reattach.
64 71
72**Cryptographically Secure Plugins.** Plugins can be verified with an expected
73checksum and RPC communications can be configured to use TLS. The host process
74must be properly secured to protect this configuration.
75
65## Architecture 76## Architecture
66 77
67The HashiCorp plugin system works by launching subprocesses and communicating 78The HashiCorp plugin system works by launching subprocesses and communicating
68over RPC (using standard `net/rpc`). A single connection is made between 79over RPC (using standard `net/rpc` or [gRPC](http://www.grpc.io)). A single
69any plugin and the host process, and we use a 80connection is made between any plugin and the host process. For net/rpc-based
70[connection multiplexing](https://github.com/hashicorp/yamux) 81plugins, we use a [connection multiplexing](https://github.com/hashicorp/yamux)
71library to multiplex any other connections on top. 82library to multiplex any other connections on top. For gRPC-based plugins,
83the HTTP2 protocol handles multiplexing.
72 84
73This architecture has a number of benefits: 85This architecture has a number of benefits:
74 86
@@ -76,8 +88,8 @@ This architecture has a number of benefits:
76 panic the plugin user. 88 panic the plugin user.
77 89
78 * Plugins are very easy to write: just write a Go application and `go build`. 90 * Plugins are very easy to write: just write a Go application and `go build`.
79 Theoretically you could also use another language as long as it can 91 Or use any other language to write a gRPC server with a tiny amount of
80 communicate the Go `net/rpc` protocol but this hasn't yet been tried. 92 boilerplate to support go-plugin.
81 93
82 * Plugins are very easy to install: just put the binary in a location where 94 * Plugins are very easy to install: just put the binary in a location where
83 the host will find it (depends on the host but this library also provides 95 the host will find it (depends on the host but this library also provides
@@ -85,8 +97,8 @@ This architecture has a number of benefits:
85 97
86 * Plugins can be relatively secure: The plugin only has access to the 98 * Plugins can be relatively secure: The plugin only has access to the
87 interfaces and args given to it, not to the entire memory space of the 99 interfaces and args given to it, not to the entire memory space of the
88 process. More security features are planned (see the coming soon section 100 process. Additionally, go-plugin can communicate with the plugin over
89 below). 101 TLS.
90 102
91## Usage 103## Usage
92 104
@@ -97,10 +109,9 @@ high-level steps that must be done. Examples are available in the
97 1. Choose the interface(s) you want to expose for plugins. 109 1. Choose the interface(s) you want to expose for plugins.
98 110
99 2. For each interface, implement an implementation of that interface 111 2. For each interface, implement an implementation of that interface
100 that communicates over an `*rpc.Client` (from the standard `net/rpc` 112 that communicates over a `net/rpc` connection or other a
101 package) for every function call. Likewise, implement the RPC server 113 [gRPC](http://www.grpc.io) connection or both. You'll have to implement
102 struct this communicates to which is then communicating to a real, 114 both a client and server implementation.
103 concrete implementation.
104 115
105 3. Create a `Plugin` implementation that knows how to create the RPC 116 3. Create a `Plugin` implementation that knows how to create the RPC
106 client/server for a given plugin type. 117 client/server for a given plugin type.
@@ -125,10 +136,6 @@ improvements we can make.
125 136
126At this point in time, the roadmap for the plugin system is: 137At this point in time, the roadmap for the plugin system is:
127 138
128**Cryptographically Secure Plugins.** We'll implement signing plugins
129and loading signed plugins in order to allow Vault to make use of multi-process
130in a secure way.
131
132**Semantic Versioning.** Plugins will be able to implement a semantic version. 139**Semantic Versioning.** Plugins will be able to implement a semantic version.
133This plugin system will give host processes a system for constraining 140This plugin system will give host processes a system for constraining
134versions. This is in addition to the protocol versioning already present 141versions. This is in addition to the protocol versioning already present
diff --git a/vendor/github.com/hashicorp/go-plugin/client.go b/vendor/github.com/hashicorp/go-plugin/client.go
index 9f8a0f2..b3e3b78 100644
--- a/vendor/github.com/hashicorp/go-plugin/client.go
+++ b/vendor/github.com/hashicorp/go-plugin/client.go
@@ -2,8 +2,12 @@ package plugin
2 2
3import ( 3import (
4 "bufio" 4 "bufio"
5 "context"
6 "crypto/subtle"
7 "crypto/tls"
5 "errors" 8 "errors"
6 "fmt" 9 "fmt"
10 "hash"
7 "io" 11 "io"
8 "io/ioutil" 12 "io/ioutil"
9 "log" 13 "log"
@@ -17,6 +21,8 @@ import (
17 "sync/atomic" 21 "sync/atomic"
18 "time" 22 "time"
19 "unicode" 23 "unicode"
24
25 hclog "github.com/hashicorp/go-hclog"
20) 26)
21 27
22// If this is 1, then we've called CleanupClients. This can be used 28// If this is 1, then we've called CleanupClients. This can be used
@@ -35,6 +41,22 @@ var (
35 // ErrProcessNotFound is returned when a client is instantiated to 41 // ErrProcessNotFound is returned when a client is instantiated to
36 // reattach to an existing process and it isn't found. 42 // reattach to an existing process and it isn't found.
37 ErrProcessNotFound = errors.New("Reattachment process not found") 43 ErrProcessNotFound = errors.New("Reattachment process not found")
44
45 // ErrChecksumsDoNotMatch is returned when binary's checksum doesn't match
46 // the one provided in the SecureConfig.
47 ErrChecksumsDoNotMatch = errors.New("checksums did not match")
48
49 // ErrSecureNoChecksum is returned when an empty checksum is provided to the
50 // SecureConfig.
51 ErrSecureConfigNoChecksum = errors.New("no checksum provided")
52
53 // ErrSecureNoHash is returned when a nil Hash object is provided to the
54 // SecureConfig.
55 ErrSecureConfigNoHash = errors.New("no hash implementation provided")
56
57 // ErrSecureConfigAndReattach is returned when both Reattach and
58 // SecureConfig are set.
59 ErrSecureConfigAndReattach = errors.New("only one of Reattach or SecureConfig can be set")
38) 60)
39 61
40// Client handles the lifecycle of a plugin application. It launches 62// Client handles the lifecycle of a plugin application. It launches
@@ -55,7 +77,10 @@ type Client struct {
55 l sync.Mutex 77 l sync.Mutex
56 address net.Addr 78 address net.Addr
57 process *os.Process 79 process *os.Process
58 client *RPCClient 80 client ClientProtocol
81 protocol Protocol
82 logger hclog.Logger
83 doneCtx context.Context
59} 84}
60 85
61// ClientConfig is the configuration used to initialize a new 86// ClientConfig is the configuration used to initialize a new
@@ -79,6 +104,13 @@ type ClientConfig struct {
79 Cmd *exec.Cmd 104 Cmd *exec.Cmd
80 Reattach *ReattachConfig 105 Reattach *ReattachConfig
81 106
107 // SecureConfig is configuration for verifying the integrity of the
108 // executable. It can not be used with Reattach.
109 SecureConfig *SecureConfig
110
111 // TLSConfig is used to enable TLS on the RPC client.
112 TLSConfig *tls.Config
113
82 // Managed represents if the client should be managed by the 114 // Managed represents if the client should be managed by the
83 // plugin package or not. If true, then by calling CleanupClients, 115 // plugin package or not. If true, then by calling CleanupClients,
84 // it will automatically be cleaned up. Otherwise, the client 116 // it will automatically be cleaned up. Otherwise, the client
@@ -109,14 +141,74 @@ type ClientConfig struct {
109 // sync any of these streams. 141 // sync any of these streams.
110 SyncStdout io.Writer 142 SyncStdout io.Writer
111 SyncStderr io.Writer 143 SyncStderr io.Writer
144
145 // AllowedProtocols is a list of allowed protocols. If this isn't set,
146 // then only netrpc is allowed. This is so that older go-plugin systems
147 // can show friendly errors if they see a plugin with an unknown
148 // protocol.
149 //
150 // By setting this, you can cause an error immediately on plugin start
151 // if an unsupported protocol is used with a good error message.
152 //
153 // If this isn't set at all (nil value), then only net/rpc is accepted.
154 // This is done for legacy reasons. You must explicitly opt-in to
155 // new protocols.
156 AllowedProtocols []Protocol
157
158 // Logger is the logger that the client will used. If none is provided,
159 // it will default to hclog's default logger.
160 Logger hclog.Logger
112} 161}
113 162
114// ReattachConfig is used to configure a client to reattach to an 163// ReattachConfig is used to configure a client to reattach to an
115// already-running plugin process. You can retrieve this information by 164// already-running plugin process. You can retrieve this information by
116// calling ReattachConfig on Client. 165// calling ReattachConfig on Client.
117type ReattachConfig struct { 166type ReattachConfig struct {
118 Addr net.Addr 167 Protocol Protocol
119 Pid int 168 Addr net.Addr
169 Pid int
170}
171
172// SecureConfig is used to configure a client to verify the integrity of an
173// executable before running. It does this by verifying the checksum is
174// expected. Hash is used to specify the hashing method to use when checksumming
175// the file. The configuration is verified by the client by calling the
176// SecureConfig.Check() function.
177//
178// The host process should ensure the checksum was provided by a trusted and
179// authoritative source. The binary should be installed in such a way that it
180// can not be modified by an unauthorized user between the time of this check
181// and the time of execution.
182type SecureConfig struct {
183 Checksum []byte
184 Hash hash.Hash
185}
186
187// Check takes the filepath to an executable and returns true if the checksum of
188// the file matches the checksum provided in the SecureConfig.
189func (s *SecureConfig) Check(filePath string) (bool, error) {
190 if len(s.Checksum) == 0 {
191 return false, ErrSecureConfigNoChecksum
192 }
193
194 if s.Hash == nil {
195 return false, ErrSecureConfigNoHash
196 }
197
198 file, err := os.Open(filePath)
199 if err != nil {
200 return false, err
201 }
202 defer file.Close()
203
204 _, err = io.Copy(s.Hash, file)
205 if err != nil {
206 return false, err
207 }
208
209 sum := s.Hash.Sum(nil)
210
211 return subtle.ConstantTimeCompare(sum, s.Checksum) == 1, nil
120} 212}
121 213
122// This makes sure all the managed subprocesses are killed and properly 214// This makes sure all the managed subprocesses are killed and properly
@@ -174,7 +266,22 @@ func NewClient(config *ClientConfig) (c *Client) {
174 config.SyncStderr = ioutil.Discard 266 config.SyncStderr = ioutil.Discard
175 } 267 }
176 268
177 c = &Client{config: config} 269 if config.AllowedProtocols == nil {
270 config.AllowedProtocols = []Protocol{ProtocolNetRPC}
271 }
272
273 if config.Logger == nil {
274 config.Logger = hclog.New(&hclog.LoggerOptions{
275 Output: hclog.DefaultOutput,
276 Level: hclog.Trace,
277 Name: "plugin",
278 })
279 }
280
281 c = &Client{
282 config: config,
283 logger: config.Logger,
284 }
178 if config.Managed { 285 if config.Managed {
179 managedClientsLock.Lock() 286 managedClientsLock.Lock()
180 managedClients = append(managedClients, c) 287 managedClients = append(managedClients, c)
@@ -184,11 +291,11 @@ func NewClient(config *ClientConfig) (c *Client) {
184 return 291 return
185} 292}
186 293
187// Client returns an RPC client for the plugin. 294// Client returns the protocol client for this connection.
188// 295//
189// Subsequent calls to this will return the same RPC client. 296// Subsequent calls to this will return the same client.
190func (c *Client) Client() (*RPCClient, error) { 297func (c *Client) Client() (ClientProtocol, error) {
191 addr, err := c.Start() 298 _, err := c.Start()
192 if err != nil { 299 if err != nil {
193 return nil, err 300 return nil, err
194 } 301 }
@@ -200,29 +307,18 @@ func (c *Client) Client() (*RPCClient, error) {
200 return c.client, nil 307 return c.client, nil
201 } 308 }
202 309
203 // Connect to the client 310 switch c.protocol {
204 conn, err := net.Dial(addr.Network(), addr.String()) 311 case ProtocolNetRPC:
205 if err != nil { 312 c.client, err = newRPCClient(c)
206 return nil, err
207 }
208 if tcpConn, ok := conn.(*net.TCPConn); ok {
209 // Make sure to set keep alive so that the connection doesn't die
210 tcpConn.SetKeepAlive(true)
211 }
212 313
213 // Create the actual RPC client 314 case ProtocolGRPC:
214 c.client, err = NewRPCClient(conn, c.config.Plugins) 315 c.client, err = newGRPCClient(c.doneCtx, c)
215 if err != nil { 316
216 conn.Close() 317 default:
217 return nil, err 318 return nil, fmt.Errorf("unknown server protocol: %s", c.protocol)
218 } 319 }
219 320
220 // Begin the stream syncing so that stdin, out, err work properly
221 err = c.client.SyncStreams(
222 c.config.SyncStdout,
223 c.config.SyncStderr)
224 if err != nil { 321 if err != nil {
225 c.client.Close()
226 c.client = nil 322 c.client = nil
227 return nil, err 323 return nil, err
228 } 324 }
@@ -274,8 +370,7 @@ func (c *Client) Kill() {
274 if err != nil { 370 if err != nil {
275 // If there was an error just log it. We're going to force 371 // If there was an error just log it. We're going to force
276 // kill in a moment anyways. 372 // kill in a moment anyways.
277 log.Printf( 373 c.logger.Warn("error closing client during Kill", "err", err)
278 "[WARN] plugin: error closing client during Kill: %s", err)
279 } 374 }
280 } 375 }
281 } 376 }
@@ -318,13 +413,21 @@ func (c *Client) Start() (addr net.Addr, err error) {
318 { 413 {
319 cmdSet := c.config.Cmd != nil 414 cmdSet := c.config.Cmd != nil
320 attachSet := c.config.Reattach != nil 415 attachSet := c.config.Reattach != nil
416 secureSet := c.config.SecureConfig != nil
321 if cmdSet == attachSet { 417 if cmdSet == attachSet {
322 return nil, fmt.Errorf("Only one of Cmd or Reattach must be set") 418 return nil, fmt.Errorf("Only one of Cmd or Reattach must be set")
323 } 419 }
420
421 if secureSet && attachSet {
422 return nil, ErrSecureConfigAndReattach
423 }
324 } 424 }
325 425
326 // Create the logging channel for when we kill 426 // Create the logging channel for when we kill
327 c.doneLogging = make(chan struct{}) 427 c.doneLogging = make(chan struct{})
428 // Create a context for when we kill
429 var ctxCancel context.CancelFunc
430 c.doneCtx, ctxCancel = context.WithCancel(context.Background())
328 431
329 if c.config.Reattach != nil { 432 if c.config.Reattach != nil {
330 // Verify the process still exists. If not, then it is an error 433 // Verify the process still exists. If not, then it is an error
@@ -350,7 +453,7 @@ func (c *Client) Start() (addr net.Addr, err error) {
350 pidWait(pid) 453 pidWait(pid)
351 454
352 // Log so we can see it 455 // Log so we can see it
353 log.Printf("[DEBUG] plugin: reattached plugin process exited\n") 456 c.logger.Debug("reattached plugin process exited")
354 457
355 // Mark it 458 // Mark it
356 c.l.Lock() 459 c.l.Lock()
@@ -359,11 +462,19 @@ func (c *Client) Start() (addr net.Addr, err error) {
359 462
360 // Close the logging channel since that doesn't work on reattach 463 // Close the logging channel since that doesn't work on reattach
361 close(c.doneLogging) 464 close(c.doneLogging)
465
466 // Cancel the context
467 ctxCancel()
362 }(p.Pid) 468 }(p.Pid)
363 469
364 // Set the address and process 470 // Set the address and process
365 c.address = c.config.Reattach.Addr 471 c.address = c.config.Reattach.Addr
366 c.process = p 472 c.process = p
473 c.protocol = c.config.Reattach.Protocol
474 if c.protocol == "" {
475 // Default the protocol to net/rpc for backwards compatibility
476 c.protocol = ProtocolNetRPC
477 }
367 478
368 return c.address, nil 479 return c.address, nil
369 } 480 }
@@ -384,7 +495,15 @@ func (c *Client) Start() (addr net.Addr, err error) {
384 cmd.Stderr = stderr_w 495 cmd.Stderr = stderr_w
385 cmd.Stdout = stdout_w 496 cmd.Stdout = stdout_w
386 497
387 log.Printf("[DEBUG] plugin: starting plugin: %s %#v", cmd.Path, cmd.Args) 498 if c.config.SecureConfig != nil {
499 if ok, err := c.config.SecureConfig.Check(cmd.Path); err != nil {
500 return nil, fmt.Errorf("error verifying checksum: %s", err)
501 } else if !ok {
502 return nil, ErrChecksumsDoNotMatch
503 }
504 }
505
506 c.logger.Debug("starting plugin", "path", cmd.Path, "args", cmd.Args)
388 err = cmd.Start() 507 err = cmd.Start()
389 if err != nil { 508 if err != nil {
390 return 509 return
@@ -418,12 +537,15 @@ func (c *Client) Start() (addr net.Addr, err error) {
418 cmd.Wait() 537 cmd.Wait()
419 538
420 // Log and make sure to flush the logs write away 539 // Log and make sure to flush the logs write away
421 log.Printf("[DEBUG] plugin: %s: plugin process exited\n", cmd.Path) 540 c.logger.Debug("plugin process exited", "path", cmd.Path)
422 os.Stderr.Sync() 541 os.Stderr.Sync()
423 542
424 // Mark that we exited 543 // Mark that we exited
425 close(exitCh) 544 close(exitCh)
426 545
546 // Cancel the context, marking that we exited
547 ctxCancel()
548
427 // Set that we exited, which takes a lock 549 // Set that we exited, which takes a lock
428 c.l.Lock() 550 c.l.Lock()
429 defer c.l.Unlock() 551 defer c.l.Unlock()
@@ -465,7 +587,7 @@ func (c *Client) Start() (addr net.Addr, err error) {
465 timeout := time.After(c.config.StartTimeout) 587 timeout := time.After(c.config.StartTimeout)
466 588
467 // Start looking for the address 589 // Start looking for the address
468 log.Printf("[DEBUG] plugin: waiting for RPC address for: %s", cmd.Path) 590 c.logger.Debug("waiting for RPC address", "path", cmd.Path)
469 select { 591 select {
470 case <-timeout: 592 case <-timeout:
471 err = errors.New("timeout while waiting for plugin to start") 593 err = errors.New("timeout while waiting for plugin to start")
@@ -475,7 +597,7 @@ func (c *Client) Start() (addr net.Addr, err error) {
475 // Trim the line and split by "|" in order to get the parts of 597 // Trim the line and split by "|" in order to get the parts of
476 // the output. 598 // the output.
477 line := strings.TrimSpace(string(lineBytes)) 599 line := strings.TrimSpace(string(lineBytes))
478 parts := strings.SplitN(line, "|", 4) 600 parts := strings.SplitN(line, "|", 6)
479 if len(parts) < 4 { 601 if len(parts) < 4 {
480 err = fmt.Errorf( 602 err = fmt.Errorf(
481 "Unrecognized remote plugin message: %s\n\n"+ 603 "Unrecognized remote plugin message: %s\n\n"+
@@ -495,7 +617,7 @@ func (c *Client) Start() (addr net.Addr, err error) {
495 617
496 if int(coreProtocol) != CoreProtocolVersion { 618 if int(coreProtocol) != CoreProtocolVersion {
497 err = fmt.Errorf("Incompatible core API version with plugin. "+ 619 err = fmt.Errorf("Incompatible core API version with plugin. "+
498 "Plugin version: %s, Ours: %d\n\n"+ 620 "Plugin version: %s, Core version: %d\n\n"+
499 "To fix this, the plugin usually only needs to be recompiled.\n"+ 621 "To fix this, the plugin usually only needs to be recompiled.\n"+
500 "Please report this to the plugin author.", parts[0], CoreProtocolVersion) 622 "Please report this to the plugin author.", parts[0], CoreProtocolVersion)
501 return 623 return
@@ -513,7 +635,7 @@ func (c *Client) Start() (addr net.Addr, err error) {
513 // Test the API version 635 // Test the API version
514 if uint(protocol) != c.config.ProtocolVersion { 636 if uint(protocol) != c.config.ProtocolVersion {
515 err = fmt.Errorf("Incompatible API version with plugin. "+ 637 err = fmt.Errorf("Incompatible API version with plugin. "+
516 "Plugin version: %s, Ours: %d", parts[1], c.config.ProtocolVersion) 638 "Plugin version: %s, Core version: %d", parts[1], c.config.ProtocolVersion)
517 return 639 return
518 } 640 }
519 641
@@ -525,6 +647,27 @@ func (c *Client) Start() (addr net.Addr, err error) {
525 default: 647 default:
526 err = fmt.Errorf("Unknown address type: %s", parts[3]) 648 err = fmt.Errorf("Unknown address type: %s", parts[3])
527 } 649 }
650
651 // If we have a server type, then record that. We default to net/rpc
652 // for backwards compatibility.
653 c.protocol = ProtocolNetRPC
654 if len(parts) >= 5 {
655 c.protocol = Protocol(parts[4])
656 }
657
658 found := false
659 for _, p := range c.config.AllowedProtocols {
660 if p == c.protocol {
661 found = true
662 break
663 }
664 }
665 if !found {
666 err = fmt.Errorf("Unsupported plugin protocol %q. Supported: %v",
667 c.protocol, c.config.AllowedProtocols)
668 return
669 }
670
528 } 671 }
529 672
530 c.address = addr 673 c.address = addr
@@ -555,9 +698,57 @@ func (c *Client) ReattachConfig() *ReattachConfig {
555 } 698 }
556 699
557 return &ReattachConfig{ 700 return &ReattachConfig{
558 Addr: c.address, 701 Protocol: c.protocol,
559 Pid: c.config.Cmd.Process.Pid, 702 Addr: c.address,
703 Pid: c.config.Cmd.Process.Pid,
704 }
705}
706
707// Protocol returns the protocol of server on the remote end. This will
708// start the plugin process if it isn't already started. Errors from
709// starting the plugin are surpressed and ProtocolInvalid is returned. It
710// is recommended you call Start explicitly before calling Protocol to ensure
711// no errors occur.
712func (c *Client) Protocol() Protocol {
713 _, err := c.Start()
714 if err != nil {
715 return ProtocolInvalid
716 }
717
718 return c.protocol
719}
720
721func netAddrDialer(addr net.Addr) func(string, time.Duration) (net.Conn, error) {
722 return func(_ string, _ time.Duration) (net.Conn, error) {
723 // Connect to the client
724 conn, err := net.Dial(addr.Network(), addr.String())
725 if err != nil {
726 return nil, err
727 }
728 if tcpConn, ok := conn.(*net.TCPConn); ok {
729 // Make sure to set keep alive so that the connection doesn't die
730 tcpConn.SetKeepAlive(true)
731 }
732
733 return conn, nil
734 }
735}
736
737// dialer is compatible with grpc.WithDialer and creates the connection
738// to the plugin.
739func (c *Client) dialer(_ string, timeout time.Duration) (net.Conn, error) {
740 conn, err := netAddrDialer(c.address)("", timeout)
741 if err != nil {
742 return nil, err
560 } 743 }
744
745 // If we have a TLS config we wrap our connection. We only do this
746 // for net/rpc since gRPC uses its own mechanism for TLS.
747 if c.protocol == ProtocolNetRPC && c.config.TLSConfig != nil {
748 conn = tls.Client(conn, c.config.TLSConfig)
749 }
750
751 return conn, nil
561} 752}
562 753
563func (c *Client) logStderr(r io.Reader) { 754func (c *Client) logStderr(r io.Reader) {
@@ -566,9 +757,31 @@ func (c *Client) logStderr(r io.Reader) {
566 line, err := bufR.ReadString('\n') 757 line, err := bufR.ReadString('\n')
567 if line != "" { 758 if line != "" {
568 c.config.Stderr.Write([]byte(line)) 759 c.config.Stderr.Write([]byte(line))
569
570 line = strings.TrimRightFunc(line, unicode.IsSpace) 760 line = strings.TrimRightFunc(line, unicode.IsSpace)
571 log.Printf("[DEBUG] plugin: %s: %s", filepath.Base(c.config.Cmd.Path), line) 761
762 l := c.logger.Named(filepath.Base(c.config.Cmd.Path))
763
764 entry, err := parseJSON(line)
765 // If output is not JSON format, print directly to Debug
766 if err != nil {
767 l.Debug(line)
768 } else {
769 out := flattenKVPairs(entry.KVPairs)
770
771 l = l.With("timestamp", entry.Timestamp.Format(hclog.TimeFormat))
772 switch hclog.LevelFromString(entry.Level) {
773 case hclog.Trace:
774 l.Trace(entry.Message, out...)
775 case hclog.Debug:
776 l.Debug(entry.Message, out...)
777 case hclog.Info:
778 l.Info(entry.Message, out...)
779 case hclog.Warn:
780 l.Warn(entry.Message, out...)
781 case hclog.Error:
782 l.Error(entry.Message, out...)
783 }
784 }
572 } 785 }
573 786
574 if err == io.EOF { 787 if err == io.EOF {
diff --git a/vendor/github.com/hashicorp/go-plugin/grpc_broker.go b/vendor/github.com/hashicorp/go-plugin/grpc_broker.go
new file mode 100644
index 0000000..49fd21c
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-plugin/grpc_broker.go
@@ -0,0 +1,455 @@
1package plugin
2
3import (
4 "context"
5 "crypto/tls"
6 "errors"
7 "fmt"
8 "log"
9 "net"
10 "sync"
11 "sync/atomic"
12 "time"
13
14 "github.com/oklog/run"
15 "google.golang.org/grpc"
16 "google.golang.org/grpc/credentials"
17)
18
19// streamer interface is used in the broker to send/receive connection
20// information.
21type streamer interface {
22 Send(*ConnInfo) error
23 Recv() (*ConnInfo, error)
24 Close()
25}
26
27// sendErr is used to pass errors back during a send.
28type sendErr struct {
29 i *ConnInfo
30 ch chan error
31}
32
33// gRPCBrokerServer is used by the plugin to start a stream and to send
34// connection information to/from the plugin. Implements GRPCBrokerServer and
35// streamer interfaces.
36type gRPCBrokerServer struct {
37 // send is used to send connection info to the gRPC stream.
38 send chan *sendErr
39
40 // recv is used to receive connection info from the gRPC stream.
41 recv chan *ConnInfo
42
43 // quit closes down the stream.
44 quit chan struct{}
45
46 // o is used to ensure we close the quit channel only once.
47 o sync.Once
48}
49
50func newGRPCBrokerServer() *gRPCBrokerServer {
51 return &gRPCBrokerServer{
52 send: make(chan *sendErr),
53 recv: make(chan *ConnInfo),
54 quit: make(chan struct{}),
55 }
56}
57
58// StartStream implements the GRPCBrokerServer interface and will block until
59// the quit channel is closed or the context reports Done. The stream will pass
60// connection information to/from the client.
61func (s *gRPCBrokerServer) StartStream(stream GRPCBroker_StartStreamServer) error {
62 doneCh := stream.Context().Done()
63 defer s.Close()
64
65 // Proccess send stream
66 go func() {
67 for {
68 select {
69 case <-doneCh:
70 return
71 case <-s.quit:
72 return
73 case se := <-s.send:
74 err := stream.Send(se.i)
75 se.ch <- err
76 }
77 }
78 }()
79
80 // Process receive stream
81 for {
82 i, err := stream.Recv()
83 if err != nil {
84 return err
85 }
86 select {
87 case <-doneCh:
88 return nil
89 case <-s.quit:
90 return nil
91 case s.recv <- i:
92 }
93 }
94
95 return nil
96}
97
98// Send is used by the GRPCBroker to pass connection information into the stream
99// to the client.
100func (s *gRPCBrokerServer) Send(i *ConnInfo) error {
101 ch := make(chan error)
102 defer close(ch)
103
104 select {
105 case <-s.quit:
106 return errors.New("broker closed")
107 case s.send <- &sendErr{
108 i: i,
109 ch: ch,
110 }:
111 }
112
113 return <-ch
114}
115
116// Recv is used by the GRPCBroker to pass connection information that has been
117// sent from the client from the stream to the broker.
118func (s *gRPCBrokerServer) Recv() (*ConnInfo, error) {
119 select {
120 case <-s.quit:
121 return nil, errors.New("broker closed")
122 case i := <-s.recv:
123 return i, nil
124 }
125}
126
127// Close closes the quit channel, shutting down the stream.
128func (s *gRPCBrokerServer) Close() {
129 s.o.Do(func() {
130 close(s.quit)
131 })
132}
133
134// gRPCBrokerClientImpl is used by the client to start a stream and to send
135// connection information to/from the client. Implements GRPCBrokerClient and
136// streamer interfaces.
137type gRPCBrokerClientImpl struct {
138 // client is the underlying GRPC client used to make calls to the server.
139 client GRPCBrokerClient
140
141 // send is used to send connection info to the gRPC stream.
142 send chan *sendErr
143
144 // recv is used to receive connection info from the gRPC stream.
145 recv chan *ConnInfo
146
147 // quit closes down the stream.
148 quit chan struct{}
149
150 // o is used to ensure we close the quit channel only once.
151 o sync.Once
152}
153
154func newGRPCBrokerClient(conn *grpc.ClientConn) *gRPCBrokerClientImpl {
155 return &gRPCBrokerClientImpl{
156 client: NewGRPCBrokerClient(conn),
157 send: make(chan *sendErr),
158 recv: make(chan *ConnInfo),
159 quit: make(chan struct{}),
160 }
161}
162
163// StartStream implements the GRPCBrokerClient interface and will block until
164// the quit channel is closed or the context reports Done. The stream will pass
165// connection information to/from the plugin.
166func (s *gRPCBrokerClientImpl) StartStream() error {
167 ctx, cancelFunc := context.WithCancel(context.Background())
168 defer cancelFunc()
169 defer s.Close()
170
171 stream, err := s.client.StartStream(ctx)
172 if err != nil {
173 return err
174 }
175 doneCh := stream.Context().Done()
176
177 go func() {
178 for {
179 select {
180 case <-doneCh:
181 return
182 case <-s.quit:
183 return
184 case se := <-s.send:
185 err := stream.Send(se.i)
186 se.ch <- err
187 }
188 }
189 }()
190
191 for {
192 i, err := stream.Recv()
193 if err != nil {
194 return err
195 }
196 select {
197 case <-doneCh:
198 return nil
199 case <-s.quit:
200 return nil
201 case s.recv <- i:
202 }
203 }
204
205 return nil
206}
207
208// Send is used by the GRPCBroker to pass connection information into the stream
209// to the plugin.
210func (s *gRPCBrokerClientImpl) Send(i *ConnInfo) error {
211 ch := make(chan error)
212 defer close(ch)
213
214 select {
215 case <-s.quit:
216 return errors.New("broker closed")
217 case s.send <- &sendErr{
218 i: i,
219 ch: ch,
220 }:
221 }
222
223 return <-ch
224}
225
226// Recv is used by the GRPCBroker to pass connection information that has been
227// sent from the plugin to the broker.
228func (s *gRPCBrokerClientImpl) Recv() (*ConnInfo, error) {
229 select {
230 case <-s.quit:
231 return nil, errors.New("broker closed")
232 case i := <-s.recv:
233 return i, nil
234 }
235}
236
237// Close closes the quit channel, shutting down the stream.
238func (s *gRPCBrokerClientImpl) Close() {
239 s.o.Do(func() {
240 close(s.quit)
241 })
242}
243
244// GRPCBroker is responsible for brokering connections by unique ID.
245//
246// It is used by plugins to create multiple gRPC connections and data
247// streams between the plugin process and the host process.
248//
249// This allows a plugin to request a channel with a specific ID to connect to
250// or accept a connection from, and the broker handles the details of
251// holding these channels open while they're being negotiated.
252//
253// The Plugin interface has access to these for both Server and Client.
254// The broker can be used by either (optionally) to reserve and connect to
255// new streams. This is useful for complex args and return values,
256// or anything else you might need a data stream for.
257type GRPCBroker struct {
258 nextId uint32
259 streamer streamer
260 streams map[uint32]*gRPCBrokerPending
261 tls *tls.Config
262 doneCh chan struct{}
263 o sync.Once
264
265 sync.Mutex
266}
267
268type gRPCBrokerPending struct {
269 ch chan *ConnInfo
270 doneCh chan struct{}
271}
272
273func newGRPCBroker(s streamer, tls *tls.Config) *GRPCBroker {
274 return &GRPCBroker{
275 streamer: s,
276 streams: make(map[uint32]*gRPCBrokerPending),
277 tls: tls,
278 doneCh: make(chan struct{}),
279 }
280}
281
282// Accept accepts a connection by ID.
283//
284// This should not be called multiple times with the same ID at one time.
285func (b *GRPCBroker) Accept(id uint32) (net.Listener, error) {
286 listener, err := serverListener()
287 if err != nil {
288 return nil, err
289 }
290
291 err = b.streamer.Send(&ConnInfo{
292 ServiceId: id,
293 Network: listener.Addr().Network(),
294 Address: listener.Addr().String(),
295 })
296 if err != nil {
297 return nil, err
298 }
299
300 return listener, nil
301}
302
303// AcceptAndServe is used to accept a specific stream ID and immediately
304// serve a gRPC server on that stream ID. This is used to easily serve
305// complex arguments. Each AcceptAndServe call opens a new listener socket and
306// sends the connection info down the stream to the dialer. Since a new
307// connection is opened every call, these calls should be used sparingly.
308// Multiple gRPC server implementations can be registered to a single
309// AcceptAndServe call.
310func (b *GRPCBroker) AcceptAndServe(id uint32, s func([]grpc.ServerOption) *grpc.Server) {
311 listener, err := b.Accept(id)
312 if err != nil {
313 log.Printf("[ERR] plugin: plugin acceptAndServe error: %s", err)
314 return
315 }
316 defer listener.Close()
317
318 var opts []grpc.ServerOption
319 if b.tls != nil {
320 opts = []grpc.ServerOption{grpc.Creds(credentials.NewTLS(b.tls))}
321 }
322
323 server := s(opts)
324
325 // Here we use a run group to close this goroutine if the server is shutdown
326 // or the broker is shutdown.
327 var g run.Group
328 {
329 // Serve on the listener, if shutting down call GracefulStop.
330 g.Add(func() error {
331 return server.Serve(listener)
332 }, func(err error) {
333 server.GracefulStop()
334 })
335 }
336 {
337 // block on the closeCh or the doneCh. If we are shutting down close the
338 // closeCh.
339 closeCh := make(chan struct{})
340 g.Add(func() error {
341 select {
342 case <-b.doneCh:
343 case <-closeCh:
344 }
345 return nil
346 }, func(err error) {
347 close(closeCh)
348 })
349 }
350
351 // Block until we are done
352 g.Run()
353}
354
355// Close closes the stream and all servers.
356func (b *GRPCBroker) Close() error {
357 b.streamer.Close()
358 b.o.Do(func() {
359 close(b.doneCh)
360 })
361 return nil
362}
363
364// Dial opens a connection by ID.
365func (b *GRPCBroker) Dial(id uint32) (conn *grpc.ClientConn, err error) {
366 var c *ConnInfo
367
368 // Open the stream
369 p := b.getStream(id)
370 select {
371 case c = <-p.ch:
372 close(p.doneCh)
373 case <-time.After(5 * time.Second):
374 return nil, fmt.Errorf("timeout waiting for connection info")
375 }
376
377 var addr net.Addr
378 switch c.Network {
379 case "tcp":
380 addr, err = net.ResolveTCPAddr("tcp", c.Address)
381 case "unix":
382 addr, err = net.ResolveUnixAddr("unix", c.Address)
383 default:
384 err = fmt.Errorf("Unknown address type: %s", c.Address)
385 }
386 if err != nil {
387 return nil, err
388 }
389
390 return dialGRPCConn(b.tls, netAddrDialer(addr))
391}
392
393// NextId returns a unique ID to use next.
394//
395// It is possible for very long-running plugin hosts to wrap this value,
396// though it would require a very large amount of calls. In practice
397// we've never seen it happen.
398func (m *GRPCBroker) NextId() uint32 {
399 return atomic.AddUint32(&m.nextId, 1)
400}
401
402// Run starts the brokering and should be executed in a goroutine, since it
403// blocks forever, or until the session closes.
404//
405// Uses of GRPCBroker never need to call this. It is called internally by
406// the plugin host/client.
407func (m *GRPCBroker) Run() {
408 for {
409 stream, err := m.streamer.Recv()
410 if err != nil {
411 // Once we receive an error, just exit
412 break
413 }
414
415 // Initialize the waiter
416 p := m.getStream(stream.ServiceId)
417 select {
418 case p.ch <- stream:
419 default:
420 }
421
422 go m.timeoutWait(stream.ServiceId, p)
423 }
424}
425
426func (m *GRPCBroker) getStream(id uint32) *gRPCBrokerPending {
427 m.Lock()
428 defer m.Unlock()
429
430 p, ok := m.streams[id]
431 if ok {
432 return p
433 }
434
435 m.streams[id] = &gRPCBrokerPending{
436 ch: make(chan *ConnInfo, 1),
437 doneCh: make(chan struct{}),
438 }
439 return m.streams[id]
440}
441
442func (m *GRPCBroker) timeoutWait(id uint32, p *gRPCBrokerPending) {
443 // Wait for the stream to either be picked up and connected, or
444 // for a timeout.
445 select {
446 case <-p.doneCh:
447 case <-time.After(5 * time.Second):
448 }
449
450 m.Lock()
451 defer m.Unlock()
452
453 // Delete the stream so no one else can grab it
454 delete(m.streams, id)
455}
diff --git a/vendor/github.com/hashicorp/go-plugin/grpc_broker.pb.go b/vendor/github.com/hashicorp/go-plugin/grpc_broker.pb.go
new file mode 100644
index 0000000..d490daf
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-plugin/grpc_broker.pb.go
@@ -0,0 +1,190 @@
1// Code generated by protoc-gen-go. DO NOT EDIT.
2// source: grpc_broker.proto
3
4/*
5Package plugin is a generated protocol buffer package.
6
7It is generated from these files:
8 grpc_broker.proto
9
10It has these top-level messages:
11 ConnInfo
12*/
13package plugin
14
15import proto "github.com/golang/protobuf/proto"
16import fmt "fmt"
17import math "math"
18
19import (
20 context "golang.org/x/net/context"
21 grpc "google.golang.org/grpc"
22)
23
24// Reference imports to suppress errors if they are not otherwise used.
25var _ = proto.Marshal
26var _ = fmt.Errorf
27var _ = math.Inf
28
29// This is a compile-time assertion to ensure that this generated file
30// is compatible with the proto package it is being compiled against.
31// A compilation error at this line likely means your copy of the
32// proto package needs to be updated.
33const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
34
35type ConnInfo struct {
36 ServiceId uint32 `protobuf:"varint,1,opt,name=service_id,json=serviceId" json:"service_id,omitempty"`
37 Network string `protobuf:"bytes,2,opt,name=network" json:"network,omitempty"`
38 Address string `protobuf:"bytes,3,opt,name=address" json:"address,omitempty"`
39}
40
41func (m *ConnInfo) Reset() { *m = ConnInfo{} }
42func (m *ConnInfo) String() string { return proto.CompactTextString(m) }
43func (*ConnInfo) ProtoMessage() {}
44func (*ConnInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
45
46func (m *ConnInfo) GetServiceId() uint32 {
47 if m != nil {
48 return m.ServiceId
49 }
50 return 0
51}
52
53func (m *ConnInfo) GetNetwork() string {
54 if m != nil {
55 return m.Network
56 }
57 return ""
58}
59
60func (m *ConnInfo) GetAddress() string {
61 if m != nil {
62 return m.Address
63 }
64 return ""
65}
66
67func init() {
68 proto.RegisterType((*ConnInfo)(nil), "plugin.ConnInfo")
69}
70
71// Reference imports to suppress errors if they are not otherwise used.
72var _ context.Context
73var _ grpc.ClientConn
74
75// This is a compile-time assertion to ensure that this generated file
76// is compatible with the grpc package it is being compiled against.
77const _ = grpc.SupportPackageIsVersion4
78
79// Client API for GRPCBroker service
80
81type GRPCBrokerClient interface {
82 StartStream(ctx context.Context, opts ...grpc.CallOption) (GRPCBroker_StartStreamClient, error)
83}
84
85type gRPCBrokerClient struct {
86 cc *grpc.ClientConn
87}
88
89func NewGRPCBrokerClient(cc *grpc.ClientConn) GRPCBrokerClient {
90 return &gRPCBrokerClient{cc}
91}
92
93func (c *gRPCBrokerClient) StartStream(ctx context.Context, opts ...grpc.CallOption) (GRPCBroker_StartStreamClient, error) {
94 stream, err := grpc.NewClientStream(ctx, &_GRPCBroker_serviceDesc.Streams[0], c.cc, "/plugin.GRPCBroker/StartStream", opts...)
95 if err != nil {
96 return nil, err
97 }
98 x := &gRPCBrokerStartStreamClient{stream}
99 return x, nil
100}
101
102type GRPCBroker_StartStreamClient interface {
103 Send(*ConnInfo) error
104 Recv() (*ConnInfo, error)
105 grpc.ClientStream
106}
107
108type gRPCBrokerStartStreamClient struct {
109 grpc.ClientStream
110}
111
112func (x *gRPCBrokerStartStreamClient) Send(m *ConnInfo) error {
113 return x.ClientStream.SendMsg(m)
114}
115
116func (x *gRPCBrokerStartStreamClient) Recv() (*ConnInfo, error) {
117 m := new(ConnInfo)
118 if err := x.ClientStream.RecvMsg(m); err != nil {
119 return nil, err
120 }
121 return m, nil
122}
123
124// Server API for GRPCBroker service
125
126type GRPCBrokerServer interface {
127 StartStream(GRPCBroker_StartStreamServer) error
128}
129
130func RegisterGRPCBrokerServer(s *grpc.Server, srv GRPCBrokerServer) {
131 s.RegisterService(&_GRPCBroker_serviceDesc, srv)
132}
133
134func _GRPCBroker_StartStream_Handler(srv interface{}, stream grpc.ServerStream) error {
135 return srv.(GRPCBrokerServer).StartStream(&gRPCBrokerStartStreamServer{stream})
136}
137
138type GRPCBroker_StartStreamServer interface {
139 Send(*ConnInfo) error
140 Recv() (*ConnInfo, error)
141 grpc.ServerStream
142}
143
144type gRPCBrokerStartStreamServer struct {
145 grpc.ServerStream
146}
147
148func (x *gRPCBrokerStartStreamServer) Send(m *ConnInfo) error {
149 return x.ServerStream.SendMsg(m)
150}
151
152func (x *gRPCBrokerStartStreamServer) Recv() (*ConnInfo, error) {
153 m := new(ConnInfo)
154 if err := x.ServerStream.RecvMsg(m); err != nil {
155 return nil, err
156 }
157 return m, nil
158}
159
160var _GRPCBroker_serviceDesc = grpc.ServiceDesc{
161 ServiceName: "plugin.GRPCBroker",
162 HandlerType: (*GRPCBrokerServer)(nil),
163 Methods: []grpc.MethodDesc{},
164 Streams: []grpc.StreamDesc{
165 {
166 StreamName: "StartStream",
167 Handler: _GRPCBroker_StartStream_Handler,
168 ServerStreams: true,
169 ClientStreams: true,
170 },
171 },
172 Metadata: "grpc_broker.proto",
173}
174
175func init() { proto.RegisterFile("grpc_broker.proto", fileDescriptor0) }
176
177var fileDescriptor0 = []byte{
178 // 170 bytes of a gzipped FileDescriptorProto
179 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4c, 0x2f, 0x2a, 0x48,
180 0x8e, 0x4f, 0x2a, 0xca, 0xcf, 0x4e, 0x2d, 0xd2, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x2b,
181 0xc8, 0x29, 0x4d, 0xcf, 0xcc, 0x53, 0x8a, 0xe5, 0xe2, 0x70, 0xce, 0xcf, 0xcb, 0xf3, 0xcc, 0x4b,
182 0xcb, 0x17, 0x92, 0xe5, 0xe2, 0x2a, 0x4e, 0x2d, 0x2a, 0xcb, 0x4c, 0x4e, 0x8d, 0xcf, 0x4c, 0x91,
183 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x0d, 0xe2, 0x84, 0x8a, 0x78, 0xa6, 0x08, 0x49, 0x70, 0xb1, 0xe7,
184 0xa5, 0x96, 0x94, 0xe7, 0x17, 0x65, 0x4b, 0x30, 0x29, 0x30, 0x6a, 0x70, 0x06, 0xc1, 0xb8, 0x20,
185 0x99, 0xc4, 0x94, 0x94, 0xa2, 0xd4, 0xe2, 0x62, 0x09, 0x66, 0x88, 0x0c, 0x94, 0x6b, 0xe4, 0xcc,
186 0xc5, 0xe5, 0x1e, 0x14, 0xe0, 0xec, 0x04, 0xb6, 0x5a, 0xc8, 0x94, 0x8b, 0x3b, 0xb8, 0x24, 0xb1,
187 0xa8, 0x24, 0xb8, 0xa4, 0x28, 0x35, 0x31, 0x57, 0x48, 0x40, 0x0f, 0xe2, 0x08, 0x3d, 0x98, 0x0b,
188 0xa4, 0x30, 0x44, 0x34, 0x18, 0x0d, 0x18, 0x93, 0xd8, 0xc0, 0x4e, 0x36, 0x06, 0x04, 0x00, 0x00,
189 0xff, 0xff, 0x7b, 0x5d, 0xfb, 0xe1, 0xc7, 0x00, 0x00, 0x00,
190}
diff --git a/vendor/github.com/hashicorp/go-plugin/grpc_broker.proto b/vendor/github.com/hashicorp/go-plugin/grpc_broker.proto
new file mode 100644
index 0000000..f578348
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-plugin/grpc_broker.proto
@@ -0,0 +1,14 @@
1syntax = "proto3";
2package plugin;
3
4message ConnInfo {
5 uint32 service_id = 1;
6 string network = 2;
7 string address = 3;
8}
9
10service GRPCBroker {
11 rpc StartStream(stream ConnInfo) returns (stream ConnInfo);
12}
13
14
diff --git a/vendor/github.com/hashicorp/go-plugin/grpc_client.go b/vendor/github.com/hashicorp/go-plugin/grpc_client.go
new file mode 100644
index 0000000..44294d0
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-plugin/grpc_client.go
@@ -0,0 +1,107 @@
1package plugin
2
3import (
4 "crypto/tls"
5 "fmt"
6 "net"
7 "time"
8
9 "golang.org/x/net/context"
10 "google.golang.org/grpc"
11 "google.golang.org/grpc/credentials"
12 "google.golang.org/grpc/health/grpc_health_v1"
13)
14
15func dialGRPCConn(tls *tls.Config, dialer func(string, time.Duration) (net.Conn, error)) (*grpc.ClientConn, error) {
16 // Build dialing options.
17 opts := make([]grpc.DialOption, 0, 5)
18
19 // We use a custom dialer so that we can connect over unix domain sockets
20 opts = append(opts, grpc.WithDialer(dialer))
21
22 // go-plugin expects to block the connection
23 opts = append(opts, grpc.WithBlock())
24
25 // Fail right away
26 opts = append(opts, grpc.FailOnNonTempDialError(true))
27
28 // If we have no TLS configuration set, we need to explicitly tell grpc
29 // that we're connecting with an insecure connection.
30 if tls == nil {
31 opts = append(opts, grpc.WithInsecure())
32 } else {
33 opts = append(opts, grpc.WithTransportCredentials(
34 credentials.NewTLS(tls)))
35 }
36
37 // Connect. Note the first parameter is unused because we use a custom
38 // dialer that has the state to see the address.
39 conn, err := grpc.Dial("unused", opts...)
40 if err != nil {
41 return nil, err
42 }
43
44 return conn, nil
45}
46
47// newGRPCClient creates a new GRPCClient. The Client argument is expected
48// to be successfully started already with a lock held.
49func newGRPCClient(doneCtx context.Context, c *Client) (*GRPCClient, error) {
50 conn, err := dialGRPCConn(c.config.TLSConfig, c.dialer)
51 if err != nil {
52 return nil, err
53 }
54
55 // Start the broker.
56 brokerGRPCClient := newGRPCBrokerClient(conn)
57 broker := newGRPCBroker(brokerGRPCClient, c.config.TLSConfig)
58 go broker.Run()
59 go brokerGRPCClient.StartStream()
60
61 return &GRPCClient{
62 Conn: conn,
63 Plugins: c.config.Plugins,
64 doneCtx: doneCtx,
65 broker: broker,
66 }, nil
67}
68
69// GRPCClient connects to a GRPCServer over gRPC to dispense plugin types.
70type GRPCClient struct {
71 Conn *grpc.ClientConn
72 Plugins map[string]Plugin
73
74 doneCtx context.Context
75 broker *GRPCBroker
76}
77
78// ClientProtocol impl.
79func (c *GRPCClient) Close() error {
80 c.broker.Close()
81 return c.Conn.Close()
82}
83
84// ClientProtocol impl.
85func (c *GRPCClient) Dispense(name string) (interface{}, error) {
86 raw, ok := c.Plugins[name]
87 if !ok {
88 return nil, fmt.Errorf("unknown plugin type: %s", name)
89 }
90
91 p, ok := raw.(GRPCPlugin)
92 if !ok {
93 return nil, fmt.Errorf("plugin %q doesn't support gRPC", name)
94 }
95
96 return p.GRPCClient(c.doneCtx, c.broker, c.Conn)
97}
98
99// ClientProtocol impl.
100func (c *GRPCClient) Ping() error {
101 client := grpc_health_v1.NewHealthClient(c.Conn)
102 _, err := client.Check(context.Background(), &grpc_health_v1.HealthCheckRequest{
103 Service: GRPCServiceName,
104 })
105
106 return err
107}
diff --git a/vendor/github.com/hashicorp/go-plugin/grpc_server.go b/vendor/github.com/hashicorp/go-plugin/grpc_server.go
new file mode 100644
index 0000000..3a72739
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-plugin/grpc_server.go
@@ -0,0 +1,132 @@
1package plugin
2
3import (
4 "bytes"
5 "crypto/tls"
6 "encoding/json"
7 "fmt"
8 "io"
9 "net"
10
11 "google.golang.org/grpc"
12 "google.golang.org/grpc/credentials"
13 "google.golang.org/grpc/health"
14 "google.golang.org/grpc/health/grpc_health_v1"
15)
16
17// GRPCServiceName is the name of the service that the health check should
18// return as passing.
19const GRPCServiceName = "plugin"
20
21// DefaultGRPCServer can be used with the "GRPCServer" field for Server
22// as a default factory method to create a gRPC server with no extra options.
23func DefaultGRPCServer(opts []grpc.ServerOption) *grpc.Server {
24 return grpc.NewServer(opts...)
25}
26
27// GRPCServer is a ServerType implementation that serves plugins over
28// gRPC. This allows plugins to easily be written for other languages.
29//
30// The GRPCServer outputs a custom configuration as a base64-encoded
31// JSON structure represented by the GRPCServerConfig config structure.
32type GRPCServer struct {
33 // Plugins are the list of plugins to serve.
34 Plugins map[string]Plugin
35
36 // Server is the actual server that will accept connections. This
37 // will be used for plugin registration as well.
38 Server func([]grpc.ServerOption) *grpc.Server
39
40 // TLS should be the TLS configuration if available. If this is nil,
41 // the connection will not have transport security.
42 TLS *tls.Config
43
44 // DoneCh is the channel that is closed when this server has exited.
45 DoneCh chan struct{}
46
47 // Stdout/StderrLis are the readers for stdout/stderr that will be copied
48 // to the stdout/stderr connection that is output.
49 Stdout io.Reader
50 Stderr io.Reader
51
52 config GRPCServerConfig
53 server *grpc.Server
54 broker *GRPCBroker
55}
56
57// ServerProtocol impl.
58func (s *GRPCServer) Init() error {
59 // Create our server
60 var opts []grpc.ServerOption
61 if s.TLS != nil {
62 opts = append(opts, grpc.Creds(credentials.NewTLS(s.TLS)))
63 }
64 s.server = s.Server(opts)
65
66 // Register the health service
67 healthCheck := health.NewServer()
68 healthCheck.SetServingStatus(
69 GRPCServiceName, grpc_health_v1.HealthCheckResponse_SERVING)
70 grpc_health_v1.RegisterHealthServer(s.server, healthCheck)
71
72 // Register the broker service
73 brokerServer := newGRPCBrokerServer()
74 RegisterGRPCBrokerServer(s.server, brokerServer)
75 s.broker = newGRPCBroker(brokerServer, s.TLS)
76 go s.broker.Run()
77
78 // Register all our plugins onto the gRPC server.
79 for k, raw := range s.Plugins {
80 p, ok := raw.(GRPCPlugin)
81 if !ok {
82 return fmt.Errorf("%q is not a GRPC-compatible plugin", k)
83 }
84
85 if err := p.GRPCServer(s.broker, s.server); err != nil {
86 return fmt.Errorf("error registring %q: %s", k, err)
87 }
88 }
89
90 return nil
91}
92
93// Stop calls Stop on the underlying grpc.Server
94func (s *GRPCServer) Stop() {
95 s.server.Stop()
96}
97
98// GracefulStop calls GracefulStop on the underlying grpc.Server
99func (s *GRPCServer) GracefulStop() {
100 s.server.GracefulStop()
101}
102
103// Config is the GRPCServerConfig encoded as JSON then base64.
104func (s *GRPCServer) Config() string {
105 // Create a buffer that will contain our final contents
106 var buf bytes.Buffer
107
108 // Wrap the base64 encoding with JSON encoding.
109 if err := json.NewEncoder(&buf).Encode(s.config); err != nil {
110 // We panic since ths shouldn't happen under any scenario. We
111 // carefully control the structure being encoded here and it should
112 // always be successful.
113 panic(err)
114 }
115
116 return buf.String()
117}
118
119func (s *GRPCServer) Serve(lis net.Listener) {
120 // Start serving in a goroutine
121 go s.server.Serve(lis)
122
123 // Wait until graceful completion
124 <-s.DoneCh
125}
126
127// GRPCServerConfig is the extra configuration passed along for consumers
128// to facilitate using GRPC plugins.
129type GRPCServerConfig struct {
130 StdoutAddr string `json:"stdout_addr"`
131 StderrAddr string `json:"stderr_addr"`
132}
diff --git a/vendor/github.com/hashicorp/go-plugin/log_entry.go b/vendor/github.com/hashicorp/go-plugin/log_entry.go
new file mode 100644
index 0000000..2996c14
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-plugin/log_entry.go
@@ -0,0 +1,73 @@
1package plugin
2
3import (
4 "encoding/json"
5 "time"
6)
7
8// logEntry is the JSON payload that gets sent to Stderr from the plugin to the host
9type logEntry struct {
10 Message string `json:"@message"`
11 Level string `json:"@level"`
12 Timestamp time.Time `json:"timestamp"`
13 KVPairs []*logEntryKV `json:"kv_pairs"`
14}
15
16// logEntryKV is a key value pair within the Output payload
17type logEntryKV struct {
18 Key string `json:"key"`
19 Value interface{} `json:"value"`
20}
21
22// flattenKVPairs is used to flatten KVPair slice into []interface{}
23// for hclog consumption.
24func flattenKVPairs(kvs []*logEntryKV) []interface{} {
25 var result []interface{}
26 for _, kv := range kvs {
27 result = append(result, kv.Key)
28 result = append(result, kv.Value)
29 }
30
31 return result
32}
33
34// parseJSON handles parsing JSON output
35func parseJSON(input string) (*logEntry, error) {
36 var raw map[string]interface{}
37 entry := &logEntry{}
38
39 err := json.Unmarshal([]byte(input), &raw)
40 if err != nil {
41 return nil, err
42 }
43
44 // Parse hclog-specific objects
45 if v, ok := raw["@message"]; ok {
46 entry.Message = v.(string)
47 delete(raw, "@message")
48 }
49
50 if v, ok := raw["@level"]; ok {
51 entry.Level = v.(string)
52 delete(raw, "@level")
53 }
54
55 if v, ok := raw["@timestamp"]; ok {
56 t, err := time.Parse("2006-01-02T15:04:05.000000Z07:00", v.(string))
57 if err != nil {
58 return nil, err
59 }
60 entry.Timestamp = t
61 delete(raw, "@timestamp")
62 }
63
64 // Parse dynamic KV args from the hclog payload.
65 for k, v := range raw {
66 entry.KVPairs = append(entry.KVPairs, &logEntryKV{
67 Key: k,
68 Value: v,
69 })
70 }
71
72 return entry, nil
73}
diff --git a/vendor/github.com/hashicorp/go-plugin/plugin.go b/vendor/github.com/hashicorp/go-plugin/plugin.go
index 37c8fd6..79d9674 100644
--- a/vendor/github.com/hashicorp/go-plugin/plugin.go
+++ b/vendor/github.com/hashicorp/go-plugin/plugin.go
@@ -9,7 +9,11 @@
9package plugin 9package plugin
10 10
11import ( 11import (
12 "context"
13 "errors"
12 "net/rpc" 14 "net/rpc"
15
16 "google.golang.org/grpc"
13) 17)
14 18
15// Plugin is the interface that is implemented to serve/connect to an 19// Plugin is the interface that is implemented to serve/connect to an
@@ -23,3 +27,32 @@ type Plugin interface {
23 // serving that communicates to the server end of the plugin. 27 // serving that communicates to the server end of the plugin.
24 Client(*MuxBroker, *rpc.Client) (interface{}, error) 28 Client(*MuxBroker, *rpc.Client) (interface{}, error)
25} 29}
30
31// GRPCPlugin is the interface that is implemented to serve/connect to
32// a plugin over gRPC.
33type GRPCPlugin interface {
34 // GRPCServer should register this plugin for serving with the
35 // given GRPCServer. Unlike Plugin.Server, this is only called once
36 // since gRPC plugins serve singletons.
37 GRPCServer(*GRPCBroker, *grpc.Server) error
38
39 // GRPCClient should return the interface implementation for the plugin
40 // you're serving via gRPC. The provided context will be canceled by
41 // go-plugin in the event of the plugin process exiting.
42 GRPCClient(context.Context, *GRPCBroker, *grpc.ClientConn) (interface{}, error)
43}
44
45// NetRPCUnsupportedPlugin implements Plugin but returns errors for the
46// Server and Client functions. This will effectively disable support for
47// net/rpc based plugins.
48//
49// This struct can be embedded in your struct.
50type NetRPCUnsupportedPlugin struct{}
51
52func (p NetRPCUnsupportedPlugin) Server(*MuxBroker) (interface{}, error) {
53 return nil, errors.New("net/rpc plugin protocol not supported")
54}
55
56func (p NetRPCUnsupportedPlugin) Client(*MuxBroker, *rpc.Client) (interface{}, error) {
57 return nil, errors.New("net/rpc plugin protocol not supported")
58}
diff --git a/vendor/github.com/hashicorp/go-plugin/protocol.go b/vendor/github.com/hashicorp/go-plugin/protocol.go
new file mode 100644
index 0000000..0cfc19e
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-plugin/protocol.go
@@ -0,0 +1,45 @@
1package plugin
2
3import (
4 "io"
5 "net"
6)
7
8// Protocol is an enum representing the types of protocols.
9type Protocol string
10
11const (
12 ProtocolInvalid Protocol = ""
13 ProtocolNetRPC Protocol = "netrpc"
14 ProtocolGRPC Protocol = "grpc"
15)
16
17// ServerProtocol is an interface that must be implemented for new plugin
18// protocols to be servers.
19type ServerProtocol interface {
20 // Init is called once to configure and initialize the protocol, but
21 // not start listening. This is the point at which all validation should
22 // be done and errors returned.
23 Init() error
24
25 // Config is extra configuration to be outputted to stdout. This will
26 // be automatically base64 encoded to ensure it can be parsed properly.
27 // This can be an empty string if additional configuration is not needed.
28 Config() string
29
30 // Serve is called to serve connections on the given listener. This should
31 // continue until the listener is closed.
32 Serve(net.Listener)
33}
34
35// ClientProtocol is an interface that must be implemented for new plugin
36// protocols to be clients.
37type ClientProtocol interface {
38 io.Closer
39
40 // Dispense dispenses a new instance of the plugin with the given name.
41 Dispense(string) (interface{}, error)
42
43 // Ping checks that the client connection is still healthy.
44 Ping() error
45}
diff --git a/vendor/github.com/hashicorp/go-plugin/rpc_client.go b/vendor/github.com/hashicorp/go-plugin/rpc_client.go
index 29f9bf0..f30a4b1 100644
--- a/vendor/github.com/hashicorp/go-plugin/rpc_client.go
+++ b/vendor/github.com/hashicorp/go-plugin/rpc_client.go
@@ -1,6 +1,7 @@
1package plugin 1package plugin
2 2
3import ( 3import (
4 "crypto/tls"
4 "fmt" 5 "fmt"
5 "io" 6 "io"
6 "net" 7 "net"
@@ -19,6 +20,42 @@ type RPCClient struct {
19 stdout, stderr net.Conn 20 stdout, stderr net.Conn
20} 21}
21 22
23// newRPCClient creates a new RPCClient. The Client argument is expected
24// to be successfully started already with a lock held.
25func newRPCClient(c *Client) (*RPCClient, error) {
26 // Connect to the client
27 conn, err := net.Dial(c.address.Network(), c.address.String())
28 if err != nil {
29 return nil, err
30 }
31 if tcpConn, ok := conn.(*net.TCPConn); ok {
32 // Make sure to set keep alive so that the connection doesn't die
33 tcpConn.SetKeepAlive(true)
34 }
35
36 if c.config.TLSConfig != nil {
37 conn = tls.Client(conn, c.config.TLSConfig)
38 }
39
40 // Create the actual RPC client
41 result, err := NewRPCClient(conn, c.config.Plugins)
42 if err != nil {
43 conn.Close()
44 return nil, err
45 }
46
47 // Begin the stream syncing so that stdin, out, err work properly
48 err = result.SyncStreams(
49 c.config.SyncStdout,
50 c.config.SyncStderr)
51 if err != nil {
52 result.Close()
53 return nil, err
54 }
55
56 return result, nil
57}
58
22// NewRPCClient creates a client from an already-open connection-like value. 59// NewRPCClient creates a client from an already-open connection-like value.
23// Dial is typically used instead. 60// Dial is typically used instead.
24func NewRPCClient(conn io.ReadWriteCloser, plugins map[string]Plugin) (*RPCClient, error) { 61func NewRPCClient(conn io.ReadWriteCloser, plugins map[string]Plugin) (*RPCClient, error) {
@@ -121,3 +158,13 @@ func (c *RPCClient) Dispense(name string) (interface{}, error) {
121 158
122 return p.Client(c.broker, rpc.NewClient(conn)) 159 return p.Client(c.broker, rpc.NewClient(conn))
123} 160}
161
162// Ping pings the connection to ensure it is still alive.
163//
164// The error from the RPC call is returned exactly if you want to inspect
165// it for further error analysis. Any error returned from here would indicate
166// that the connection to the plugin is not healthy.
167func (c *RPCClient) Ping() error {
168 var empty struct{}
169 return c.control.Call("Control.Ping", true, &empty)
170}
diff --git a/vendor/github.com/hashicorp/go-plugin/rpc_server.go b/vendor/github.com/hashicorp/go-plugin/rpc_server.go
index 3984dc8..5bb18dd 100644
--- a/vendor/github.com/hashicorp/go-plugin/rpc_server.go
+++ b/vendor/github.com/hashicorp/go-plugin/rpc_server.go
@@ -34,10 +34,14 @@ type RPCServer struct {
34 lock sync.Mutex 34 lock sync.Mutex
35} 35}
36 36
37// Accept accepts connections on a listener and serves requests for 37// ServerProtocol impl.
38// each incoming connection. Accept blocks; the caller typically invokes 38func (s *RPCServer) Init() error { return nil }
39// it in a go statement. 39
40func (s *RPCServer) Accept(lis net.Listener) { 40// ServerProtocol impl.
41func (s *RPCServer) Config() string { return "" }
42
43// ServerProtocol impl.
44func (s *RPCServer) Serve(lis net.Listener) {
41 for { 45 for {
42 conn, err := lis.Accept() 46 conn, err := lis.Accept()
43 if err != nil { 47 if err != nil {
@@ -122,6 +126,14 @@ type controlServer struct {
122 server *RPCServer 126 server *RPCServer
123} 127}
124 128
129// Ping can be called to verify the connection (and likely the binary)
130// is still alive to a plugin.
131func (c *controlServer) Ping(
132 null bool, response *struct{}) error {
133 *response = struct{}{}
134 return nil
135}
136
125func (c *controlServer) Quit( 137func (c *controlServer) Quit(
126 null bool, response *struct{}) error { 138 null bool, response *struct{}) error {
127 // End the server 139 // End the server
diff --git a/vendor/github.com/hashicorp/go-plugin/server.go b/vendor/github.com/hashicorp/go-plugin/server.go
index b5c5270..1e808b9 100644
--- a/vendor/github.com/hashicorp/go-plugin/server.go
+++ b/vendor/github.com/hashicorp/go-plugin/server.go
@@ -1,6 +1,8 @@
1package plugin 1package plugin
2 2
3import ( 3import (
4 "crypto/tls"
5 "encoding/base64"
4 "errors" 6 "errors"
5 "fmt" 7 "fmt"
6 "io/ioutil" 8 "io/ioutil"
@@ -11,6 +13,10 @@ import (
11 "runtime" 13 "runtime"
12 "strconv" 14 "strconv"
13 "sync/atomic" 15 "sync/atomic"
16
17 "github.com/hashicorp/go-hclog"
18
19 "google.golang.org/grpc"
14) 20)
15 21
16// CoreProtocolVersion is the ProtocolVersion of the plugin system itself. 22// CoreProtocolVersion is the ProtocolVersion of the plugin system itself.
@@ -45,14 +51,41 @@ type ServeConfig struct {
45 // HandshakeConfig is the configuration that must match clients. 51 // HandshakeConfig is the configuration that must match clients.
46 HandshakeConfig 52 HandshakeConfig
47 53
54 // TLSProvider is a function that returns a configured tls.Config.
55 TLSProvider func() (*tls.Config, error)
56
48 // Plugins are the plugins that are served. 57 // Plugins are the plugins that are served.
49 Plugins map[string]Plugin 58 Plugins map[string]Plugin
59
60 // GRPCServer should be non-nil to enable serving the plugins over
61 // gRPC. This is a function to create the server when needed with the
62 // given server options. The server options populated by go-plugin will
63 // be for TLS if set. You may modify the input slice.
64 //
65 // Note that the grpc.Server will automatically be registered with
66 // the gRPC health checking service. This is not optional since go-plugin
67 // relies on this to implement Ping().
68 GRPCServer func([]grpc.ServerOption) *grpc.Server
69
70 // Logger is used to pass a logger into the server. If none is provided the
71 // server will create a default logger.
72 Logger hclog.Logger
73}
74
75// Protocol returns the protocol that this server should speak.
76func (c *ServeConfig) Protocol() Protocol {
77 result := ProtocolNetRPC
78 if c.GRPCServer != nil {
79 result = ProtocolGRPC
80 }
81
82 return result
50} 83}
51 84
52// Serve serves the plugins given by ServeConfig. 85// Serve serves the plugins given by ServeConfig.
53// 86//
54// Serve doesn't return until the plugin is done being executed. Any 87// Serve doesn't return until the plugin is done being executed. Any
55// errors will be outputted to the log. 88// errors will be outputted to os.Stderr.
56// 89//
57// This is the method that plugins should call in their main() functions. 90// This is the method that plugins should call in their main() functions.
58func Serve(opts *ServeConfig) { 91func Serve(opts *ServeConfig) {
@@ -77,6 +110,16 @@ func Serve(opts *ServeConfig) {
77 // Logging goes to the original stderr 110 // Logging goes to the original stderr
78 log.SetOutput(os.Stderr) 111 log.SetOutput(os.Stderr)
79 112
113 logger := opts.Logger
114 if logger == nil {
115 // internal logger to os.Stderr
116 logger = hclog.New(&hclog.LoggerOptions{
117 Level: hclog.Trace,
118 Output: os.Stderr,
119 JSONFormat: true,
120 })
121 }
122
80 // Create our new stdout, stderr files. These will override our built-in 123 // Create our new stdout, stderr files. These will override our built-in
81 // stdout/stderr so that it works across the stream boundary. 124 // stdout/stderr so that it works across the stream boundary.
82 stdout_r, stdout_w, err := os.Pipe() 125 stdout_r, stdout_w, err := os.Pipe()
@@ -93,30 +136,86 @@ func Serve(opts *ServeConfig) {
93 // Register a listener so we can accept a connection 136 // Register a listener so we can accept a connection
94 listener, err := serverListener() 137 listener, err := serverListener()
95 if err != nil { 138 if err != nil {
96 log.Printf("[ERR] plugin: plugin init: %s", err) 139 logger.Error("plugin init error", "error", err)
97 return 140 return
98 } 141 }
99 defer listener.Close() 142
143 // Close the listener on return. We wrap this in a func() on purpose
144 // because the "listener" reference may change to TLS.
145 defer func() {
146 listener.Close()
147 }()
148
149 var tlsConfig *tls.Config
150 if opts.TLSProvider != nil {
151 tlsConfig, err = opts.TLSProvider()
152 if err != nil {
153 logger.Error("plugin tls init", "error", err)
154 return
155 }
156 }
100 157
101 // Create the channel to tell us when we're done 158 // Create the channel to tell us when we're done
102 doneCh := make(chan struct{}) 159 doneCh := make(chan struct{})
103 160
104 // Create the RPC server to dispense 161 // Build the server type
105 server := &RPCServer{ 162 var server ServerProtocol
106 Plugins: opts.Plugins, 163 switch opts.Protocol() {
107 Stdout: stdout_r, 164 case ProtocolNetRPC:
108 Stderr: stderr_r, 165 // If we have a TLS configuration then we wrap the listener
109 DoneCh: doneCh, 166 // ourselves and do it at that level.
167 if tlsConfig != nil {
168 listener = tls.NewListener(listener, tlsConfig)
169 }
170
171 // Create the RPC server to dispense
172 server = &RPCServer{
173 Plugins: opts.Plugins,
174 Stdout: stdout_r,
175 Stderr: stderr_r,
176 DoneCh: doneCh,
177 }
178
179 case ProtocolGRPC:
180 // Create the gRPC server
181 server = &GRPCServer{
182 Plugins: opts.Plugins,
183 Server: opts.GRPCServer,
184 TLS: tlsConfig,
185 Stdout: stdout_r,
186 Stderr: stderr_r,
187 DoneCh: doneCh,
188 }
189
190 default:
191 panic("unknown server protocol: " + opts.Protocol())
110 } 192 }
111 193
194 // Initialize the servers
195 if err := server.Init(); err != nil {
196 logger.Error("protocol init", "error", err)
197 return
198 }
199
200 // Build the extra configuration
201 extra := ""
202 if v := server.Config(); v != "" {
203 extra = base64.StdEncoding.EncodeToString([]byte(v))
204 }
205 if extra != "" {
206 extra = "|" + extra
207 }
208
209 logger.Debug("plugin address", "network", listener.Addr().Network(), "address", listener.Addr().String())
210
112 // Output the address and service name to stdout so that core can bring it up. 211 // Output the address and service name to stdout so that core can bring it up.
113 log.Printf("[DEBUG] plugin: plugin address: %s %s\n", 212 fmt.Printf("%d|%d|%s|%s|%s%s\n",
114 listener.Addr().Network(), listener.Addr().String())
115 fmt.Printf("%d|%d|%s|%s\n",
116 CoreProtocolVersion, 213 CoreProtocolVersion,
117 opts.ProtocolVersion, 214 opts.ProtocolVersion,
118 listener.Addr().Network(), 215 listener.Addr().Network(),
119 listener.Addr().String()) 216 listener.Addr().String(),
217 opts.Protocol(),
218 extra)
120 os.Stdout.Sync() 219 os.Stdout.Sync()
121 220
122 // Eat the interrupts 221 // Eat the interrupts
@@ -127,9 +226,7 @@ func Serve(opts *ServeConfig) {
127 for { 226 for {
128 <-ch 227 <-ch
129 newCount := atomic.AddInt32(&count, 1) 228 newCount := atomic.AddInt32(&count, 1)
130 log.Printf( 229 logger.Debug("plugin received interrupt signal, ignoring", "count", newCount)
131 "[DEBUG] plugin: received interrupt signal (count: %d). Ignoring.",
132 newCount)
133 } 230 }
134 }() 231 }()
135 232
@@ -137,10 +234,8 @@ func Serve(opts *ServeConfig) {
137 os.Stdout = stdout_w 234 os.Stdout = stdout_w
138 os.Stderr = stderr_w 235 os.Stderr = stderr_w
139 236
140 // Serve 237 // Accept connections and wait for completion
141 go server.Accept(listener) 238 go server.Serve(listener)
142
143 // Wait for the graceful exit
144 <-doneCh 239 <-doneCh
145} 240}
146 241
diff --git a/vendor/github.com/hashicorp/go-plugin/testing.go b/vendor/github.com/hashicorp/go-plugin/testing.go
index 9086a1b..df29593 100644
--- a/vendor/github.com/hashicorp/go-plugin/testing.go
+++ b/vendor/github.com/hashicorp/go-plugin/testing.go
@@ -2,9 +2,12 @@ package plugin
2 2
3import ( 3import (
4 "bytes" 4 "bytes"
5 "context"
5 "net" 6 "net"
6 "net/rpc" 7 "net/rpc"
7 "testing" 8
9 "github.com/mitchellh/go-testing-interface"
10 "google.golang.org/grpc"
8) 11)
9 12
10// The testing file contains test helpers that you can use outside of 13// The testing file contains test helpers that you can use outside of
@@ -12,7 +15,7 @@ import (
12 15
13// TestConn is a helper function for returning a client and server 16// TestConn is a helper function for returning a client and server
14// net.Conn connected to each other. 17// net.Conn connected to each other.
15func TestConn(t *testing.T) (net.Conn, net.Conn) { 18func TestConn(t testing.T) (net.Conn, net.Conn) {
16 // Listen to any local port. This listener will be closed 19 // Listen to any local port. This listener will be closed
17 // after a single connection is established. 20 // after a single connection is established.
18 l, err := net.Listen("tcp", "127.0.0.1:0") 21 l, err := net.Listen("tcp", "127.0.0.1:0")
@@ -46,7 +49,7 @@ func TestConn(t *testing.T) (net.Conn, net.Conn) {
46} 49}
47 50
48// TestRPCConn returns a rpc client and server connected to each other. 51// TestRPCConn returns a rpc client and server connected to each other.
49func TestRPCConn(t *testing.T) (*rpc.Client, *rpc.Server) { 52func TestRPCConn(t testing.T) (*rpc.Client, *rpc.Server) {
50 clientConn, serverConn := TestConn(t) 53 clientConn, serverConn := TestConn(t)
51 54
52 server := rpc.NewServer() 55 server := rpc.NewServer()
@@ -58,7 +61,7 @@ func TestRPCConn(t *testing.T) (*rpc.Client, *rpc.Server) {
58 61
59// TestPluginRPCConn returns a plugin RPC client and server that are connected 62// TestPluginRPCConn returns a plugin RPC client and server that are connected
60// together and configured. 63// together and configured.
61func TestPluginRPCConn(t *testing.T, ps map[string]Plugin) (*RPCClient, *RPCServer) { 64func TestPluginRPCConn(t testing.T, ps map[string]Plugin) (*RPCClient, *RPCServer) {
62 // Create two net.Conns we can use to shuttle our control connection 65 // Create two net.Conns we can use to shuttle our control connection
63 clientConn, serverConn := TestConn(t) 66 clientConn, serverConn := TestConn(t)
64 67
@@ -74,3 +77,78 @@ func TestPluginRPCConn(t *testing.T, ps map[string]Plugin) (*RPCClient, *RPCServ
74 77
75 return client, server 78 return client, server
76} 79}
80
81// TestGRPCConn returns a gRPC client conn and grpc server that are connected
82// together and configured. The register function is used to register services
83// prior to the Serve call. This is used to test gRPC connections.
84func TestGRPCConn(t testing.T, register func(*grpc.Server)) (*grpc.ClientConn, *grpc.Server) {
85 // Create a listener
86 l, err := net.Listen("tcp", "127.0.0.1:0")
87 if err != nil {
88 t.Fatalf("err: %s", err)
89 }
90
91 server := grpc.NewServer()
92 register(server)
93 go server.Serve(l)
94
95 // Connect to the server
96 conn, err := grpc.Dial(
97 l.Addr().String(),
98 grpc.WithBlock(),
99 grpc.WithInsecure())
100 if err != nil {
101 t.Fatalf("err: %s", err)
102 }
103
104 // Connection successful, close the listener
105 l.Close()
106
107 return conn, server
108}
109
110// TestPluginGRPCConn returns a plugin gRPC client and server that are connected
111// together and configured. This is used to test gRPC connections.
112func TestPluginGRPCConn(t testing.T, ps map[string]Plugin) (*GRPCClient, *GRPCServer) {
113 // Create a listener
114 l, err := net.Listen("tcp", "127.0.0.1:0")
115 if err != nil {
116 t.Fatalf("err: %s", err)
117 }
118
119 // Start up the server
120 server := &GRPCServer{
121 Plugins: ps,
122 Server: DefaultGRPCServer,
123 Stdout: new(bytes.Buffer),
124 Stderr: new(bytes.Buffer),
125 }
126 if err := server.Init(); err != nil {
127 t.Fatalf("err: %s", err)
128 }
129 go server.Serve(l)
130
131 // Connect to the server
132 conn, err := grpc.Dial(
133 l.Addr().String(),
134 grpc.WithBlock(),
135 grpc.WithInsecure())
136 if err != nil {
137 t.Fatalf("err: %s", err)
138 }
139
140 brokerGRPCClient := newGRPCBrokerClient(conn)
141 broker := newGRPCBroker(brokerGRPCClient, nil)
142 go broker.Run()
143 go brokerGRPCClient.StartStream()
144
145 // Create the client
146 client := &GRPCClient{
147 Conn: conn,
148 Plugins: ps,
149 broker: broker,
150 doneCtx: context.Background(),
151 }
152
153 return client, server
154}
diff --git a/vendor/github.com/hashicorp/go-safetemp/LICENSE b/vendor/github.com/hashicorp/go-safetemp/LICENSE
new file mode 100644
index 0000000..be2cc4d
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-safetemp/LICENSE
@@ -0,0 +1,362 @@
1Mozilla Public License, version 2.0
2
31. Definitions
4
51.1. "Contributor"
6
7 means each individual or legal entity that creates, contributes to the
8 creation of, or owns Covered Software.
9
101.2. "Contributor Version"
11
12 means the combination of the Contributions of others (if any) used by a
13 Contributor and that particular Contributor's Contribution.
14
151.3. "Contribution"
16
17 means Covered Software of a particular Contributor.
18
191.4. "Covered Software"
20
21 means Source Code Form to which the initial Contributor has attached the
22 notice in Exhibit A, the Executable Form of such Source Code Form, and
23 Modifications of such Source Code Form, in each case including portions
24 thereof.
25
261.5. "Incompatible With Secondary Licenses"
27 means
28
29 a. that the initial Contributor has attached the notice described in
30 Exhibit B to the Covered Software; or
31
32 b. that the Covered Software was made available under the terms of
33 version 1.1 or earlier of the License, but not also under the terms of
34 a Secondary License.
35
361.6. "Executable Form"
37
38 means any form of the work other than Source Code Form.
39
401.7. "Larger Work"
41
42 means a work that combines Covered Software with other material, in a
43 separate file or files, that is not Covered Software.
44
451.8. "License"
46
47 means this document.
48
491.9. "Licensable"
50
51 means having the right to grant, to the maximum extent possible, whether
52 at the time of the initial grant or subsequently, any and all of the
53 rights conveyed by this License.
54
551.10. "Modifications"
56
57 means any of the following:
58
59 a. any file in Source Code Form that results from an addition to,
60 deletion from, or modification of the contents of Covered Software; or
61
62 b. any new file in Source Code Form that contains any Covered Software.
63
641.11. "Patent Claims" of a Contributor
65
66 means any patent claim(s), including without limitation, method,
67 process, and apparatus claims, in any patent Licensable by such
68 Contributor that would be infringed, but for the grant of the License,
69 by the making, using, selling, offering for sale, having made, import,
70 or transfer of either its Contributions or its Contributor Version.
71
721.12. "Secondary License"
73
74 means either the GNU General Public License, Version 2.0, the GNU Lesser
75 General Public License, Version 2.1, the GNU Affero General Public
76 License, Version 3.0, or any later versions of those licenses.
77
781.13. "Source Code Form"
79
80 means the form of the work preferred for making modifications.
81
821.14. "You" (or "Your")
83
84 means an individual or a legal entity exercising rights under this
85 License. For legal entities, "You" includes any entity that controls, is
86 controlled by, or is under common control with You. For purposes of this
87 definition, "control" means (a) the power, direct or indirect, to cause
88 the direction or management of such entity, whether by contract or
89 otherwise, or (b) ownership of more than fifty percent (50%) of the
90 outstanding shares or beneficial ownership of such entity.
91
92
932. License Grants and Conditions
94
952.1. Grants
96
97 Each Contributor hereby grants You a world-wide, royalty-free,
98 non-exclusive license:
99
100 a. under intellectual property rights (other than patent or trademark)
101 Licensable by such Contributor to use, reproduce, make available,
102 modify, display, perform, distribute, and otherwise exploit its
103 Contributions, either on an unmodified basis, with Modifications, or
104 as part of a Larger Work; and
105
106 b. under Patent Claims of such Contributor to make, use, sell, offer for
107 sale, have made, import, and otherwise transfer either its
108 Contributions or its Contributor Version.
109
1102.2. Effective Date
111
112 The licenses granted in Section 2.1 with respect to any Contribution
113 become effective for each Contribution on the date the Contributor first
114 distributes such Contribution.
115
1162.3. Limitations on Grant Scope
117
118 The licenses granted in this Section 2 are the only rights granted under
119 this License. No additional rights or licenses will be implied from the
120 distribution or licensing of Covered Software under this License.
121 Notwithstanding Section 2.1(b) above, no patent license is granted by a
122 Contributor:
123
124 a. for any code that a Contributor has removed from Covered Software; or
125
126 b. for infringements caused by: (i) Your and any other third party's
127 modifications of Covered Software, or (ii) the combination of its
128 Contributions with other software (except as part of its Contributor
129 Version); or
130
131 c. under Patent Claims infringed by Covered Software in the absence of
132 its Contributions.
133
134 This License does not grant any rights in the trademarks, service marks,
135 or logos of any Contributor (except as may be necessary to comply with
136 the notice requirements in Section 3.4).
137
1382.4. Subsequent Licenses
139
140 No Contributor makes additional grants as a result of Your choice to
141 distribute the Covered Software under a subsequent version of this
142 License (see Section 10.2) or under the terms of a Secondary License (if
143 permitted under the terms of Section 3.3).
144
1452.5. Representation
146
147 Each Contributor represents that the Contributor believes its
148 Contributions are its original creation(s) or it has sufficient rights to
149 grant the rights to its Contributions conveyed by this License.
150
1512.6. Fair Use
152
153 This License is not intended to limit any rights You have under
154 applicable copyright doctrines of fair use, fair dealing, or other
155 equivalents.
156
1572.7. Conditions
158
159 Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
160 Section 2.1.
161
162
1633. Responsibilities
164
1653.1. Distribution of Source Form
166
167 All distribution of Covered Software in Source Code Form, including any
168 Modifications that You create or to which You contribute, must be under
169 the terms of this License. You must inform recipients that the Source
170 Code Form of the Covered Software is governed by the terms of this
171 License, and how they can obtain a copy of this License. You may not
172 attempt to alter or restrict the recipients' rights in the Source Code
173 Form.
174
1753.2. Distribution of Executable Form
176
177 If You distribute Covered Software in Executable Form then:
178
179 a. such Covered Software must also be made available in Source Code Form,
180 as described in Section 3.1, and You must inform recipients of the
181 Executable Form how they can obtain a copy of such Source Code Form by
182 reasonable means in a timely manner, at a charge no more than the cost
183 of distribution to the recipient; and
184
185 b. You may distribute such Executable Form under the terms of this
186 License, or sublicense it under different terms, provided that the
187 license for the Executable Form does not attempt to limit or alter the
188 recipients' rights in the Source Code Form under this License.
189
1903.3. Distribution of a Larger Work
191
192 You may create and distribute a Larger Work under terms of Your choice,
193 provided that You also comply with the requirements of this License for
194 the Covered Software. If the Larger Work is a combination of Covered
195 Software with a work governed by one or more Secondary Licenses, and the
196 Covered Software is not Incompatible With Secondary Licenses, this
197 License permits You to additionally distribute such Covered Software
198 under the terms of such Secondary License(s), so that the recipient of
199 the Larger Work may, at their option, further distribute the Covered
200 Software under the terms of either this License or such Secondary
201 License(s).
202
2033.4. Notices
204
205 You may not remove or alter the substance of any license notices
206 (including copyright notices, patent notices, disclaimers of warranty, or
207 limitations of liability) contained within the Source Code Form of the
208 Covered Software, except that You may alter any license notices to the
209 extent required to remedy known factual inaccuracies.
210
2113.5. Application of Additional Terms
212
213 You may choose to offer, and to charge a fee for, warranty, support,
214 indemnity or liability obligations to one or more recipients of Covered
215 Software. However, You may do so only on Your own behalf, and not on
216 behalf of any Contributor. You must make it absolutely clear that any
217 such warranty, support, indemnity, or liability obligation is offered by
218 You alone, and You hereby agree to indemnify every Contributor for any
219 liability incurred by such Contributor as a result of warranty, support,
220 indemnity or liability terms You offer. You may include additional
221 disclaimers of warranty and limitations of liability specific to any
222 jurisdiction.
223
2244. Inability to Comply Due to Statute or Regulation
225
226 If it is impossible for You to comply with any of the terms of this License
227 with respect to some or all of the Covered Software due to statute,
228 judicial order, or regulation then You must: (a) comply with the terms of
229 this License to the maximum extent possible; and (b) describe the
230 limitations and the code they affect. Such description must be placed in a
231 text file included with all distributions of the Covered Software under
232 this License. Except to the extent prohibited by statute or regulation,
233 such description must be sufficiently detailed for a recipient of ordinary
234 skill to be able to understand it.
235
2365. Termination
237
2385.1. The rights granted under this License will terminate automatically if You
239 fail to comply with any of its terms. However, if You become compliant,
240 then the rights granted under this License from a particular Contributor
241 are reinstated (a) provisionally, unless and until such Contributor
242 explicitly and finally terminates Your grants, and (b) on an ongoing
243 basis, if such Contributor fails to notify You of the non-compliance by
244 some reasonable means prior to 60 days after You have come back into
245 compliance. Moreover, Your grants from a particular Contributor are
246 reinstated on an ongoing basis if such Contributor notifies You of the
247 non-compliance by some reasonable means, this is the first time You have
248 received notice of non-compliance with this License from such
249 Contributor, and You become compliant prior to 30 days after Your receipt
250 of the notice.
251
2525.2. If You initiate litigation against any entity by asserting a patent
253 infringement claim (excluding declaratory judgment actions,
254 counter-claims, and cross-claims) alleging that a Contributor Version
255 directly or indirectly infringes any patent, then the rights granted to
256 You by any and all Contributors for the Covered Software under Section
257 2.1 of this License shall terminate.
258
2595.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
260 license agreements (excluding distributors and resellers) which have been
261 validly granted by You or Your distributors under this License prior to
262 termination shall survive termination.
263
2646. Disclaimer of Warranty
265
266 Covered Software is provided under this License on an "as is" basis,
267 without warranty of any kind, either expressed, implied, or statutory,
268 including, without limitation, warranties that the Covered Software is free
269 of defects, merchantable, fit for a particular purpose or non-infringing.
270 The entire risk as to the quality and performance of the Covered Software
271 is with You. Should any Covered Software prove defective in any respect,
272 You (not any Contributor) assume the cost of any necessary servicing,
273 repair, or correction. This disclaimer of warranty constitutes an essential
274 part of this License. No use of any Covered Software is authorized under
275 this License except under this disclaimer.
276
2777. Limitation of Liability
278
279 Under no circumstances and under no legal theory, whether tort (including
280 negligence), contract, or otherwise, shall any Contributor, or anyone who
281 distributes Covered Software as permitted above, be liable to You for any
282 direct, indirect, special, incidental, or consequential damages of any
283 character including, without limitation, damages for lost profits, loss of
284 goodwill, work stoppage, computer failure or malfunction, or any and all
285 other commercial damages or losses, even if such party shall have been
286 informed of the possibility of such damages. This limitation of liability
287 shall not apply to liability for death or personal injury resulting from
288 such party's negligence to the extent applicable law prohibits such
289 limitation. Some jurisdictions do not allow the exclusion or limitation of
290 incidental or consequential damages, so this exclusion and limitation may
291 not apply to You.
292
2938. Litigation
294
295 Any litigation relating to this License may be brought only in the courts
296 of a jurisdiction where the defendant maintains its principal place of
297 business and such litigation shall be governed by laws of that
298 jurisdiction, without reference to its conflict-of-law provisions. Nothing
299 in this Section shall prevent a party's ability to bring cross-claims or
300 counter-claims.
301
3029. Miscellaneous
303
304 This License represents the complete agreement concerning the subject
305 matter hereof. If any provision of this License is held to be
306 unenforceable, such provision shall be reformed only to the extent
307 necessary to make it enforceable. Any law or regulation which provides that
308 the language of a contract shall be construed against the drafter shall not
309 be used to construe this License against a Contributor.
310
311
31210. Versions of the License
313
31410.1. New Versions
315
316 Mozilla Foundation is the license steward. Except as provided in Section
317 10.3, no one other than the license steward has the right to modify or
318 publish new versions of this License. Each version will be given a
319 distinguishing version number.
320
32110.2. Effect of New Versions
322
323 You may distribute the Covered Software under the terms of the version
324 of the License under which You originally received the Covered Software,
325 or under the terms of any subsequent version published by the license
326 steward.
327
32810.3. Modified Versions
329
330 If you create software not governed by this License, and you want to
331 create a new license for such software, you may create and use a
332 modified version of this License if you rename the license and remove
333 any references to the name of the license steward (except to note that
334 such modified license differs from this License).
335
33610.4. Distributing Source Code Form that is Incompatible With Secondary
337 Licenses If You choose to distribute Source Code Form that is
338 Incompatible With Secondary Licenses under the terms of this version of
339 the License, the notice described in Exhibit B of this License must be
340 attached.
341
342Exhibit A - Source Code Form License Notice
343
344 This Source Code Form is subject to the
345 terms of the Mozilla Public License, v.
346 2.0. If a copy of the MPL was not
347 distributed with this file, You can
348 obtain one at
349 http://mozilla.org/MPL/2.0/.
350
351If it is not possible or desirable to put the notice in a particular file,
352then You may include the notice in a location (such as a LICENSE file in a
353relevant directory) where a recipient would be likely to look for such a
354notice.
355
356You may add additional accurate notices of copyright ownership.
357
358Exhibit B - "Incompatible With Secondary Licenses" Notice
359
360 This Source Code Form is "Incompatible
361 With Secondary Licenses", as defined by
362 the Mozilla Public License, v. 2.0.
diff --git a/vendor/github.com/hashicorp/go-safetemp/README.md b/vendor/github.com/hashicorp/go-safetemp/README.md
new file mode 100644
index 0000000..02ece33
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-safetemp/README.md
@@ -0,0 +1,10 @@
1# go-safetemp
2[![Godoc](https://godoc.org/github.com/hashcorp/go-safetemp?status.svg)](https://godoc.org/github.com/hashicorp/go-safetemp)
3
4Functions for safely working with temporary directories and files.
5
6## Why?
7
8The Go standard library provides the excellent `ioutil` package for
9working with temporary directories and files. This library builds on top
10of that to provide safe abstractions above that.
diff --git a/vendor/github.com/hashicorp/go-safetemp/safetemp.go b/vendor/github.com/hashicorp/go-safetemp/safetemp.go
new file mode 100644
index 0000000..c4ae72b
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-safetemp/safetemp.go
@@ -0,0 +1,40 @@
1package safetemp
2
3import (
4 "io"
5 "io/ioutil"
6 "os"
7 "path/filepath"
8)
9
10// Dir creates a new temporary directory that isn't yet created. This
11// can be used with calls that expect a non-existent directory.
12//
13// The directory is created as a child of a temporary directory created
14// within the directory dir starting with prefix. The temporary directory
15// returned is always named "temp". The parent directory has the specified
16// prefix.
17//
18// The returned io.Closer should be used to clean up the returned directory.
19// This will properly remove the returned directory and any other temporary
20// files created.
21//
22// If an error is returned, the Closer does not need to be called (and will
23// be nil).
24func Dir(dir, prefix string) (string, io.Closer, error) {
25 // Create the temporary directory
26 td, err := ioutil.TempDir(dir, prefix)
27 if err != nil {
28 return "", nil, err
29 }
30
31 return filepath.Join(td, "temp"), pathCloser(td), nil
32}
33
34// pathCloser implements io.Closer to remove the given path on Close.
35type pathCloser string
36
37// Close deletes this path.
38func (p pathCloser) Close() error {
39 return os.RemoveAll(string(p))
40}
diff --git a/vendor/github.com/hashicorp/go-uuid/.travis.yml b/vendor/github.com/hashicorp/go-uuid/.travis.yml
new file mode 100644
index 0000000..7698490
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-uuid/.travis.yml
@@ -0,0 +1,12 @@
1language: go
2
3sudo: false
4
5go:
6 - 1.4
7 - 1.5
8 - 1.6
9 - tip
10
11script:
12 - go test -bench . -benchmem -v ./...
diff --git a/vendor/github.com/hashicorp/go-uuid/README.md b/vendor/github.com/hashicorp/go-uuid/README.md
index 21fdda4..fbde8b9 100644
--- a/vendor/github.com/hashicorp/go-uuid/README.md
+++ b/vendor/github.com/hashicorp/go-uuid/README.md
@@ -1,6 +1,6 @@
1# uuid 1# uuid [![Build Status](https://travis-ci.org/hashicorp/go-uuid.svg?branch=master)](https://travis-ci.org/hashicorp/go-uuid)
2 2
3Generates UUID-format strings using purely high quality random bytes. 3Generates UUID-format strings using high quality, _purely random_ bytes. It is **not** intended to be RFC compliant, merely to use a well-understood string representation of a 128-bit value. It can also parse UUID-format strings into their component bytes.
4 4
5Documentation 5Documentation
6============= 6=============
diff --git a/vendor/github.com/hashicorp/go-uuid/go.mod b/vendor/github.com/hashicorp/go-uuid/go.mod
new file mode 100644
index 0000000..dd57f9d
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-uuid/go.mod
@@ -0,0 +1 @@
module github.com/hashicorp/go-uuid
diff --git a/vendor/github.com/hashicorp/go-uuid/uuid.go b/vendor/github.com/hashicorp/go-uuid/uuid.go
index 322b522..ff9364c 100644
--- a/vendor/github.com/hashicorp/go-uuid/uuid.go
+++ b/vendor/github.com/hashicorp/go-uuid/uuid.go
@@ -6,13 +6,21 @@ import (
6 "fmt" 6 "fmt"
7) 7)
8 8
9// GenerateUUID is used to generate a random UUID 9// GenerateRandomBytes is used to generate random bytes of given size.
10func GenerateUUID() (string, error) { 10func GenerateRandomBytes(size int) ([]byte, error) {
11 buf := make([]byte, 16) 11 buf := make([]byte, size)
12 if _, err := rand.Read(buf); err != nil { 12 if _, err := rand.Read(buf); err != nil {
13 return "", fmt.Errorf("failed to read random bytes: %v", err) 13 return nil, fmt.Errorf("failed to read random bytes: %v", err)
14 } 14 }
15 return buf, nil
16}
15 17
18// GenerateUUID is used to generate a random UUID
19func GenerateUUID() (string, error) {
20 buf, err := GenerateRandomBytes(16)
21 if err != nil {
22 return "", err
23 }
16 return FormatUUID(buf) 24 return FormatUUID(buf)
17} 25}
18 26
diff --git a/vendor/github.com/hashicorp/go-version/.travis.yml b/vendor/github.com/hashicorp/go-version/.travis.yml
index 9f30eec..542ca8b 100644
--- a/vendor/github.com/hashicorp/go-version/.travis.yml
+++ b/vendor/github.com/hashicorp/go-version/.travis.yml
@@ -6,6 +6,8 @@ go:
6 - 1.2 6 - 1.2
7 - 1.3 7 - 1.3
8 - 1.4 8 - 1.4
9 - 1.9
10 - "1.10"
9 11
10script: 12script:
11 - go test 13 - go test
diff --git a/vendor/github.com/hashicorp/go-version/constraint.go b/vendor/github.com/hashicorp/go-version/constraint.go
index 8c73df0..d055759 100644
--- a/vendor/github.com/hashicorp/go-version/constraint.go
+++ b/vendor/github.com/hashicorp/go-version/constraint.go
@@ -2,6 +2,7 @@ package version
2 2
3import ( 3import (
4 "fmt" 4 "fmt"
5 "reflect"
5 "regexp" 6 "regexp"
6 "strings" 7 "strings"
7) 8)
@@ -113,6 +114,26 @@ func parseSingle(v string) (*Constraint, error) {
113 }, nil 114 }, nil
114} 115}
115 116
117func prereleaseCheck(v, c *Version) bool {
118 switch vPre, cPre := v.Prerelease() != "", c.Prerelease() != ""; {
119 case cPre && vPre:
120 // A constraint with a pre-release can only match a pre-release version
121 // with the same base segments.
122 return reflect.DeepEqual(c.Segments64(), v.Segments64())
123
124 case !cPre && vPre:
125 // A constraint without a pre-release can only match a version without a
126 // pre-release.
127 return false
128
129 case cPre && !vPre:
130 // OK, except with the pessimistic operator
131 case !cPre && !vPre:
132 // OK
133 }
134 return true
135}
136
116//------------------------------------------------------------------- 137//-------------------------------------------------------------------
117// Constraint functions 138// Constraint functions
118//------------------------------------------------------------------- 139//-------------------------------------------------------------------
@@ -126,22 +147,27 @@ func constraintNotEqual(v, c *Version) bool {
126} 147}
127 148
128func constraintGreaterThan(v, c *Version) bool { 149func constraintGreaterThan(v, c *Version) bool {
129 return v.Compare(c) == 1 150 return prereleaseCheck(v, c) && v.Compare(c) == 1
130} 151}
131 152
132func constraintLessThan(v, c *Version) bool { 153func constraintLessThan(v, c *Version) bool {
133 return v.Compare(c) == -1 154 return prereleaseCheck(v, c) && v.Compare(c) == -1
134} 155}
135 156
136func constraintGreaterThanEqual(v, c *Version) bool { 157func constraintGreaterThanEqual(v, c *Version) bool {
137 return v.Compare(c) >= 0 158 return prereleaseCheck(v, c) && v.Compare(c) >= 0
138} 159}
139 160
140func constraintLessThanEqual(v, c *Version) bool { 161func constraintLessThanEqual(v, c *Version) bool {
141 return v.Compare(c) <= 0 162 return prereleaseCheck(v, c) && v.Compare(c) <= 0
142} 163}
143 164
144func constraintPessimistic(v, c *Version) bool { 165func constraintPessimistic(v, c *Version) bool {
166 // Using a pessimistic constraint with a pre-release, restricts versions to pre-releases
167 if !prereleaseCheck(v, c) || (c.Prerelease() != "" && v.Prerelease() == "") {
168 return false
169 }
170
145 // If the version being checked is naturally less than the constraint, then there 171 // If the version being checked is naturally less than the constraint, then there
146 // is no way for the version to be valid against the constraint 172 // is no way for the version to be valid against the constraint
147 if v.LessThan(c) { 173 if v.LessThan(c) {
diff --git a/vendor/github.com/hashicorp/go-version/go.mod b/vendor/github.com/hashicorp/go-version/go.mod
new file mode 100644
index 0000000..f528555
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-version/go.mod
@@ -0,0 +1 @@
module github.com/hashicorp/go-version
diff --git a/vendor/github.com/hashicorp/go-version/version.go b/vendor/github.com/hashicorp/go-version/version.go
index ae2f6b6..4d1e6e2 100644
--- a/vendor/github.com/hashicorp/go-version/version.go
+++ b/vendor/github.com/hashicorp/go-version/version.go
@@ -15,8 +15,8 @@ var versionRegexp *regexp.Regexp
15// The raw regular expression string used for testing the validity 15// The raw regular expression string used for testing the validity
16// of a version. 16// of a version.
17const VersionRegexpRaw string = `v?([0-9]+(\.[0-9]+)*?)` + 17const VersionRegexpRaw string = `v?([0-9]+(\.[0-9]+)*?)` +
18 `(-?([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + 18 `(-([0-9]+[0-9A-Za-z\-~]*(\.[0-9A-Za-z\-~]+)*)|(-?([A-Za-z\-~]+[0-9A-Za-z\-~]*(\.[0-9A-Za-z\-~]+)*)))?` +
19 `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + 19 `(\+([0-9A-Za-z\-~]+(\.[0-9A-Za-z\-~]+)*))?` +
20 `?` 20 `?`
21 21
22// Version represents a single version. 22// Version represents a single version.
@@ -25,6 +25,7 @@ type Version struct {
25 pre string 25 pre string
26 segments []int64 26 segments []int64
27 si int 27 si int
28 original string
28} 29}
29 30
30func init() { 31func init() {
@@ -59,11 +60,17 @@ func NewVersion(v string) (*Version, error) {
59 segments = append(segments, 0) 60 segments = append(segments, 0)
60 } 61 }
61 62
63 pre := matches[7]
64 if pre == "" {
65 pre = matches[4]
66 }
67
62 return &Version{ 68 return &Version{
63 metadata: matches[7], 69 metadata: matches[10],
64 pre: matches[4], 70 pre: pre,
65 segments: segments, 71 segments: segments,
66 si: si, 72 si: si,
73 original: v,
67 }, nil 74 }, nil
68} 75}
69 76
@@ -166,24 +173,42 @@ func comparePart(preSelf string, preOther string) int {
166 return 0 173 return 0
167 } 174 }
168 175
176 var selfInt int64
177 selfNumeric := true
178 selfInt, err := strconv.ParseInt(preSelf, 10, 64)
179 if err != nil {
180 selfNumeric = false
181 }
182
183 var otherInt int64
184 otherNumeric := true
185 otherInt, err = strconv.ParseInt(preOther, 10, 64)
186 if err != nil {
187 otherNumeric = false
188 }
189
169 // if a part is empty, we use the other to decide 190 // if a part is empty, we use the other to decide
170 if preSelf == "" { 191 if preSelf == "" {
171 _, notIsNumeric := strconv.ParseInt(preOther, 10, 64) 192 if otherNumeric {
172 if notIsNumeric == nil {
173 return -1 193 return -1
174 } 194 }
175 return 1 195 return 1
176 } 196 }
177 197
178 if preOther == "" { 198 if preOther == "" {
179 _, notIsNumeric := strconv.ParseInt(preSelf, 10, 64) 199 if selfNumeric {
180 if notIsNumeric == nil {
181 return 1 200 return 1
182 } 201 }
183 return -1 202 return -1
184 } 203 }
185 204
186 if preSelf > preOther { 205 if selfNumeric && !otherNumeric {
206 return -1
207 } else if !selfNumeric && otherNumeric {
208 return 1
209 } else if !selfNumeric && !otherNumeric && preSelf > preOther {
210 return 1
211 } else if selfInt > otherInt {
187 return 1 212 return 1
188 } 213 }
189 214
@@ -283,11 +308,19 @@ func (v *Version) Segments() []int {
283// for a version "1.2.3-beta", segments will return a slice of 308// for a version "1.2.3-beta", segments will return a slice of
284// 1, 2, 3. 309// 1, 2, 3.
285func (v *Version) Segments64() []int64 { 310func (v *Version) Segments64() []int64 {
286 return v.segments 311 result := make([]int64, len(v.segments))
312 copy(result, v.segments)
313 return result
287} 314}
288 315
289// String returns the full version string included pre-release 316// String returns the full version string included pre-release
290// and metadata information. 317// and metadata information.
318//
319// This value is rebuilt according to the parsed segments and other
320// information. Therefore, ambiguities in the version string such as
321// prefixed zeroes (1.04.0 => 1.4.0), `v` prefix (v1.0.0 => 1.0.0), and
322// missing parts (1.0 => 1.0.0) will be made into a canonicalized form
323// as shown in the parenthesized examples.
291func (v *Version) String() string { 324func (v *Version) String() string {
292 var buf bytes.Buffer 325 var buf bytes.Buffer
293 fmtParts := make([]string, len(v.segments)) 326 fmtParts := make([]string, len(v.segments))
@@ -306,3 +339,9 @@ func (v *Version) String() string {
306 339
307 return buf.String() 340 return buf.String()
308} 341}
342
343// Original returns the original parsed version as-is, including any
344// potential whitespace, `v` prefix, etc.
345func (v *Version) Original() string {
346 return v.original
347}
diff --git a/vendor/github.com/hashicorp/hcl2/LICENSE b/vendor/github.com/hashicorp/hcl2/LICENSE
new file mode 100644
index 0000000..82b4de9
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/LICENSE
@@ -0,0 +1,353 @@
1Mozilla Public License, version 2.0
2
31. Definitions
4
51.1. “Contributor”
6
7 means each individual or legal entity that creates, contributes to the
8 creation of, or owns Covered Software.
9
101.2. “Contributor Version”
11
12 means the combination of the Contributions of others (if any) used by a
13 Contributor and that particular Contributor’s Contribution.
14
151.3. “Contribution”
16
17 means Covered Software of a particular Contributor.
18
191.4. “Covered Software”
20
21 means Source Code Form to which the initial Contributor has attached the
22 notice in Exhibit A, the Executable Form of such Source Code Form, and
23 Modifications of such Source Code Form, in each case including portions
24 thereof.
25
261.5. “Incompatible With Secondary Licenses”
27 means
28
29 a. that the initial Contributor has attached the notice described in
30 Exhibit B to the Covered Software; or
31
32 b. that the Covered Software was made available under the terms of version
33 1.1 or earlier of the License, but not also under the terms of a
34 Secondary License.
35
361.6. “Executable Form”
37
38 means any form of the work other than Source Code Form.
39
401.7. “Larger Work”
41
42 means a work that combines Covered Software with other material, in a separate
43 file or files, that is not Covered Software.
44
451.8. “License”
46
47 means this document.
48
491.9. “Licensable”
50
51 means having the right to grant, to the maximum extent possible, whether at the
52 time of the initial grant or subsequently, any and all of the rights conveyed by
53 this License.
54
551.10. “Modifications”
56
57 means any of the following:
58
59 a. any file in Source Code Form that results from an addition to, deletion
60 from, or modification of the contents of Covered Software; or
61
62 b. any new file in Source Code Form that contains any Covered Software.
63
641.11. “Patent Claims” of a Contributor
65
66 means any patent claim(s), including without limitation, method, process,
67 and apparatus claims, in any patent Licensable by such Contributor that
68 would be infringed, but for the grant of the License, by the making,
69 using, selling, offering for sale, having made, import, or transfer of
70 either its Contributions or its Contributor Version.
71
721.12. “Secondary License”
73
74 means either the GNU General Public License, Version 2.0, the GNU Lesser
75 General Public License, Version 2.1, the GNU Affero General Public
76 License, Version 3.0, or any later versions of those licenses.
77
781.13. “Source Code Form”
79
80 means the form of the work preferred for making modifications.
81
821.14. “You” (or “Your”)
83
84 means an individual or a legal entity exercising rights under this
85 License. For legal entities, “You” includes any entity that controls, is
86 controlled by, or is under common control with You. For purposes of this
87 definition, “control” means (a) the power, direct or indirect, to cause
88 the direction or management of such entity, whether by contract or
89 otherwise, or (b) ownership of more than fifty percent (50%) of the
90 outstanding shares or beneficial ownership of such entity.
91
92
932. License Grants and Conditions
94
952.1. Grants
96
97 Each Contributor hereby grants You a world-wide, royalty-free,
98 non-exclusive license:
99
100 a. under intellectual property rights (other than patent or trademark)
101 Licensable by such Contributor to use, reproduce, make available,
102 modify, display, perform, distribute, and otherwise exploit its
103 Contributions, either on an unmodified basis, with Modifications, or as
104 part of a Larger Work; and
105
106 b. under Patent Claims of such Contributor to make, use, sell, offer for
107 sale, have made, import, and otherwise transfer either its Contributions
108 or its Contributor Version.
109
1102.2. Effective Date
111
112 The licenses granted in Section 2.1 with respect to any Contribution become
113 effective for each Contribution on the date the Contributor first distributes
114 such Contribution.
115
1162.3. Limitations on Grant Scope
117
118 The licenses granted in this Section 2 are the only rights granted under this
119 License. No additional rights or licenses will be implied from the distribution
120 or licensing of Covered Software under this License. Notwithstanding Section
121 2.1(b) above, no patent license is granted by a Contributor:
122
123 a. for any code that a Contributor has removed from Covered Software; or
124
125 b. for infringements caused by: (i) Your and any other third party’s
126 modifications of Covered Software, or (ii) the combination of its
127 Contributions with other software (except as part of its Contributor
128 Version); or
129
130 c. under Patent Claims infringed by Covered Software in the absence of its
131 Contributions.
132
133 This License does not grant any rights in the trademarks, service marks, or
134 logos of any Contributor (except as may be necessary to comply with the
135 notice requirements in Section 3.4).
136
1372.4. Subsequent Licenses
138
139 No Contributor makes additional grants as a result of Your choice to
140 distribute the Covered Software under a subsequent version of this License
141 (see Section 10.2) or under the terms of a Secondary License (if permitted
142 under the terms of Section 3.3).
143
1442.5. Representation
145
146 Each Contributor represents that the Contributor believes its Contributions
147 are its original creation(s) or it has sufficient rights to grant the
148 rights to its Contributions conveyed by this License.
149
1502.6. Fair Use
151
152 This License is not intended to limit any rights You have under applicable
153 copyright doctrines of fair use, fair dealing, or other equivalents.
154
1552.7. Conditions
156
157 Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
158 Section 2.1.
159
160
1613. Responsibilities
162
1633.1. Distribution of Source Form
164
165 All distribution of Covered Software in Source Code Form, including any
166 Modifications that You create or to which You contribute, must be under the
167 terms of this License. You must inform recipients that the Source Code Form
168 of the Covered Software is governed by the terms of this License, and how
169 they can obtain a copy of this License. You may not attempt to alter or
170 restrict the recipients’ rights in the Source Code Form.
171
1723.2. Distribution of Executable Form
173
174 If You distribute Covered Software in Executable Form then:
175
176 a. such Covered Software must also be made available in Source Code Form,
177 as described in Section 3.1, and You must inform recipients of the
178 Executable Form how they can obtain a copy of such Source Code Form by
179 reasonable means in a timely manner, at a charge no more than the cost
180 of distribution to the recipient; and
181
182 b. You may distribute such Executable Form under the terms of this License,
183 or sublicense it under different terms, provided that the license for
184 the Executable Form does not attempt to limit or alter the recipients’
185 rights in the Source Code Form under this License.
186
1873.3. Distribution of a Larger Work
188
189 You may create and distribute a Larger Work under terms of Your choice,
190 provided that You also comply with the requirements of this License for the
191 Covered Software. If the Larger Work is a combination of Covered Software
192 with a work governed by one or more Secondary Licenses, and the Covered
193 Software is not Incompatible With Secondary Licenses, this License permits
194 You to additionally distribute such Covered Software under the terms of
195 such Secondary License(s), so that the recipient of the Larger Work may, at
196 their option, further distribute the Covered Software under the terms of
197 either this License or such Secondary License(s).
198
1993.4. Notices
200
201 You may not remove or alter the substance of any license notices (including
202 copyright notices, patent notices, disclaimers of warranty, or limitations
203 of liability) contained within the Source Code Form of the Covered
204 Software, except that You may alter any license notices to the extent
205 required to remedy known factual inaccuracies.
206
2073.5. Application of Additional Terms
208
209 You may choose to offer, and to charge a fee for, warranty, support,
210 indemnity or liability obligations to one or more recipients of Covered
211 Software. However, You may do so only on Your own behalf, and not on behalf
212 of any Contributor. You must make it absolutely clear that any such
213 warranty, support, indemnity, or liability obligation is offered by You
214 alone, and You hereby agree to indemnify every Contributor for any
215 liability incurred by such Contributor as a result of warranty, support,
216 indemnity or liability terms You offer. You may include additional
217 disclaimers of warranty and limitations of liability specific to any
218 jurisdiction.
219
2204. Inability to Comply Due to Statute or Regulation
221
222 If it is impossible for You to comply with any of the terms of this License
223 with respect to some or all of the Covered Software due to statute, judicial
224 order, or regulation then You must: (a) comply with the terms of this License
225 to the maximum extent possible; and (b) describe the limitations and the code
226 they affect. Such description must be placed in a text file included with all
227 distributions of the Covered Software under this License. Except to the
228 extent prohibited by statute or regulation, such description must be
229 sufficiently detailed for a recipient of ordinary skill to be able to
230 understand it.
231
2325. Termination
233
2345.1. The rights granted under this License will terminate automatically if You
235 fail to comply with any of its terms. However, if You become compliant,
236 then the rights granted under this License from a particular Contributor
237 are reinstated (a) provisionally, unless and until such Contributor
238 explicitly and finally terminates Your grants, and (b) on an ongoing basis,
239 if such Contributor fails to notify You of the non-compliance by some
240 reasonable means prior to 60 days after You have come back into compliance.
241 Moreover, Your grants from a particular Contributor are reinstated on an
242 ongoing basis if such Contributor notifies You of the non-compliance by
243 some reasonable means, this is the first time You have received notice of
244 non-compliance with this License from such Contributor, and You become
245 compliant prior to 30 days after Your receipt of the notice.
246
2475.2. If You initiate litigation against any entity by asserting a patent
248 infringement claim (excluding declaratory judgment actions, counter-claims,
249 and cross-claims) alleging that a Contributor Version directly or
250 indirectly infringes any patent, then the rights granted to You by any and
251 all Contributors for the Covered Software under Section 2.1 of this License
252 shall terminate.
253
2545.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
255 license agreements (excluding distributors and resellers) which have been
256 validly granted by You or Your distributors under this License prior to
257 termination shall survive termination.
258
2596. Disclaimer of Warranty
260
261 Covered Software is provided under this License on an “as is” basis, without
262 warranty of any kind, either expressed, implied, or statutory, including,
263 without limitation, warranties that the Covered Software is free of defects,
264 merchantable, fit for a particular purpose or non-infringing. The entire
265 risk as to the quality and performance of the Covered Software is with You.
266 Should any Covered Software prove defective in any respect, You (not any
267 Contributor) assume the cost of any necessary servicing, repair, or
268 correction. This disclaimer of warranty constitutes an essential part of this
269 License. No use of any Covered Software is authorized under this License
270 except under this disclaimer.
271
2727. Limitation of Liability
273
274 Under no circumstances and under no legal theory, whether tort (including
275 negligence), contract, or otherwise, shall any Contributor, or anyone who
276 distributes Covered Software as permitted above, be liable to You for any
277 direct, indirect, special, incidental, or consequential damages of any
278 character including, without limitation, damages for lost profits, loss of
279 goodwill, work stoppage, computer failure or malfunction, or any and all
280 other commercial damages or losses, even if such party shall have been
281 informed of the possibility of such damages. This limitation of liability
282 shall not apply to liability for death or personal injury resulting from such
283 party’s negligence to the extent applicable law prohibits such limitation.
284 Some jurisdictions do not allow the exclusion or limitation of incidental or
285 consequential damages, so this exclusion and limitation may not apply to You.
286
2878. Litigation
288
289 Any litigation relating to this License may be brought only in the courts of
290 a jurisdiction where the defendant maintains its principal place of business
291 and such litigation shall be governed by laws of that jurisdiction, without
292 reference to its conflict-of-law provisions. Nothing in this Section shall
293 prevent a party’s ability to bring cross-claims or counter-claims.
294
2959. Miscellaneous
296
297 This License represents the complete agreement concerning the subject matter
298 hereof. If any provision of this License is held to be unenforceable, such
299 provision shall be reformed only to the extent necessary to make it
300 enforceable. Any law or regulation which provides that the language of a
301 contract shall be construed against the drafter shall not be used to construe
302 this License against a Contributor.
303
304
30510. Versions of the License
306
30710.1. New Versions
308
309 Mozilla Foundation is the license steward. Except as provided in Section
310 10.3, no one other than the license steward has the right to modify or
311 publish new versions of this License. Each version will be given a
312 distinguishing version number.
313
31410.2. Effect of New Versions
315
316 You may distribute the Covered Software under the terms of the version of
317 the License under which You originally received the Covered Software, or
318 under the terms of any subsequent version published by the license
319 steward.
320
32110.3. Modified Versions
322
323 If you create software not governed by this License, and you want to
324 create a new license for such software, you may create and use a modified
325 version of this License if you rename the license and remove any
326 references to the name of the license steward (except to note that such
327 modified license differs from this License).
328
32910.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
330 If You choose to distribute Source Code Form that is Incompatible With
331 Secondary Licenses under the terms of this version of the License, the
332 notice described in Exhibit B of this License must be attached.
333
334Exhibit A - Source Code Form License Notice
335
336 This Source Code Form is subject to the
337 terms of the Mozilla Public License, v.
338 2.0. If a copy of the MPL was not
339 distributed with this file, You can
340 obtain one at
341 http://mozilla.org/MPL/2.0/.
342
343If it is not possible or desirable to put the notice in a particular file, then
344You may include the notice in a location (such as a LICENSE file in a relevant
345directory) where a recipient would be likely to look for such a notice.
346
347You may add additional accurate notices of copyright ownership.
348
349Exhibit B - “Incompatible With Secondary Licenses” Notice
350
351 This Source Code Form is “Incompatible
352 With Secondary Licenses”, as defined by
353 the Mozilla Public License, v. 2.0.
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/decode.go b/vendor/github.com/hashicorp/hcl2/gohcl/decode.go
new file mode 100644
index 0000000..3a149a8
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/gohcl/decode.go
@@ -0,0 +1,304 @@
1package gohcl
2
3import (
4 "fmt"
5 "reflect"
6
7 "github.com/zclconf/go-cty/cty"
8
9 "github.com/hashicorp/hcl2/hcl"
10 "github.com/zclconf/go-cty/cty/convert"
11 "github.com/zclconf/go-cty/cty/gocty"
12)
13
14// DecodeBody extracts the configuration within the given body into the given
15// value. This value must be a non-nil pointer to either a struct or
16// a map, where in the former case the configuration will be decoded using
17// struct tags and in the latter case only attributes are allowed and their
18// values are decoded into the map.
19//
20// The given EvalContext is used to resolve any variables or functions in
21// expressions encountered while decoding. This may be nil to require only
22// constant values, for simple applications that do not support variables or
23// functions.
24//
25// The returned diagnostics should be inspected with its HasErrors method to
26// determine if the populated value is valid and complete. If error diagnostics
27// are returned then the given value may have been partially-populated but
28// may still be accessed by a careful caller for static analysis and editor
29// integration use-cases.
30func DecodeBody(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
31 rv := reflect.ValueOf(val)
32 if rv.Kind() != reflect.Ptr {
33 panic(fmt.Sprintf("target value must be a pointer, not %s", rv.Type().String()))
34 }
35
36 return decodeBodyToValue(body, ctx, rv.Elem())
37}
38
39func decodeBodyToValue(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) hcl.Diagnostics {
40 et := val.Type()
41 switch et.Kind() {
42 case reflect.Struct:
43 return decodeBodyToStruct(body, ctx, val)
44 case reflect.Map:
45 return decodeBodyToMap(body, ctx, val)
46 default:
47 panic(fmt.Sprintf("target value must be pointer to struct or map, not %s", et.String()))
48 }
49}
50
51func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) hcl.Diagnostics {
52 schema, partial := ImpliedBodySchema(val.Interface())
53
54 var content *hcl.BodyContent
55 var leftovers hcl.Body
56 var diags hcl.Diagnostics
57 if partial {
58 content, leftovers, diags = body.PartialContent(schema)
59 } else {
60 content, diags = body.Content(schema)
61 }
62 if content == nil {
63 return diags
64 }
65
66 tags := getFieldTags(val.Type())
67
68 if tags.Remain != nil {
69 fieldIdx := *tags.Remain
70 field := val.Type().Field(fieldIdx)
71 fieldV := val.Field(fieldIdx)
72 switch {
73 case bodyType.AssignableTo(field.Type):
74 fieldV.Set(reflect.ValueOf(leftovers))
75 case attrsType.AssignableTo(field.Type):
76 attrs, attrsDiags := leftovers.JustAttributes()
77 if len(attrsDiags) > 0 {
78 diags = append(diags, attrsDiags...)
79 }
80 fieldV.Set(reflect.ValueOf(attrs))
81 default:
82 diags = append(diags, decodeBodyToValue(leftovers, ctx, fieldV)...)
83 }
84 }
85
86 for name, fieldIdx := range tags.Attributes {
87 attr := content.Attributes[name]
88 field := val.Type().Field(fieldIdx)
89 fieldV := val.Field(fieldIdx)
90
91 if attr == nil {
92 if !exprType.AssignableTo(field.Type) {
93 continue
94 }
95
96 // As a special case, if the target is of type hcl.Expression then
97 // we'll assign an actual expression that evalues to a cty null,
98 // so the caller can deal with it within the cty realm rather
99 // than within the Go realm.
100 synthExpr := hcl.StaticExpr(cty.NullVal(cty.DynamicPseudoType), body.MissingItemRange())
101 fieldV.Set(reflect.ValueOf(synthExpr))
102 continue
103 }
104
105 switch {
106 case attrType.AssignableTo(field.Type):
107 fieldV.Set(reflect.ValueOf(attr))
108 case exprType.AssignableTo(field.Type):
109 fieldV.Set(reflect.ValueOf(attr.Expr))
110 default:
111 diags = append(diags, DecodeExpression(
112 attr.Expr, ctx, fieldV.Addr().Interface(),
113 )...)
114 }
115 }
116
117 blocksByType := content.Blocks.ByType()
118
119 for typeName, fieldIdx := range tags.Blocks {
120 blocks := blocksByType[typeName]
121 field := val.Type().Field(fieldIdx)
122
123 ty := field.Type
124 isSlice := false
125 isPtr := false
126 if ty.Kind() == reflect.Slice {
127 isSlice = true
128 ty = ty.Elem()
129 }
130 if ty.Kind() == reflect.Ptr {
131 isPtr = true
132 ty = ty.Elem()
133 }
134
135 if len(blocks) > 1 && !isSlice {
136 diags = append(diags, &hcl.Diagnostic{
137 Severity: hcl.DiagError,
138 Summary: fmt.Sprintf("Duplicate %s block", typeName),
139 Detail: fmt.Sprintf(
140 "Only one %s block is allowed. Another was defined at %s.",
141 typeName, blocks[0].DefRange.String(),
142 ),
143 Subject: &blocks[1].DefRange,
144 })
145 continue
146 }
147
148 if len(blocks) == 0 {
149 if isSlice || isPtr {
150 val.Field(fieldIdx).Set(reflect.Zero(field.Type))
151 } else {
152 diags = append(diags, &hcl.Diagnostic{
153 Severity: hcl.DiagError,
154 Summary: fmt.Sprintf("Missing %s block", typeName),
155 Detail: fmt.Sprintf("A %s block is required.", typeName),
156 Subject: body.MissingItemRange().Ptr(),
157 })
158 }
159 continue
160 }
161
162 switch {
163
164 case isSlice:
165 elemType := ty
166 if isPtr {
167 elemType = reflect.PtrTo(ty)
168 }
169 sli := reflect.MakeSlice(reflect.SliceOf(elemType), len(blocks), len(blocks))
170
171 for i, block := range blocks {
172 if isPtr {
173 v := reflect.New(ty)
174 diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...)
175 sli.Index(i).Set(v)
176 } else {
177 diags = append(diags, decodeBlockToValue(block, ctx, sli.Index(i))...)
178 }
179 }
180
181 val.Field(fieldIdx).Set(sli)
182
183 default:
184 block := blocks[0]
185 if isPtr {
186 v := reflect.New(ty)
187 diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...)
188 val.Field(fieldIdx).Set(v)
189 } else {
190 diags = append(diags, decodeBlockToValue(block, ctx, val.Field(fieldIdx))...)
191 }
192
193 }
194
195 }
196
197 return diags
198}
199
200func decodeBodyToMap(body hcl.Body, ctx *hcl.EvalContext, v reflect.Value) hcl.Diagnostics {
201 attrs, diags := body.JustAttributes()
202 if attrs == nil {
203 return diags
204 }
205
206 mv := reflect.MakeMap(v.Type())
207
208 for k, attr := range attrs {
209 switch {
210 case attrType.AssignableTo(v.Type().Elem()):
211 mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr))
212 case exprType.AssignableTo(v.Type().Elem()):
213 mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr.Expr))
214 default:
215 ev := reflect.New(v.Type().Elem())
216 diags = append(diags, DecodeExpression(attr.Expr, ctx, ev.Interface())...)
217 mv.SetMapIndex(reflect.ValueOf(k), ev.Elem())
218 }
219 }
220
221 v.Set(mv)
222
223 return diags
224}
225
226func decodeBlockToValue(block *hcl.Block, ctx *hcl.EvalContext, v reflect.Value) hcl.Diagnostics {
227 var diags hcl.Diagnostics
228
229 ty := v.Type()
230
231 switch {
232 case blockType.AssignableTo(ty):
233 v.Elem().Set(reflect.ValueOf(block))
234 case bodyType.AssignableTo(ty):
235 v.Elem().Set(reflect.ValueOf(block.Body))
236 case attrsType.AssignableTo(ty):
237 attrs, attrsDiags := block.Body.JustAttributes()
238 if len(attrsDiags) > 0 {
239 diags = append(diags, attrsDiags...)
240 }
241 v.Elem().Set(reflect.ValueOf(attrs))
242 default:
243 diags = append(diags, decodeBodyToValue(block.Body, ctx, v)...)
244
245 if len(block.Labels) > 0 {
246 blockTags := getFieldTags(ty)
247 for li, lv := range block.Labels {
248 lfieldIdx := blockTags.Labels[li].FieldIndex
249 v.Field(lfieldIdx).Set(reflect.ValueOf(lv))
250 }
251 }
252
253 }
254
255 return diags
256}
257
258// DecodeExpression extracts the value of the given expression into the given
259// value. This value must be something that gocty is able to decode into,
260// since the final decoding is delegated to that package.
261//
262// The given EvalContext is used to resolve any variables or functions in
263// expressions encountered while decoding. This may be nil to require only
264// constant values, for simple applications that do not support variables or
265// functions.
266//
267// The returned diagnostics should be inspected with its HasErrors method to
268// determine if the populated value is valid and complete. If error diagnostics
269// are returned then the given value may have been partially-populated but
270// may still be accessed by a careful caller for static analysis and editor
271// integration use-cases.
272func DecodeExpression(expr hcl.Expression, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
273 srcVal, diags := expr.Value(ctx)
274
275 convTy, err := gocty.ImpliedType(val)
276 if err != nil {
277 panic(fmt.Sprintf("unsuitable DecodeExpression target: %s", err))
278 }
279
280 srcVal, err = convert.Convert(srcVal, convTy)
281 if err != nil {
282 diags = append(diags, &hcl.Diagnostic{
283 Severity: hcl.DiagError,
284 Summary: "Unsuitable value type",
285 Detail: fmt.Sprintf("Unsuitable value: %s", err.Error()),
286 Subject: expr.StartRange().Ptr(),
287 Context: expr.Range().Ptr(),
288 })
289 return diags
290 }
291
292 err = gocty.FromCtyValue(srcVal, val)
293 if err != nil {
294 diags = append(diags, &hcl.Diagnostic{
295 Severity: hcl.DiagError,
296 Summary: "Unsuitable value type",
297 Detail: fmt.Sprintf("Unsuitable value: %s", err.Error()),
298 Subject: expr.StartRange().Ptr(),
299 Context: expr.Range().Ptr(),
300 })
301 }
302
303 return diags
304}
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/doc.go b/vendor/github.com/hashicorp/hcl2/gohcl/doc.go
new file mode 100644
index 0000000..8500214
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/gohcl/doc.go
@@ -0,0 +1,49 @@
1// Package gohcl allows decoding HCL configurations into Go data structures.
2//
3// It provides a convenient and concise way of describing the schema for
4// configuration and then accessing the resulting data via native Go
5// types.
6//
7// A struct field tag scheme is used, similar to other decoding and
8// unmarshalling libraries. The tags are formatted as in the following example:
9//
10// ThingType string `hcl:"thing_type,attr"`
11//
12// Within each tag there are two comma-separated tokens. The first is the
13// name of the corresponding construct in configuration, while the second
14// is a keyword giving the kind of construct expected. The following
15// kind keywords are supported:
16//
17// attr (the default) indicates that the value is to be populated from an attribute
18// block indicates that the value is to populated from a block
19// label indicates that the value is to populated from a block label
20// remain indicates that the value is to be populated from the remaining body after populating other fields
21//
22// "attr" fields may either be of type *hcl.Expression, in which case the raw
23// expression is assigned, or of any type accepted by gocty, in which case
24// gocty will be used to assign the value to a native Go type.
25//
26// "block" fields may be of type *hcl.Block or hcl.Body, in which case the
27// corresponding raw value is assigned, or may be a struct that recursively
28// uses the same tags. Block fields may also be slices of any of these types,
29// in which case multiple blocks of the corresponding type are decoded into
30// the slice.
31//
32// "label" fields are considered only in a struct used as the type of a field
33// marked as "block", and are used sequentially to capture the labels of
34// the blocks being decoded. In this case, the name token is used only as
35// an identifier for the label in diagnostic messages.
36//
37// "remain" can be placed on a single field that may be either of type
38// hcl.Body or hcl.Attributes, in which case any remaining body content is
39// placed into this field for delayed processing. If no "remain" field is
40// present then any attributes or blocks not matched by another valid tag
41// will cause an error diagnostic.
42//
43// Broadly-speaking this package deals with two types of error. The first is
44// errors in the configuration itself, which are returned as diagnostics
45// written with the configuration author as the target audience. The second
46// is bugs in the calling program, such as invalid struct tags, which are
47// surfaced via panics since there can be no useful runtime handling of such
48// errors and they should certainly not be returned to the user as diagnostics.
49package gohcl
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/schema.go b/vendor/github.com/hashicorp/hcl2/gohcl/schema.go
new file mode 100644
index 0000000..88164cb
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/gohcl/schema.go
@@ -0,0 +1,174 @@
1package gohcl
2
3import (
4 "fmt"
5 "reflect"
6 "sort"
7 "strings"
8
9 "github.com/hashicorp/hcl2/hcl"
10)
11
12// ImpliedBodySchema produces a hcl.BodySchema derived from the type of the
13// given value, which must be a struct value or a pointer to one. If an
14// inappropriate value is passed, this function will panic.
15//
16// The second return argument indicates whether the given struct includes
17// a "remain" field, and thus the returned schema is non-exhaustive.
18//
19// This uses the tags on the fields of the struct to discover how each
20// field's value should be expressed within configuration. If an invalid
21// mapping is attempted, this function will panic.
22func ImpliedBodySchema(val interface{}) (schema *hcl.BodySchema, partial bool) {
23 ty := reflect.TypeOf(val)
24
25 if ty.Kind() == reflect.Ptr {
26 ty = ty.Elem()
27 }
28
29 if ty.Kind() != reflect.Struct {
30 panic(fmt.Sprintf("given value must be struct, not %T", val))
31 }
32
33 var attrSchemas []hcl.AttributeSchema
34 var blockSchemas []hcl.BlockHeaderSchema
35
36 tags := getFieldTags(ty)
37
38 attrNames := make([]string, 0, len(tags.Attributes))
39 for n := range tags.Attributes {
40 attrNames = append(attrNames, n)
41 }
42 sort.Strings(attrNames)
43 for _, n := range attrNames {
44 idx := tags.Attributes[n]
45 optional := tags.Optional[n]
46 field := ty.Field(idx)
47
48 var required bool
49
50 switch {
51 case field.Type.AssignableTo(exprType):
52 // If we're decoding to hcl.Expression then absense can be
53 // indicated via a null value, so we don't specify that
54 // the field is required during decoding.
55 required = false
56 case field.Type.Kind() != reflect.Ptr && !optional:
57 required = true
58 default:
59 required = false
60 }
61
62 attrSchemas = append(attrSchemas, hcl.AttributeSchema{
63 Name: n,
64 Required: required,
65 })
66 }
67
68 blockNames := make([]string, 0, len(tags.Blocks))
69 for n := range tags.Blocks {
70 blockNames = append(blockNames, n)
71 }
72 sort.Strings(blockNames)
73 for _, n := range blockNames {
74 idx := tags.Blocks[n]
75 field := ty.Field(idx)
76 fty := field.Type
77 if fty.Kind() == reflect.Slice {
78 fty = fty.Elem()
79 }
80 if fty.Kind() == reflect.Ptr {
81 fty = fty.Elem()
82 }
83 if fty.Kind() != reflect.Struct {
84 panic(fmt.Sprintf(
85 "hcl 'block' tag kind cannot be applied to %s field %s: struct required", field.Type.String(), field.Name,
86 ))
87 }
88 ftags := getFieldTags(fty)
89 var labelNames []string
90 if len(ftags.Labels) > 0 {
91 labelNames = make([]string, len(ftags.Labels))
92 for i, l := range ftags.Labels {
93 labelNames[i] = l.Name
94 }
95 }
96
97 blockSchemas = append(blockSchemas, hcl.BlockHeaderSchema{
98 Type: n,
99 LabelNames: labelNames,
100 })
101 }
102
103 partial = tags.Remain != nil
104 schema = &hcl.BodySchema{
105 Attributes: attrSchemas,
106 Blocks: blockSchemas,
107 }
108 return schema, partial
109}
110
111type fieldTags struct {
112 Attributes map[string]int
113 Blocks map[string]int
114 Labels []labelField
115 Remain *int
116 Optional map[string]bool
117}
118
119type labelField struct {
120 FieldIndex int
121 Name string
122}
123
124func getFieldTags(ty reflect.Type) *fieldTags {
125 ret := &fieldTags{
126 Attributes: map[string]int{},
127 Blocks: map[string]int{},
128 Optional: map[string]bool{},
129 }
130
131 ct := ty.NumField()
132 for i := 0; i < ct; i++ {
133 field := ty.Field(i)
134 tag := field.Tag.Get("hcl")
135 if tag == "" {
136 continue
137 }
138
139 comma := strings.Index(tag, ",")
140 var name, kind string
141 if comma != -1 {
142 name = tag[:comma]
143 kind = tag[comma+1:]
144 } else {
145 name = tag
146 kind = "attr"
147 }
148
149 switch kind {
150 case "attr":
151 ret.Attributes[name] = i
152 case "block":
153 ret.Blocks[name] = i
154 case "label":
155 ret.Labels = append(ret.Labels, labelField{
156 FieldIndex: i,
157 Name: name,
158 })
159 case "remain":
160 if ret.Remain != nil {
161 panic("only one 'remain' tag is permitted")
162 }
163 idx := i // copy, because this loop will continue assigning to i
164 ret.Remain = &idx
165 case "optional":
166 ret.Attributes[name] = i
167 ret.Optional[name] = true
168 default:
169 panic(fmt.Sprintf("invalid hcl field tag kind %q on %s %q", kind, field.Type.String(), field.Name))
170 }
171 }
172
173 return ret
174}
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/types.go b/vendor/github.com/hashicorp/hcl2/gohcl/types.go
new file mode 100644
index 0000000..a94f275
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/gohcl/types.go
@@ -0,0 +1,16 @@
1package gohcl
2
3import (
4 "reflect"
5
6 "github.com/hashicorp/hcl2/hcl"
7)
8
9var victimExpr hcl.Expression
10var victimBody hcl.Body
11
12var exprType = reflect.TypeOf(&victimExpr).Elem()
13var bodyType = reflect.TypeOf(&victimBody).Elem()
14var blockType = reflect.TypeOf((*hcl.Block)(nil))
15var attrType = reflect.TypeOf((*hcl.Attribute)(nil))
16var attrsType = reflect.TypeOf(hcl.Attributes(nil))
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/diagnostic.go b/vendor/github.com/hashicorp/hcl2/hcl/diagnostic.go
new file mode 100644
index 0000000..6ecf744
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/diagnostic.go
@@ -0,0 +1,103 @@
1package hcl
2
3import (
4 "fmt"
5)
6
7// DiagnosticSeverity represents the severity of a diagnostic.
8type DiagnosticSeverity int
9
10const (
11 // DiagInvalid is the invalid zero value of DiagnosticSeverity
12 DiagInvalid DiagnosticSeverity = iota
13
14 // DiagError indicates that the problem reported by a diagnostic prevents
15 // further progress in parsing and/or evaluating the subject.
16 DiagError
17
18 // DiagWarning indicates that the problem reported by a diagnostic warrants
19 // user attention but does not prevent further progress. It is most
20 // commonly used for showing deprecation notices.
21 DiagWarning
22)
23
24// Diagnostic represents information to be presented to a user about an
25// error or anomoly in parsing or evaluating configuration.
26type Diagnostic struct {
27 Severity DiagnosticSeverity
28
29 // Summary and detail contain the English-language description of the
30 // problem. Summary is a terse description of the general problem and
31 // detail is a more elaborate, often-multi-sentence description of
32 // the probem and what might be done to solve it.
33 Summary string
34 Detail string
35 Subject *Range
36 Context *Range
37}
38
39// Diagnostics is a list of Diagnostic instances.
40type Diagnostics []*Diagnostic
41
42// error implementation, so that diagnostics can be returned via APIs
43// that normally deal in vanilla Go errors.
44//
45// This presents only minimal context about the error, for compatibility
46// with usual expectations about how errors will present as strings.
47func (d *Diagnostic) Error() string {
48 return fmt.Sprintf("%s: %s; %s", d.Subject, d.Summary, d.Detail)
49}
50
51// error implementation, so that sets of diagnostics can be returned via
52// APIs that normally deal in vanilla Go errors.
53func (d Diagnostics) Error() string {
54 count := len(d)
55 switch {
56 case count == 0:
57 return "no diagnostics"
58 case count == 1:
59 return d[0].Error()
60 default:
61 return fmt.Sprintf("%s, and %d other diagnostic(s)", d[0].Error(), count-1)
62 }
63}
64
65// Append appends a new error to a Diagnostics and return the whole Diagnostics.
66//
67// This is provided as a convenience for returning from a function that
68// collects and then returns a set of diagnostics:
69//
70// return nil, diags.Append(&hcl.Diagnostic{ ... })
71//
72// Note that this modifies the array underlying the diagnostics slice, so
73// must be used carefully within a single codepath. It is incorrect (and rude)
74// to extend a diagnostics created by a different subsystem.
75func (d Diagnostics) Append(diag *Diagnostic) Diagnostics {
76 return append(d, diag)
77}
78
79// Extend concatenates the given Diagnostics with the receiver and returns
80// the whole new Diagnostics.
81//
82// This is similar to Append but accepts multiple diagnostics to add. It has
83// all the same caveats and constraints.
84func (d Diagnostics) Extend(diags Diagnostics) Diagnostics {
85 return append(d, diags...)
86}
87
88// HasErrors returns true if the receiver contains any diagnostics of
89// severity DiagError.
90func (d Diagnostics) HasErrors() bool {
91 for _, diag := range d {
92 if diag.Severity == DiagError {
93 return true
94 }
95 }
96 return false
97}
98
99// A DiagnosticWriter emits diagnostics somehow.
100type DiagnosticWriter interface {
101 WriteDiagnostic(*Diagnostic) error
102 WriteDiagnostics(Diagnostics) error
103}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/diagnostic_text.go b/vendor/github.com/hashicorp/hcl2/hcl/diagnostic_text.go
new file mode 100644
index 0000000..dfa473a
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/diagnostic_text.go
@@ -0,0 +1,168 @@
1package hcl
2
3import (
4 "bufio"
5 "errors"
6 "fmt"
7 "io"
8
9 wordwrap "github.com/mitchellh/go-wordwrap"
10)
11
12type diagnosticTextWriter struct {
13 files map[string]*File
14 wr io.Writer
15 width uint
16 color bool
17}
18
19// NewDiagnosticTextWriter creates a DiagnosticWriter that writes diagnostics
20// to the given writer as formatted text.
21//
22// It is designed to produce text appropriate to print in a monospaced font
23// in a terminal of a particular width, or optionally with no width limit.
24//
25// The given width may be zero to disable word-wrapping of the detail text
26// and truncation of source code snippets.
27//
28// If color is set to true, the output will include VT100 escape sequences to
29// color-code the severity indicators. It is suggested to turn this off if
30// the target writer is not a terminal.
31func NewDiagnosticTextWriter(wr io.Writer, files map[string]*File, width uint, color bool) DiagnosticWriter {
32 return &diagnosticTextWriter{
33 files: files,
34 wr: wr,
35 width: width,
36 color: color,
37 }
38}
39
40func (w *diagnosticTextWriter) WriteDiagnostic(diag *Diagnostic) error {
41 if diag == nil {
42 return errors.New("nil diagnostic")
43 }
44
45 var colorCode, highlightCode, resetCode string
46 if w.color {
47 switch diag.Severity {
48 case DiagError:
49 colorCode = "\x1b[31m"
50 case DiagWarning:
51 colorCode = "\x1b[33m"
52 }
53 resetCode = "\x1b[0m"
54 highlightCode = "\x1b[1;4m"
55 }
56
57 var severityStr string
58 switch diag.Severity {
59 case DiagError:
60 severityStr = "Error"
61 case DiagWarning:
62 severityStr = "Warning"
63 default:
64 // should never happen
65 severityStr = "???????"
66 }
67
68 fmt.Fprintf(w.wr, "%s%s%s: %s\n\n", colorCode, severityStr, resetCode, diag.Summary)
69
70 if diag.Subject != nil {
71 snipRange := *diag.Subject
72 highlightRange := snipRange
73 if diag.Context != nil {
74 // Show enough of the source code to include both the subject
75 // and context ranges, which overlap in all reasonable
76 // situations.
77 snipRange = RangeOver(snipRange, *diag.Context)
78 }
79 // We can't illustrate an empty range, so we'll turn such ranges into
80 // single-character ranges, which might not be totally valid (may point
81 // off the end of a line, or off the end of the file) but are good
82 // enough for the bounds checks we do below.
83 if snipRange.Empty() {
84 snipRange.End.Byte++
85 snipRange.End.Column++
86 }
87 if highlightRange.Empty() {
88 highlightRange.End.Byte++
89 highlightRange.End.Column++
90 }
91
92 file := w.files[diag.Subject.Filename]
93 if file == nil || file.Bytes == nil {
94 fmt.Fprintf(w.wr, " on %s line %d:\n (source code not available)\n\n", diag.Subject.Filename, diag.Subject.Start.Line)
95 } else {
96
97 var contextLine string
98 if diag.Subject != nil {
99 contextLine = contextString(file, diag.Subject.Start.Byte)
100 if contextLine != "" {
101 contextLine = ", in " + contextLine
102 }
103 }
104
105 fmt.Fprintf(w.wr, " on %s line %d%s:\n", diag.Subject.Filename, diag.Subject.Start.Line, contextLine)
106
107 src := file.Bytes
108 sc := NewRangeScanner(src, diag.Subject.Filename, bufio.ScanLines)
109
110 for sc.Scan() {
111 lineRange := sc.Range()
112 if !lineRange.Overlaps(snipRange) {
113 continue
114 }
115
116 beforeRange, highlightedRange, afterRange := lineRange.PartitionAround(highlightRange)
117 if highlightedRange.Empty() {
118 fmt.Fprintf(w.wr, "%4d: %s\n", lineRange.Start.Line, sc.Bytes())
119 } else {
120 before := beforeRange.SliceBytes(src)
121 highlighted := highlightedRange.SliceBytes(src)
122 after := afterRange.SliceBytes(src)
123 fmt.Fprintf(
124 w.wr, "%4d: %s%s%s%s%s\n",
125 lineRange.Start.Line,
126 before,
127 highlightCode, highlighted, resetCode,
128 after,
129 )
130 }
131
132 }
133
134 w.wr.Write([]byte{'\n'})
135 }
136 }
137
138 if diag.Detail != "" {
139 detail := diag.Detail
140 if w.width != 0 {
141 detail = wordwrap.WrapString(detail, w.width)
142 }
143 fmt.Fprintf(w.wr, "%s\n\n", detail)
144 }
145
146 return nil
147}
148
149func (w *diagnosticTextWriter) WriteDiagnostics(diags Diagnostics) error {
150 for _, diag := range diags {
151 err := w.WriteDiagnostic(diag)
152 if err != nil {
153 return err
154 }
155 }
156 return nil
157}
158
159func contextString(file *File, offset int) string {
160 type contextStringer interface {
161 ContextString(offset int) string
162 }
163
164 if cser, ok := file.Nav.(contextStringer); ok {
165 return cser.ContextString(offset)
166 }
167 return ""
168}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/didyoumean.go b/vendor/github.com/hashicorp/hcl2/hcl/didyoumean.go
new file mode 100644
index 0000000..c128334
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/didyoumean.go
@@ -0,0 +1,24 @@
1package hcl
2
3import (
4 "github.com/agext/levenshtein"
5)
6
7// nameSuggestion tries to find a name from the given slice of suggested names
8// that is close to the given name and returns it if found. If no suggestion
9// is close enough, returns the empty string.
10//
11// The suggestions are tried in order, so earlier suggestions take precedence
12// if the given string is similar to two or more suggestions.
13//
14// This function is intended to be used with a relatively-small number of
15// suggestions. It's not optimized for hundreds or thousands of them.
16func nameSuggestion(given string, suggestions []string) string {
17 for _, suggestion := range suggestions {
18 dist := levenshtein.Distance(given, suggestion, nil)
19 if dist < 3 { // threshold determined experimentally
20 return suggestion
21 }
22 }
23 return ""
24}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/doc.go b/vendor/github.com/hashicorp/hcl2/hcl/doc.go
new file mode 100644
index 0000000..01318c9
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/doc.go
@@ -0,0 +1 @@
package hcl
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/eval_context.go b/vendor/github.com/hashicorp/hcl2/hcl/eval_context.go
new file mode 100644
index 0000000..915910a
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/eval_context.go
@@ -0,0 +1,25 @@
1package hcl
2
3import (
4 "github.com/zclconf/go-cty/cty"
5 "github.com/zclconf/go-cty/cty/function"
6)
7
8// An EvalContext provides the variables and functions that should be used
9// to evaluate an expression.
10type EvalContext struct {
11 Variables map[string]cty.Value
12 Functions map[string]function.Function
13 parent *EvalContext
14}
15
16// NewChild returns a new EvalContext that is a child of the receiver.
17func (ctx *EvalContext) NewChild() *EvalContext {
18 return &EvalContext{parent: ctx}
19}
20
21// Parent returns the parent of the receiver, or nil if the receiver has
22// no parent.
23func (ctx *EvalContext) Parent() *EvalContext {
24 return ctx.parent
25}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_call.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_call.go
new file mode 100644
index 0000000..6963fba
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/expr_call.go
@@ -0,0 +1,46 @@
1package hcl
2
3// ExprCall tests if the given expression is a function call and,
4// if so, extracts the function name and the expressions that represent
5// the arguments. If the given expression is not statically a function call,
6// error diagnostics are returned.
7//
8// A particular Expression implementation can support this function by
9// offering a method called ExprCall that takes no arguments and returns
10// *StaticCall. This method should return nil if a static call cannot
11// be extracted. Alternatively, an implementation can support
12// UnwrapExpression to delegate handling of this function to a wrapped
13// Expression object.
14func ExprCall(expr Expression) (*StaticCall, Diagnostics) {
15 type exprCall interface {
16 ExprCall() *StaticCall
17 }
18
19 physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
20 _, supported := expr.(exprCall)
21 return supported
22 })
23
24 if exC, supported := physExpr.(exprCall); supported {
25 if call := exC.ExprCall(); call != nil {
26 return call, nil
27 }
28 }
29 return nil, Diagnostics{
30 &Diagnostic{
31 Severity: DiagError,
32 Summary: "Invalid expression",
33 Detail: "A static function call is required.",
34 Subject: expr.StartRange().Ptr(),
35 },
36 }
37}
38
39// StaticCall represents a function call that was extracted statically from
40// an expression using ExprCall.
41type StaticCall struct {
42 Name string
43 NameRange Range
44 Arguments []Expression
45 ArgsRange Range
46}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_list.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_list.go
new file mode 100644
index 0000000..d05cca0
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/expr_list.go
@@ -0,0 +1,37 @@
1package hcl
2
3// ExprList tests if the given expression is a static list construct and,
4// if so, extracts the expressions that represent the list elements.
5// If the given expression is not a static list, error diagnostics are
6// returned.
7//
8// A particular Expression implementation can support this function by
9// offering a method called ExprList that takes no arguments and returns
10// []Expression. This method should return nil if a static list cannot
11// be extracted. Alternatively, an implementation can support
12// UnwrapExpression to delegate handling of this function to a wrapped
13// Expression object.
14func ExprList(expr Expression) ([]Expression, Diagnostics) {
15 type exprList interface {
16 ExprList() []Expression
17 }
18
19 physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
20 _, supported := expr.(exprList)
21 return supported
22 })
23
24 if exL, supported := physExpr.(exprList); supported {
25 if list := exL.ExprList(); list != nil {
26 return list, nil
27 }
28 }
29 return nil, Diagnostics{
30 &Diagnostic{
31 Severity: DiagError,
32 Summary: "Invalid expression",
33 Detail: "A static list expression is required.",
34 Subject: expr.StartRange().Ptr(),
35 },
36 }
37}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_map.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_map.go
new file mode 100644
index 0000000..96d1ce4
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/expr_map.go
@@ -0,0 +1,44 @@
1package hcl
2
3// ExprMap tests if the given expression is a static map construct and,
4// if so, extracts the expressions that represent the map elements.
5// If the given expression is not a static map, error diagnostics are
6// returned.
7//
8// A particular Expression implementation can support this function by
9// offering a method called ExprMap that takes no arguments and returns
10// []KeyValuePair. This method should return nil if a static map cannot
11// be extracted. Alternatively, an implementation can support
12// UnwrapExpression to delegate handling of this function to a wrapped
13// Expression object.
14func ExprMap(expr Expression) ([]KeyValuePair, Diagnostics) {
15 type exprMap interface {
16 ExprMap() []KeyValuePair
17 }
18
19 physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
20 _, supported := expr.(exprMap)
21 return supported
22 })
23
24 if exM, supported := physExpr.(exprMap); supported {
25 if pairs := exM.ExprMap(); pairs != nil {
26 return pairs, nil
27 }
28 }
29 return nil, Diagnostics{
30 &Diagnostic{
31 Severity: DiagError,
32 Summary: "Invalid expression",
33 Detail: "A static map expression is required.",
34 Subject: expr.StartRange().Ptr(),
35 },
36 }
37}
38
39// KeyValuePair represents a pair of expressions that serve as a single item
40// within a map or object definition construct.
41type KeyValuePair struct {
42 Key Expression
43 Value Expression
44}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_unwrap.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_unwrap.go
new file mode 100644
index 0000000..6d5d205
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/expr_unwrap.go
@@ -0,0 +1,68 @@
1package hcl
2
3type unwrapExpression interface {
4 UnwrapExpression() Expression
5}
6
7// UnwrapExpression removes any "wrapper" expressions from the given expression,
8// to recover the representation of the physical expression given in source
9// code.
10//
11// Sometimes wrapping expressions are used to modify expression behavior, e.g.
12// in extensions that need to make some local variables available to certain
13// sub-trees of the configuration. This can make it difficult to reliably
14// type-assert on the physical AST types used by the underlying syntax.
15//
16// Unwrapping an expression may modify its behavior by stripping away any
17// additional constraints or capabilities being applied to the Value and
18// Variables methods, so this function should generally only be used prior
19// to operations that concern themselves with the static syntax of the input
20// configuration, and not with the effective value of the expression.
21//
22// Wrapper expression types must support unwrapping by implementing a method
23// called UnwrapExpression that takes no arguments and returns the embedded
24// Expression. Implementations of this method should peel away only one level
25// of wrapping, if multiple are present. This method may return nil to
26// indicate _dynamically_ that no wrapped expression is available, for
27// expression types that might only behave as wrappers in certain cases.
28func UnwrapExpression(expr Expression) Expression {
29 for {
30 unwrap, wrapped := expr.(unwrapExpression)
31 if !wrapped {
32 return expr
33 }
34 innerExpr := unwrap.UnwrapExpression()
35 if innerExpr == nil {
36 return expr
37 }
38 expr = innerExpr
39 }
40}
41
42// UnwrapExpressionUntil is similar to UnwrapExpression except it gives the
43// caller an opportunity to test each level of unwrapping to see each a
44// particular expression is accepted.
45//
46// This could be used, for example, to unwrap until a particular other
47// interface is satisfied, regardless of wrap wrapping level it is satisfied
48// at.
49//
50// The given callback function must return false to continue wrapping, or
51// true to accept and return the proposed expression given. If the callback
52// function rejects even the final, physical expression then the result of
53// this function is nil.
54func UnwrapExpressionUntil(expr Expression, until func(Expression) bool) Expression {
55 for {
56 if until(expr) {
57 return expr
58 }
59 unwrap, wrapped := expr.(unwrapExpression)
60 if !wrapped {
61 return nil
62 }
63 expr = unwrap.UnwrapExpression()
64 if expr == nil {
65 return nil
66 }
67 }
68}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/didyoumean.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/didyoumean.go
new file mode 100644
index 0000000..ccc1c0a
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/didyoumean.go
@@ -0,0 +1,24 @@
1package hclsyntax
2
3import (
4 "github.com/agext/levenshtein"
5)
6
7// nameSuggestion tries to find a name from the given slice of suggested names
8// that is close to the given name and returns it if found. If no suggestion
9// is close enough, returns the empty string.
10//
11// The suggestions are tried in order, so earlier suggestions take precedence
12// if the given string is similar to two or more suggestions.
13//
14// This function is intended to be used with a relatively-small number of
15// suggestions. It's not optimized for hundreds or thousands of them.
16func nameSuggestion(given string, suggestions []string) string {
17 for _, suggestion := range suggestions {
18 dist := levenshtein.Distance(given, suggestion, nil)
19 if dist < 3 { // threshold determined experimentally
20 return suggestion
21 }
22 }
23 return ""
24}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/doc.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/doc.go
new file mode 100644
index 0000000..617bc29
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/doc.go
@@ -0,0 +1,7 @@
1// Package hclsyntax contains the parser, AST, etc for HCL's native language,
2// as opposed to the JSON variant.
3//
4// In normal use applications should rarely depend on this package directly,
5// instead preferring the higher-level interface of the main hcl package and
6// its companion package hclparse.
7package hclsyntax
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression.go
new file mode 100644
index 0000000..cfc7cd9
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression.go
@@ -0,0 +1,1275 @@
1package hclsyntax
2
3import (
4 "fmt"
5
6 "github.com/hashicorp/hcl2/hcl"
7 "github.com/zclconf/go-cty/cty"
8 "github.com/zclconf/go-cty/cty/convert"
9 "github.com/zclconf/go-cty/cty/function"
10)
11
12// Expression is the abstract type for nodes that behave as HCL expressions.
13type Expression interface {
14 Node
15
16 // The hcl.Expression methods are duplicated here, rather than simply
17 // embedded, because both Node and hcl.Expression have a Range method
18 // and so they conflict.
19
20 Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics)
21 Variables() []hcl.Traversal
22 StartRange() hcl.Range
23}
24
25// Assert that Expression implements hcl.Expression
26var assertExprImplExpr hcl.Expression = Expression(nil)
27
28// LiteralValueExpr is an expression that just always returns a given value.
29type LiteralValueExpr struct {
30 Val cty.Value
31 SrcRange hcl.Range
32}
33
34func (e *LiteralValueExpr) walkChildNodes(w internalWalkFunc) {
35 // Literal values have no child nodes
36}
37
38func (e *LiteralValueExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
39 return e.Val, nil
40}
41
42func (e *LiteralValueExpr) Range() hcl.Range {
43 return e.SrcRange
44}
45
46func (e *LiteralValueExpr) StartRange() hcl.Range {
47 return e.SrcRange
48}
49
50// Implementation for hcl.AbsTraversalForExpr.
51func (e *LiteralValueExpr) AsTraversal() hcl.Traversal {
52 // This one's a little weird: the contract for AsTraversal is to interpret
53 // an expression as if it were traversal syntax, and traversal syntax
54 // doesn't have the special keywords "null", "true", and "false" so these
55 // are expected to be treated like variables in that case.
56 // Since our parser already turned them into LiteralValueExpr by the time
57 // we get here, we need to undo this and infer the name that would've
58 // originally led to our value.
59 // We don't do anything for any other values, since they don't overlap
60 // with traversal roots.
61
62 if e.Val.IsNull() {
63 // In practice the parser only generates null values of the dynamic
64 // pseudo-type for literals, so we can safely assume that any null
65 // was orignally the keyword "null".
66 return hcl.Traversal{
67 hcl.TraverseRoot{
68 Name: "null",
69 SrcRange: e.SrcRange,
70 },
71 }
72 }
73
74 switch e.Val {
75 case cty.True:
76 return hcl.Traversal{
77 hcl.TraverseRoot{
78 Name: "true",
79 SrcRange: e.SrcRange,
80 },
81 }
82 case cty.False:
83 return hcl.Traversal{
84 hcl.TraverseRoot{
85 Name: "false",
86 SrcRange: e.SrcRange,
87 },
88 }
89 default:
90 // No traversal is possible for any other value.
91 return nil
92 }
93}
94
95// ScopeTraversalExpr is an Expression that retrieves a value from the scope
96// using a traversal.
97type ScopeTraversalExpr struct {
98 Traversal hcl.Traversal
99 SrcRange hcl.Range
100}
101
102func (e *ScopeTraversalExpr) walkChildNodes(w internalWalkFunc) {
103 // Scope traversals have no child nodes
104}
105
106func (e *ScopeTraversalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
107 return e.Traversal.TraverseAbs(ctx)
108}
109
110func (e *ScopeTraversalExpr) Range() hcl.Range {
111 return e.SrcRange
112}
113
114func (e *ScopeTraversalExpr) StartRange() hcl.Range {
115 return e.SrcRange
116}
117
118// Implementation for hcl.AbsTraversalForExpr.
119func (e *ScopeTraversalExpr) AsTraversal() hcl.Traversal {
120 return e.Traversal
121}
122
123// RelativeTraversalExpr is an Expression that retrieves a value from another
124// value using a _relative_ traversal.
125type RelativeTraversalExpr struct {
126 Source Expression
127 Traversal hcl.Traversal
128 SrcRange hcl.Range
129}
130
131func (e *RelativeTraversalExpr) walkChildNodes(w internalWalkFunc) {
132 // Scope traversals have no child nodes
133}
134
135func (e *RelativeTraversalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
136 src, diags := e.Source.Value(ctx)
137 ret, travDiags := e.Traversal.TraverseRel(src)
138 diags = append(diags, travDiags...)
139 return ret, diags
140}
141
142func (e *RelativeTraversalExpr) Range() hcl.Range {
143 return e.SrcRange
144}
145
146func (e *RelativeTraversalExpr) StartRange() hcl.Range {
147 return e.SrcRange
148}
149
150// Implementation for hcl.AbsTraversalForExpr.
151func (e *RelativeTraversalExpr) AsTraversal() hcl.Traversal {
152 // We can produce a traversal only if our source can.
153 st, diags := hcl.AbsTraversalForExpr(e.Source)
154 if diags.HasErrors() {
155 return nil
156 }
157
158 ret := make(hcl.Traversal, len(st)+len(e.Traversal))
159 copy(ret, st)
160 copy(ret[len(st):], e.Traversal)
161 return ret
162}
163
164// FunctionCallExpr is an Expression that calls a function from the EvalContext
165// and returns its result.
166type FunctionCallExpr struct {
167 Name string
168 Args []Expression
169
170 // If true, the final argument should be a tuple, list or set which will
171 // expand to be one argument per element.
172 ExpandFinal bool
173
174 NameRange hcl.Range
175 OpenParenRange hcl.Range
176 CloseParenRange hcl.Range
177}
178
179func (e *FunctionCallExpr) walkChildNodes(w internalWalkFunc) {
180 for i, arg := range e.Args {
181 e.Args[i] = w(arg).(Expression)
182 }
183}
184
185func (e *FunctionCallExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
186 var diags hcl.Diagnostics
187
188 var f function.Function
189 exists := false
190 hasNonNilMap := false
191 thisCtx := ctx
192 for thisCtx != nil {
193 if thisCtx.Functions == nil {
194 thisCtx = thisCtx.Parent()
195 continue
196 }
197 hasNonNilMap = true
198 f, exists = thisCtx.Functions[e.Name]
199 if exists {
200 break
201 }
202 thisCtx = thisCtx.Parent()
203 }
204
205 if !exists {
206 if !hasNonNilMap {
207 return cty.DynamicVal, hcl.Diagnostics{
208 {
209 Severity: hcl.DiagError,
210 Summary: "Function calls not allowed",
211 Detail: "Functions may not be called here.",
212 Subject: e.Range().Ptr(),
213 },
214 }
215 }
216
217 avail := make([]string, 0, len(ctx.Functions))
218 for name := range ctx.Functions {
219 avail = append(avail, name)
220 }
221 suggestion := nameSuggestion(e.Name, avail)
222 if suggestion != "" {
223 suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
224 }
225
226 return cty.DynamicVal, hcl.Diagnostics{
227 {
228 Severity: hcl.DiagError,
229 Summary: "Call to unknown function",
230 Detail: fmt.Sprintf("There is no function named %q.%s", e.Name, suggestion),
231 Subject: &e.NameRange,
232 Context: e.Range().Ptr(),
233 },
234 }
235 }
236
237 params := f.Params()
238 varParam := f.VarParam()
239
240 args := e.Args
241 if e.ExpandFinal {
242 if len(args) < 1 {
243 // should never happen if the parser is behaving
244 panic("ExpandFinal set on function call with no arguments")
245 }
246 expandExpr := args[len(args)-1]
247 expandVal, expandDiags := expandExpr.Value(ctx)
248 diags = append(diags, expandDiags...)
249 if expandDiags.HasErrors() {
250 return cty.DynamicVal, diags
251 }
252
253 switch {
254 case expandVal.Type().IsTupleType() || expandVal.Type().IsListType() || expandVal.Type().IsSetType():
255 if expandVal.IsNull() {
256 diags = append(diags, &hcl.Diagnostic{
257 Severity: hcl.DiagError,
258 Summary: "Invalid expanding argument value",
259 Detail: "The expanding argument (indicated by ...) must not be null.",
260 Context: expandExpr.Range().Ptr(),
261 Subject: e.Range().Ptr(),
262 })
263 return cty.DynamicVal, diags
264 }
265 if !expandVal.IsKnown() {
266 return cty.DynamicVal, diags
267 }
268
269 newArgs := make([]Expression, 0, (len(args)-1)+expandVal.LengthInt())
270 newArgs = append(newArgs, args[:len(args)-1]...)
271 it := expandVal.ElementIterator()
272 for it.Next() {
273 _, val := it.Element()
274 newArgs = append(newArgs, &LiteralValueExpr{
275 Val: val,
276 SrcRange: expandExpr.Range(),
277 })
278 }
279 args = newArgs
280 default:
281 diags = append(diags, &hcl.Diagnostic{
282 Severity: hcl.DiagError,
283 Summary: "Invalid expanding argument value",
284 Detail: "The expanding argument (indicated by ...) must be of a tuple, list, or set type.",
285 Context: expandExpr.Range().Ptr(),
286 Subject: e.Range().Ptr(),
287 })
288 return cty.DynamicVal, diags
289 }
290 }
291
292 if len(args) < len(params) {
293 missing := params[len(args)]
294 qual := ""
295 if varParam != nil {
296 qual = " at least"
297 }
298 return cty.DynamicVal, hcl.Diagnostics{
299 {
300 Severity: hcl.DiagError,
301 Summary: "Not enough function arguments",
302 Detail: fmt.Sprintf(
303 "Function %q expects%s %d argument(s). Missing value for %q.",
304 e.Name, qual, len(params), missing.Name,
305 ),
306 Subject: &e.CloseParenRange,
307 Context: e.Range().Ptr(),
308 },
309 }
310 }
311
312 if varParam == nil && len(args) > len(params) {
313 return cty.DynamicVal, hcl.Diagnostics{
314 {
315 Severity: hcl.DiagError,
316 Summary: "Too many function arguments",
317 Detail: fmt.Sprintf(
318 "Function %q expects only %d argument(s).",
319 e.Name, len(params),
320 ),
321 Subject: args[len(params)].StartRange().Ptr(),
322 Context: e.Range().Ptr(),
323 },
324 }
325 }
326
327 argVals := make([]cty.Value, len(args))
328
329 for i, argExpr := range args {
330 var param *function.Parameter
331 if i < len(params) {
332 param = &params[i]
333 } else {
334 param = varParam
335 }
336
337 val, argDiags := argExpr.Value(ctx)
338 if len(argDiags) > 0 {
339 diags = append(diags, argDiags...)
340 }
341
342 // Try to convert our value to the parameter type
343 val, err := convert.Convert(val, param.Type)
344 if err != nil {
345 diags = append(diags, &hcl.Diagnostic{
346 Severity: hcl.DiagError,
347 Summary: "Invalid function argument",
348 Detail: fmt.Sprintf(
349 "Invalid value for %q parameter: %s.",
350 param.Name, err,
351 ),
352 Subject: argExpr.StartRange().Ptr(),
353 Context: e.Range().Ptr(),
354 })
355 }
356
357 argVals[i] = val
358 }
359
360 if diags.HasErrors() {
361 // Don't try to execute the function if we already have errors with
362 // the arguments, because the result will probably be a confusing
363 // error message.
364 return cty.DynamicVal, diags
365 }
366
367 resultVal, err := f.Call(argVals)
368 if err != nil {
369 switch terr := err.(type) {
370 case function.ArgError:
371 i := terr.Index
372 var param *function.Parameter
373 if i < len(params) {
374 param = &params[i]
375 } else {
376 param = varParam
377 }
378 argExpr := e.Args[i]
379
380 // TODO: we should also unpick a PathError here and show the
381 // path to the deep value where the error was detected.
382 diags = append(diags, &hcl.Diagnostic{
383 Severity: hcl.DiagError,
384 Summary: "Invalid function argument",
385 Detail: fmt.Sprintf(
386 "Invalid value for %q parameter: %s.",
387 param.Name, err,
388 ),
389 Subject: argExpr.StartRange().Ptr(),
390 Context: e.Range().Ptr(),
391 })
392
393 default:
394 diags = append(diags, &hcl.Diagnostic{
395 Severity: hcl.DiagError,
396 Summary: "Error in function call",
397 Detail: fmt.Sprintf(
398 "Call to function %q failed: %s.",
399 e.Name, err,
400 ),
401 Subject: e.StartRange().Ptr(),
402 Context: e.Range().Ptr(),
403 })
404 }
405
406 return cty.DynamicVal, diags
407 }
408
409 return resultVal, diags
410}
411
412func (e *FunctionCallExpr) Range() hcl.Range {
413 return hcl.RangeBetween(e.NameRange, e.CloseParenRange)
414}
415
416func (e *FunctionCallExpr) StartRange() hcl.Range {
417 return hcl.RangeBetween(e.NameRange, e.OpenParenRange)
418}
419
420// Implementation for hcl.ExprCall.
421func (e *FunctionCallExpr) ExprCall() *hcl.StaticCall {
422 ret := &hcl.StaticCall{
423 Name: e.Name,
424 NameRange: e.NameRange,
425 Arguments: make([]hcl.Expression, len(e.Args)),
426 ArgsRange: hcl.RangeBetween(e.OpenParenRange, e.CloseParenRange),
427 }
428 // Need to convert our own Expression objects into hcl.Expression.
429 for i, arg := range e.Args {
430 ret.Arguments[i] = arg
431 }
432 return ret
433}
434
435type ConditionalExpr struct {
436 Condition Expression
437 TrueResult Expression
438 FalseResult Expression
439
440 SrcRange hcl.Range
441}
442
443func (e *ConditionalExpr) walkChildNodes(w internalWalkFunc) {
444 e.Condition = w(e.Condition).(Expression)
445 e.TrueResult = w(e.TrueResult).(Expression)
446 e.FalseResult = w(e.FalseResult).(Expression)
447}
448
449func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
450 trueResult, trueDiags := e.TrueResult.Value(ctx)
451 falseResult, falseDiags := e.FalseResult.Value(ctx)
452 var diags hcl.Diagnostics
453
454 // Try to find a type that both results can be converted to.
455 resultType, convs := convert.UnifyUnsafe([]cty.Type{trueResult.Type(), falseResult.Type()})
456 if resultType == cty.NilType {
457 return cty.DynamicVal, hcl.Diagnostics{
458 {
459 Severity: hcl.DiagError,
460 Summary: "Inconsistent conditional result types",
461 Detail: fmt.Sprintf(
462 // FIXME: Need a helper function for showing natural-language type diffs,
463 // since this will generate some useless messages in some cases, like
464 // "These expressions are object and object respectively" if the
465 // object types don't exactly match.
466 "The true and false result expressions must have consistent types. The given expressions are %s and %s, respectively.",
467 trueResult.Type(), falseResult.Type(),
468 ),
469 Subject: hcl.RangeBetween(e.TrueResult.Range(), e.FalseResult.Range()).Ptr(),
470 Context: &e.SrcRange,
471 },
472 }
473 }
474
475 condResult, condDiags := e.Condition.Value(ctx)
476 diags = append(diags, condDiags...)
477 if condResult.IsNull() {
478 diags = append(diags, &hcl.Diagnostic{
479 Severity: hcl.DiagError,
480 Summary: "Null condition",
481 Detail: "The condition value is null. Conditions must either be true or false.",
482 Subject: e.Condition.Range().Ptr(),
483 Context: &e.SrcRange,
484 })
485 return cty.UnknownVal(resultType), diags
486 }
487 if !condResult.IsKnown() {
488 return cty.UnknownVal(resultType), diags
489 }
490 condResult, err := convert.Convert(condResult, cty.Bool)
491 if err != nil {
492 diags = append(diags, &hcl.Diagnostic{
493 Severity: hcl.DiagError,
494 Summary: "Incorrect condition type",
495 Detail: fmt.Sprintf("The condition expression must be of type bool."),
496 Subject: e.Condition.Range().Ptr(),
497 Context: &e.SrcRange,
498 })
499 return cty.UnknownVal(resultType), diags
500 }
501
502 if condResult.True() {
503 diags = append(diags, trueDiags...)
504 if convs[0] != nil {
505 var err error
506 trueResult, err = convs[0](trueResult)
507 if err != nil {
508 // Unsafe conversion failed with the concrete result value
509 diags = append(diags, &hcl.Diagnostic{
510 Severity: hcl.DiagError,
511 Summary: "Inconsistent conditional result types",
512 Detail: fmt.Sprintf(
513 "The true result value has the wrong type: %s.",
514 err.Error(),
515 ),
516 Subject: e.TrueResult.Range().Ptr(),
517 Context: &e.SrcRange,
518 })
519 trueResult = cty.UnknownVal(resultType)
520 }
521 }
522 return trueResult, diags
523 } else {
524 diags = append(diags, falseDiags...)
525 if convs[1] != nil {
526 var err error
527 falseResult, err = convs[1](falseResult)
528 if err != nil {
529 // Unsafe conversion failed with the concrete result value
530 diags = append(diags, &hcl.Diagnostic{
531 Severity: hcl.DiagError,
532 Summary: "Inconsistent conditional result types",
533 Detail: fmt.Sprintf(
534 "The false result value has the wrong type: %s.",
535 err.Error(),
536 ),
537 Subject: e.TrueResult.Range().Ptr(),
538 Context: &e.SrcRange,
539 })
540 falseResult = cty.UnknownVal(resultType)
541 }
542 }
543 return falseResult, diags
544 }
545}
546
547func (e *ConditionalExpr) Range() hcl.Range {
548 return e.SrcRange
549}
550
551func (e *ConditionalExpr) StartRange() hcl.Range {
552 return e.Condition.StartRange()
553}
554
555type IndexExpr struct {
556 Collection Expression
557 Key Expression
558
559 SrcRange hcl.Range
560 OpenRange hcl.Range
561}
562
563func (e *IndexExpr) walkChildNodes(w internalWalkFunc) {
564 e.Collection = w(e.Collection).(Expression)
565 e.Key = w(e.Key).(Expression)
566}
567
568func (e *IndexExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
569 var diags hcl.Diagnostics
570 coll, collDiags := e.Collection.Value(ctx)
571 key, keyDiags := e.Key.Value(ctx)
572 diags = append(diags, collDiags...)
573 diags = append(diags, keyDiags...)
574
575 return hcl.Index(coll, key, &e.SrcRange)
576}
577
578func (e *IndexExpr) Range() hcl.Range {
579 return e.SrcRange
580}
581
582func (e *IndexExpr) StartRange() hcl.Range {
583 return e.OpenRange
584}
585
586type TupleConsExpr struct {
587 Exprs []Expression
588
589 SrcRange hcl.Range
590 OpenRange hcl.Range
591}
592
593func (e *TupleConsExpr) walkChildNodes(w internalWalkFunc) {
594 for i, expr := range e.Exprs {
595 e.Exprs[i] = w(expr).(Expression)
596 }
597}
598
599func (e *TupleConsExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
600 var vals []cty.Value
601 var diags hcl.Diagnostics
602
603 vals = make([]cty.Value, len(e.Exprs))
604 for i, expr := range e.Exprs {
605 val, valDiags := expr.Value(ctx)
606 vals[i] = val
607 diags = append(diags, valDiags...)
608 }
609
610 return cty.TupleVal(vals), diags
611}
612
613func (e *TupleConsExpr) Range() hcl.Range {
614 return e.SrcRange
615}
616
617func (e *TupleConsExpr) StartRange() hcl.Range {
618 return e.OpenRange
619}
620
621// Implementation for hcl.ExprList
622func (e *TupleConsExpr) ExprList() []hcl.Expression {
623 ret := make([]hcl.Expression, len(e.Exprs))
624 for i, expr := range e.Exprs {
625 ret[i] = expr
626 }
627 return ret
628}
629
630type ObjectConsExpr struct {
631 Items []ObjectConsItem
632
633 SrcRange hcl.Range
634 OpenRange hcl.Range
635}
636
637type ObjectConsItem struct {
638 KeyExpr Expression
639 ValueExpr Expression
640}
641
642func (e *ObjectConsExpr) walkChildNodes(w internalWalkFunc) {
643 for i, item := range e.Items {
644 e.Items[i].KeyExpr = w(item.KeyExpr).(Expression)
645 e.Items[i].ValueExpr = w(item.ValueExpr).(Expression)
646 }
647}
648
649func (e *ObjectConsExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
650 var vals map[string]cty.Value
651 var diags hcl.Diagnostics
652
653 // This will get set to true if we fail to produce any of our keys,
654 // either because they are actually unknown or if the evaluation produces
655 // errors. In all of these case we must return DynamicPseudoType because
656 // we're unable to know the full set of keys our object has, and thus
657 // we can't produce a complete value of the intended type.
658 //
659 // We still evaluate all of the item keys and values to make sure that we
660 // get as complete as possible a set of diagnostics.
661 known := true
662
663 vals = make(map[string]cty.Value, len(e.Items))
664 for _, item := range e.Items {
665 key, keyDiags := item.KeyExpr.Value(ctx)
666 diags = append(diags, keyDiags...)
667
668 val, valDiags := item.ValueExpr.Value(ctx)
669 diags = append(diags, valDiags...)
670
671 if keyDiags.HasErrors() {
672 known = false
673 continue
674 }
675
676 if key.IsNull() {
677 diags = append(diags, &hcl.Diagnostic{
678 Severity: hcl.DiagError,
679 Summary: "Null value as key",
680 Detail: "Can't use a null value as a key.",
681 Subject: item.ValueExpr.Range().Ptr(),
682 })
683 known = false
684 continue
685 }
686
687 var err error
688 key, err = convert.Convert(key, cty.String)
689 if err != nil {
690 diags = append(diags, &hcl.Diagnostic{
691 Severity: hcl.DiagError,
692 Summary: "Incorrect key type",
693 Detail: fmt.Sprintf("Can't use this value as a key: %s.", err.Error()),
694 Subject: item.ValueExpr.Range().Ptr(),
695 })
696 known = false
697 continue
698 }
699
700 if !key.IsKnown() {
701 known = false
702 continue
703 }
704
705 keyStr := key.AsString()
706
707 vals[keyStr] = val
708 }
709
710 if !known {
711 return cty.DynamicVal, diags
712 }
713
714 return cty.ObjectVal(vals), diags
715}
716
717func (e *ObjectConsExpr) Range() hcl.Range {
718 return e.SrcRange
719}
720
721func (e *ObjectConsExpr) StartRange() hcl.Range {
722 return e.OpenRange
723}
724
725// Implementation for hcl.ExprMap
726func (e *ObjectConsExpr) ExprMap() []hcl.KeyValuePair {
727 ret := make([]hcl.KeyValuePair, len(e.Items))
728 for i, item := range e.Items {
729 ret[i] = hcl.KeyValuePair{
730 Key: item.KeyExpr,
731 Value: item.ValueExpr,
732 }
733 }
734 return ret
735}
736
737// ObjectConsKeyExpr is a special wrapper used only for ObjectConsExpr keys,
738// which deals with the special case that a naked identifier in that position
739// must be interpreted as a literal string rather than evaluated directly.
740type ObjectConsKeyExpr struct {
741 Wrapped Expression
742}
743
744func (e *ObjectConsKeyExpr) literalName() string {
745 // This is our logic for deciding whether to behave like a literal string.
746 // We lean on our AbsTraversalForExpr implementation here, which already
747 // deals with some awkward cases like the expression being the result
748 // of the keywords "null", "true" and "false" which we'd want to interpret
749 // as keys here too.
750 return hcl.ExprAsKeyword(e.Wrapped)
751}
752
753func (e *ObjectConsKeyExpr) walkChildNodes(w internalWalkFunc) {
754 // We only treat our wrapped expression as a real expression if we're
755 // not going to interpret it as a literal.
756 if e.literalName() == "" {
757 e.Wrapped = w(e.Wrapped).(Expression)
758 }
759}
760
761func (e *ObjectConsKeyExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
762 if ln := e.literalName(); ln != "" {
763 return cty.StringVal(ln), nil
764 }
765 return e.Wrapped.Value(ctx)
766}
767
768func (e *ObjectConsKeyExpr) Range() hcl.Range {
769 return e.Wrapped.Range()
770}
771
772func (e *ObjectConsKeyExpr) StartRange() hcl.Range {
773 return e.Wrapped.StartRange()
774}
775
776// Implementation for hcl.AbsTraversalForExpr.
777func (e *ObjectConsKeyExpr) AsTraversal() hcl.Traversal {
778 // We can produce a traversal only if our wrappee can.
779 st, diags := hcl.AbsTraversalForExpr(e.Wrapped)
780 if diags.HasErrors() {
781 return nil
782 }
783
784 return st
785}
786
787func (e *ObjectConsKeyExpr) UnwrapExpression() Expression {
788 return e.Wrapped
789}
790
791// ForExpr represents iteration constructs:
792//
793// tuple = [for i, v in list: upper(v) if i > 2]
794// object = {for k, v in map: k => upper(v)}
795// object_of_tuples = {for v in list: v.key: v...}
796type ForExpr struct {
797 KeyVar string // empty if ignoring the key
798 ValVar string
799
800 CollExpr Expression
801
802 KeyExpr Expression // nil when producing a tuple
803 ValExpr Expression
804 CondExpr Expression // null if no "if" clause is present
805
806 Group bool // set if the ellipsis is used on the value in an object for
807
808 SrcRange hcl.Range
809 OpenRange hcl.Range
810 CloseRange hcl.Range
811}
812
813func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
814 var diags hcl.Diagnostics
815
816 collVal, collDiags := e.CollExpr.Value(ctx)
817 diags = append(diags, collDiags...)
818
819 if collVal.IsNull() {
820 diags = append(diags, &hcl.Diagnostic{
821 Severity: hcl.DiagError,
822 Summary: "Iteration over null value",
823 Detail: "A null value cannot be used as the collection in a 'for' expression.",
824 Subject: e.CollExpr.Range().Ptr(),
825 Context: &e.SrcRange,
826 })
827 return cty.DynamicVal, diags
828 }
829 if collVal.Type() == cty.DynamicPseudoType {
830 return cty.DynamicVal, diags
831 }
832 if !collVal.CanIterateElements() {
833 diags = append(diags, &hcl.Diagnostic{
834 Severity: hcl.DiagError,
835 Summary: "Iteration over non-iterable value",
836 Detail: fmt.Sprintf(
837 "A value of type %s cannot be used as the collection in a 'for' expression.",
838 collVal.Type().FriendlyName(),
839 ),
840 Subject: e.CollExpr.Range().Ptr(),
841 Context: &e.SrcRange,
842 })
843 return cty.DynamicVal, diags
844 }
845 if !collVal.IsKnown() {
846 return cty.DynamicVal, diags
847 }
848
849 childCtx := ctx.NewChild()
850 childCtx.Variables = map[string]cty.Value{}
851
852 // Before we start we'll do an early check to see if any CondExpr we've
853 // been given is of the wrong type. This isn't 100% reliable (it may
854 // be DynamicVal until real values are given) but it should catch some
855 // straightforward cases and prevent a barrage of repeated errors.
856 if e.CondExpr != nil {
857 if e.KeyVar != "" {
858 childCtx.Variables[e.KeyVar] = cty.DynamicVal
859 }
860 childCtx.Variables[e.ValVar] = cty.DynamicVal
861
862 result, condDiags := e.CondExpr.Value(childCtx)
863 diags = append(diags, condDiags...)
864 if result.IsNull() {
865 diags = append(diags, &hcl.Diagnostic{
866 Severity: hcl.DiagError,
867 Summary: "Condition is null",
868 Detail: "The value of the 'if' clause must not be null.",
869 Subject: e.CondExpr.Range().Ptr(),
870 Context: &e.SrcRange,
871 })
872 return cty.DynamicVal, diags
873 }
874 _, err := convert.Convert(result, cty.Bool)
875 if err != nil {
876 diags = append(diags, &hcl.Diagnostic{
877 Severity: hcl.DiagError,
878 Summary: "Invalid 'for' condition",
879 Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()),
880 Subject: e.CondExpr.Range().Ptr(),
881 Context: &e.SrcRange,
882 })
883 return cty.DynamicVal, diags
884 }
885 if condDiags.HasErrors() {
886 return cty.DynamicVal, diags
887 }
888 }
889
890 if e.KeyExpr != nil {
891 // Producing an object
892 var vals map[string]cty.Value
893 var groupVals map[string][]cty.Value
894 if e.Group {
895 groupVals = map[string][]cty.Value{}
896 } else {
897 vals = map[string]cty.Value{}
898 }
899
900 it := collVal.ElementIterator()
901
902 known := true
903 for it.Next() {
904 k, v := it.Element()
905 if e.KeyVar != "" {
906 childCtx.Variables[e.KeyVar] = k
907 }
908 childCtx.Variables[e.ValVar] = v
909
910 if e.CondExpr != nil {
911 includeRaw, condDiags := e.CondExpr.Value(childCtx)
912 diags = append(diags, condDiags...)
913 if includeRaw.IsNull() {
914 if known {
915 diags = append(diags, &hcl.Diagnostic{
916 Severity: hcl.DiagError,
917 Summary: "Condition is null",
918 Detail: "The value of the 'if' clause must not be null.",
919 Subject: e.CondExpr.Range().Ptr(),
920 Context: &e.SrcRange,
921 })
922 }
923 known = false
924 continue
925 }
926 include, err := convert.Convert(includeRaw, cty.Bool)
927 if err != nil {
928 if known {
929 diags = append(diags, &hcl.Diagnostic{
930 Severity: hcl.DiagError,
931 Summary: "Invalid 'for' condition",
932 Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()),
933 Subject: e.CondExpr.Range().Ptr(),
934 Context: &e.SrcRange,
935 })
936 }
937 known = false
938 continue
939 }
940 if !include.IsKnown() {
941 known = false
942 continue
943 }
944
945 if include.False() {
946 // Skip this element
947 continue
948 }
949 }
950
951 keyRaw, keyDiags := e.KeyExpr.Value(childCtx)
952 diags = append(diags, keyDiags...)
953 if keyRaw.IsNull() {
954 if known {
955 diags = append(diags, &hcl.Diagnostic{
956 Severity: hcl.DiagError,
957 Summary: "Invalid object key",
958 Detail: "Key expression in 'for' expression must not produce a null value.",
959 Subject: e.KeyExpr.Range().Ptr(),
960 Context: &e.SrcRange,
961 })
962 }
963 known = false
964 continue
965 }
966 if !keyRaw.IsKnown() {
967 known = false
968 continue
969 }
970
971 key, err := convert.Convert(keyRaw, cty.String)
972 if err != nil {
973 if known {
974 diags = append(diags, &hcl.Diagnostic{
975 Severity: hcl.DiagError,
976 Summary: "Invalid object key",
977 Detail: fmt.Sprintf("The key expression produced an invalid result: %s.", err.Error()),
978 Subject: e.KeyExpr.Range().Ptr(),
979 Context: &e.SrcRange,
980 })
981 }
982 known = false
983 continue
984 }
985
986 val, valDiags := e.ValExpr.Value(childCtx)
987 diags = append(diags, valDiags...)
988
989 if e.Group {
990 k := key.AsString()
991 groupVals[k] = append(groupVals[k], val)
992 } else {
993 k := key.AsString()
994 if _, exists := vals[k]; exists {
995 diags = append(diags, &hcl.Diagnostic{
996 Severity: hcl.DiagError,
997 Summary: "Duplicate object key",
998 Detail: fmt.Sprintf(
999 "Two different items produced the key %q in this for expression. If duplicates are expected, use the ellipsis (...) after the value expression to enable grouping by key.",
1000 k,
1001 ),
1002 Subject: e.KeyExpr.Range().Ptr(),
1003 Context: &e.SrcRange,
1004 })
1005 } else {
1006 vals[key.AsString()] = val
1007 }
1008 }
1009 }
1010
1011 if !known {
1012 return cty.DynamicVal, diags
1013 }
1014
1015 if e.Group {
1016 vals = map[string]cty.Value{}
1017 for k, gvs := range groupVals {
1018 vals[k] = cty.TupleVal(gvs)
1019 }
1020 }
1021
1022 return cty.ObjectVal(vals), diags
1023
1024 } else {
1025 // Producing a tuple
1026 vals := []cty.Value{}
1027
1028 it := collVal.ElementIterator()
1029
1030 known := true
1031 for it.Next() {
1032 k, v := it.Element()
1033 if e.KeyVar != "" {
1034 childCtx.Variables[e.KeyVar] = k
1035 }
1036 childCtx.Variables[e.ValVar] = v
1037
1038 if e.CondExpr != nil {
1039 includeRaw, condDiags := e.CondExpr.Value(childCtx)
1040 diags = append(diags, condDiags...)
1041 if includeRaw.IsNull() {
1042 if known {
1043 diags = append(diags, &hcl.Diagnostic{
1044 Severity: hcl.DiagError,
1045 Summary: "Condition is null",
1046 Detail: "The value of the 'if' clause must not be null.",
1047 Subject: e.CondExpr.Range().Ptr(),
1048 Context: &e.SrcRange,
1049 })
1050 }
1051 known = false
1052 continue
1053 }
1054 if !includeRaw.IsKnown() {
1055 // We will eventually return DynamicVal, but we'll continue
1056 // iterating in case there are other diagnostics to gather
1057 // for later elements.
1058 known = false
1059 continue
1060 }
1061
1062 include, err := convert.Convert(includeRaw, cty.Bool)
1063 if err != nil {
1064 if known {
1065 diags = append(diags, &hcl.Diagnostic{
1066 Severity: hcl.DiagError,
1067 Summary: "Invalid 'for' condition",
1068 Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()),
1069 Subject: e.CondExpr.Range().Ptr(),
1070 Context: &e.SrcRange,
1071 })
1072 }
1073 known = false
1074 continue
1075 }
1076
1077 if include.False() {
1078 // Skip this element
1079 continue
1080 }
1081 }
1082
1083 val, valDiags := e.ValExpr.Value(childCtx)
1084 diags = append(diags, valDiags...)
1085 vals = append(vals, val)
1086 }
1087
1088 if !known {
1089 return cty.DynamicVal, diags
1090 }
1091
1092 return cty.TupleVal(vals), diags
1093 }
1094}
1095
1096func (e *ForExpr) walkChildNodes(w internalWalkFunc) {
1097 e.CollExpr = w(e.CollExpr).(Expression)
1098
1099 scopeNames := map[string]struct{}{}
1100 if e.KeyVar != "" {
1101 scopeNames[e.KeyVar] = struct{}{}
1102 }
1103 if e.ValVar != "" {
1104 scopeNames[e.ValVar] = struct{}{}
1105 }
1106
1107 if e.KeyExpr != nil {
1108 w(ChildScope{
1109 LocalNames: scopeNames,
1110 Expr: &e.KeyExpr,
1111 })
1112 }
1113 w(ChildScope{
1114 LocalNames: scopeNames,
1115 Expr: &e.ValExpr,
1116 })
1117 if e.CondExpr != nil {
1118 w(ChildScope{
1119 LocalNames: scopeNames,
1120 Expr: &e.CondExpr,
1121 })
1122 }
1123}
1124
1125func (e *ForExpr) Range() hcl.Range {
1126 return e.SrcRange
1127}
1128
1129func (e *ForExpr) StartRange() hcl.Range {
1130 return e.OpenRange
1131}
1132
1133type SplatExpr struct {
1134 Source Expression
1135 Each Expression
1136 Item *AnonSymbolExpr
1137
1138 SrcRange hcl.Range
1139 MarkerRange hcl.Range
1140}
1141
1142func (e *SplatExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
1143 sourceVal, diags := e.Source.Value(ctx)
1144 if diags.HasErrors() {
1145 // We'll evaluate our "Each" expression here just to see if it
1146 // produces any more diagnostics we can report. Since we're not
1147 // assigning a value to our AnonSymbolExpr here it will return
1148 // DynamicVal, which should short-circuit any use of it.
1149 _, itemDiags := e.Item.Value(ctx)
1150 diags = append(diags, itemDiags...)
1151 return cty.DynamicVal, diags
1152 }
1153
1154 if sourceVal.IsNull() {
1155 diags = append(diags, &hcl.Diagnostic{
1156 Severity: hcl.DiagError,
1157 Summary: "Splat of null value",
1158 Detail: "Splat expressions (with the * symbol) cannot be applied to null values.",
1159 Subject: e.Source.Range().Ptr(),
1160 Context: hcl.RangeBetween(e.Source.Range(), e.MarkerRange).Ptr(),
1161 })
1162 return cty.DynamicVal, diags
1163 }
1164 if !sourceVal.IsKnown() {
1165 return cty.DynamicVal, diags
1166 }
1167
1168 // A "special power" of splat expressions is that they can be applied
1169 // both to tuples/lists and to other values, and in the latter case
1170 // the value will be treated as an implicit single-value list. We'll
1171 // deal with that here first.
1172 if !(sourceVal.Type().IsTupleType() || sourceVal.Type().IsListType()) {
1173 sourceVal = cty.ListVal([]cty.Value{sourceVal})
1174 }
1175
1176 vals := make([]cty.Value, 0, sourceVal.LengthInt())
1177 it := sourceVal.ElementIterator()
1178 if ctx == nil {
1179 // we need a context to use our AnonSymbolExpr, so we'll just
1180 // make an empty one here to use as a placeholder.
1181 ctx = ctx.NewChild()
1182 }
1183 isKnown := true
1184 for it.Next() {
1185 _, sourceItem := it.Element()
1186 e.Item.setValue(ctx, sourceItem)
1187 newItem, itemDiags := e.Each.Value(ctx)
1188 diags = append(diags, itemDiags...)
1189 if itemDiags.HasErrors() {
1190 isKnown = false
1191 }
1192 vals = append(vals, newItem)
1193 }
1194 e.Item.clearValue(ctx) // clean up our temporary value
1195
1196 if !isKnown {
1197 return cty.DynamicVal, diags
1198 }
1199
1200 return cty.TupleVal(vals), diags
1201}
1202
1203func (e *SplatExpr) walkChildNodes(w internalWalkFunc) {
1204 e.Source = w(e.Source).(Expression)
1205 e.Each = w(e.Each).(Expression)
1206}
1207
1208func (e *SplatExpr) Range() hcl.Range {
1209 return e.SrcRange
1210}
1211
1212func (e *SplatExpr) StartRange() hcl.Range {
1213 return e.MarkerRange
1214}
1215
1216// AnonSymbolExpr is used as a placeholder for a value in an expression that
1217// can be applied dynamically to any value at runtime.
1218//
1219// This is a rather odd, synthetic expression. It is used as part of the
1220// representation of splat expressions as a placeholder for the current item
1221// being visited in the splat evaluation.
1222//
1223// AnonSymbolExpr cannot be evaluated in isolation. If its Value is called
1224// directly then cty.DynamicVal will be returned. Instead, it is evaluated
1225// in terms of another node (i.e. a splat expression) which temporarily
1226// assigns it a value.
1227type AnonSymbolExpr struct {
1228 SrcRange hcl.Range
1229 values map[*hcl.EvalContext]cty.Value
1230}
1231
1232func (e *AnonSymbolExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
1233 if ctx == nil {
1234 return cty.DynamicVal, nil
1235 }
1236 val, exists := e.values[ctx]
1237 if !exists {
1238 return cty.DynamicVal, nil
1239 }
1240 return val, nil
1241}
1242
1243// setValue sets a temporary local value for the expression when evaluated
1244// in the given context, which must be non-nil.
1245func (e *AnonSymbolExpr) setValue(ctx *hcl.EvalContext, val cty.Value) {
1246 if e.values == nil {
1247 e.values = make(map[*hcl.EvalContext]cty.Value)
1248 }
1249 if ctx == nil {
1250 panic("can't setValue for a nil EvalContext")
1251 }
1252 e.values[ctx] = val
1253}
1254
1255func (e *AnonSymbolExpr) clearValue(ctx *hcl.EvalContext) {
1256 if e.values == nil {
1257 return
1258 }
1259 if ctx == nil {
1260 panic("can't clearValue for a nil EvalContext")
1261 }
1262 delete(e.values, ctx)
1263}
1264
1265func (e *AnonSymbolExpr) walkChildNodes(w internalWalkFunc) {
1266 // AnonSymbolExpr is a leaf node in the tree
1267}
1268
1269func (e *AnonSymbolExpr) Range() hcl.Range {
1270 return e.SrcRange
1271}
1272
1273func (e *AnonSymbolExpr) StartRange() hcl.Range {
1274 return e.SrcRange
1275}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_ops.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_ops.go
new file mode 100644
index 0000000..9a5da04
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_ops.go
@@ -0,0 +1,258 @@
1package hclsyntax
2
3import (
4 "fmt"
5
6 "github.com/hashicorp/hcl2/hcl"
7 "github.com/zclconf/go-cty/cty"
8 "github.com/zclconf/go-cty/cty/convert"
9 "github.com/zclconf/go-cty/cty/function"
10 "github.com/zclconf/go-cty/cty/function/stdlib"
11)
12
13type Operation struct {
14 Impl function.Function
15 Type cty.Type
16}
17
18var (
19 OpLogicalOr = &Operation{
20 Impl: stdlib.OrFunc,
21 Type: cty.Bool,
22 }
23 OpLogicalAnd = &Operation{
24 Impl: stdlib.AndFunc,
25 Type: cty.Bool,
26 }
27 OpLogicalNot = &Operation{
28 Impl: stdlib.NotFunc,
29 Type: cty.Bool,
30 }
31
32 OpEqual = &Operation{
33 Impl: stdlib.EqualFunc,
34 Type: cty.Bool,
35 }
36 OpNotEqual = &Operation{
37 Impl: stdlib.NotEqualFunc,
38 Type: cty.Bool,
39 }
40
41 OpGreaterThan = &Operation{
42 Impl: stdlib.GreaterThanFunc,
43 Type: cty.Bool,
44 }
45 OpGreaterThanOrEqual = &Operation{
46 Impl: stdlib.GreaterThanOrEqualToFunc,
47 Type: cty.Bool,
48 }
49 OpLessThan = &Operation{
50 Impl: stdlib.LessThanFunc,
51 Type: cty.Bool,
52 }
53 OpLessThanOrEqual = &Operation{
54 Impl: stdlib.LessThanOrEqualToFunc,
55 Type: cty.Bool,
56 }
57
58 OpAdd = &Operation{
59 Impl: stdlib.AddFunc,
60 Type: cty.Number,
61 }
62 OpSubtract = &Operation{
63 Impl: stdlib.SubtractFunc,
64 Type: cty.Number,
65 }
66 OpMultiply = &Operation{
67 Impl: stdlib.MultiplyFunc,
68 Type: cty.Number,
69 }
70 OpDivide = &Operation{
71 Impl: stdlib.DivideFunc,
72 Type: cty.Number,
73 }
74 OpModulo = &Operation{
75 Impl: stdlib.ModuloFunc,
76 Type: cty.Number,
77 }
78 OpNegate = &Operation{
79 Impl: stdlib.NegateFunc,
80 Type: cty.Number,
81 }
82)
83
84var binaryOps []map[TokenType]*Operation
85
86func init() {
87 // This operation table maps from the operator's token type
88 // to the AST operation type. All expressions produced from
89 // binary operators are BinaryOp nodes.
90 //
91 // Binary operator groups are listed in order of precedence, with
92 // the *lowest* precedence first. Operators within the same group
93 // have left-to-right associativity.
94 binaryOps = []map[TokenType]*Operation{
95 {
96 TokenOr: OpLogicalOr,
97 },
98 {
99 TokenAnd: OpLogicalAnd,
100 },
101 {
102 TokenEqualOp: OpEqual,
103 TokenNotEqual: OpNotEqual,
104 },
105 {
106 TokenGreaterThan: OpGreaterThan,
107 TokenGreaterThanEq: OpGreaterThanOrEqual,
108 TokenLessThan: OpLessThan,
109 TokenLessThanEq: OpLessThanOrEqual,
110 },
111 {
112 TokenPlus: OpAdd,
113 TokenMinus: OpSubtract,
114 },
115 {
116 TokenStar: OpMultiply,
117 TokenSlash: OpDivide,
118 TokenPercent: OpModulo,
119 },
120 }
121}
122
123type BinaryOpExpr struct {
124 LHS Expression
125 Op *Operation
126 RHS Expression
127
128 SrcRange hcl.Range
129}
130
131func (e *BinaryOpExpr) walkChildNodes(w internalWalkFunc) {
132 e.LHS = w(e.LHS).(Expression)
133 e.RHS = w(e.RHS).(Expression)
134}
135
136func (e *BinaryOpExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
137 impl := e.Op.Impl // assumed to be a function taking exactly two arguments
138 params := impl.Params()
139 lhsParam := params[0]
140 rhsParam := params[1]
141
142 var diags hcl.Diagnostics
143
144 givenLHSVal, lhsDiags := e.LHS.Value(ctx)
145 givenRHSVal, rhsDiags := e.RHS.Value(ctx)
146 diags = append(diags, lhsDiags...)
147 diags = append(diags, rhsDiags...)
148
149 lhsVal, err := convert.Convert(givenLHSVal, lhsParam.Type)
150 if err != nil {
151 diags = append(diags, &hcl.Diagnostic{
152 Severity: hcl.DiagError,
153 Summary: "Invalid operand",
154 Detail: fmt.Sprintf("Unsuitable value for left operand: %s.", err),
155 Subject: e.LHS.Range().Ptr(),
156 Context: &e.SrcRange,
157 })
158 }
159 rhsVal, err := convert.Convert(givenRHSVal, rhsParam.Type)
160 if err != nil {
161 diags = append(diags, &hcl.Diagnostic{
162 Severity: hcl.DiagError,
163 Summary: "Invalid operand",
164 Detail: fmt.Sprintf("Unsuitable value for right operand: %s.", err),
165 Subject: e.RHS.Range().Ptr(),
166 Context: &e.SrcRange,
167 })
168 }
169
170 if diags.HasErrors() {
171 // Don't actually try the call if we have errors already, since the
172 // this will probably just produce a confusing duplicative diagnostic.
173 return cty.UnknownVal(e.Op.Type), diags
174 }
175
176 args := []cty.Value{lhsVal, rhsVal}
177 result, err := impl.Call(args)
178 if err != nil {
179 diags = append(diags, &hcl.Diagnostic{
180 // FIXME: This diagnostic is useless.
181 Severity: hcl.DiagError,
182 Summary: "Operation failed",
183 Detail: fmt.Sprintf("Error during operation: %s.", err),
184 Subject: &e.SrcRange,
185 })
186 return cty.UnknownVal(e.Op.Type), diags
187 }
188
189 return result, diags
190}
191
192func (e *BinaryOpExpr) Range() hcl.Range {
193 return e.SrcRange
194}
195
196func (e *BinaryOpExpr) StartRange() hcl.Range {
197 return e.LHS.StartRange()
198}
199
200type UnaryOpExpr struct {
201 Op *Operation
202 Val Expression
203
204 SrcRange hcl.Range
205 SymbolRange hcl.Range
206}
207
208func (e *UnaryOpExpr) walkChildNodes(w internalWalkFunc) {
209 e.Val = w(e.Val).(Expression)
210}
211
212func (e *UnaryOpExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
213 impl := e.Op.Impl // assumed to be a function taking exactly one argument
214 params := impl.Params()
215 param := params[0]
216
217 givenVal, diags := e.Val.Value(ctx)
218
219 val, err := convert.Convert(givenVal, param.Type)
220 if err != nil {
221 diags = append(diags, &hcl.Diagnostic{
222 Severity: hcl.DiagError,
223 Summary: "Invalid operand",
224 Detail: fmt.Sprintf("Unsuitable value for unary operand: %s.", err),
225 Subject: e.Val.Range().Ptr(),
226 Context: &e.SrcRange,
227 })
228 }
229
230 if diags.HasErrors() {
231 // Don't actually try the call if we have errors already, since the
232 // this will probably just produce a confusing duplicative diagnostic.
233 return cty.UnknownVal(e.Op.Type), diags
234 }
235
236 args := []cty.Value{val}
237 result, err := impl.Call(args)
238 if err != nil {
239 diags = append(diags, &hcl.Diagnostic{
240 // FIXME: This diagnostic is useless.
241 Severity: hcl.DiagError,
242 Summary: "Operation failed",
243 Detail: fmt.Sprintf("Error during operation: %s.", err),
244 Subject: &e.SrcRange,
245 })
246 return cty.UnknownVal(e.Op.Type), diags
247 }
248
249 return result, diags
250}
251
252func (e *UnaryOpExpr) Range() hcl.Range {
253 return e.SrcRange
254}
255
256func (e *UnaryOpExpr) StartRange() hcl.Range {
257 return e.SymbolRange
258}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_template.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_template.go
new file mode 100644
index 0000000..a1c4727
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_template.go
@@ -0,0 +1,192 @@
1package hclsyntax
2
3import (
4 "bytes"
5 "fmt"
6
7 "github.com/hashicorp/hcl2/hcl"
8 "github.com/zclconf/go-cty/cty"
9 "github.com/zclconf/go-cty/cty/convert"
10)
11
12type TemplateExpr struct {
13 Parts []Expression
14
15 SrcRange hcl.Range
16}
17
18func (e *TemplateExpr) walkChildNodes(w internalWalkFunc) {
19 for i, part := range e.Parts {
20 e.Parts[i] = w(part).(Expression)
21 }
22}
23
24func (e *TemplateExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
25 buf := &bytes.Buffer{}
26 var diags hcl.Diagnostics
27 isKnown := true
28
29 for _, part := range e.Parts {
30 partVal, partDiags := part.Value(ctx)
31 diags = append(diags, partDiags...)
32
33 if partVal.IsNull() {
34 diags = append(diags, &hcl.Diagnostic{
35 Severity: hcl.DiagError,
36 Summary: "Invalid template interpolation value",
37 Detail: fmt.Sprintf(
38 "The expression result is null. Cannot include a null value in a string template.",
39 ),
40 Subject: part.Range().Ptr(),
41 Context: &e.SrcRange,
42 })
43 continue
44 }
45
46 if !partVal.IsKnown() {
47 // If any part is unknown then the result as a whole must be
48 // unknown too. We'll keep on processing the rest of the parts
49 // anyway, because we want to still emit any diagnostics resulting
50 // from evaluating those.
51 isKnown = false
52 continue
53 }
54
55 strVal, err := convert.Convert(partVal, cty.String)
56 if err != nil {
57 diags = append(diags, &hcl.Diagnostic{
58 Severity: hcl.DiagError,
59 Summary: "Invalid template interpolation value",
60 Detail: fmt.Sprintf(
61 "Cannot include the given value in a string template: %s.",
62 err.Error(),
63 ),
64 Subject: part.Range().Ptr(),
65 Context: &e.SrcRange,
66 })
67 continue
68 }
69
70 buf.WriteString(strVal.AsString())
71 }
72
73 if !isKnown {
74 return cty.UnknownVal(cty.String), diags
75 }
76
77 return cty.StringVal(buf.String()), diags
78}
79
80func (e *TemplateExpr) Range() hcl.Range {
81 return e.SrcRange
82}
83
84func (e *TemplateExpr) StartRange() hcl.Range {
85 return e.Parts[0].StartRange()
86}
87
88// TemplateJoinExpr is used to convert tuples of strings produced by template
89// constructs (i.e. for loops) into flat strings, by converting the values
90// tos strings and joining them. This AST node is not used directly; it's
91// produced as part of the AST of a "for" loop in a template.
92type TemplateJoinExpr struct {
93 Tuple Expression
94}
95
96func (e *TemplateJoinExpr) walkChildNodes(w internalWalkFunc) {
97 e.Tuple = w(e.Tuple).(Expression)
98}
99
100func (e *TemplateJoinExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
101 tuple, diags := e.Tuple.Value(ctx)
102
103 if tuple.IsNull() {
104 // This indicates a bug in the code that constructed the AST.
105 panic("TemplateJoinExpr got null tuple")
106 }
107 if tuple.Type() == cty.DynamicPseudoType {
108 return cty.UnknownVal(cty.String), diags
109 }
110 if !tuple.Type().IsTupleType() {
111 // This indicates a bug in the code that constructed the AST.
112 panic("TemplateJoinExpr got non-tuple tuple")
113 }
114 if !tuple.IsKnown() {
115 return cty.UnknownVal(cty.String), diags
116 }
117
118 buf := &bytes.Buffer{}
119 it := tuple.ElementIterator()
120 for it.Next() {
121 _, val := it.Element()
122
123 if val.IsNull() {
124 diags = append(diags, &hcl.Diagnostic{
125 Severity: hcl.DiagError,
126 Summary: "Invalid template interpolation value",
127 Detail: fmt.Sprintf(
128 "An iteration result is null. Cannot include a null value in a string template.",
129 ),
130 Subject: e.Range().Ptr(),
131 })
132 continue
133 }
134 if val.Type() == cty.DynamicPseudoType {
135 return cty.UnknownVal(cty.String), diags
136 }
137 strVal, err := convert.Convert(val, cty.String)
138 if err != nil {
139 diags = append(diags, &hcl.Diagnostic{
140 Severity: hcl.DiagError,
141 Summary: "Invalid template interpolation value",
142 Detail: fmt.Sprintf(
143 "Cannot include one of the interpolation results into the string template: %s.",
144 err.Error(),
145 ),
146 Subject: e.Range().Ptr(),
147 })
148 continue
149 }
150 if !val.IsKnown() {
151 return cty.UnknownVal(cty.String), diags
152 }
153
154 buf.WriteString(strVal.AsString())
155 }
156
157 return cty.StringVal(buf.String()), diags
158}
159
160func (e *TemplateJoinExpr) Range() hcl.Range {
161 return e.Tuple.Range()
162}
163
164func (e *TemplateJoinExpr) StartRange() hcl.Range {
165 return e.Tuple.StartRange()
166}
167
168// TemplateWrapExpr is used instead of a TemplateExpr when a template
169// consists _only_ of a single interpolation sequence. In that case, the
170// template's result is the single interpolation's result, verbatim with
171// no type conversions.
172type TemplateWrapExpr struct {
173 Wrapped Expression
174
175 SrcRange hcl.Range
176}
177
178func (e *TemplateWrapExpr) walkChildNodes(w internalWalkFunc) {
179 e.Wrapped = w(e.Wrapped).(Expression)
180}
181
182func (e *TemplateWrapExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
183 return e.Wrapped.Value(ctx)
184}
185
186func (e *TemplateWrapExpr) Range() hcl.Range {
187 return e.SrcRange
188}
189
190func (e *TemplateWrapExpr) StartRange() hcl.Range {
191 return e.SrcRange
192}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars.go
new file mode 100644
index 0000000..9177092
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars.go
@@ -0,0 +1,76 @@
1package hclsyntax
2
3// Generated by expression_vars_get.go. DO NOT EDIT.
4// Run 'go generate' on this package to update the set of functions here.
5
6import (
7 "github.com/hashicorp/hcl2/hcl"
8)
9
10func (e *AnonSymbolExpr) Variables() []hcl.Traversal {
11 return Variables(e)
12}
13
14func (e *BinaryOpExpr) Variables() []hcl.Traversal {
15 return Variables(e)
16}
17
18func (e *ConditionalExpr) Variables() []hcl.Traversal {
19 return Variables(e)
20}
21
22func (e *ForExpr) Variables() []hcl.Traversal {
23 return Variables(e)
24}
25
26func (e *FunctionCallExpr) Variables() []hcl.Traversal {
27 return Variables(e)
28}
29
30func (e *IndexExpr) Variables() []hcl.Traversal {
31 return Variables(e)
32}
33
34func (e *LiteralValueExpr) Variables() []hcl.Traversal {
35 return Variables(e)
36}
37
38func (e *ObjectConsExpr) Variables() []hcl.Traversal {
39 return Variables(e)
40}
41
42func (e *ObjectConsKeyExpr) Variables() []hcl.Traversal {
43 return Variables(e)
44}
45
46func (e *RelativeTraversalExpr) Variables() []hcl.Traversal {
47 return Variables(e)
48}
49
50func (e *ScopeTraversalExpr) Variables() []hcl.Traversal {
51 return Variables(e)
52}
53
54func (e *SplatExpr) Variables() []hcl.Traversal {
55 return Variables(e)
56}
57
58func (e *TemplateExpr) Variables() []hcl.Traversal {
59 return Variables(e)
60}
61
62func (e *TemplateJoinExpr) Variables() []hcl.Traversal {
63 return Variables(e)
64}
65
66func (e *TemplateWrapExpr) Variables() []hcl.Traversal {
67 return Variables(e)
68}
69
70func (e *TupleConsExpr) Variables() []hcl.Traversal {
71 return Variables(e)
72}
73
74func (e *UnaryOpExpr) Variables() []hcl.Traversal {
75 return Variables(e)
76}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars_gen.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars_gen.go
new file mode 100644
index 0000000..88f1980
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars_gen.go
@@ -0,0 +1,99 @@
1// This is a 'go generate'-oriented program for producing the "Variables"
2// method on every Expression implementation found within this package.
3// All expressions share the same implementation for this method, which
4// just wraps the package-level function "Variables" and uses an AST walk
5// to do its work.
6
7// +build ignore
8
9package main
10
11import (
12 "fmt"
13 "go/ast"
14 "go/parser"
15 "go/token"
16 "os"
17 "sort"
18)
19
20func main() {
21 fs := token.NewFileSet()
22 pkgs, err := parser.ParseDir(fs, ".", nil, 0)
23 if err != nil {
24 fmt.Fprintf(os.Stderr, "error while parsing: %s\n", err)
25 os.Exit(1)
26 }
27 pkg := pkgs["hclsyntax"]
28
29 // Walk all the files and collect the receivers of any "Value" methods
30 // that look like they are trying to implement Expression.
31 var recvs []string
32 for _, f := range pkg.Files {
33 for _, decl := range f.Decls {
34 fd, ok := decl.(*ast.FuncDecl)
35 if !ok {
36 continue
37 }
38 if fd.Name.Name != "Value" {
39 continue
40 }
41 results := fd.Type.Results.List
42 if len(results) != 2 {
43 continue
44 }
45 valResult := fd.Type.Results.List[0].Type.(*ast.SelectorExpr).X.(*ast.Ident)
46 diagsResult := fd.Type.Results.List[1].Type.(*ast.SelectorExpr).X.(*ast.Ident)
47
48 if valResult.Name != "cty" && diagsResult.Name != "hcl" {
49 continue
50 }
51
52 // If we have a method called Value and it returns something in
53 // "cty" followed by something in "hcl" then that's specific enough
54 // for now, even though this is not 100% exact as a correct
55 // implementation of Value.
56
57 recvTy := fd.Recv.List[0].Type
58
59 switch rtt := recvTy.(type) {
60 case *ast.StarExpr:
61 name := rtt.X.(*ast.Ident).Name
62 recvs = append(recvs, fmt.Sprintf("*%s", name))
63 default:
64 fmt.Fprintf(os.Stderr, "don't know what to do with a %T receiver\n", recvTy)
65 }
66
67 }
68 }
69
70 sort.Strings(recvs)
71
72 of, err := os.OpenFile("expression_vars.go", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
73 if err != nil {
74 fmt.Fprintf(os.Stderr, "failed to open output file: %s\n", err)
75 os.Exit(1)
76 }
77
78 fmt.Fprint(of, outputPreamble)
79 for _, recv := range recvs {
80 fmt.Fprintf(of, outputMethodFmt, recv)
81 }
82 fmt.Fprint(of, "\n")
83
84}
85
86const outputPreamble = `package hclsyntax
87
88// Generated by expression_vars_get.go. DO NOT EDIT.
89// Run 'go generate' on this package to update the set of functions here.
90
91import (
92 "github.com/hashicorp/hcl2/hcl"
93)`
94
95const outputMethodFmt = `
96
97func (e %s) Variables() []hcl.Traversal {
98 return Variables(e)
99}`
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/file.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/file.go
new file mode 100644
index 0000000..490c025
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/file.go
@@ -0,0 +1,20 @@
1package hclsyntax
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5)
6
7// File is the top-level object resulting from parsing a configuration file.
8type File struct {
9 Body *Body
10 Bytes []byte
11}
12
13func (f *File) AsHCLFile() *hcl.File {
14 return &hcl.File{
15 Body: f.Body,
16 Bytes: f.Bytes,
17
18 // TODO: The Nav object, once we have an implementation of it
19 }
20}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/generate.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/generate.go
new file mode 100644
index 0000000..841656a
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/generate.go
@@ -0,0 +1,9 @@
1package hclsyntax
2
3//go:generate go run expression_vars_gen.go
4//go:generate ruby unicode2ragel.rb --url=http://www.unicode.org/Public/9.0.0/ucd/DerivedCoreProperties.txt -m UnicodeDerived -p ID_Start,ID_Continue -o unicode_derived.rl
5//go:generate ragel -Z scan_tokens.rl
6//go:generate gofmt -w scan_tokens.go
7//go:generate ragel -Z scan_string_lit.rl
8//go:generate gofmt -w scan_string_lit.go
9//go:generate stringer -type TokenType -output token_type_string.go
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/keywords.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/keywords.go
new file mode 100644
index 0000000..eef8b96
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/keywords.go
@@ -0,0 +1,21 @@
1package hclsyntax
2
3import (
4 "bytes"
5)
6
7type Keyword []byte
8
9var forKeyword = Keyword([]byte{'f', 'o', 'r'})
10var inKeyword = Keyword([]byte{'i', 'n'})
11var ifKeyword = Keyword([]byte{'i', 'f'})
12var elseKeyword = Keyword([]byte{'e', 'l', 's', 'e'})
13var endifKeyword = Keyword([]byte{'e', 'n', 'd', 'i', 'f'})
14var endforKeyword = Keyword([]byte{'e', 'n', 'd', 'f', 'o', 'r'})
15
16func (kw Keyword) TokenMatches(token Token) bool {
17 if token.Type != TokenIdent {
18 return false
19 }
20 return bytes.Equal([]byte(kw), token.Bytes)
21}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/navigation.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/navigation.go
new file mode 100644
index 0000000..4d41b6b
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/navigation.go
@@ -0,0 +1,41 @@
1package hclsyntax
2
3import (
4 "bytes"
5 "fmt"
6)
7
8type navigation struct {
9 root *Body
10}
11
12// Implementation of hcled.ContextString
13func (n navigation) ContextString(offset int) string {
14 // We will walk our top-level blocks until we find one that contains
15 // the given offset, and then construct a representation of the header
16 // of the block.
17
18 var block *Block
19 for _, candidate := range n.root.Blocks {
20 if candidate.Range().ContainsOffset(offset) {
21 block = candidate
22 break
23 }
24 }
25
26 if block == nil {
27 return ""
28 }
29
30 if len(block.Labels) == 0 {
31 // Easy case!
32 return block.Type
33 }
34
35 buf := &bytes.Buffer{}
36 buf.WriteString(block.Type)
37 for _, label := range block.Labels {
38 fmt.Fprintf(buf, " %q", label)
39 }
40 return buf.String()
41}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/node.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/node.go
new file mode 100644
index 0000000..fd426d4
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/node.go
@@ -0,0 +1,22 @@
1package hclsyntax
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5)
6
7// Node is the abstract type that every AST node implements.
8//
9// This is a closed interface, so it cannot be implemented from outside of
10// this package.
11type Node interface {
12 // This is the mechanism by which the public-facing walk functions
13 // are implemented. Implementations should call the given function
14 // for each child node and then replace that node with its return value.
15 // The return value might just be the same node, for non-transforming
16 // walks.
17 walkChildNodes(w internalWalkFunc)
18
19 Range() hcl.Range
20}
21
22type internalWalkFunc func(Node) Node
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go
new file mode 100644
index 0000000..002858f
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go
@@ -0,0 +1,1836 @@
1package hclsyntax
2
3import (
4 "bytes"
5 "fmt"
6 "strconv"
7 "unicode/utf8"
8
9 "github.com/apparentlymart/go-textseg/textseg"
10 "github.com/hashicorp/hcl2/hcl"
11 "github.com/zclconf/go-cty/cty"
12 "github.com/zclconf/go-cty/cty/convert"
13)
14
15type parser struct {
16 *peeker
17
18 // set to true if any recovery is attempted. The parser can use this
19 // to attempt to reduce error noise by suppressing "bad token" errors
20 // in recovery mode, assuming that the recovery heuristics have failed
21 // in this case and left the peeker in a wrong place.
22 recovery bool
23}
24
25func (p *parser) ParseBody(end TokenType) (*Body, hcl.Diagnostics) {
26 attrs := Attributes{}
27 blocks := Blocks{}
28 var diags hcl.Diagnostics
29
30 startRange := p.PrevRange()
31 var endRange hcl.Range
32
33Token:
34 for {
35 next := p.Peek()
36 if next.Type == end {
37 endRange = p.NextRange()
38 p.Read()
39 break Token
40 }
41
42 switch next.Type {
43 case TokenNewline:
44 p.Read()
45 continue
46 case TokenIdent:
47 item, itemDiags := p.ParseBodyItem()
48 diags = append(diags, itemDiags...)
49 switch titem := item.(type) {
50 case *Block:
51 blocks = append(blocks, titem)
52 case *Attribute:
53 if existing, exists := attrs[titem.Name]; exists {
54 diags = append(diags, &hcl.Diagnostic{
55 Severity: hcl.DiagError,
56 Summary: "Attribute redefined",
57 Detail: fmt.Sprintf(
58 "The attribute %q was already defined at %s. Each attribute may be defined only once.",
59 titem.Name, existing.NameRange.String(),
60 ),
61 Subject: &titem.NameRange,
62 })
63 } else {
64 attrs[titem.Name] = titem
65 }
66 default:
67 // This should never happen for valid input, but may if a
68 // syntax error was detected in ParseBodyItem that prevented
69 // it from even producing a partially-broken item. In that
70 // case, it would've left at least one error in the diagnostics
71 // slice we already dealt with above.
72 //
73 // We'll assume ParseBodyItem attempted recovery to leave
74 // us in a reasonable position to try parsing the next item.
75 continue
76 }
77 default:
78 bad := p.Read()
79 if !p.recovery {
80 if bad.Type == TokenOQuote {
81 diags = append(diags, &hcl.Diagnostic{
82 Severity: hcl.DiagError,
83 Summary: "Invalid attribute name",
84 Detail: "Attribute names must not be quoted.",
85 Subject: &bad.Range,
86 })
87 } else {
88 diags = append(diags, &hcl.Diagnostic{
89 Severity: hcl.DiagError,
90 Summary: "Attribute or block definition required",
91 Detail: "An attribute or block definition is required here.",
92 Subject: &bad.Range,
93 })
94 }
95 }
96 endRange = p.PrevRange() // arbitrary, but somewhere inside the body means better diagnostics
97
98 p.recover(end) // attempt to recover to the token after the end of this body
99 break Token
100 }
101 }
102
103 return &Body{
104 Attributes: attrs,
105 Blocks: blocks,
106
107 SrcRange: hcl.RangeBetween(startRange, endRange),
108 EndRange: hcl.Range{
109 Filename: endRange.Filename,
110 Start: endRange.End,
111 End: endRange.End,
112 },
113 }, diags
114}
115
116func (p *parser) ParseBodyItem() (Node, hcl.Diagnostics) {
117 ident := p.Read()
118 if ident.Type != TokenIdent {
119 p.recoverAfterBodyItem()
120 return nil, hcl.Diagnostics{
121 {
122 Severity: hcl.DiagError,
123 Summary: "Attribute or block definition required",
124 Detail: "An attribute or block definition is required here.",
125 Subject: &ident.Range,
126 },
127 }
128 }
129
130 next := p.Peek()
131
132 switch next.Type {
133 case TokenEqual:
134 return p.finishParsingBodyAttribute(ident)
135 case TokenOQuote, TokenOBrace, TokenIdent:
136 return p.finishParsingBodyBlock(ident)
137 default:
138 p.recoverAfterBodyItem()
139 return nil, hcl.Diagnostics{
140 {
141 Severity: hcl.DiagError,
142 Summary: "Attribute or block definition required",
143 Detail: "An attribute or block definition is required here. To define an attribute, use the equals sign \"=\" to introduce the attribute value.",
144 Subject: &ident.Range,
145 },
146 }
147 }
148
149 return nil, nil
150}
151
152func (p *parser) finishParsingBodyAttribute(ident Token) (Node, hcl.Diagnostics) {
153 eqTok := p.Read() // eat equals token
154 if eqTok.Type != TokenEqual {
155 // should never happen if caller behaves
156 panic("finishParsingBodyAttribute called with next not equals")
157 }
158
159 var endRange hcl.Range
160
161 expr, diags := p.ParseExpression()
162 if p.recovery && diags.HasErrors() {
163 // recovery within expressions tends to be tricky, so we've probably
164 // landed somewhere weird. We'll try to reset to the start of a body
165 // item so parsing can continue.
166 endRange = p.PrevRange()
167 p.recoverAfterBodyItem()
168 } else {
169 end := p.Peek()
170 if end.Type != TokenNewline && end.Type != TokenEOF {
171 if !p.recovery {
172 diags = append(diags, &hcl.Diagnostic{
173 Severity: hcl.DiagError,
174 Summary: "Missing newline after attribute definition",
175 Detail: "An attribute definition must end with a newline.",
176 Subject: &end.Range,
177 Context: hcl.RangeBetween(ident.Range, end.Range).Ptr(),
178 })
179 }
180 endRange = p.PrevRange()
181 p.recoverAfterBodyItem()
182 } else {
183 endRange = p.PrevRange()
184 p.Read() // eat newline
185 }
186 }
187
188 return &Attribute{
189 Name: string(ident.Bytes),
190 Expr: expr,
191
192 SrcRange: hcl.RangeBetween(ident.Range, endRange),
193 NameRange: ident.Range,
194 EqualsRange: eqTok.Range,
195 }, diags
196}
197
198func (p *parser) finishParsingBodyBlock(ident Token) (Node, hcl.Diagnostics) {
199 var blockType = string(ident.Bytes)
200 var diags hcl.Diagnostics
201 var labels []string
202 var labelRanges []hcl.Range
203
204 var oBrace Token
205
206Token:
207 for {
208 tok := p.Peek()
209
210 switch tok.Type {
211
212 case TokenOBrace:
213 oBrace = p.Read()
214 break Token
215
216 case TokenOQuote:
217 label, labelRange, labelDiags := p.parseQuotedStringLiteral()
218 diags = append(diags, labelDiags...)
219 labels = append(labels, label)
220 labelRanges = append(labelRanges, labelRange)
221 if labelDiags.HasErrors() {
222 p.recoverAfterBodyItem()
223 return &Block{
224 Type: blockType,
225 Labels: labels,
226 Body: nil,
227
228 TypeRange: ident.Range,
229 LabelRanges: labelRanges,
230 OpenBraceRange: ident.Range, // placeholder
231 CloseBraceRange: ident.Range, // placeholder
232 }, diags
233 }
234
235 case TokenIdent:
236 tok = p.Read() // eat token
237 label, labelRange := string(tok.Bytes), tok.Range
238 labels = append(labels, label)
239 labelRanges = append(labelRanges, labelRange)
240
241 default:
242 switch tok.Type {
243 case TokenEqual:
244 diags = append(diags, &hcl.Diagnostic{
245 Severity: hcl.DiagError,
246 Summary: "Invalid block definition",
247 Detail: "The equals sign \"=\" indicates an attribute definition, and must not be used when defining a block.",
248 Subject: &tok.Range,
249 Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
250 })
251 case TokenNewline:
252 diags = append(diags, &hcl.Diagnostic{
253 Severity: hcl.DiagError,
254 Summary: "Invalid block definition",
255 Detail: "A block definition must have block content delimited by \"{\" and \"}\", starting on the same line as the block header.",
256 Subject: &tok.Range,
257 Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
258 })
259 default:
260 if !p.recovery {
261 diags = append(diags, &hcl.Diagnostic{
262 Severity: hcl.DiagError,
263 Summary: "Invalid block definition",
264 Detail: "Either a quoted string block label or an opening brace (\"{\") is expected here.",
265 Subject: &tok.Range,
266 Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
267 })
268 }
269 }
270
271 p.recoverAfterBodyItem()
272
273 return &Block{
274 Type: blockType,
275 Labels: labels,
276 Body: nil,
277
278 TypeRange: ident.Range,
279 LabelRanges: labelRanges,
280 OpenBraceRange: ident.Range, // placeholder
281 CloseBraceRange: ident.Range, // placeholder
282 }, diags
283 }
284 }
285
286 // Once we fall out here, the peeker is pointed just after our opening
287 // brace, so we can begin our nested body parsing.
288 body, bodyDiags := p.ParseBody(TokenCBrace)
289 diags = append(diags, bodyDiags...)
290 cBraceRange := p.PrevRange()
291
292 eol := p.Peek()
293 if eol.Type == TokenNewline || eol.Type == TokenEOF {
294 p.Read() // eat newline
295 } else {
296 if !p.recovery {
297 diags = append(diags, &hcl.Diagnostic{
298 Severity: hcl.DiagError,
299 Summary: "Missing newline after block definition",
300 Detail: "A block definition must end with a newline.",
301 Subject: &eol.Range,
302 Context: hcl.RangeBetween(ident.Range, eol.Range).Ptr(),
303 })
304 }
305 p.recoverAfterBodyItem()
306 }
307
308 return &Block{
309 Type: blockType,
310 Labels: labels,
311 Body: body,
312
313 TypeRange: ident.Range,
314 LabelRanges: labelRanges,
315 OpenBraceRange: oBrace.Range,
316 CloseBraceRange: cBraceRange,
317 }, diags
318}
319
320func (p *parser) ParseExpression() (Expression, hcl.Diagnostics) {
321 return p.parseTernaryConditional()
322}
323
324func (p *parser) parseTernaryConditional() (Expression, hcl.Diagnostics) {
325 // The ternary conditional operator (.. ? .. : ..) behaves somewhat
326 // like a binary operator except that the "symbol" is itself
327 // an expression enclosed in two punctuation characters.
328 // The middle expression is parsed as if the ? and : symbols
329 // were parentheses. The "rhs" (the "false expression") is then
330 // treated right-associatively so it behaves similarly to the
331 // middle in terms of precedence.
332
333 startRange := p.NextRange()
334 var condExpr, trueExpr, falseExpr Expression
335 var diags hcl.Diagnostics
336
337 condExpr, condDiags := p.parseBinaryOps(binaryOps)
338 diags = append(diags, condDiags...)
339 if p.recovery && condDiags.HasErrors() {
340 return condExpr, diags
341 }
342
343 questionMark := p.Peek()
344 if questionMark.Type != TokenQuestion {
345 return condExpr, diags
346 }
347
348 p.Read() // eat question mark
349
350 trueExpr, trueDiags := p.ParseExpression()
351 diags = append(diags, trueDiags...)
352 if p.recovery && trueDiags.HasErrors() {
353 return condExpr, diags
354 }
355
356 colon := p.Peek()
357 if colon.Type != TokenColon {
358 diags = append(diags, &hcl.Diagnostic{
359 Severity: hcl.DiagError,
360 Summary: "Missing false expression in conditional",
361 Detail: "The conditional operator (...?...:...) requires a false expression, delimited by a colon.",
362 Subject: &colon.Range,
363 Context: hcl.RangeBetween(startRange, colon.Range).Ptr(),
364 })
365 return condExpr, diags
366 }
367
368 p.Read() // eat colon
369
370 falseExpr, falseDiags := p.ParseExpression()
371 diags = append(diags, falseDiags...)
372 if p.recovery && falseDiags.HasErrors() {
373 return condExpr, diags
374 }
375
376 return &ConditionalExpr{
377 Condition: condExpr,
378 TrueResult: trueExpr,
379 FalseResult: falseExpr,
380
381 SrcRange: hcl.RangeBetween(startRange, falseExpr.Range()),
382 }, diags
383}
384
385// parseBinaryOps calls itself recursively to work through all of the
386// operator precedence groups, and then eventually calls parseExpressionTerm
387// for each operand.
388func (p *parser) parseBinaryOps(ops []map[TokenType]*Operation) (Expression, hcl.Diagnostics) {
389 if len(ops) == 0 {
390 // We've run out of operators, so now we'll just try to parse a term.
391 return p.parseExpressionWithTraversals()
392 }
393
394 thisLevel := ops[0]
395 remaining := ops[1:]
396
397 var lhs, rhs Expression
398 var operation *Operation
399 var diags hcl.Diagnostics
400
401 // Parse a term that might be the first operand of a binary
402 // operation or it might just be a standalone term.
403 // We won't know until we've parsed it and can look ahead
404 // to see if there's an operator token for this level.
405 lhs, lhsDiags := p.parseBinaryOps(remaining)
406 diags = append(diags, lhsDiags...)
407 if p.recovery && lhsDiags.HasErrors() {
408 return lhs, diags
409 }
410
411 // We'll keep eating up operators until we run out, so that operators
412 // with the same precedence will combine in a left-associative manner:
413 // a+b+c => (a+b)+c, not a+(b+c)
414 //
415 // Should we later want to have right-associative operators, a way
416 // to achieve that would be to call back up to ParseExpression here
417 // instead of iteratively parsing only the remaining operators.
418 for {
419 next := p.Peek()
420 var newOp *Operation
421 var ok bool
422 if newOp, ok = thisLevel[next.Type]; !ok {
423 break
424 }
425
426 // Are we extending an expression started on the previous iteration?
427 if operation != nil {
428 lhs = &BinaryOpExpr{
429 LHS: lhs,
430 Op: operation,
431 RHS: rhs,
432
433 SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()),
434 }
435 }
436
437 operation = newOp
438 p.Read() // eat operator token
439 var rhsDiags hcl.Diagnostics
440 rhs, rhsDiags = p.parseBinaryOps(remaining)
441 diags = append(diags, rhsDiags...)
442 if p.recovery && rhsDiags.HasErrors() {
443 return lhs, diags
444 }
445 }
446
447 if operation == nil {
448 return lhs, diags
449 }
450
451 return &BinaryOpExpr{
452 LHS: lhs,
453 Op: operation,
454 RHS: rhs,
455
456 SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()),
457 }, diags
458}
459
460func (p *parser) parseExpressionWithTraversals() (Expression, hcl.Diagnostics) {
461 term, diags := p.parseExpressionTerm()
462 ret := term
463
464Traversal:
465 for {
466 next := p.Peek()
467
468 switch next.Type {
469 case TokenDot:
470 // Attribute access or splat
471 dot := p.Read()
472 attrTok := p.Peek()
473
474 switch attrTok.Type {
475 case TokenIdent:
476 attrTok = p.Read() // eat token
477 name := string(attrTok.Bytes)
478 rng := hcl.RangeBetween(dot.Range, attrTok.Range)
479 step := hcl.TraverseAttr{
480 Name: name,
481 SrcRange: rng,
482 }
483
484 ret = makeRelativeTraversal(ret, step, rng)
485
486 case TokenNumberLit:
487 // This is a weird form we inherited from HIL, allowing numbers
488 // to be used as attributes as a weird way of writing [n].
489 // This was never actually a first-class thing in HIL, but
490 // HIL tolerated sequences like .0. in its variable names and
491 // calling applications like Terraform exploited that to
492 // introduce indexing syntax where none existed.
493 numTok := p.Read() // eat token
494 attrTok = numTok
495
496 // This syntax is ambiguous if multiple indices are used in
497 // succession, like foo.0.1.baz: that actually parses as
498 // a fractional number 0.1. Since we're only supporting this
499 // syntax for compatibility with legacy Terraform
500 // configurations, and Terraform does not tend to have lists
501 // of lists, we'll choose to reject that here with a helpful
502 // error message, rather than failing later because the index
503 // isn't a whole number.
504 if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 {
505 first := numTok.Bytes[:dotIdx]
506 second := numTok.Bytes[dotIdx+1:]
507 diags = append(diags, &hcl.Diagnostic{
508 Severity: hcl.DiagError,
509 Summary: "Invalid legacy index syntax",
510 Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax instead, like [%s][%s].", first, second),
511 Subject: &attrTok.Range,
512 })
513 rng := hcl.RangeBetween(dot.Range, numTok.Range)
514 step := hcl.TraverseIndex{
515 Key: cty.DynamicVal,
516 SrcRange: rng,
517 }
518 ret = makeRelativeTraversal(ret, step, rng)
519 break
520 }
521
522 numVal, numDiags := p.numberLitValue(numTok)
523 diags = append(diags, numDiags...)
524
525 rng := hcl.RangeBetween(dot.Range, numTok.Range)
526 step := hcl.TraverseIndex{
527 Key: numVal,
528 SrcRange: rng,
529 }
530
531 ret = makeRelativeTraversal(ret, step, rng)
532
533 case TokenStar:
534 // "Attribute-only" splat expression.
535 // (This is a kinda weird construct inherited from HIL, which
536 // behaves a bit like a [*] splat except that it is only able
537 // to do attribute traversals into each of its elements,
538 // whereas foo[*] can support _any_ traversal.
539 marker := p.Read() // eat star
540 trav := make(hcl.Traversal, 0, 1)
541 var firstRange, lastRange hcl.Range
542 firstRange = p.NextRange()
543 for p.Peek().Type == TokenDot {
544 dot := p.Read()
545
546 if p.Peek().Type == TokenNumberLit {
547 // Continuing the "weird stuff inherited from HIL"
548 // theme, we also allow numbers as attribute names
549 // inside splats and interpret them as indexing
550 // into a list, for expressions like:
551 // foo.bar.*.baz.0.foo
552 numTok := p.Read()
553
554 // Weird special case if the user writes something
555 // like foo.bar.*.baz.0.0.foo, where 0.0 parses
556 // as a number.
557 if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 {
558 first := numTok.Bytes[:dotIdx]
559 second := numTok.Bytes[dotIdx+1:]
560 diags = append(diags, &hcl.Diagnostic{
561 Severity: hcl.DiagError,
562 Summary: "Invalid legacy index syntax",
563 Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax with a full splat expression [*] instead, like [%s][%s].", first, second),
564 Subject: &attrTok.Range,
565 })
566 trav = append(trav, hcl.TraverseIndex{
567 Key: cty.DynamicVal,
568 SrcRange: hcl.RangeBetween(dot.Range, numTok.Range),
569 })
570 lastRange = numTok.Range
571 continue
572 }
573
574 numVal, numDiags := p.numberLitValue(numTok)
575 diags = append(diags, numDiags...)
576 trav = append(trav, hcl.TraverseIndex{
577 Key: numVal,
578 SrcRange: hcl.RangeBetween(dot.Range, numTok.Range),
579 })
580 lastRange = numTok.Range
581 continue
582 }
583
584 if p.Peek().Type != TokenIdent {
585 if !p.recovery {
586 if p.Peek().Type == TokenStar {
587 diags = append(diags, &hcl.Diagnostic{
588 Severity: hcl.DiagError,
589 Summary: "Nested splat expression not allowed",
590 Detail: "A splat expression (*) cannot be used inside another attribute-only splat expression.",
591 Subject: p.Peek().Range.Ptr(),
592 })
593 } else {
594 diags = append(diags, &hcl.Diagnostic{
595 Severity: hcl.DiagError,
596 Summary: "Invalid attribute name",
597 Detail: "An attribute name is required after a dot.",
598 Subject: &attrTok.Range,
599 })
600 }
601 }
602 p.setRecovery()
603 continue Traversal
604 }
605
606 attrTok := p.Read()
607 trav = append(trav, hcl.TraverseAttr{
608 Name: string(attrTok.Bytes),
609 SrcRange: hcl.RangeBetween(dot.Range, attrTok.Range),
610 })
611 lastRange = attrTok.Range
612 }
613
614 itemExpr := &AnonSymbolExpr{
615 SrcRange: hcl.RangeBetween(dot.Range, marker.Range),
616 }
617 var travExpr Expression
618 if len(trav) == 0 {
619 travExpr = itemExpr
620 } else {
621 travExpr = &RelativeTraversalExpr{
622 Source: itemExpr,
623 Traversal: trav,
624 SrcRange: hcl.RangeBetween(firstRange, lastRange),
625 }
626 }
627
628 ret = &SplatExpr{
629 Source: ret,
630 Each: travExpr,
631 Item: itemExpr,
632
633 SrcRange: hcl.RangeBetween(dot.Range, lastRange),
634 MarkerRange: hcl.RangeBetween(dot.Range, marker.Range),
635 }
636
637 default:
638 diags = append(diags, &hcl.Diagnostic{
639 Severity: hcl.DiagError,
640 Summary: "Invalid attribute name",
641 Detail: "An attribute name is required after a dot.",
642 Subject: &attrTok.Range,
643 })
644 // This leaves the peeker in a bad place, so following items
645 // will probably be misparsed until we hit something that
646 // allows us to re-sync.
647 //
648 // We will probably need to do something better here eventually
649 // in order to support autocomplete triggered by typing a
650 // period.
651 p.setRecovery()
652 }
653
654 case TokenOBrack:
655 // Indexing of a collection.
656 // This may or may not be a hcl.Traverser, depending on whether
657 // the key value is something constant.
658
659 open := p.Read()
660 // TODO: If we have a TokenStar inside our brackets, parse as
661 // a Splat expression: foo[*].baz[0].
662 var close Token
663 p.PushIncludeNewlines(false) // arbitrary newlines allowed in brackets
664 keyExpr, keyDiags := p.ParseExpression()
665 diags = append(diags, keyDiags...)
666 if p.recovery && keyDiags.HasErrors() {
667 close = p.recover(TokenCBrack)
668 } else {
669 close = p.Read()
670 if close.Type != TokenCBrack && !p.recovery {
671 diags = append(diags, &hcl.Diagnostic{
672 Severity: hcl.DiagError,
673 Summary: "Missing close bracket on index",
674 Detail: "The index operator must end with a closing bracket (\"]\").",
675 Subject: &close.Range,
676 })
677 close = p.recover(TokenCBrack)
678 }
679 }
680 p.PopIncludeNewlines()
681
682 if lit, isLit := keyExpr.(*LiteralValueExpr); isLit {
683 litKey, _ := lit.Value(nil)
684 rng := hcl.RangeBetween(open.Range, close.Range)
685 step := hcl.TraverseIndex{
686 Key: litKey,
687 SrcRange: rng,
688 }
689 ret = makeRelativeTraversal(ret, step, rng)
690 } else {
691 rng := hcl.RangeBetween(open.Range, close.Range)
692 ret = &IndexExpr{
693 Collection: ret,
694 Key: keyExpr,
695
696 SrcRange: rng,
697 OpenRange: open.Range,
698 }
699 }
700
701 default:
702 break Traversal
703 }
704 }
705
706 return ret, diags
707}
708
709// makeRelativeTraversal takes an expression and a traverser and returns
710// a traversal expression that combines the two. If the given expression
711// is already a traversal, it is extended in place (mutating it) and
712// returned. If it isn't, a new RelativeTraversalExpr is created and returned.
713func makeRelativeTraversal(expr Expression, next hcl.Traverser, rng hcl.Range) Expression {
714 switch texpr := expr.(type) {
715 case *ScopeTraversalExpr:
716 texpr.Traversal = append(texpr.Traversal, next)
717 texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng)
718 return texpr
719 case *RelativeTraversalExpr:
720 texpr.Traversal = append(texpr.Traversal, next)
721 texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng)
722 return texpr
723 default:
724 return &RelativeTraversalExpr{
725 Source: expr,
726 Traversal: hcl.Traversal{next},
727 SrcRange: rng,
728 }
729 }
730}
731
732func (p *parser) parseExpressionTerm() (Expression, hcl.Diagnostics) {
733 start := p.Peek()
734
735 switch start.Type {
736 case TokenOParen:
737 p.Read() // eat open paren
738
739 p.PushIncludeNewlines(false)
740
741 expr, diags := p.ParseExpression()
742 if diags.HasErrors() {
743 // attempt to place the peeker after our closing paren
744 // before we return, so that the next parser has some
745 // chance of finding a valid expression.
746 p.recover(TokenCParen)
747 p.PopIncludeNewlines()
748 return expr, diags
749 }
750
751 close := p.Peek()
752 if close.Type != TokenCParen {
753 diags = append(diags, &hcl.Diagnostic{
754 Severity: hcl.DiagError,
755 Summary: "Unbalanced parentheses",
756 Detail: "Expected a closing parenthesis to terminate the expression.",
757 Subject: &close.Range,
758 Context: hcl.RangeBetween(start.Range, close.Range).Ptr(),
759 })
760 p.setRecovery()
761 }
762
763 p.Read() // eat closing paren
764 p.PopIncludeNewlines()
765
766 return expr, diags
767
768 case TokenNumberLit:
769 tok := p.Read() // eat number token
770
771 numVal, diags := p.numberLitValue(tok)
772 return &LiteralValueExpr{
773 Val: numVal,
774 SrcRange: tok.Range,
775 }, diags
776
777 case TokenIdent:
778 tok := p.Read() // eat identifier token
779
780 if p.Peek().Type == TokenOParen {
781 return p.finishParsingFunctionCall(tok)
782 }
783
784 name := string(tok.Bytes)
785 switch name {
786 case "true":
787 return &LiteralValueExpr{
788 Val: cty.True,
789 SrcRange: tok.Range,
790 }, nil
791 case "false":
792 return &LiteralValueExpr{
793 Val: cty.False,
794 SrcRange: tok.Range,
795 }, nil
796 case "null":
797 return &LiteralValueExpr{
798 Val: cty.NullVal(cty.DynamicPseudoType),
799 SrcRange: tok.Range,
800 }, nil
801 default:
802 return &ScopeTraversalExpr{
803 Traversal: hcl.Traversal{
804 hcl.TraverseRoot{
805 Name: name,
806 SrcRange: tok.Range,
807 },
808 },
809 SrcRange: tok.Range,
810 }, nil
811 }
812
813 case TokenOQuote, TokenOHeredoc:
814 open := p.Read() // eat opening marker
815 closer := p.oppositeBracket(open.Type)
816 exprs, passthru, _, diags := p.parseTemplateInner(closer)
817
818 closeRange := p.PrevRange()
819
820 if passthru {
821 if len(exprs) != 1 {
822 panic("passthru set with len(exprs) != 1")
823 }
824 return &TemplateWrapExpr{
825 Wrapped: exprs[0],
826 SrcRange: hcl.RangeBetween(open.Range, closeRange),
827 }, diags
828 }
829
830 return &TemplateExpr{
831 Parts: exprs,
832 SrcRange: hcl.RangeBetween(open.Range, closeRange),
833 }, diags
834
835 case TokenMinus:
836 tok := p.Read() // eat minus token
837
838 // Important to use parseExpressionWithTraversals rather than parseExpression
839 // here, otherwise we can capture a following binary expression into
840 // our negation.
841 // e.g. -46+5 should parse as (-46)+5, not -(46+5)
842 operand, diags := p.parseExpressionWithTraversals()
843 return &UnaryOpExpr{
844 Op: OpNegate,
845 Val: operand,
846
847 SrcRange: hcl.RangeBetween(tok.Range, operand.Range()),
848 SymbolRange: tok.Range,
849 }, diags
850
851 case TokenBang:
852 tok := p.Read() // eat bang token
853
854 // Important to use parseExpressionWithTraversals rather than parseExpression
855 // here, otherwise we can capture a following binary expression into
856 // our negation.
857 operand, diags := p.parseExpressionWithTraversals()
858 return &UnaryOpExpr{
859 Op: OpLogicalNot,
860 Val: operand,
861
862 SrcRange: hcl.RangeBetween(tok.Range, operand.Range()),
863 SymbolRange: tok.Range,
864 }, diags
865
866 case TokenOBrack:
867 return p.parseTupleCons()
868
869 case TokenOBrace:
870 return p.parseObjectCons()
871
872 default:
873 var diags hcl.Diagnostics
874 if !p.recovery {
875 diags = append(diags, &hcl.Diagnostic{
876 Severity: hcl.DiagError,
877 Summary: "Invalid expression",
878 Detail: "Expected the start of an expression, but found an invalid expression token.",
879 Subject: &start.Range,
880 })
881 }
882 p.setRecovery()
883
884 // Return a placeholder so that the AST is still structurally sound
885 // even in the presence of parse errors.
886 return &LiteralValueExpr{
887 Val: cty.DynamicVal,
888 SrcRange: start.Range,
889 }, diags
890 }
891}
892
893func (p *parser) numberLitValue(tok Token) (cty.Value, hcl.Diagnostics) {
894 // We'll lean on the cty converter to do the conversion, to ensure that
895 // the behavior is the same as what would happen if converting a
896 // non-literal string to a number.
897 numStrVal := cty.StringVal(string(tok.Bytes))
898 numVal, err := convert.Convert(numStrVal, cty.Number)
899 if err != nil {
900 ret := cty.UnknownVal(cty.Number)
901 return ret, hcl.Diagnostics{
902 {
903 Severity: hcl.DiagError,
904 Summary: "Invalid number literal",
905 // FIXME: not a very good error message, but convert only
906 // gives us "a number is required", so not much help either.
907 Detail: "Failed to recognize the value of this number literal.",
908 Subject: &tok.Range,
909 },
910 }
911 }
912 return numVal, nil
913}
914
915// finishParsingFunctionCall parses a function call assuming that the function
916// name was already read, and so the peeker should be pointing at the opening
917// parenthesis after the name.
918func (p *parser) finishParsingFunctionCall(name Token) (Expression, hcl.Diagnostics) {
919 openTok := p.Read()
920 if openTok.Type != TokenOParen {
921 // should never happen if callers behave
922 panic("finishParsingFunctionCall called with non-parenthesis as next token")
923 }
924
925 var args []Expression
926 var diags hcl.Diagnostics
927 var expandFinal bool
928 var closeTok Token
929
930 // Arbitrary newlines are allowed inside the function call parentheses.
931 p.PushIncludeNewlines(false)
932
933Token:
934 for {
935 tok := p.Peek()
936
937 if tok.Type == TokenCParen {
938 closeTok = p.Read() // eat closing paren
939 break Token
940 }
941
942 arg, argDiags := p.ParseExpression()
943 args = append(args, arg)
944 diags = append(diags, argDiags...)
945 if p.recovery && argDiags.HasErrors() {
946 // if there was a parse error in the argument then we've
947 // probably been left in a weird place in the token stream,
948 // so we'll bail out with a partial argument list.
949 p.recover(TokenCParen)
950 break Token
951 }
952
953 sep := p.Read()
954 if sep.Type == TokenCParen {
955 closeTok = sep
956 break Token
957 }
958
959 if sep.Type == TokenEllipsis {
960 expandFinal = true
961
962 if p.Peek().Type != TokenCParen {
963 if !p.recovery {
964 diags = append(diags, &hcl.Diagnostic{
965 Severity: hcl.DiagError,
966 Summary: "Missing closing parenthesis",
967 Detail: "An expanded function argument (with ...) must be immediately followed by closing parentheses.",
968 Subject: &sep.Range,
969 Context: hcl.RangeBetween(name.Range, sep.Range).Ptr(),
970 })
971 }
972 closeTok = p.recover(TokenCParen)
973 } else {
974 closeTok = p.Read() // eat closing paren
975 }
976 break Token
977 }
978
979 if sep.Type != TokenComma {
980 diags = append(diags, &hcl.Diagnostic{
981 Severity: hcl.DiagError,
982 Summary: "Missing argument separator",
983 Detail: "A comma is required to separate each function argument from the next.",
984 Subject: &sep.Range,
985 Context: hcl.RangeBetween(name.Range, sep.Range).Ptr(),
986 })
987 closeTok = p.recover(TokenCParen)
988 break Token
989 }
990
991 if p.Peek().Type == TokenCParen {
992 // A trailing comma after the last argument gets us in here.
993 closeTok = p.Read() // eat closing paren
994 break Token
995 }
996
997 }
998
999 p.PopIncludeNewlines()
1000
1001 return &FunctionCallExpr{
1002 Name: string(name.Bytes),
1003 Args: args,
1004
1005 ExpandFinal: expandFinal,
1006
1007 NameRange: name.Range,
1008 OpenParenRange: openTok.Range,
1009 CloseParenRange: closeTok.Range,
1010 }, diags
1011}
1012
1013func (p *parser) parseTupleCons() (Expression, hcl.Diagnostics) {
1014 open := p.Read()
1015 if open.Type != TokenOBrack {
1016 // Should never happen if callers are behaving
1017 panic("parseTupleCons called without peeker pointing to open bracket")
1018 }
1019
1020 p.PushIncludeNewlines(false)
1021 defer p.PopIncludeNewlines()
1022
1023 if forKeyword.TokenMatches(p.Peek()) {
1024 return p.finishParsingForExpr(open)
1025 }
1026
1027 var close Token
1028
1029 var diags hcl.Diagnostics
1030 var exprs []Expression
1031
1032 for {
1033 next := p.Peek()
1034 if next.Type == TokenCBrack {
1035 close = p.Read() // eat closer
1036 break
1037 }
1038
1039 expr, exprDiags := p.ParseExpression()
1040 exprs = append(exprs, expr)
1041 diags = append(diags, exprDiags...)
1042
1043 if p.recovery && exprDiags.HasErrors() {
1044 // If expression parsing failed then we are probably in a strange
1045 // place in the token stream, so we'll bail out and try to reset
1046 // to after our closing bracket to allow parsing to continue.
1047 close = p.recover(TokenCBrack)
1048 break
1049 }
1050
1051 next = p.Peek()
1052 if next.Type == TokenCBrack {
1053 close = p.Read() // eat closer
1054 break
1055 }
1056
1057 if next.Type != TokenComma {
1058 if !p.recovery {
1059 diags = append(diags, &hcl.Diagnostic{
1060 Severity: hcl.DiagError,
1061 Summary: "Missing item separator",
1062 Detail: "Expected a comma to mark the beginning of the next item.",
1063 Subject: &next.Range,
1064 Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
1065 })
1066 }
1067 close = p.recover(TokenCBrack)
1068 break
1069 }
1070
1071 p.Read() // eat comma
1072
1073 }
1074
1075 return &TupleConsExpr{
1076 Exprs: exprs,
1077
1078 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1079 OpenRange: open.Range,
1080 }, diags
1081}
1082
1083func (p *parser) parseObjectCons() (Expression, hcl.Diagnostics) {
1084 open := p.Read()
1085 if open.Type != TokenOBrace {
1086 // Should never happen if callers are behaving
1087 panic("parseObjectCons called without peeker pointing to open brace")
1088 }
1089
1090 p.PushIncludeNewlines(true)
1091 defer p.PopIncludeNewlines()
1092
1093 if forKeyword.TokenMatches(p.Peek()) {
1094 return p.finishParsingForExpr(open)
1095 }
1096
1097 var close Token
1098
1099 var diags hcl.Diagnostics
1100 var items []ObjectConsItem
1101
1102 for {
1103 next := p.Peek()
1104 if next.Type == TokenNewline {
1105 p.Read() // eat newline
1106 continue
1107 }
1108
1109 if next.Type == TokenCBrace {
1110 close = p.Read() // eat closer
1111 break
1112 }
1113
1114 var key Expression
1115 var keyDiags hcl.Diagnostics
1116 key, keyDiags = p.ParseExpression()
1117 diags = append(diags, keyDiags...)
1118
1119 if p.recovery && keyDiags.HasErrors() {
1120 // If expression parsing failed then we are probably in a strange
1121 // place in the token stream, so we'll bail out and try to reset
1122 // to after our closing brace to allow parsing to continue.
1123 close = p.recover(TokenCBrace)
1124 break
1125 }
1126
1127 // We wrap up the key expression in a special wrapper that deals
1128 // with our special case that naked identifiers as object keys
1129 // are interpreted as literal strings.
1130 key = &ObjectConsKeyExpr{Wrapped: key}
1131
1132 next = p.Peek()
1133 if next.Type != TokenEqual && next.Type != TokenColon {
1134 if !p.recovery {
1135 if next.Type == TokenNewline || next.Type == TokenComma {
1136 diags = append(diags, &hcl.Diagnostic{
1137 Severity: hcl.DiagError,
1138 Summary: "Missing item value",
1139 Detail: "Expected an item value, introduced by an equals sign (\"=\").",
1140 Subject: &next.Range,
1141 Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
1142 })
1143 } else {
1144 diags = append(diags, &hcl.Diagnostic{
1145 Severity: hcl.DiagError,
1146 Summary: "Missing key/value separator",
1147 Detail: "Expected an equals sign (\"=\") to mark the beginning of the item value.",
1148 Subject: &next.Range,
1149 Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
1150 })
1151 }
1152 }
1153 close = p.recover(TokenCBrace)
1154 break
1155 }
1156
1157 p.Read() // eat equals sign or colon
1158
1159 value, valueDiags := p.ParseExpression()
1160 diags = append(diags, valueDiags...)
1161
1162 if p.recovery && valueDiags.HasErrors() {
1163 // If expression parsing failed then we are probably in a strange
1164 // place in the token stream, so we'll bail out and try to reset
1165 // to after our closing brace to allow parsing to continue.
1166 close = p.recover(TokenCBrace)
1167 break
1168 }
1169
1170 items = append(items, ObjectConsItem{
1171 KeyExpr: key,
1172 ValueExpr: value,
1173 })
1174
1175 next = p.Peek()
1176 if next.Type == TokenCBrace {
1177 close = p.Read() // eat closer
1178 break
1179 }
1180
1181 if next.Type != TokenComma && next.Type != TokenNewline {
1182 if !p.recovery {
1183 diags = append(diags, &hcl.Diagnostic{
1184 Severity: hcl.DiagError,
1185 Summary: "Missing item separator",
1186 Detail: "Expected a newline or comma to mark the beginning of the next item.",
1187 Subject: &next.Range,
1188 Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
1189 })
1190 }
1191 close = p.recover(TokenCBrace)
1192 break
1193 }
1194
1195 p.Read() // eat comma or newline
1196
1197 }
1198
1199 return &ObjectConsExpr{
1200 Items: items,
1201
1202 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1203 OpenRange: open.Range,
1204 }, diags
1205}
1206
1207func (p *parser) finishParsingForExpr(open Token) (Expression, hcl.Diagnostics) {
1208 introducer := p.Read()
1209 if !forKeyword.TokenMatches(introducer) {
1210 // Should never happen if callers are behaving
1211 panic("finishParsingForExpr called without peeker pointing to 'for' identifier")
1212 }
1213
1214 var makeObj bool
1215 var closeType TokenType
1216 switch open.Type {
1217 case TokenOBrace:
1218 makeObj = true
1219 closeType = TokenCBrace
1220 case TokenOBrack:
1221 makeObj = false // making a tuple
1222 closeType = TokenCBrack
1223 default:
1224 // Should never happen if callers are behaving
1225 panic("finishParsingForExpr called with invalid open token")
1226 }
1227
1228 var diags hcl.Diagnostics
1229 var keyName, valName string
1230
1231 if p.Peek().Type != TokenIdent {
1232 if !p.recovery {
1233 diags = append(diags, &hcl.Diagnostic{
1234 Severity: hcl.DiagError,
1235 Summary: "Invalid 'for' expression",
1236 Detail: "For expression requires variable name after 'for'.",
1237 Subject: p.Peek().Range.Ptr(),
1238 Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1239 })
1240 }
1241 close := p.recover(closeType)
1242 return &LiteralValueExpr{
1243 Val: cty.DynamicVal,
1244 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1245 }, diags
1246 }
1247
1248 valName = string(p.Read().Bytes)
1249
1250 if p.Peek().Type == TokenComma {
1251 // What we just read was actually the key, then.
1252 keyName = valName
1253 p.Read() // eat comma
1254
1255 if p.Peek().Type != TokenIdent {
1256 if !p.recovery {
1257 diags = append(diags, &hcl.Diagnostic{
1258 Severity: hcl.DiagError,
1259 Summary: "Invalid 'for' expression",
1260 Detail: "For expression requires value variable name after comma.",
1261 Subject: p.Peek().Range.Ptr(),
1262 Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1263 })
1264 }
1265 close := p.recover(closeType)
1266 return &LiteralValueExpr{
1267 Val: cty.DynamicVal,
1268 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1269 }, diags
1270 }
1271
1272 valName = string(p.Read().Bytes)
1273 }
1274
1275 if !inKeyword.TokenMatches(p.Peek()) {
1276 if !p.recovery {
1277 diags = append(diags, &hcl.Diagnostic{
1278 Severity: hcl.DiagError,
1279 Summary: "Invalid 'for' expression",
1280 Detail: "For expression requires 'in' keyword after names.",
1281 Subject: p.Peek().Range.Ptr(),
1282 Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1283 })
1284 }
1285 close := p.recover(closeType)
1286 return &LiteralValueExpr{
1287 Val: cty.DynamicVal,
1288 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1289 }, diags
1290 }
1291 p.Read() // eat 'in' keyword
1292
1293 collExpr, collDiags := p.ParseExpression()
1294 diags = append(diags, collDiags...)
1295 if p.recovery && collDiags.HasErrors() {
1296 close := p.recover(closeType)
1297 return &LiteralValueExpr{
1298 Val: cty.DynamicVal,
1299 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1300 }, diags
1301 }
1302
1303 if p.Peek().Type != TokenColon {
1304 if !p.recovery {
1305 diags = append(diags, &hcl.Diagnostic{
1306 Severity: hcl.DiagError,
1307 Summary: "Invalid 'for' expression",
1308 Detail: "For expression requires colon after collection expression.",
1309 Subject: p.Peek().Range.Ptr(),
1310 Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1311 })
1312 }
1313 close := p.recover(closeType)
1314 return &LiteralValueExpr{
1315 Val: cty.DynamicVal,
1316 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1317 }, diags
1318 }
1319 p.Read() // eat colon
1320
1321 var keyExpr, valExpr Expression
1322 var keyDiags, valDiags hcl.Diagnostics
1323 valExpr, valDiags = p.ParseExpression()
1324 if p.Peek().Type == TokenFatArrow {
1325 // What we just parsed was actually keyExpr
1326 p.Read() // eat the fat arrow
1327 keyExpr, keyDiags = valExpr, valDiags
1328
1329 valExpr, valDiags = p.ParseExpression()
1330 }
1331 diags = append(diags, keyDiags...)
1332 diags = append(diags, valDiags...)
1333 if p.recovery && (keyDiags.HasErrors() || valDiags.HasErrors()) {
1334 close := p.recover(closeType)
1335 return &LiteralValueExpr{
1336 Val: cty.DynamicVal,
1337 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1338 }, diags
1339 }
1340
1341 group := false
1342 var ellipsis Token
1343 if p.Peek().Type == TokenEllipsis {
1344 ellipsis = p.Read()
1345 group = true
1346 }
1347
1348 var condExpr Expression
1349 var condDiags hcl.Diagnostics
1350 if ifKeyword.TokenMatches(p.Peek()) {
1351 p.Read() // eat "if"
1352 condExpr, condDiags = p.ParseExpression()
1353 diags = append(diags, condDiags...)
1354 if p.recovery && condDiags.HasErrors() {
1355 close := p.recover(p.oppositeBracket(open.Type))
1356 return &LiteralValueExpr{
1357 Val: cty.DynamicVal,
1358 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1359 }, diags
1360 }
1361 }
1362
1363 var close Token
1364 if p.Peek().Type == closeType {
1365 close = p.Read()
1366 } else {
1367 if !p.recovery {
1368 diags = append(diags, &hcl.Diagnostic{
1369 Severity: hcl.DiagError,
1370 Summary: "Invalid 'for' expression",
1371 Detail: "Extra characters after the end of the 'for' expression.",
1372 Subject: p.Peek().Range.Ptr(),
1373 Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1374 })
1375 }
1376 close = p.recover(closeType)
1377 }
1378
1379 if !makeObj {
1380 if keyExpr != nil {
1381 diags = append(diags, &hcl.Diagnostic{
1382 Severity: hcl.DiagError,
1383 Summary: "Invalid 'for' expression",
1384 Detail: "Key expression is not valid when building a tuple.",
1385 Subject: keyExpr.Range().Ptr(),
1386 Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
1387 })
1388 }
1389
1390 if group {
1391 diags = append(diags, &hcl.Diagnostic{
1392 Severity: hcl.DiagError,
1393 Summary: "Invalid 'for' expression",
1394 Detail: "Grouping ellipsis (...) cannot be used when building a tuple.",
1395 Subject: &ellipsis.Range,
1396 Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
1397 })
1398 }
1399 } else {
1400 if keyExpr == nil {
1401 diags = append(diags, &hcl.Diagnostic{
1402 Severity: hcl.DiagError,
1403 Summary: "Invalid 'for' expression",
1404 Detail: "Key expression is required when building an object.",
1405 Subject: valExpr.Range().Ptr(),
1406 Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
1407 })
1408 }
1409 }
1410
1411 return &ForExpr{
1412 KeyVar: keyName,
1413 ValVar: valName,
1414 CollExpr: collExpr,
1415 KeyExpr: keyExpr,
1416 ValExpr: valExpr,
1417 CondExpr: condExpr,
1418 Group: group,
1419
1420 SrcRange: hcl.RangeBetween(open.Range, close.Range),
1421 OpenRange: open.Range,
1422 CloseRange: close.Range,
1423 }, diags
1424}
1425
1426// parseQuotedStringLiteral is a helper for parsing quoted strings that
1427// aren't allowed to contain any interpolations, such as block labels.
1428func (p *parser) parseQuotedStringLiteral() (string, hcl.Range, hcl.Diagnostics) {
1429 oQuote := p.Read()
1430 if oQuote.Type != TokenOQuote {
1431 return "", oQuote.Range, hcl.Diagnostics{
1432 {
1433 Severity: hcl.DiagError,
1434 Summary: "Invalid string literal",
1435 Detail: "A quoted string is required here.",
1436 Subject: &oQuote.Range,
1437 },
1438 }
1439 }
1440
1441 var diags hcl.Diagnostics
1442 ret := &bytes.Buffer{}
1443 var cQuote Token
1444
1445Token:
1446 for {
1447 tok := p.Read()
1448 switch tok.Type {
1449
1450 case TokenCQuote:
1451 cQuote = tok
1452 break Token
1453
1454 case TokenQuotedLit:
1455 s, sDiags := p.decodeStringLit(tok)
1456 diags = append(diags, sDiags...)
1457 ret.WriteString(s)
1458
1459 case TokenTemplateControl, TokenTemplateInterp:
1460 which := "$"
1461 if tok.Type == TokenTemplateControl {
1462 which = "!"
1463 }
1464
1465 diags = append(diags, &hcl.Diagnostic{
1466 Severity: hcl.DiagError,
1467 Summary: "Invalid string literal",
1468 Detail: fmt.Sprintf(
1469 "Template sequences are not allowed in this string. To include a literal %q, double it (as \"%s%s\") to escape it.",
1470 which, which, which,
1471 ),
1472 Subject: &tok.Range,
1473 Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
1474 })
1475 p.recover(TokenTemplateSeqEnd)
1476
1477 case TokenEOF:
1478 diags = append(diags, &hcl.Diagnostic{
1479 Severity: hcl.DiagError,
1480 Summary: "Unterminated string literal",
1481 Detail: "Unable to find the closing quote mark before the end of the file.",
1482 Subject: &tok.Range,
1483 Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
1484 })
1485 break Token
1486
1487 default:
1488 // Should never happen, as long as the scanner is behaving itself
1489 diags = append(diags, &hcl.Diagnostic{
1490 Severity: hcl.DiagError,
1491 Summary: "Invalid string literal",
1492 Detail: "This item is not valid in a string literal.",
1493 Subject: &tok.Range,
1494 Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
1495 })
1496 p.recover(TokenOQuote)
1497 break Token
1498
1499 }
1500
1501 }
1502
1503 return ret.String(), hcl.RangeBetween(oQuote.Range, cQuote.Range), diags
1504}
1505
1506// decodeStringLit processes the given token, which must be either a
1507// TokenQuotedLit or a TokenStringLit, returning the string resulting from
1508// resolving any escape sequences.
1509//
1510// If any error diagnostics are returned, the returned string may be incomplete
1511// or otherwise invalid.
1512func (p *parser) decodeStringLit(tok Token) (string, hcl.Diagnostics) {
1513 var quoted bool
1514 switch tok.Type {
1515 case TokenQuotedLit:
1516 quoted = true
1517 case TokenStringLit:
1518 quoted = false
1519 default:
1520 panic("decodeQuotedLit can only be used with TokenStringLit and TokenQuotedLit tokens")
1521 }
1522 var diags hcl.Diagnostics
1523
1524 ret := make([]byte, 0, len(tok.Bytes))
1525 slices := scanStringLit(tok.Bytes, quoted)
1526
1527 // We will mutate rng constantly as we walk through our token slices below.
1528 // Any diagnostics must take a copy of this rng rather than simply pointing
1529 // to it, e.g. by using rng.Ptr() rather than &rng.
1530 rng := tok.Range
1531 rng.End = rng.Start
1532
1533Slices:
1534 for _, slice := range slices {
1535 if len(slice) == 0 {
1536 continue
1537 }
1538
1539 // Advance the start of our range to where the previous token ended
1540 rng.Start = rng.End
1541
1542 // Advance the end of our range to after our token.
1543 b := slice
1544 for len(b) > 0 {
1545 adv, ch, _ := textseg.ScanGraphemeClusters(b, true)
1546 rng.End.Byte += adv
1547 switch ch[0] {
1548 case '\r', '\n':
1549 rng.End.Line++
1550 rng.End.Column = 1
1551 default:
1552 rng.End.Column++
1553 }
1554 b = b[adv:]
1555 }
1556
1557 TokenType:
1558 switch slice[0] {
1559 case '\\':
1560 if !quoted {
1561 // If we're not in quoted mode then just treat this token as
1562 // normal. (Slices can still start with backslash even if we're
1563 // not specifically looking for backslash sequences.)
1564 break TokenType
1565 }
1566 if len(slice) < 2 {
1567 diags = append(diags, &hcl.Diagnostic{
1568 Severity: hcl.DiagError,
1569 Summary: "Invalid escape sequence",
1570 Detail: "Backslash must be followed by an escape sequence selector character.",
1571 Subject: rng.Ptr(),
1572 })
1573 break TokenType
1574 }
1575
1576 switch slice[1] {
1577
1578 case 'n':
1579 ret = append(ret, '\n')
1580 continue Slices
1581 case 'r':
1582 ret = append(ret, '\r')
1583 continue Slices
1584 case 't':
1585 ret = append(ret, '\t')
1586 continue Slices
1587 case '"':
1588 ret = append(ret, '"')
1589 continue Slices
1590 case '\\':
1591 ret = append(ret, '\\')
1592 continue Slices
1593 case 'u', 'U':
1594 if slice[1] == 'u' && len(slice) != 6 {
1595 diags = append(diags, &hcl.Diagnostic{
1596 Severity: hcl.DiagError,
1597 Summary: "Invalid escape sequence",
1598 Detail: "The \\u escape sequence must be followed by four hexadecimal digits.",
1599 Subject: rng.Ptr(),
1600 })
1601 break TokenType
1602 } else if slice[1] == 'U' && len(slice) != 10 {
1603 diags = append(diags, &hcl.Diagnostic{
1604 Severity: hcl.DiagError,
1605 Summary: "Invalid escape sequence",
1606 Detail: "The \\U escape sequence must be followed by eight hexadecimal digits.",
1607 Subject: rng.Ptr(),
1608 })
1609 break TokenType
1610 }
1611
1612 numHex := string(slice[2:])
1613 num, err := strconv.ParseUint(numHex, 16, 32)
1614 if err != nil {
1615 // Should never happen because the scanner won't match
1616 // a sequence of digits that isn't valid.
1617 panic(err)
1618 }
1619
1620 r := rune(num)
1621 l := utf8.RuneLen(r)
1622 if l == -1 {
1623 diags = append(diags, &hcl.Diagnostic{
1624 Severity: hcl.DiagError,
1625 Summary: "Invalid escape sequence",
1626 Detail: fmt.Sprintf("Cannot encode character U+%04x in UTF-8.", num),
1627 Subject: rng.Ptr(),
1628 })
1629 break TokenType
1630 }
1631 for i := 0; i < l; i++ {
1632 ret = append(ret, 0)
1633 }
1634 rb := ret[len(ret)-l:]
1635 utf8.EncodeRune(rb, r)
1636
1637 continue Slices
1638
1639 default:
1640 diags = append(diags, &hcl.Diagnostic{
1641 Severity: hcl.DiagError,
1642 Summary: "Invalid escape sequence",
1643 Detail: fmt.Sprintf("The symbol %q is not a valid escape sequence selector.", slice[1:]),
1644 Subject: rng.Ptr(),
1645 })
1646 ret = append(ret, slice[1:]...)
1647 continue Slices
1648 }
1649
1650 case '$', '%':
1651 if len(slice) != 3 {
1652 // Not long enough to be our escape sequence, so it's literal.
1653 break TokenType
1654 }
1655
1656 if slice[1] == slice[0] && slice[2] == '{' {
1657 ret = append(ret, slice[0])
1658 ret = append(ret, '{')
1659 continue Slices
1660 }
1661
1662 break TokenType
1663 }
1664
1665 // If we fall out here or break out of here from the switch above
1666 // then this slice is just a literal.
1667 ret = append(ret, slice...)
1668 }
1669
1670 return string(ret), diags
1671}
1672
1673// setRecovery turns on recovery mode without actually doing any recovery.
1674// This can be used when a parser knowingly leaves the peeker in a useless
1675// place and wants to suppress errors that might result from that decision.
1676func (p *parser) setRecovery() {
1677 p.recovery = true
1678}
1679
1680// recover seeks forward in the token stream until it finds TokenType "end",
1681// then returns with the peeker pointed at the following token.
1682//
1683// If the given token type is a bracketer, this function will additionally
1684// count nested instances of the brackets to try to leave the peeker at
1685// the end of the _current_ instance of that bracketer, skipping over any
1686// nested instances. This is a best-effort operation and may have
1687// unpredictable results on input with bad bracketer nesting.
1688func (p *parser) recover(end TokenType) Token {
1689 start := p.oppositeBracket(end)
1690 p.recovery = true
1691
1692 nest := 0
1693 for {
1694 tok := p.Read()
1695 ty := tok.Type
1696 if end == TokenTemplateSeqEnd && ty == TokenTemplateControl {
1697 // normalize so that our matching behavior can work, since
1698 // TokenTemplateControl/TokenTemplateInterp are asymmetrical
1699 // with TokenTemplateSeqEnd and thus we need to count both
1700 // openers if that's the closer we're looking for.
1701 ty = TokenTemplateInterp
1702 }
1703
1704 switch ty {
1705 case start:
1706 nest++
1707 case end:
1708 if nest < 1 {
1709 return tok
1710 }
1711
1712 nest--
1713 case TokenEOF:
1714 return tok
1715 }
1716 }
1717}
1718
1719// recoverOver seeks forward in the token stream until it finds a block
1720// starting with TokenType "start", then finds the corresponding end token,
1721// leaving the peeker pointed at the token after that end token.
1722//
1723// The given token type _must_ be a bracketer. For example, if the given
1724// start token is TokenOBrace then the parser will be left at the _end_ of
1725// the next brace-delimited block encountered, or at EOF if no such block
1726// is found or it is unclosed.
1727func (p *parser) recoverOver(start TokenType) {
1728 end := p.oppositeBracket(start)
1729
1730 // find the opening bracket first
1731Token:
1732 for {
1733 tok := p.Read()
1734 switch tok.Type {
1735 case start, TokenEOF:
1736 break Token
1737 }
1738 }
1739
1740 // Now use our existing recover function to locate the _end_ of the
1741 // container we've found.
1742 p.recover(end)
1743}
1744
1745func (p *parser) recoverAfterBodyItem() {
1746 p.recovery = true
1747 var open []TokenType
1748
1749Token:
1750 for {
1751 tok := p.Read()
1752
1753 switch tok.Type {
1754
1755 case TokenNewline:
1756 if len(open) == 0 {
1757 break Token
1758 }
1759
1760 case TokenEOF:
1761 break Token
1762
1763 case TokenOBrace, TokenOBrack, TokenOParen, TokenOQuote, TokenOHeredoc, TokenTemplateInterp, TokenTemplateControl:
1764 open = append(open, tok.Type)
1765
1766 case TokenCBrace, TokenCBrack, TokenCParen, TokenCQuote, TokenCHeredoc:
1767 opener := p.oppositeBracket(tok.Type)
1768 for len(open) > 0 && open[len(open)-1] != opener {
1769 open = open[:len(open)-1]
1770 }
1771 if len(open) > 0 {
1772 open = open[:len(open)-1]
1773 }
1774
1775 case TokenTemplateSeqEnd:
1776 for len(open) > 0 && open[len(open)-1] != TokenTemplateInterp && open[len(open)-1] != TokenTemplateControl {
1777 open = open[:len(open)-1]
1778 }
1779 if len(open) > 0 {
1780 open = open[:len(open)-1]
1781 }
1782
1783 }
1784 }
1785}
1786
1787// oppositeBracket finds the bracket that opposes the given bracketer, or
1788// NilToken if the given token isn't a bracketer.
1789//
1790// "Bracketer", for the sake of this function, is one end of a matching
1791// open/close set of tokens that establish a bracketing context.
1792func (p *parser) oppositeBracket(ty TokenType) TokenType {
1793 switch ty {
1794
1795 case TokenOBrace:
1796 return TokenCBrace
1797 case TokenOBrack:
1798 return TokenCBrack
1799 case TokenOParen:
1800 return TokenCParen
1801 case TokenOQuote:
1802 return TokenCQuote
1803 case TokenOHeredoc:
1804 return TokenCHeredoc
1805
1806 case TokenCBrace:
1807 return TokenOBrace
1808 case TokenCBrack:
1809 return TokenOBrack
1810 case TokenCParen:
1811 return TokenOParen
1812 case TokenCQuote:
1813 return TokenOQuote
1814 case TokenCHeredoc:
1815 return TokenOHeredoc
1816
1817 case TokenTemplateControl:
1818 return TokenTemplateSeqEnd
1819 case TokenTemplateInterp:
1820 return TokenTemplateSeqEnd
1821 case TokenTemplateSeqEnd:
1822 // This is ambigous, but we return Interp here because that's
1823 // what's assumed by the "recover" method.
1824 return TokenTemplateInterp
1825
1826 default:
1827 return TokenNil
1828 }
1829}
1830
1831func errPlaceholderExpr(rng hcl.Range) Expression {
1832 return &LiteralValueExpr{
1833 Val: cty.DynamicVal,
1834 SrcRange: rng,
1835 }
1836}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go
new file mode 100644
index 0000000..3711067
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go
@@ -0,0 +1,728 @@
1package hclsyntax
2
3import (
4 "fmt"
5 "strings"
6 "unicode"
7
8 "github.com/hashicorp/hcl2/hcl"
9 "github.com/zclconf/go-cty/cty"
10)
11
12func (p *parser) ParseTemplate() (Expression, hcl.Diagnostics) {
13 return p.parseTemplate(TokenEOF)
14}
15
16func (p *parser) parseTemplate(end TokenType) (Expression, hcl.Diagnostics) {
17 exprs, passthru, rng, diags := p.parseTemplateInner(end)
18
19 if passthru {
20 if len(exprs) != 1 {
21 panic("passthru set with len(exprs) != 1")
22 }
23 return &TemplateWrapExpr{
24 Wrapped: exprs[0],
25 SrcRange: rng,
26 }, diags
27 }
28
29 return &TemplateExpr{
30 Parts: exprs,
31 SrcRange: rng,
32 }, diags
33}
34
35func (p *parser) parseTemplateInner(end TokenType) ([]Expression, bool, hcl.Range, hcl.Diagnostics) {
36 parts, diags := p.parseTemplateParts(end)
37 tp := templateParser{
38 Tokens: parts.Tokens,
39 SrcRange: parts.SrcRange,
40 }
41 exprs, exprsDiags := tp.parseRoot()
42 diags = append(diags, exprsDiags...)
43
44 passthru := false
45 if len(parts.Tokens) == 2 { // one real token and one synthetic "end" token
46 if _, isInterp := parts.Tokens[0].(*templateInterpToken); isInterp {
47 passthru = true
48 }
49 }
50
51 return exprs, passthru, parts.SrcRange, diags
52}
53
54type templateParser struct {
55 Tokens []templateToken
56 SrcRange hcl.Range
57
58 pos int
59}
60
61func (p *templateParser) parseRoot() ([]Expression, hcl.Diagnostics) {
62 var exprs []Expression
63 var diags hcl.Diagnostics
64
65 for {
66 next := p.Peek()
67 if _, isEnd := next.(*templateEndToken); isEnd {
68 break
69 }
70
71 expr, exprDiags := p.parseExpr()
72 diags = append(diags, exprDiags...)
73 exprs = append(exprs, expr)
74 }
75
76 return exprs, diags
77}
78
79func (p *templateParser) parseExpr() (Expression, hcl.Diagnostics) {
80 next := p.Peek()
81 switch tok := next.(type) {
82
83 case *templateLiteralToken:
84 p.Read() // eat literal
85 return &LiteralValueExpr{
86 Val: cty.StringVal(tok.Val),
87 SrcRange: tok.SrcRange,
88 }, nil
89
90 case *templateInterpToken:
91 p.Read() // eat interp
92 return tok.Expr, nil
93
94 case *templateIfToken:
95 return p.parseIf()
96
97 case *templateForToken:
98 return p.parseFor()
99
100 case *templateEndToken:
101 p.Read() // eat erroneous token
102 return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{
103 {
104 // This is a particularly unhelpful diagnostic, so callers
105 // should attempt to pre-empt it and produce a more helpful
106 // diagnostic that is context-aware.
107 Severity: hcl.DiagError,
108 Summary: "Unexpected end of template",
109 Detail: "The control directives within this template are unbalanced.",
110 Subject: &tok.SrcRange,
111 },
112 }
113
114 case *templateEndCtrlToken:
115 p.Read() // eat erroneous token
116 return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{
117 {
118 Severity: hcl.DiagError,
119 Summary: fmt.Sprintf("Unexpected %s directive", tok.Name()),
120 Detail: "The control directives within this template are unbalanced.",
121 Subject: &tok.SrcRange,
122 },
123 }
124
125 default:
126 // should never happen, because above should be exhaustive
127 panic(fmt.Sprintf("unhandled template token type %T", next))
128 }
129}
130
131func (p *templateParser) parseIf() (Expression, hcl.Diagnostics) {
132 open := p.Read()
133 openIf, isIf := open.(*templateIfToken)
134 if !isIf {
135 // should never happen if caller is behaving
136 panic("parseIf called with peeker not pointing at if token")
137 }
138
139 var ifExprs, elseExprs []Expression
140 var diags hcl.Diagnostics
141 var endifRange hcl.Range
142
143 currentExprs := &ifExprs
144Token:
145 for {
146 next := p.Peek()
147 if end, isEnd := next.(*templateEndToken); isEnd {
148 diags = append(diags, &hcl.Diagnostic{
149 Severity: hcl.DiagError,
150 Summary: "Unexpected end of template",
151 Detail: fmt.Sprintf(
152 "The if directive at %s is missing its corresponding endif directive.",
153 openIf.SrcRange,
154 ),
155 Subject: &end.SrcRange,
156 })
157 return errPlaceholderExpr(end.SrcRange), diags
158 }
159 if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd {
160 p.Read() // eat end directive
161
162 switch end.Type {
163
164 case templateElse:
165 if currentExprs == &ifExprs {
166 currentExprs = &elseExprs
167 continue Token
168 }
169
170 diags = append(diags, &hcl.Diagnostic{
171 Severity: hcl.DiagError,
172 Summary: "Unexpected else directive",
173 Detail: fmt.Sprintf(
174 "Already in the else clause for the if started at %s.",
175 openIf.SrcRange,
176 ),
177 Subject: &end.SrcRange,
178 })
179
180 case templateEndIf:
181 endifRange = end.SrcRange
182 break Token
183
184 default:
185 diags = append(diags, &hcl.Diagnostic{
186 Severity: hcl.DiagError,
187 Summary: fmt.Sprintf("Unexpected %s directive", end.Name()),
188 Detail: fmt.Sprintf(
189 "Expecting an endif directive for the if started at %s.",
190 openIf.SrcRange,
191 ),
192 Subject: &end.SrcRange,
193 })
194 }
195
196 return errPlaceholderExpr(end.SrcRange), diags
197 }
198
199 expr, exprDiags := p.parseExpr()
200 diags = append(diags, exprDiags...)
201 *currentExprs = append(*currentExprs, expr)
202 }
203
204 if len(ifExprs) == 0 {
205 ifExprs = append(ifExprs, &LiteralValueExpr{
206 Val: cty.StringVal(""),
207 SrcRange: hcl.Range{
208 Filename: openIf.SrcRange.Filename,
209 Start: openIf.SrcRange.End,
210 End: openIf.SrcRange.End,
211 },
212 })
213 }
214 if len(elseExprs) == 0 {
215 elseExprs = append(elseExprs, &LiteralValueExpr{
216 Val: cty.StringVal(""),
217 SrcRange: hcl.Range{
218 Filename: endifRange.Filename,
219 Start: endifRange.Start,
220 End: endifRange.Start,
221 },
222 })
223 }
224
225 trueExpr := &TemplateExpr{
226 Parts: ifExprs,
227 SrcRange: hcl.RangeBetween(ifExprs[0].Range(), ifExprs[len(ifExprs)-1].Range()),
228 }
229 falseExpr := &TemplateExpr{
230 Parts: elseExprs,
231 SrcRange: hcl.RangeBetween(elseExprs[0].Range(), elseExprs[len(elseExprs)-1].Range()),
232 }
233
234 return &ConditionalExpr{
235 Condition: openIf.CondExpr,
236 TrueResult: trueExpr,
237 FalseResult: falseExpr,
238
239 SrcRange: hcl.RangeBetween(openIf.SrcRange, endifRange),
240 }, diags
241}
242
243func (p *templateParser) parseFor() (Expression, hcl.Diagnostics) {
244 open := p.Read()
245 openFor, isFor := open.(*templateForToken)
246 if !isFor {
247 // should never happen if caller is behaving
248 panic("parseFor called with peeker not pointing at for token")
249 }
250
251 var contentExprs []Expression
252 var diags hcl.Diagnostics
253 var endforRange hcl.Range
254
255Token:
256 for {
257 next := p.Peek()
258 if end, isEnd := next.(*templateEndToken); isEnd {
259 diags = append(diags, &hcl.Diagnostic{
260 Severity: hcl.DiagError,
261 Summary: "Unexpected end of template",
262 Detail: fmt.Sprintf(
263 "The for directive at %s is missing its corresponding endfor directive.",
264 openFor.SrcRange,
265 ),
266 Subject: &end.SrcRange,
267 })
268 return errPlaceholderExpr(end.SrcRange), diags
269 }
270 if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd {
271 p.Read() // eat end directive
272
273 switch end.Type {
274
275 case templateElse:
276 diags = append(diags, &hcl.Diagnostic{
277 Severity: hcl.DiagError,
278 Summary: "Unexpected else directive",
279 Detail: "An else clause is not expected for a for directive.",
280 Subject: &end.SrcRange,
281 })
282
283 case templateEndFor:
284 endforRange = end.SrcRange
285 break Token
286
287 default:
288 diags = append(diags, &hcl.Diagnostic{
289 Severity: hcl.DiagError,
290 Summary: fmt.Sprintf("Unexpected %s directive", end.Name()),
291 Detail: fmt.Sprintf(
292 "Expecting an endfor directive corresponding to the for directive at %s.",
293 openFor.SrcRange,
294 ),
295 Subject: &end.SrcRange,
296 })
297 }
298
299 return errPlaceholderExpr(end.SrcRange), diags
300 }
301
302 expr, exprDiags := p.parseExpr()
303 diags = append(diags, exprDiags...)
304 contentExprs = append(contentExprs, expr)
305 }
306
307 if len(contentExprs) == 0 {
308 contentExprs = append(contentExprs, &LiteralValueExpr{
309 Val: cty.StringVal(""),
310 SrcRange: hcl.Range{
311 Filename: openFor.SrcRange.Filename,
312 Start: openFor.SrcRange.End,
313 End: openFor.SrcRange.End,
314 },
315 })
316 }
317
318 contentExpr := &TemplateExpr{
319 Parts: contentExprs,
320 SrcRange: hcl.RangeBetween(contentExprs[0].Range(), contentExprs[len(contentExprs)-1].Range()),
321 }
322
323 forExpr := &ForExpr{
324 KeyVar: openFor.KeyVar,
325 ValVar: openFor.ValVar,
326
327 CollExpr: openFor.CollExpr,
328 ValExpr: contentExpr,
329
330 SrcRange: hcl.RangeBetween(openFor.SrcRange, endforRange),
331 OpenRange: openFor.SrcRange,
332 CloseRange: endforRange,
333 }
334
335 return &TemplateJoinExpr{
336 Tuple: forExpr,
337 }, diags
338}
339
340func (p *templateParser) Peek() templateToken {
341 return p.Tokens[p.pos]
342}
343
344func (p *templateParser) Read() templateToken {
345 ret := p.Peek()
346 if _, end := ret.(*templateEndToken); !end {
347 p.pos++
348 }
349 return ret
350}
351
352// parseTemplateParts produces a flat sequence of "template tokens", which are
353// either literal values (with any "trimming" already applied), interpolation
354// sequences, or control flow markers.
355//
356// A further pass is required on the result to turn it into an AST.
357func (p *parser) parseTemplateParts(end TokenType) (*templateParts, hcl.Diagnostics) {
358 var parts []templateToken
359 var diags hcl.Diagnostics
360
361 startRange := p.NextRange()
362 ltrimNext := false
363 nextCanTrimPrev := false
364 var endRange hcl.Range
365
366Token:
367 for {
368 next := p.Read()
369 if next.Type == end {
370 // all done!
371 endRange = next.Range
372 break
373 }
374
375 ltrim := ltrimNext
376 ltrimNext = false
377 canTrimPrev := nextCanTrimPrev
378 nextCanTrimPrev = false
379
380 switch next.Type {
381 case TokenStringLit, TokenQuotedLit:
382 str, strDiags := p.decodeStringLit(next)
383 diags = append(diags, strDiags...)
384
385 if ltrim {
386 str = strings.TrimLeftFunc(str, unicode.IsSpace)
387 }
388
389 parts = append(parts, &templateLiteralToken{
390 Val: str,
391 SrcRange: next.Range,
392 })
393 nextCanTrimPrev = true
394
395 case TokenTemplateInterp:
396 // if the opener is ${~ then we want to eat any trailing whitespace
397 // in the preceding literal token, assuming it is indeed a literal
398 // token.
399 if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 {
400 prevExpr := parts[len(parts)-1]
401 if lexpr, ok := prevExpr.(*templateLiteralToken); ok {
402 lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace)
403 }
404 }
405
406 p.PushIncludeNewlines(false)
407 expr, exprDiags := p.ParseExpression()
408 diags = append(diags, exprDiags...)
409 close := p.Peek()
410 if close.Type != TokenTemplateSeqEnd {
411 if !p.recovery {
412 diags = append(diags, &hcl.Diagnostic{
413 Severity: hcl.DiagError,
414 Summary: "Extra characters after interpolation expression",
415 Detail: "Expected a closing brace to end the interpolation expression, but found extra characters.",
416 Subject: &close.Range,
417 Context: hcl.RangeBetween(startRange, close.Range).Ptr(),
418 })
419 }
420 p.recover(TokenTemplateSeqEnd)
421 } else {
422 p.Read() // eat closing brace
423
424 // If the closer is ~} then we want to eat any leading
425 // whitespace on the next token, if it turns out to be a
426 // literal token.
427 if len(close.Bytes) == 2 && close.Bytes[0] == '~' {
428 ltrimNext = true
429 }
430 }
431 p.PopIncludeNewlines()
432 parts = append(parts, &templateInterpToken{
433 Expr: expr,
434 SrcRange: hcl.RangeBetween(next.Range, close.Range),
435 })
436
437 case TokenTemplateControl:
438 // if the opener is %{~ then we want to eat any trailing whitespace
439 // in the preceding literal token, assuming it is indeed a literal
440 // token.
441 if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 {
442 prevExpr := parts[len(parts)-1]
443 if lexpr, ok := prevExpr.(*templateLiteralToken); ok {
444 lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace)
445 }
446 }
447 p.PushIncludeNewlines(false)
448
449 kw := p.Peek()
450 if kw.Type != TokenIdent {
451 if !p.recovery {
452 diags = append(diags, &hcl.Diagnostic{
453 Severity: hcl.DiagError,
454 Summary: "Invalid template directive",
455 Detail: "A template directive keyword (\"if\", \"for\", etc) is expected at the beginning of a %{ sequence.",
456 Subject: &kw.Range,
457 Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(),
458 })
459 }
460 p.recover(TokenTemplateSeqEnd)
461 p.PopIncludeNewlines()
462 continue Token
463 }
464 p.Read() // eat keyword token
465
466 switch {
467
468 case ifKeyword.TokenMatches(kw):
469 condExpr, exprDiags := p.ParseExpression()
470 diags = append(diags, exprDiags...)
471 parts = append(parts, &templateIfToken{
472 CondExpr: condExpr,
473 SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
474 })
475
476 case elseKeyword.TokenMatches(kw):
477 parts = append(parts, &templateEndCtrlToken{
478 Type: templateElse,
479 SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
480 })
481
482 case endifKeyword.TokenMatches(kw):
483 parts = append(parts, &templateEndCtrlToken{
484 Type: templateEndIf,
485 SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
486 })
487
488 case forKeyword.TokenMatches(kw):
489 var keyName, valName string
490 if p.Peek().Type != TokenIdent {
491 if !p.recovery {
492 diags = append(diags, &hcl.Diagnostic{
493 Severity: hcl.DiagError,
494 Summary: "Invalid 'for' directive",
495 Detail: "For directive requires variable name after 'for'.",
496 Subject: p.Peek().Range.Ptr(),
497 })
498 }
499 p.recover(TokenTemplateSeqEnd)
500 p.PopIncludeNewlines()
501 continue Token
502 }
503
504 valName = string(p.Read().Bytes)
505
506 if p.Peek().Type == TokenComma {
507 // What we just read was actually the key, then.
508 keyName = valName
509 p.Read() // eat comma
510
511 if p.Peek().Type != TokenIdent {
512 if !p.recovery {
513 diags = append(diags, &hcl.Diagnostic{
514 Severity: hcl.DiagError,
515 Summary: "Invalid 'for' directive",
516 Detail: "For directive requires value variable name after comma.",
517 Subject: p.Peek().Range.Ptr(),
518 })
519 }
520 p.recover(TokenTemplateSeqEnd)
521 p.PopIncludeNewlines()
522 continue Token
523 }
524
525 valName = string(p.Read().Bytes)
526 }
527
528 if !inKeyword.TokenMatches(p.Peek()) {
529 if !p.recovery {
530 diags = append(diags, &hcl.Diagnostic{
531 Severity: hcl.DiagError,
532 Summary: "Invalid 'for' directive",
533 Detail: "For directive requires 'in' keyword after names.",
534 Subject: p.Peek().Range.Ptr(),
535 })
536 }
537 p.recover(TokenTemplateSeqEnd)
538 p.PopIncludeNewlines()
539 continue Token
540 }
541 p.Read() // eat 'in' keyword
542
543 collExpr, collDiags := p.ParseExpression()
544 diags = append(diags, collDiags...)
545 parts = append(parts, &templateForToken{
546 KeyVar: keyName,
547 ValVar: valName,
548 CollExpr: collExpr,
549
550 SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
551 })
552
553 case endforKeyword.TokenMatches(kw):
554 parts = append(parts, &templateEndCtrlToken{
555 Type: templateEndFor,
556 SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
557 })
558
559 default:
560 if !p.recovery {
561 suggestions := []string{"if", "for", "else", "endif", "endfor"}
562 given := string(kw.Bytes)
563 suggestion := nameSuggestion(given, suggestions)
564 if suggestion != "" {
565 suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
566 }
567
568 diags = append(diags, &hcl.Diagnostic{
569 Severity: hcl.DiagError,
570 Summary: "Invalid template control keyword",
571 Detail: fmt.Sprintf("%q is not a valid template control keyword.%s", given, suggestion),
572 Subject: &kw.Range,
573 Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(),
574 })
575 }
576 p.recover(TokenTemplateSeqEnd)
577 p.PopIncludeNewlines()
578 continue Token
579
580 }
581
582 close := p.Peek()
583 if close.Type != TokenTemplateSeqEnd {
584 if !p.recovery {
585 diags = append(diags, &hcl.Diagnostic{
586 Severity: hcl.DiagError,
587 Summary: fmt.Sprintf("Extra characters in %s marker", kw.Bytes),
588 Detail: "Expected a closing brace to end the sequence, but found extra characters.",
589 Subject: &close.Range,
590 Context: hcl.RangeBetween(startRange, close.Range).Ptr(),
591 })
592 }
593 p.recover(TokenTemplateSeqEnd)
594 } else {
595 p.Read() // eat closing brace
596
597 // If the closer is ~} then we want to eat any leading
598 // whitespace on the next token, if it turns out to be a
599 // literal token.
600 if len(close.Bytes) == 2 && close.Bytes[0] == '~' {
601 ltrimNext = true
602 }
603 }
604 p.PopIncludeNewlines()
605
606 default:
607 if !p.recovery {
608 diags = append(diags, &hcl.Diagnostic{
609 Severity: hcl.DiagError,
610 Summary: "Unterminated template string",
611 Detail: "No closing marker was found for the string.",
612 Subject: &next.Range,
613 Context: hcl.RangeBetween(startRange, next.Range).Ptr(),
614 })
615 }
616 final := p.recover(end)
617 endRange = final.Range
618 break Token
619 }
620 }
621
622 if len(parts) == 0 {
623 // If a sequence has no content, we'll treat it as if it had an
624 // empty string in it because that's what the user probably means
625 // if they write "" in configuration.
626 parts = append(parts, &templateLiteralToken{
627 Val: "",
628 SrcRange: hcl.Range{
629 // Range is the zero-character span immediately after the
630 // opening quote.
631 Filename: startRange.Filename,
632 Start: startRange.End,
633 End: startRange.End,
634 },
635 })
636 }
637
638 // Always end with an end token, so the parser can produce diagnostics
639 // about unclosed items with proper position information.
640 parts = append(parts, &templateEndToken{
641 SrcRange: endRange,
642 })
643
644 ret := &templateParts{
645 Tokens: parts,
646 SrcRange: hcl.RangeBetween(startRange, endRange),
647 }
648
649 return ret, diags
650}
651
652type templateParts struct {
653 Tokens []templateToken
654 SrcRange hcl.Range
655}
656
657// templateToken is a higher-level token that represents a single atom within
658// the template language. Our template parsing first raises the raw token
659// stream to a sequence of templateToken, and then transforms the result into
660// an expression tree.
661type templateToken interface {
662 templateToken() templateToken
663}
664
665type templateLiteralToken struct {
666 Val string
667 SrcRange hcl.Range
668 isTemplateToken
669}
670
671type templateInterpToken struct {
672 Expr Expression
673 SrcRange hcl.Range
674 isTemplateToken
675}
676
677type templateIfToken struct {
678 CondExpr Expression
679 SrcRange hcl.Range
680 isTemplateToken
681}
682
683type templateForToken struct {
684 KeyVar string // empty if ignoring key
685 ValVar string
686 CollExpr Expression
687 SrcRange hcl.Range
688 isTemplateToken
689}
690
691type templateEndCtrlType int
692
693const (
694 templateEndIf templateEndCtrlType = iota
695 templateElse
696 templateEndFor
697)
698
699type templateEndCtrlToken struct {
700 Type templateEndCtrlType
701 SrcRange hcl.Range
702 isTemplateToken
703}
704
705func (t *templateEndCtrlToken) Name() string {
706 switch t.Type {
707 case templateEndIf:
708 return "endif"
709 case templateElse:
710 return "else"
711 case templateEndFor:
712 return "endfor"
713 default:
714 // should never happen
715 panic("invalid templateEndCtrlType")
716 }
717}
718
719type templateEndToken struct {
720 SrcRange hcl.Range
721 isTemplateToken
722}
723
724type isTemplateToken [0]int
725
726func (t isTemplateToken) templateToken() templateToken {
727 return t
728}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_traversal.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_traversal.go
new file mode 100644
index 0000000..2ff3ed6
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_traversal.go
@@ -0,0 +1,159 @@
1package hclsyntax
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5 "github.com/zclconf/go-cty/cty"
6)
7
8// ParseTraversalAbs parses an absolute traversal that is assumed to consume
9// all of the remaining tokens in the peeker. The usual parser recovery
10// behavior is not supported here because traversals are not expected to
11// be parsed as part of a larger program.
12func (p *parser) ParseTraversalAbs() (hcl.Traversal, hcl.Diagnostics) {
13 var ret hcl.Traversal
14 var diags hcl.Diagnostics
15
16 // Absolute traversal must always begin with a variable name
17 varTok := p.Read()
18 if varTok.Type != TokenIdent {
19 diags = append(diags, &hcl.Diagnostic{
20 Severity: hcl.DiagError,
21 Summary: "Variable name required",
22 Detail: "Must begin with a variable name.",
23 Subject: &varTok.Range,
24 })
25 return ret, diags
26 }
27
28 varName := string(varTok.Bytes)
29 ret = append(ret, hcl.TraverseRoot{
30 Name: varName,
31 SrcRange: varTok.Range,
32 })
33
34 for {
35 next := p.Peek()
36
37 if next.Type == TokenEOF {
38 return ret, diags
39 }
40
41 switch next.Type {
42 case TokenDot:
43 // Attribute access
44 dot := p.Read() // eat dot
45 nameTok := p.Read()
46 if nameTok.Type != TokenIdent {
47 if nameTok.Type == TokenStar {
48 diags = append(diags, &hcl.Diagnostic{
49 Severity: hcl.DiagError,
50 Summary: "Attribute name required",
51 Detail: "Splat expressions (.*) may not be used here.",
52 Subject: &nameTok.Range,
53 Context: hcl.RangeBetween(varTok.Range, nameTok.Range).Ptr(),
54 })
55 } else {
56 diags = append(diags, &hcl.Diagnostic{
57 Severity: hcl.DiagError,
58 Summary: "Attribute name required",
59 Detail: "Dot must be followed by attribute name.",
60 Subject: &nameTok.Range,
61 Context: hcl.RangeBetween(varTok.Range, nameTok.Range).Ptr(),
62 })
63 }
64 return ret, diags
65 }
66
67 attrName := string(nameTok.Bytes)
68 ret = append(ret, hcl.TraverseAttr{
69 Name: attrName,
70 SrcRange: hcl.RangeBetween(dot.Range, nameTok.Range),
71 })
72 case TokenOBrack:
73 // Index
74 open := p.Read() // eat open bracket
75 next := p.Peek()
76
77 switch next.Type {
78 case TokenNumberLit:
79 tok := p.Read() // eat number
80 numVal, numDiags := p.numberLitValue(tok)
81 diags = append(diags, numDiags...)
82
83 close := p.Read()
84 if close.Type != TokenCBrack {
85 diags = append(diags, &hcl.Diagnostic{
86 Severity: hcl.DiagError,
87 Summary: "Unclosed index brackets",
88 Detail: "Index key must be followed by a closing bracket.",
89 Subject: &close.Range,
90 Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
91 })
92 }
93
94 ret = append(ret, hcl.TraverseIndex{
95 Key: numVal,
96 SrcRange: hcl.RangeBetween(open.Range, close.Range),
97 })
98
99 if diags.HasErrors() {
100 return ret, diags
101 }
102
103 case TokenOQuote:
104 str, _, strDiags := p.parseQuotedStringLiteral()
105 diags = append(diags, strDiags...)
106
107 close := p.Read()
108 if close.Type != TokenCBrack {
109 diags = append(diags, &hcl.Diagnostic{
110 Severity: hcl.DiagError,
111 Summary: "Unclosed index brackets",
112 Detail: "Index key must be followed by a closing bracket.",
113 Subject: &close.Range,
114 Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
115 })
116 }
117
118 ret = append(ret, hcl.TraverseIndex{
119 Key: cty.StringVal(str),
120 SrcRange: hcl.RangeBetween(open.Range, close.Range),
121 })
122
123 if diags.HasErrors() {
124 return ret, diags
125 }
126
127 default:
128 if next.Type == TokenStar {
129 diags = append(diags, &hcl.Diagnostic{
130 Severity: hcl.DiagError,
131 Summary: "Attribute name required",
132 Detail: "Splat expressions ([*]) may not be used here.",
133 Subject: &next.Range,
134 Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(),
135 })
136 } else {
137 diags = append(diags, &hcl.Diagnostic{
138 Severity: hcl.DiagError,
139 Summary: "Index value required",
140 Detail: "Index brackets must contain either a literal number or a literal string.",
141 Subject: &next.Range,
142 Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(),
143 })
144 }
145 return ret, diags
146 }
147
148 default:
149 diags = append(diags, &hcl.Diagnostic{
150 Severity: hcl.DiagError,
151 Summary: "Invalid character",
152 Detail: "Expected an attribute access or an index operator.",
153 Subject: &next.Range,
154 Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(),
155 })
156 return ret, diags
157 }
158 }
159}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/peeker.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/peeker.go
new file mode 100644
index 0000000..5a4b50e
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/peeker.go
@@ -0,0 +1,212 @@
1package hclsyntax
2
3import (
4 "bytes"
5 "fmt"
6 "path/filepath"
7 "runtime"
8 "strings"
9
10 "github.com/hashicorp/hcl2/hcl"
11)
12
13// This is set to true at init() time in tests, to enable more useful output
14// if a stack discipline error is detected. It should not be enabled in
15// normal mode since there is a performance penalty from accessing the
16// runtime stack to produce the traces, but could be temporarily set to
17// true for debugging if desired.
18var tracePeekerNewlinesStack = false
19
20type peeker struct {
21 Tokens Tokens
22 NextIndex int
23
24 IncludeComments bool
25 IncludeNewlinesStack []bool
26
27 // used only when tracePeekerNewlinesStack is set
28 newlineStackChanges []peekerNewlineStackChange
29}
30
31// for use in debugging the stack usage only
32type peekerNewlineStackChange struct {
33 Pushing bool // if false, then popping
34 Frame runtime.Frame
35 Include bool
36}
37
38func newPeeker(tokens Tokens, includeComments bool) *peeker {
39 return &peeker{
40 Tokens: tokens,
41 IncludeComments: includeComments,
42
43 IncludeNewlinesStack: []bool{true},
44 }
45}
46
47func (p *peeker) Peek() Token {
48 ret, _ := p.nextToken()
49 return ret
50}
51
52func (p *peeker) Read() Token {
53 ret, nextIdx := p.nextToken()
54 p.NextIndex = nextIdx
55 return ret
56}
57
58func (p *peeker) NextRange() hcl.Range {
59 return p.Peek().Range
60}
61
62func (p *peeker) PrevRange() hcl.Range {
63 if p.NextIndex == 0 {
64 return p.NextRange()
65 }
66
67 return p.Tokens[p.NextIndex-1].Range
68}
69
70func (p *peeker) nextToken() (Token, int) {
71 for i := p.NextIndex; i < len(p.Tokens); i++ {
72 tok := p.Tokens[i]
73 switch tok.Type {
74 case TokenComment:
75 if !p.IncludeComments {
76 // Single-line comment tokens, starting with # or //, absorb
77 // the trailing newline that terminates them as part of their
78 // bytes. When we're filtering out comments, we must as a
79 // special case transform these to newline tokens in order
80 // to properly parse newline-terminated block items.
81
82 if p.includingNewlines() {
83 if len(tok.Bytes) > 0 && tok.Bytes[len(tok.Bytes)-1] == '\n' {
84 fakeNewline := Token{
85 Type: TokenNewline,
86 Bytes: tok.Bytes[len(tok.Bytes)-1 : len(tok.Bytes)],
87
88 // We use the whole token range as the newline
89 // range, even though that's a little... weird,
90 // because otherwise we'd need to go count
91 // characters again in order to figure out the
92 // column of the newline, and that complexity
93 // isn't justified when ranges of newlines are
94 // so rarely printed anyway.
95 Range: tok.Range,
96 }
97 return fakeNewline, i + 1
98 }
99 }
100
101 continue
102 }
103 case TokenNewline:
104 if !p.includingNewlines() {
105 continue
106 }
107 }
108
109 return tok, i + 1
110 }
111
112 // if we fall out here then we'll return the EOF token, and leave
113 // our index pointed off the end of the array so we'll keep
114 // returning EOF in future too.
115 return p.Tokens[len(p.Tokens)-1], len(p.Tokens)
116}
117
118func (p *peeker) includingNewlines() bool {
119 return p.IncludeNewlinesStack[len(p.IncludeNewlinesStack)-1]
120}
121
122func (p *peeker) PushIncludeNewlines(include bool) {
123 if tracePeekerNewlinesStack {
124 // Record who called us so that we can more easily track down any
125 // mismanagement of the stack in the parser.
126 callers := []uintptr{0}
127 runtime.Callers(2, callers)
128 frames := runtime.CallersFrames(callers)
129 frame, _ := frames.Next()
130 p.newlineStackChanges = append(p.newlineStackChanges, peekerNewlineStackChange{
131 true, frame, include,
132 })
133 }
134
135 p.IncludeNewlinesStack = append(p.IncludeNewlinesStack, include)
136}
137
138func (p *peeker) PopIncludeNewlines() bool {
139 stack := p.IncludeNewlinesStack
140 remain, ret := stack[:len(stack)-1], stack[len(stack)-1]
141 p.IncludeNewlinesStack = remain
142
143 if tracePeekerNewlinesStack {
144 // Record who called us so that we can more easily track down any
145 // mismanagement of the stack in the parser.
146 callers := []uintptr{0}
147 runtime.Callers(2, callers)
148 frames := runtime.CallersFrames(callers)
149 frame, _ := frames.Next()
150 p.newlineStackChanges = append(p.newlineStackChanges, peekerNewlineStackChange{
151 false, frame, ret,
152 })
153 }
154
155 return ret
156}
157
158// AssertEmptyNewlinesStack checks if the IncludeNewlinesStack is empty, doing
159// panicking if it is not. This can be used to catch stack mismanagement that
160// might otherwise just cause confusing downstream errors.
161//
162// This function is a no-op if the stack is empty when called.
163//
164// If newlines stack tracing is enabled by setting the global variable
165// tracePeekerNewlinesStack at init time, a full log of all of the push/pop
166// calls will be produced to help identify which caller in the parser is
167// misbehaving.
168func (p *peeker) AssertEmptyIncludeNewlinesStack() {
169 if len(p.IncludeNewlinesStack) != 1 {
170 // Should never happen; indicates mismanagement of the stack inside
171 // the parser.
172 if p.newlineStackChanges != nil { // only if traceNewlinesStack is enabled above
173 panic(fmt.Errorf(
174 "non-empty IncludeNewlinesStack after parse with %d calls unaccounted for:\n%s",
175 len(p.IncludeNewlinesStack)-1,
176 formatPeekerNewlineStackChanges(p.newlineStackChanges),
177 ))
178 } else {
179 panic(fmt.Errorf("non-empty IncludeNewlinesStack after parse: %#v", p.IncludeNewlinesStack))
180 }
181 }
182}
183
184func formatPeekerNewlineStackChanges(changes []peekerNewlineStackChange) string {
185 indent := 0
186 var buf bytes.Buffer
187 for _, change := range changes {
188 funcName := change.Frame.Function
189 if idx := strings.LastIndexByte(funcName, '.'); idx != -1 {
190 funcName = funcName[idx+1:]
191 }
192 filename := change.Frame.File
193 if idx := strings.LastIndexByte(filename, filepath.Separator); idx != -1 {
194 filename = filename[idx+1:]
195 }
196
197 switch change.Pushing {
198
199 case true:
200 buf.WriteString(strings.Repeat(" ", indent))
201 fmt.Fprintf(&buf, "PUSH %#v (%s at %s:%d)\n", change.Include, funcName, filename, change.Frame.Line)
202 indent++
203
204 case false:
205 indent--
206 buf.WriteString(strings.Repeat(" ", indent))
207 fmt.Fprintf(&buf, "POP %#v (%s at %s:%d)\n", change.Include, funcName, filename, change.Frame.Line)
208
209 }
210 }
211 return buf.String()
212}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/public.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/public.go
new file mode 100644
index 0000000..cf0ee29
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/public.go
@@ -0,0 +1,171 @@
1package hclsyntax
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5)
6
7// ParseConfig parses the given buffer as a whole HCL config file, returning
8// a *hcl.File representing its contents. If HasErrors called on the returned
9// diagnostics returns true, the returned body is likely to be incomplete
10// and should therefore be used with care.
11//
12// The body in the returned file has dynamic type *hclsyntax.Body, so callers
13// may freely type-assert this to get access to the full hclsyntax API in
14// situations where detailed access is required. However, most common use-cases
15// should be served using the hcl.Body interface to ensure compatibility with
16// other configurationg syntaxes, such as JSON.
17func ParseConfig(src []byte, filename string, start hcl.Pos) (*hcl.File, hcl.Diagnostics) {
18 tokens, diags := LexConfig(src, filename, start)
19 peeker := newPeeker(tokens, false)
20 parser := &parser{peeker: peeker}
21 body, parseDiags := parser.ParseBody(TokenEOF)
22 diags = append(diags, parseDiags...)
23
24 // Panic if the parser uses incorrect stack discipline with the peeker's
25 // newlines stack, since otherwise it will produce confusing downstream
26 // errors.
27 peeker.AssertEmptyIncludeNewlinesStack()
28
29 return &hcl.File{
30 Body: body,
31 Bytes: src,
32
33 Nav: navigation{
34 root: body,
35 },
36 }, diags
37}
38
39// ParseExpression parses the given buffer as a standalone HCL expression,
40// returning it as an instance of Expression.
41func ParseExpression(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) {
42 tokens, diags := LexExpression(src, filename, start)
43 peeker := newPeeker(tokens, false)
44 parser := &parser{peeker: peeker}
45
46 // Bare expressions are always parsed in "ignore newlines" mode, as if
47 // they were wrapped in parentheses.
48 parser.PushIncludeNewlines(false)
49
50 expr, parseDiags := parser.ParseExpression()
51 diags = append(diags, parseDiags...)
52
53 next := parser.Peek()
54 if next.Type != TokenEOF && !parser.recovery {
55 diags = append(diags, &hcl.Diagnostic{
56 Severity: hcl.DiagError,
57 Summary: "Extra characters after expression",
58 Detail: "An expression was successfully parsed, but extra characters were found after it.",
59 Subject: &next.Range,
60 })
61 }
62
63 parser.PopIncludeNewlines()
64
65 // Panic if the parser uses incorrect stack discipline with the peeker's
66 // newlines stack, since otherwise it will produce confusing downstream
67 // errors.
68 peeker.AssertEmptyIncludeNewlinesStack()
69
70 return expr, diags
71}
72
73// ParseTemplate parses the given buffer as a standalone HCL template,
74// returning it as an instance of Expression.
75func ParseTemplate(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) {
76 tokens, diags := LexTemplate(src, filename, start)
77 peeker := newPeeker(tokens, false)
78 parser := &parser{peeker: peeker}
79 expr, parseDiags := parser.ParseTemplate()
80 diags = append(diags, parseDiags...)
81
82 // Panic if the parser uses incorrect stack discipline with the peeker's
83 // newlines stack, since otherwise it will produce confusing downstream
84 // errors.
85 peeker.AssertEmptyIncludeNewlinesStack()
86
87 return expr, diags
88}
89
90// ParseTraversalAbs parses the given buffer as a standalone absolute traversal.
91//
92// Parsing as a traversal is more limited than parsing as an expession since
93// it allows only attribute and indexing operations on variables. Traverals
94// are useful as a syntax for referring to objects without necessarily
95// evaluating them.
96func ParseTraversalAbs(src []byte, filename string, start hcl.Pos) (hcl.Traversal, hcl.Diagnostics) {
97 tokens, diags := LexExpression(src, filename, start)
98 peeker := newPeeker(tokens, false)
99 parser := &parser{peeker: peeker}
100
101 // Bare traverals are always parsed in "ignore newlines" mode, as if
102 // they were wrapped in parentheses.
103 parser.PushIncludeNewlines(false)
104
105 expr, parseDiags := parser.ParseTraversalAbs()
106 diags = append(diags, parseDiags...)
107
108 parser.PopIncludeNewlines()
109
110 // Panic if the parser uses incorrect stack discipline with the peeker's
111 // newlines stack, since otherwise it will produce confusing downstream
112 // errors.
113 peeker.AssertEmptyIncludeNewlinesStack()
114
115 return expr, diags
116}
117
118// LexConfig performs lexical analysis on the given buffer, treating it as a
119// whole HCL config file, and returns the resulting tokens.
120//
121// Only minimal validation is done during lexical analysis, so the returned
122// diagnostics may include errors about lexical issues such as bad character
123// encodings or unrecognized characters, but full parsing is required to
124// detect _all_ syntax errors.
125func LexConfig(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) {
126 tokens := scanTokens(src, filename, start, scanNormal)
127 diags := checkInvalidTokens(tokens)
128 return tokens, diags
129}
130
131// LexExpression performs lexical analysis on the given buffer, treating it as
132// a standalone HCL expression, and returns the resulting tokens.
133//
134// Only minimal validation is done during lexical analysis, so the returned
135// diagnostics may include errors about lexical issues such as bad character
136// encodings or unrecognized characters, but full parsing is required to
137// detect _all_ syntax errors.
138func LexExpression(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) {
139 // This is actually just the same thing as LexConfig, since configs
140 // and expressions lex in the same way.
141 tokens := scanTokens(src, filename, start, scanNormal)
142 diags := checkInvalidTokens(tokens)
143 return tokens, diags
144}
145
146// LexTemplate performs lexical analysis on the given buffer, treating it as a
147// standalone HCL template, and returns the resulting tokens.
148//
149// Only minimal validation is done during lexical analysis, so the returned
150// diagnostics may include errors about lexical issues such as bad character
151// encodings or unrecognized characters, but full parsing is required to
152// detect _all_ syntax errors.
153func LexTemplate(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) {
154 tokens := scanTokens(src, filename, start, scanTemplate)
155 diags := checkInvalidTokens(tokens)
156 return tokens, diags
157}
158
159// ValidIdentifier tests if the given string could be a valid identifier in
160// a native syntax expression.
161//
162// This is useful when accepting names from the user that will be used as
163// variable or attribute names in the scope, to ensure that any name chosen
164// will be traversable using the variable or attribute traversal syntax.
165func ValidIdentifier(s string) bool {
166 // This is a kinda-expensive way to do something pretty simple, but it
167 // is easiest to do with our existing scanner-related infrastructure here
168 // and nobody should be validating identifiers in a tight loop.
169 tokens := scanTokens([]byte(s), "", hcl.Pos{}, scanIdentOnly)
170 return len(tokens) == 2 && tokens[0].Type == TokenIdent && tokens[1].Type == TokenEOF
171}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.go
new file mode 100644
index 0000000..de1f524
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.go
@@ -0,0 +1,301 @@
1// line 1 "scan_string_lit.rl"
2
3package hclsyntax
4
5// This file is generated from scan_string_lit.rl. DO NOT EDIT.
6
7// line 9 "scan_string_lit.go"
8var _hclstrtok_actions []byte = []byte{
9 0, 1, 0, 1, 1, 2, 1, 0,
10}
11
12var _hclstrtok_key_offsets []byte = []byte{
13 0, 0, 2, 4, 6, 10, 14, 18,
14 22, 27, 31, 36, 41, 46, 51, 57,
15 62, 74, 85, 96, 107, 118, 129, 140,
16 151,
17}
18
19var _hclstrtok_trans_keys []byte = []byte{
20 128, 191, 128, 191, 128, 191, 10, 13,
21 36, 37, 10, 13, 36, 37, 10, 13,
22 36, 37, 10, 13, 36, 37, 10, 13,
23 36, 37, 123, 10, 13, 36, 37, 10,
24 13, 36, 37, 92, 10, 13, 36, 37,
25 92, 10, 13, 36, 37, 92, 10, 13,
26 36, 37, 92, 10, 13, 36, 37, 92,
27 123, 10, 13, 36, 37, 92, 85, 117,
28 128, 191, 192, 223, 224, 239, 240, 247,
29 248, 255, 10, 13, 36, 37, 92, 48,
30 57, 65, 70, 97, 102, 10, 13, 36,
31 37, 92, 48, 57, 65, 70, 97, 102,
32 10, 13, 36, 37, 92, 48, 57, 65,
33 70, 97, 102, 10, 13, 36, 37, 92,
34 48, 57, 65, 70, 97, 102, 10, 13,
35 36, 37, 92, 48, 57, 65, 70, 97,
36 102, 10, 13, 36, 37, 92, 48, 57,
37 65, 70, 97, 102, 10, 13, 36, 37,
38 92, 48, 57, 65, 70, 97, 102, 10,
39 13, 36, 37, 92, 48, 57, 65, 70,
40 97, 102,
41}
42
43var _hclstrtok_single_lengths []byte = []byte{
44 0, 0, 0, 0, 4, 4, 4, 4,
45 5, 4, 5, 5, 5, 5, 6, 5,
46 2, 5, 5, 5, 5, 5, 5, 5,
47 5,
48}
49
50var _hclstrtok_range_lengths []byte = []byte{
51 0, 1, 1, 1, 0, 0, 0, 0,
52 0, 0, 0, 0, 0, 0, 0, 0,
53 5, 3, 3, 3, 3, 3, 3, 3,
54 3,
55}
56
57var _hclstrtok_index_offsets []byte = []byte{
58 0, 0, 2, 4, 6, 11, 16, 21,
59 26, 32, 37, 43, 49, 55, 61, 68,
60 74, 82, 91, 100, 109, 118, 127, 136,
61 145,
62}
63
64var _hclstrtok_indicies []byte = []byte{
65 0, 1, 2, 1, 3, 1, 5, 6,
66 7, 8, 4, 10, 11, 12, 13, 9,
67 14, 11, 12, 13, 9, 10, 11, 15,
68 13, 9, 10, 11, 12, 13, 14, 9,
69 10, 11, 12, 15, 9, 17, 18, 19,
70 20, 21, 16, 23, 24, 25, 26, 27,
71 22, 0, 24, 25, 26, 27, 22, 23,
72 24, 28, 26, 27, 22, 23, 24, 25,
73 26, 27, 0, 22, 23, 24, 25, 28,
74 27, 22, 29, 30, 22, 2, 3, 31,
75 22, 0, 23, 24, 25, 26, 27, 32,
76 32, 32, 22, 23, 24, 25, 26, 27,
77 33, 33, 33, 22, 23, 24, 25, 26,
78 27, 34, 34, 34, 22, 23, 24, 25,
79 26, 27, 30, 30, 30, 22, 23, 24,
80 25, 26, 27, 35, 35, 35, 22, 23,
81 24, 25, 26, 27, 36, 36, 36, 22,
82 23, 24, 25, 26, 27, 37, 37, 37,
83 22, 23, 24, 25, 26, 27, 0, 0,
84 0, 22,
85}
86
87var _hclstrtok_trans_targs []byte = []byte{
88 11, 0, 1, 2, 4, 5, 6, 7,
89 9, 4, 5, 6, 7, 9, 5, 8,
90 10, 11, 12, 13, 15, 16, 10, 11,
91 12, 13, 15, 16, 14, 17, 21, 3,
92 18, 19, 20, 22, 23, 24,
93}
94
95var _hclstrtok_trans_actions []byte = []byte{
96 0, 0, 0, 0, 0, 1, 1, 1,
97 1, 3, 5, 5, 5, 5, 0, 0,
98 0, 1, 1, 1, 1, 1, 3, 5,
99 5, 5, 5, 5, 0, 0, 0, 0,
100 0, 0, 0, 0, 0, 0,
101}
102
103var _hclstrtok_eof_actions []byte = []byte{
104 0, 0, 0, 0, 0, 3, 3, 3,
105 3, 3, 0, 3, 3, 3, 3, 3,
106 3, 3, 3, 3, 3, 3, 3, 3,
107 3,
108}
109
110const hclstrtok_start int = 4
111const hclstrtok_first_final int = 4
112const hclstrtok_error int = 0
113
114const hclstrtok_en_quoted int = 10
115const hclstrtok_en_unquoted int = 4
116
117// line 10 "scan_string_lit.rl"
118
119func scanStringLit(data []byte, quoted bool) [][]byte {
120 var ret [][]byte
121
122 // line 61 "scan_string_lit.rl"
123
124 // Ragel state
125 p := 0 // "Pointer" into data
126 pe := len(data) // End-of-data "pointer"
127 ts := 0
128 te := 0
129 eof := pe
130
131 var cs int // current state
132 switch {
133 case quoted:
134 cs = hclstrtok_en_quoted
135 default:
136 cs = hclstrtok_en_unquoted
137 }
138
139 // Make Go compiler happy
140 _ = ts
141 _ = eof
142
143 /*token := func () {
144 ret = append(ret, data[ts:te])
145 }*/
146
147 // line 154 "scan_string_lit.go"
148 {
149 }
150
151 // line 158 "scan_string_lit.go"
152 {
153 var _klen int
154 var _trans int
155 var _acts int
156 var _nacts uint
157 var _keys int
158 if p == pe {
159 goto _test_eof
160 }
161 if cs == 0 {
162 goto _out
163 }
164 _resume:
165 _keys = int(_hclstrtok_key_offsets[cs])
166 _trans = int(_hclstrtok_index_offsets[cs])
167
168 _klen = int(_hclstrtok_single_lengths[cs])
169 if _klen > 0 {
170 _lower := int(_keys)
171 var _mid int
172 _upper := int(_keys + _klen - 1)
173 for {
174 if _upper < _lower {
175 break
176 }
177
178 _mid = _lower + ((_upper - _lower) >> 1)
179 switch {
180 case data[p] < _hclstrtok_trans_keys[_mid]:
181 _upper = _mid - 1
182 case data[p] > _hclstrtok_trans_keys[_mid]:
183 _lower = _mid + 1
184 default:
185 _trans += int(_mid - int(_keys))
186 goto _match
187 }
188 }
189 _keys += _klen
190 _trans += _klen
191 }
192
193 _klen = int(_hclstrtok_range_lengths[cs])
194 if _klen > 0 {
195 _lower := int(_keys)
196 var _mid int
197 _upper := int(_keys + (_klen << 1) - 2)
198 for {
199 if _upper < _lower {
200 break
201 }
202
203 _mid = _lower + (((_upper - _lower) >> 1) & ^1)
204 switch {
205 case data[p] < _hclstrtok_trans_keys[_mid]:
206 _upper = _mid - 2
207 case data[p] > _hclstrtok_trans_keys[_mid+1]:
208 _lower = _mid + 2
209 default:
210 _trans += int((_mid - int(_keys)) >> 1)
211 goto _match
212 }
213 }
214 _trans += _klen
215 }
216
217 _match:
218 _trans = int(_hclstrtok_indicies[_trans])
219 cs = int(_hclstrtok_trans_targs[_trans])
220
221 if _hclstrtok_trans_actions[_trans] == 0 {
222 goto _again
223 }
224
225 _acts = int(_hclstrtok_trans_actions[_trans])
226 _nacts = uint(_hclstrtok_actions[_acts])
227 _acts++
228 for ; _nacts > 0; _nacts-- {
229 _acts++
230 switch _hclstrtok_actions[_acts-1] {
231 case 0:
232 // line 40 "scan_string_lit.rl"
233
234 // If te is behind p then we've skipped over some literal
235 // characters which we must now return.
236 if te < p {
237 ret = append(ret, data[te:p])
238 }
239 ts = p
240
241 case 1:
242 // line 48 "scan_string_lit.rl"
243
244 te = p
245 ret = append(ret, data[ts:te])
246
247 // line 255 "scan_string_lit.go"
248 }
249 }
250
251 _again:
252 if cs == 0 {
253 goto _out
254 }
255 p++
256 if p != pe {
257 goto _resume
258 }
259 _test_eof:
260 {
261 }
262 if p == eof {
263 __acts := _hclstrtok_eof_actions[cs]
264 __nacts := uint(_hclstrtok_actions[__acts])
265 __acts++
266 for ; __nacts > 0; __nacts-- {
267 __acts++
268 switch _hclstrtok_actions[__acts-1] {
269 case 1:
270 // line 48 "scan_string_lit.rl"
271
272 te = p
273 ret = append(ret, data[ts:te])
274
275 // line 281 "scan_string_lit.go"
276 }
277 }
278 }
279
280 _out:
281 {
282 }
283 }
284
285 // line 89 "scan_string_lit.rl"
286
287 if te < p {
288 // Collect any leftover literal characters at the end of the input
289 ret = append(ret, data[te:p])
290 }
291
292 // If we fall out here without being in a final state then we've
293 // encountered something that the scanner can't match, which should
294 // be impossible (the scanner matches all bytes _somehow_) but we'll
295 // tolerate it and let the caller deal with it.
296 if cs < hclstrtok_first_final {
297 ret = append(ret, data[p:len(data)])
298 }
299
300 return ret
301}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.rl b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.rl
new file mode 100644
index 0000000..f8ac117
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.rl
@@ -0,0 +1,105 @@
1
2package hclsyntax
3
4// This file is generated from scan_string_lit.rl. DO NOT EDIT.
5%%{
6 # (except you are actually in scan_string_lit.rl here, so edit away!)
7
8 machine hclstrtok;
9 write data;
10}%%
11
12func scanStringLit(data []byte, quoted bool) [][]byte {
13 var ret [][]byte
14
15 %%{
16 include UnicodeDerived "unicode_derived.rl";
17
18 UTF8Cont = 0x80 .. 0xBF;
19 AnyUTF8 = (
20 0x00..0x7F |
21 0xC0..0xDF . UTF8Cont |
22 0xE0..0xEF . UTF8Cont . UTF8Cont |
23 0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont
24 );
25 BadUTF8 = any - AnyUTF8;
26
27 Hex = ('0'..'9' | 'a'..'f' | 'A'..'F');
28
29 # Our goal with this patterns is to capture user intent as best as
30 # possible, even if the input is invalid. The caller will then verify
31 # whether each token is valid and generate suitable error messages
32 # if not.
33 UnicodeEscapeShort = "\\u" . Hex{0,4};
34 UnicodeEscapeLong = "\\U" . Hex{0,8};
35 UnicodeEscape = (UnicodeEscapeShort | UnicodeEscapeLong);
36 SimpleEscape = "\\" . (AnyUTF8 - ('U'|'u'))?;
37 TemplateEscape = ("$" . ("$" . ("{"?))?) | ("%" . ("%" . ("{"?))?);
38 Newline = ("\r\n" | "\r" | "\n");
39
40 action Begin {
41 // If te is behind p then we've skipped over some literal
42 // characters which we must now return.
43 if te < p {
44 ret = append(ret, data[te:p])
45 }
46 ts = p;
47 }
48 action End {
49 te = p;
50 ret = append(ret, data[ts:te]);
51 }
52
53 QuotedToken = (UnicodeEscape | SimpleEscape | TemplateEscape | Newline) >Begin %End;
54 UnquotedToken = (TemplateEscape | Newline) >Begin %End;
55 QuotedLiteral = (any - ("\\" | "$" | "%" | "\r" | "\n"));
56 UnquotedLiteral = (any - ("$" | "%" | "\r" | "\n"));
57
58 quoted := (QuotedToken | QuotedLiteral)**;
59 unquoted := (UnquotedToken | UnquotedLiteral)**;
60
61 }%%
62
63 // Ragel state
64 p := 0 // "Pointer" into data
65 pe := len(data) // End-of-data "pointer"
66 ts := 0
67 te := 0
68 eof := pe
69
70 var cs int // current state
71 switch {
72 case quoted:
73 cs = hclstrtok_en_quoted
74 default:
75 cs = hclstrtok_en_unquoted
76 }
77
78 // Make Go compiler happy
79 _ = ts
80 _ = eof
81
82 /*token := func () {
83 ret = append(ret, data[ts:te])
84 }*/
85
86 %%{
87 write init nocs;
88 write exec;
89 }%%
90
91 if te < p {
92 // Collect any leftover literal characters at the end of the input
93 ret = append(ret, data[te:p])
94 }
95
96 // If we fall out here without being in a final state then we've
97 // encountered something that the scanner can't match, which should
98 // be impossible (the scanner matches all bytes _somehow_) but we'll
99 // tolerate it and let the caller deal with it.
100 if cs < hclstrtok_first_final {
101 ret = append(ret, data[p:len(data)])
102 }
103
104 return ret
105}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.go
new file mode 100644
index 0000000..395e9c1
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.go
@@ -0,0 +1,5443 @@
1// line 1 "scan_tokens.rl"
2
3package hclsyntax
4
5import (
6 "bytes"
7
8 "github.com/hashicorp/hcl2/hcl"
9)
10
11// This file is generated from scan_tokens.rl. DO NOT EDIT.
12
13// line 15 "scan_tokens.go"
14var _hcltok_actions []byte = []byte{
15 0, 1, 0, 1, 1, 1, 2, 1, 3,
16 1, 4, 1, 6, 1, 7, 1, 8,
17 1, 9, 1, 10, 1, 11, 1, 12,
18 1, 13, 1, 14, 1, 15, 1, 16,
19 1, 17, 1, 18, 1, 19, 1, 22,
20 1, 23, 1, 24, 1, 25, 1, 26,
21 1, 27, 1, 28, 1, 29, 1, 30,
22 1, 31, 1, 34, 1, 35, 1, 36,
23 1, 37, 1, 38, 1, 39, 1, 40,
24 1, 41, 1, 42, 1, 43, 1, 46,
25 1, 47, 1, 48, 1, 49, 1, 50,
26 1, 51, 1, 52, 1, 58, 1, 59,
27 1, 60, 1, 61, 1, 62, 1, 63,
28 1, 64, 1, 65, 1, 66, 1, 67,
29 1, 68, 1, 69, 1, 70, 1, 71,
30 1, 72, 1, 73, 1, 74, 1, 75,
31 1, 76, 1, 77, 1, 78, 1, 79,
32 1, 80, 1, 81, 1, 82, 1, 83,
33 1, 84, 1, 85, 1, 86, 1, 87,
34 2, 0, 15, 2, 1, 15, 2, 2,
35 24, 2, 2, 28, 2, 3, 24, 2,
36 3, 28, 2, 4, 5, 2, 7, 0,
37 2, 7, 1, 2, 7, 20, 2, 7,
38 21, 2, 7, 32, 2, 7, 33, 2,
39 7, 44, 2, 7, 45, 2, 7, 53,
40 2, 7, 54, 2, 7, 55, 2, 7,
41 56, 2, 7, 57, 3, 7, 2, 20,
42 3, 7, 3, 20,
43}
44
45var _hcltok_key_offsets []int16 = []int16{
46 0, 0, 1, 2, 3, 5, 10, 14,
47 16, 58, 99, 145, 146, 150, 156, 156,
48 158, 160, 169, 175, 182, 183, 186, 187,
49 191, 196, 205, 209, 213, 221, 223, 225,
50 227, 230, 262, 264, 266, 270, 274, 277,
51 288, 301, 320, 333, 349, 361, 377, 392,
52 413, 423, 435, 446, 460, 475, 485, 497,
53 506, 518, 520, 524, 545, 554, 564, 570,
54 576, 577, 626, 628, 632, 634, 640, 647,
55 655, 662, 665, 671, 675, 679, 681, 685,
56 689, 693, 699, 707, 715, 721, 723, 727,
57 729, 735, 739, 743, 747, 751, 756, 763,
58 769, 771, 773, 777, 779, 785, 789, 793,
59 803, 808, 822, 837, 839, 847, 849, 854,
60 868, 873, 875, 879, 880, 884, 890, 896,
61 906, 916, 927, 935, 938, 941, 945, 949,
62 951, 954, 954, 957, 959, 989, 991, 993,
63 997, 1002, 1006, 1011, 1013, 1015, 1017, 1026,
64 1030, 1034, 1040, 1042, 1050, 1058, 1070, 1073,
65 1079, 1083, 1085, 1089, 1109, 1111, 1113, 1124,
66 1130, 1132, 1134, 1136, 1140, 1146, 1152, 1154,
67 1159, 1163, 1165, 1173, 1191, 1231, 1241, 1245,
68 1247, 1249, 1250, 1254, 1258, 1262, 1266, 1270,
69 1275, 1279, 1283, 1287, 1289, 1291, 1295, 1305,
70 1309, 1311, 1315, 1319, 1323, 1336, 1338, 1340,
71 1344, 1346, 1350, 1352, 1354, 1384, 1388, 1392,
72 1396, 1399, 1406, 1411, 1422, 1426, 1442, 1456,
73 1460, 1465, 1469, 1473, 1479, 1481, 1487, 1489,
74 1493, 1495, 1501, 1506, 1511, 1521, 1523, 1525,
75 1529, 1533, 1535, 1548, 1550, 1554, 1558, 1566,
76 1568, 1572, 1574, 1575, 1578, 1583, 1585, 1587,
77 1591, 1593, 1597, 1603, 1623, 1629, 1635, 1637,
78 1638, 1648, 1649, 1657, 1664, 1666, 1669, 1671,
79 1673, 1675, 1680, 1684, 1688, 1693, 1703, 1713,
80 1717, 1721, 1735, 1761, 1771, 1773, 1775, 1778,
81 1780, 1783, 1785, 1789, 1791, 1792, 1796, 1798,
82 1801, 1808, 1816, 1818, 1820, 1824, 1826, 1832,
83 1843, 1846, 1848, 1852, 1857, 1887, 1892, 1894,
84 1897, 1902, 1916, 1923, 1937, 1942, 1955, 1959,
85 1972, 1977, 1995, 1996, 2005, 2009, 2021, 2026,
86 2033, 2040, 2047, 2049, 2053, 2075, 2080, 2081,
87 2085, 2087, 2137, 2140, 2151, 2155, 2157, 2163,
88 2169, 2171, 2176, 2178, 2182, 2184, 2185, 2187,
89 2189, 2195, 2197, 2199, 2203, 2209, 2222, 2224,
90 2230, 2234, 2242, 2253, 2261, 2264, 2294, 2300,
91 2303, 2308, 2310, 2314, 2318, 2322, 2324, 2331,
92 2333, 2342, 2349, 2357, 2359, 2379, 2391, 2395,
93 2397, 2415, 2454, 2456, 2460, 2462, 2469, 2473,
94 2501, 2503, 2505, 2507, 2509, 2512, 2514, 2518,
95 2522, 2524, 2527, 2529, 2531, 2534, 2536, 2538,
96 2539, 2541, 2543, 2547, 2551, 2554, 2567, 2569,
97 2575, 2579, 2581, 2585, 2589, 2603, 2606, 2615,
98 2617, 2621, 2627, 2627, 2629, 2631, 2640, 2646,
99 2653, 2654, 2657, 2658, 2662, 2667, 2676, 2680,
100 2684, 2692, 2694, 2696, 2698, 2701, 2733, 2735,
101 2737, 2741, 2745, 2748, 2759, 2772, 2791, 2804,
102 2820, 2832, 2848, 2863, 2884, 2894, 2906, 2917,
103 2931, 2946, 2956, 2968, 2977, 2989, 2991, 2995,
104 3016, 3025, 3035, 3041, 3047, 3048, 3097, 3099,
105 3103, 3105, 3111, 3118, 3126, 3133, 3136, 3142,
106 3146, 3150, 3152, 3156, 3160, 3164, 3170, 3178,
107 3186, 3192, 3194, 3198, 3200, 3206, 3210, 3214,
108 3218, 3222, 3227, 3234, 3240, 3242, 3244, 3248,
109 3250, 3256, 3260, 3264, 3274, 3279, 3293, 3308,
110 3310, 3318, 3320, 3325, 3339, 3344, 3346, 3350,
111 3351, 3355, 3361, 3367, 3377, 3387, 3398, 3406,
112 3409, 3412, 3416, 3420, 3422, 3425, 3425, 3428,
113 3430, 3460, 3462, 3464, 3468, 3473, 3477, 3482,
114 3484, 3486, 3488, 3497, 3501, 3505, 3511, 3513,
115 3521, 3529, 3541, 3544, 3550, 3554, 3556, 3560,
116 3580, 3582, 3584, 3595, 3601, 3603, 3605, 3607,
117 3611, 3617, 3623, 3625, 3630, 3634, 3636, 3644,
118 3662, 3702, 3712, 3716, 3718, 3720, 3721, 3725,
119 3729, 3733, 3737, 3741, 3746, 3750, 3754, 3758,
120 3760, 3762, 3766, 3776, 3780, 3782, 3786, 3790,
121 3794, 3807, 3809, 3811, 3815, 3817, 3821, 3823,
122 3825, 3855, 3859, 3863, 3867, 3870, 3877, 3882,
123 3893, 3897, 3913, 3927, 3931, 3936, 3940, 3944,
124 3950, 3952, 3958, 3960, 3964, 3966, 3972, 3977,
125 3982, 3992, 3994, 3996, 4000, 4004, 4006, 4019,
126 4021, 4025, 4029, 4037, 4039, 4043, 4045, 4046,
127 4049, 4054, 4056, 4058, 4062, 4064, 4068, 4074,
128 4094, 4100, 4106, 4108, 4109, 4119, 4120, 4128,
129 4135, 4137, 4140, 4142, 4144, 4146, 4151, 4155,
130 4159, 4164, 4174, 4184, 4188, 4192, 4206, 4232,
131 4242, 4244, 4246, 4249, 4251, 4254, 4256, 4260,
132 4262, 4263, 4267, 4269, 4271, 4278, 4282, 4289,
133 4296, 4305, 4321, 4333, 4351, 4362, 4374, 4382,
134 4400, 4408, 4438, 4441, 4451, 4461, 4473, 4484,
135 4493, 4506, 4518, 4522, 4528, 4555, 4564, 4567,
136 4572, 4578, 4583, 4604, 4608, 4614, 4614, 4621,
137 4630, 4638, 4641, 4645, 4651, 4657, 4660, 4664,
138 4671, 4677, 4686, 4695, 4699, 4703, 4707, 4711,
139 4718, 4722, 4726, 4736, 4742, 4746, 4752, 4756,
140 4759, 4765, 4771, 4783, 4787, 4791, 4801, 4805,
141 4816, 4818, 4820, 4824, 4836, 4841, 4865, 4869,
142 4875, 4897, 4906, 4910, 4913, 4914, 4922, 4930,
143 4936, 4946, 4953, 4971, 4974, 4977, 4985, 4991,
144 4995, 4999, 5003, 5009, 5017, 5022, 5028, 5032,
145 5040, 5047, 5051, 5058, 5064, 5072, 5080, 5086,
146 5092, 5103, 5107, 5119, 5128, 5145, 5162, 5165,
147 5169, 5171, 5177, 5179, 5183, 5198, 5202, 5206,
148 5210, 5214, 5218, 5220, 5226, 5231, 5235, 5241,
149 5248, 5251, 5269, 5271, 5316, 5322, 5328, 5332,
150 5336, 5342, 5346, 5352, 5358, 5365, 5367, 5373,
151 5379, 5383, 5387, 5395, 5408, 5414, 5421, 5429,
152 5435, 5444, 5450, 5454, 5459, 5463, 5471, 5475,
153 5479, 5509, 5515, 5521, 5527, 5533, 5540, 5546,
154 5553, 5558, 5568, 5572, 5579, 5585, 5589, 5596,
155 5600, 5606, 5609, 5613, 5617, 5621, 5625, 5630,
156 5635, 5639, 5650, 5654, 5658, 5664, 5672, 5676,
157 5693, 5697, 5703, 5713, 5719, 5725, 5728, 5733,
158 5742, 5746, 5750, 5756, 5760, 5766, 5774, 5792,
159 5793, 5803, 5804, 5813, 5821, 5823, 5826, 5828,
160 5830, 5832, 5837, 5850, 5854, 5869, 5898, 5909,
161 5911, 5915, 5919, 5924, 5928, 5930, 5937, 5941,
162 5949, 5953, 5954, 5955, 5957, 5959, 5961, 5963,
163 5965, 5966, 5967, 5968, 5970, 5972, 5974, 5975,
164 5976, 5977, 5978, 5980, 5982, 5984, 5985, 5986,
165 5990, 5996, 5996, 5998, 6000, 6009, 6015, 6022,
166 6023, 6026, 6027, 6031, 6036, 6045, 6049, 6053,
167 6061, 6063, 6065, 6067, 6070, 6102, 6104, 6106,
168 6110, 6114, 6117, 6128, 6141, 6160, 6173, 6189,
169 6201, 6217, 6232, 6253, 6263, 6275, 6286, 6300,
170 6315, 6325, 6337, 6346, 6358, 6360, 6364, 6385,
171 6394, 6404, 6410, 6416, 6417, 6466, 6468, 6472,
172 6474, 6480, 6487, 6495, 6502, 6505, 6511, 6515,
173 6519, 6521, 6525, 6529, 6533, 6539, 6547, 6555,
174 6561, 6563, 6567, 6569, 6575, 6579, 6583, 6587,
175 6591, 6596, 6603, 6609, 6611, 6613, 6617, 6619,
176 6625, 6629, 6633, 6643, 6648, 6662, 6677, 6679,
177 6687, 6689, 6694, 6708, 6713, 6715, 6719, 6720,
178 6724, 6730, 6736, 6746, 6756, 6767, 6775, 6778,
179 6781, 6785, 6789, 6791, 6794, 6794, 6797, 6799,
180 6829, 6831, 6833, 6837, 6842, 6846, 6851, 6853,
181 6855, 6857, 6866, 6870, 6874, 6880, 6882, 6890,
182 6898, 6910, 6913, 6919, 6923, 6925, 6929, 6949,
183 6951, 6953, 6964, 6970, 6972, 6974, 6976, 6980,
184 6986, 6992, 6994, 6999, 7003, 7005, 7013, 7031,
185 7071, 7081, 7085, 7087, 7089, 7090, 7094, 7098,
186 7102, 7106, 7110, 7115, 7119, 7123, 7127, 7129,
187 7131, 7135, 7145, 7149, 7151, 7155, 7159, 7163,
188 7176, 7178, 7180, 7184, 7186, 7190, 7192, 7194,
189 7224, 7228, 7232, 7236, 7239, 7246, 7251, 7262,
190 7266, 7282, 7296, 7300, 7305, 7309, 7313, 7319,
191 7321, 7327, 7329, 7333, 7335, 7341, 7346, 7351,
192 7361, 7363, 7365, 7369, 7373, 7375, 7388, 7390,
193 7394, 7398, 7406, 7408, 7412, 7414, 7415, 7418,
194 7423, 7425, 7427, 7431, 7433, 7437, 7443, 7463,
195 7469, 7475, 7477, 7478, 7488, 7489, 7497, 7504,
196 7506, 7509, 7511, 7513, 7515, 7520, 7524, 7528,
197 7533, 7543, 7553, 7557, 7561, 7575, 7601, 7611,
198 7613, 7615, 7618, 7620, 7623, 7625, 7629, 7631,
199 7632, 7636, 7638, 7640, 7647, 7651, 7658, 7665,
200 7674, 7690, 7702, 7720, 7731, 7743, 7751, 7769,
201 7777, 7807, 7810, 7820, 7830, 7842, 7853, 7862,
202 7875, 7887, 7891, 7897, 7924, 7933, 7936, 7941,
203 7947, 7952, 7973, 7977, 7983, 7983, 7990, 7999,
204 8007, 8010, 8014, 8020, 8026, 8029, 8033, 8040,
205 8046, 8055, 8064, 8068, 8072, 8076, 8080, 8087,
206 8091, 8095, 8105, 8111, 8115, 8121, 8125, 8128,
207 8134, 8140, 8152, 8156, 8160, 8170, 8174, 8185,
208 8187, 8189, 8193, 8205, 8210, 8234, 8238, 8244,
209 8266, 8275, 8279, 8282, 8283, 8291, 8299, 8305,
210 8315, 8322, 8340, 8343, 8346, 8354, 8360, 8364,
211 8368, 8372, 8378, 8386, 8391, 8397, 8401, 8409,
212 8416, 8420, 8427, 8433, 8441, 8449, 8455, 8461,
213 8472, 8476, 8488, 8497, 8514, 8531, 8534, 8538,
214 8540, 8546, 8548, 8552, 8567, 8571, 8575, 8579,
215 8583, 8587, 8589, 8595, 8600, 8604, 8610, 8617,
216 8620, 8638, 8640, 8685, 8691, 8697, 8701, 8705,
217 8711, 8715, 8721, 8727, 8734, 8736, 8742, 8748,
218 8752, 8756, 8764, 8777, 8783, 8790, 8798, 8804,
219 8813, 8819, 8823, 8828, 8832, 8840, 8844, 8848,
220 8878, 8884, 8890, 8896, 8902, 8909, 8915, 8922,
221 8927, 8937, 8941, 8948, 8954, 8958, 8965, 8969,
222 8975, 8978, 8982, 8986, 8990, 8994, 8999, 9004,
223 9008, 9019, 9023, 9027, 9033, 9041, 9045, 9062,
224 9066, 9072, 9082, 9088, 9094, 9097, 9102, 9111,
225 9115, 9119, 9125, 9129, 9135, 9143, 9161, 9162,
226 9172, 9173, 9182, 9190, 9192, 9195, 9197, 9199,
227 9201, 9206, 9219, 9223, 9238, 9267, 9278, 9280,
228 9284, 9288, 9293, 9297, 9299, 9306, 9310, 9318,
229 9322, 9398, 9400, 9401, 9402, 9403, 9404, 9405,
230 9407, 9408, 9413, 9415, 9417, 9418, 9462, 9463,
231 9464, 9466, 9471, 9475, 9475, 9477, 9479, 9490,
232 9500, 9508, 9509, 9511, 9512, 9516, 9520, 9530,
233 9534, 9541, 9552, 9559, 9563, 9569, 9580, 9612,
234 9661, 9676, 9691, 9696, 9698, 9703, 9735, 9743,
235 9745, 9767, 9789, 9791, 9807, 9823, 9839, 9855,
236 9870, 9880, 9897, 9914, 9931, 9947, 9957, 9974,
237 9990, 10006, 10022, 10038, 10054, 10070, 10086, 10087,
238 10088, 10089, 10090, 10092, 10094, 10096, 10110, 10124,
239 10138, 10152, 10153, 10154, 10156, 10158, 10160, 10174,
240 10188, 10189, 10190, 10192, 10194, 10196, 10245, 10289,
241 10291, 10296, 10300, 10300, 10302, 10304, 10315, 10325,
242 10333, 10334, 10336, 10337, 10341, 10345, 10355, 10359,
243 10366, 10377, 10384, 10388, 10394, 10405, 10437, 10486,
244 10501, 10516, 10521, 10523, 10528, 10560, 10568, 10570,
245 10592, 10614,
246}
247
248var _hcltok_trans_keys []byte = []byte{
249 10, 46, 42, 42, 47, 46, 69, 101,
250 48, 57, 43, 45, 48, 57, 48, 57,
251 45, 95, 194, 195, 198, 199, 203, 205,
252 206, 207, 210, 212, 213, 214, 215, 216,
253 217, 219, 220, 221, 222, 223, 224, 225,
254 226, 227, 228, 233, 234, 237, 239, 240,
255 65, 90, 97, 122, 196, 202, 208, 218,
256 229, 236, 95, 194, 195, 198, 199, 203,
257 205, 206, 207, 210, 212, 213, 214, 215,
258 216, 217, 219, 220, 221, 222, 223, 224,
259 225, 226, 227, 228, 233, 234, 237, 239,
260 240, 65, 90, 97, 122, 196, 202, 208,
261 218, 229, 236, 10, 13, 45, 95, 194,
262 195, 198, 199, 203, 204, 205, 206, 207,
263 210, 212, 213, 214, 215, 216, 217, 219,
264 220, 221, 222, 223, 224, 225, 226, 227,
265 228, 233, 234, 237, 239, 240, 243, 48,
266 57, 65, 90, 97, 122, 196, 218, 229,
267 236, 10, 170, 181, 183, 186, 128, 150,
268 152, 182, 184, 255, 192, 255, 0, 127,
269 173, 130, 133, 146, 159, 165, 171, 175,
270 255, 181, 190, 184, 185, 192, 255, 140,
271 134, 138, 142, 161, 163, 255, 182, 130,
272 136, 137, 176, 151, 152, 154, 160, 190,
273 136, 144, 192, 255, 135, 129, 130, 132,
274 133, 144, 170, 176, 178, 144, 154, 160,
275 191, 128, 169, 174, 255, 148, 169, 157,
276 158, 189, 190, 192, 255, 144, 255, 139,
277 140, 178, 255, 186, 128, 181, 160, 161,
278 162, 163, 164, 165, 166, 167, 168, 169,
279 170, 171, 172, 173, 174, 175, 176, 177,
280 178, 179, 180, 181, 182, 183, 184, 185,
281 186, 187, 188, 189, 190, 191, 128, 173,
282 128, 155, 160, 180, 182, 189, 148, 161,
283 163, 255, 176, 164, 165, 132, 169, 177,
284 141, 142, 145, 146, 179, 181, 186, 187,
285 158, 133, 134, 137, 138, 143, 150, 152,
286 155, 164, 165, 178, 255, 188, 129, 131,
287 133, 138, 143, 144, 147, 168, 170, 176,
288 178, 179, 181, 182, 184, 185, 190, 255,
289 157, 131, 134, 137, 138, 142, 144, 146,
290 152, 159, 165, 182, 255, 129, 131, 133,
291 141, 143, 145, 147, 168, 170, 176, 178,
292 179, 181, 185, 188, 255, 134, 138, 142,
293 143, 145, 159, 164, 165, 176, 184, 186,
294 255, 129, 131, 133, 140, 143, 144, 147,
295 168, 170, 176, 178, 179, 181, 185, 188,
296 191, 177, 128, 132, 135, 136, 139, 141,
297 150, 151, 156, 157, 159, 163, 166, 175,
298 156, 130, 131, 133, 138, 142, 144, 146,
299 149, 153, 154, 158, 159, 163, 164, 168,
300 170, 174, 185, 190, 191, 144, 151, 128,
301 130, 134, 136, 138, 141, 166, 175, 128,
302 131, 133, 140, 142, 144, 146, 168, 170,
303 185, 189, 255, 133, 137, 151, 142, 148,
304 155, 159, 164, 165, 176, 255, 128, 131,
305 133, 140, 142, 144, 146, 168, 170, 179,
306 181, 185, 188, 191, 158, 128, 132, 134,
307 136, 138, 141, 149, 150, 160, 163, 166,
308 175, 177, 178, 129, 131, 133, 140, 142,
309 144, 146, 186, 189, 255, 133, 137, 143,
310 147, 152, 158, 164, 165, 176, 185, 192,
311 255, 189, 130, 131, 133, 150, 154, 177,
312 179, 187, 138, 150, 128, 134, 143, 148,
313 152, 159, 166, 175, 178, 179, 129, 186,
314 128, 142, 144, 153, 132, 138, 141, 165,
315 167, 129, 130, 135, 136, 148, 151, 153,
316 159, 161, 163, 170, 171, 173, 185, 187,
317 189, 134, 128, 132, 136, 141, 144, 153,
318 156, 159, 128, 181, 183, 185, 152, 153,
319 160, 169, 190, 191, 128, 135, 137, 172,
320 177, 191, 128, 132, 134, 151, 153, 188,
321 134, 128, 129, 130, 131, 137, 138, 139,
322 140, 141, 142, 143, 144, 153, 154, 155,
323 156, 157, 158, 159, 160, 161, 162, 163,
324 164, 165, 166, 167, 168, 169, 170, 173,
325 175, 176, 177, 178, 179, 181, 182, 183,
326 188, 189, 190, 191, 132, 152, 172, 184,
327 185, 187, 128, 191, 128, 137, 144, 255,
328 158, 159, 134, 187, 136, 140, 142, 143,
329 137, 151, 153, 142, 143, 158, 159, 137,
330 177, 142, 143, 182, 183, 191, 255, 128,
331 130, 133, 136, 150, 152, 255, 145, 150,
332 151, 155, 156, 160, 168, 178, 255, 128,
333 143, 160, 255, 182, 183, 190, 255, 129,
334 255, 173, 174, 192, 255, 129, 154, 160,
335 255, 171, 173, 185, 255, 128, 140, 142,
336 148, 160, 180, 128, 147, 160, 172, 174,
337 176, 178, 179, 148, 150, 152, 155, 158,
338 159, 170, 255, 139, 141, 144, 153, 160,
339 255, 184, 255, 128, 170, 176, 255, 182,
340 255, 128, 158, 160, 171, 176, 187, 134,
341 173, 176, 180, 128, 171, 176, 255, 138,
342 143, 155, 255, 128, 155, 160, 255, 159,
343 189, 190, 192, 255, 167, 128, 137, 144,
344 153, 176, 189, 140, 143, 154, 170, 180,
345 255, 180, 255, 128, 183, 128, 137, 141,
346 189, 128, 136, 144, 146, 148, 182, 184,
347 185, 128, 181, 187, 191, 150, 151, 158,
348 159, 152, 154, 156, 158, 134, 135, 142,
349 143, 190, 255, 190, 128, 180, 182, 188,
350 130, 132, 134, 140, 144, 147, 150, 155,
351 160, 172, 178, 180, 182, 188, 128, 129,
352 130, 131, 132, 133, 134, 176, 177, 178,
353 179, 180, 181, 182, 183, 191, 255, 129,
354 147, 149, 176, 178, 190, 192, 255, 144,
355 156, 161, 144, 156, 165, 176, 130, 135,
356 149, 164, 166, 168, 138, 147, 152, 157,
357 170, 185, 188, 191, 142, 133, 137, 160,
358 255, 137, 255, 128, 174, 176, 255, 159,
359 165, 170, 180, 255, 167, 173, 128, 165,
360 176, 255, 168, 174, 176, 190, 192, 255,
361 128, 150, 160, 166, 168, 174, 176, 182,
362 184, 190, 128, 134, 136, 142, 144, 150,
363 152, 158, 160, 191, 128, 129, 130, 131,
364 132, 133, 134, 135, 144, 145, 255, 133,
365 135, 161, 175, 177, 181, 184, 188, 160,
366 151, 152, 187, 192, 255, 133, 173, 177,
367 255, 143, 159, 187, 255, 176, 191, 182,
368 183, 184, 191, 192, 255, 150, 255, 128,
369 146, 147, 148, 152, 153, 154, 155, 156,
370 158, 159, 160, 161, 162, 163, 164, 165,
371 166, 167, 168, 169, 170, 171, 172, 173,
372 174, 175, 176, 129, 255, 141, 255, 144,
373 189, 141, 143, 172, 255, 191, 128, 175,
374 180, 189, 151, 159, 162, 255, 175, 137,
375 138, 184, 255, 183, 255, 168, 255, 128,
376 179, 188, 134, 143, 154, 159, 184, 186,
377 190, 255, 128, 173, 176, 255, 148, 159,
378 189, 255, 129, 142, 154, 159, 191, 255,
379 128, 182, 128, 141, 144, 153, 160, 182,
380 186, 255, 128, 130, 155, 157, 160, 175,
381 178, 182, 129, 134, 137, 142, 145, 150,
382 160, 166, 168, 174, 176, 255, 155, 166,
383 175, 128, 170, 172, 173, 176, 185, 158,
384 159, 160, 255, 164, 175, 135, 138, 188,
385 255, 164, 169, 171, 172, 173, 174, 175,
386 180, 181, 182, 183, 184, 185, 187, 188,
387 189, 190, 191, 165, 186, 174, 175, 154,
388 255, 190, 128, 134, 147, 151, 157, 168,
389 170, 182, 184, 188, 128, 129, 131, 132,
390 134, 255, 147, 255, 190, 255, 144, 145,
391 136, 175, 188, 255, 128, 143, 160, 175,
392 179, 180, 141, 143, 176, 180, 182, 255,
393 189, 255, 191, 144, 153, 161, 186, 129,
394 154, 166, 255, 191, 255, 130, 135, 138,
395 143, 146, 151, 154, 156, 144, 145, 146,
396 147, 148, 150, 151, 152, 155, 157, 158,
397 160, 170, 171, 172, 175, 161, 169, 128,
398 129, 130, 131, 133, 135, 138, 139, 140,
399 141, 142, 143, 144, 145, 146, 147, 148,
400 149, 152, 156, 157, 160, 161, 162, 163,
401 164, 166, 168, 169, 170, 171, 172, 173,
402 174, 176, 177, 153, 155, 178, 179, 128,
403 139, 141, 166, 168, 186, 188, 189, 191,
404 255, 142, 143, 158, 255, 187, 255, 128,
405 180, 189, 128, 156, 160, 255, 145, 159,
406 161, 255, 128, 159, 176, 255, 139, 143,
407 187, 255, 128, 157, 160, 255, 144, 132,
408 135, 150, 255, 158, 159, 170, 175, 148,
409 151, 188, 255, 128, 167, 176, 255, 164,
410 255, 183, 255, 128, 149, 160, 167, 136,
411 188, 128, 133, 138, 181, 183, 184, 191,
412 255, 150, 159, 183, 255, 128, 158, 160,
413 178, 180, 181, 128, 149, 160, 185, 128,
414 183, 190, 191, 191, 128, 131, 133, 134,
415 140, 147, 149, 151, 153, 179, 184, 186,
416 160, 188, 128, 156, 128, 135, 137, 166,
417 128, 181, 128, 149, 160, 178, 128, 145,
418 128, 178, 129, 130, 131, 132, 133, 135,
419 136, 138, 139, 140, 141, 144, 145, 146,
420 147, 150, 151, 152, 153, 154, 155, 156,
421 162, 163, 171, 176, 177, 178, 128, 134,
422 135, 165, 176, 190, 144, 168, 176, 185,
423 128, 180, 182, 191, 182, 144, 179, 155,
424 133, 137, 141, 143, 157, 255, 190, 128,
425 145, 147, 183, 136, 128, 134, 138, 141,
426 143, 157, 159, 168, 176, 255, 171, 175,
427 186, 255, 128, 131, 133, 140, 143, 144,
428 147, 168, 170, 176, 178, 179, 181, 185,
429 188, 191, 144, 151, 128, 132, 135, 136,
430 139, 141, 157, 163, 166, 172, 176, 180,
431 128, 138, 144, 153, 134, 136, 143, 154,
432 255, 128, 181, 184, 255, 129, 151, 158,
433 255, 129, 131, 133, 143, 154, 255, 128,
434 137, 128, 153, 157, 171, 176, 185, 160,
435 255, 170, 190, 192, 255, 128, 184, 128,
436 136, 138, 182, 184, 191, 128, 144, 153,
437 178, 255, 168, 144, 145, 183, 255, 128,
438 142, 145, 149, 129, 141, 144, 146, 147,
439 148, 175, 255, 132, 255, 128, 144, 129,
440 143, 144, 153, 145, 152, 135, 255, 160,
441 168, 169, 171, 172, 173, 174, 188, 189,
442 190, 191, 161, 167, 185, 255, 128, 158,
443 160, 169, 144, 173, 176, 180, 128, 131,
444 144, 153, 163, 183, 189, 255, 144, 255,
445 133, 143, 191, 255, 143, 159, 160, 128,
446 129, 255, 159, 160, 171, 172, 255, 173,
447 255, 179, 255, 128, 176, 177, 178, 128,
448 129, 171, 175, 189, 255, 128, 136, 144,
449 153, 157, 158, 133, 134, 137, 144, 145,
450 146, 147, 148, 149, 154, 155, 156, 157,
451 158, 159, 168, 169, 170, 150, 153, 165,
452 169, 173, 178, 187, 255, 131, 132, 140,
453 169, 174, 255, 130, 132, 149, 157, 173,
454 186, 188, 160, 161, 163, 164, 167, 168,
455 132, 134, 149, 157, 186, 139, 140, 191,
456 255, 134, 128, 132, 138, 144, 146, 255,
457 166, 167, 129, 155, 187, 149, 181, 143,
458 175, 137, 169, 131, 140, 141, 192, 255,
459 128, 182, 187, 255, 173, 180, 182, 255,
460 132, 155, 159, 161, 175, 128, 160, 163,
461 164, 165, 184, 185, 186, 161, 162, 128,
462 134, 136, 152, 155, 161, 163, 164, 166,
463 170, 133, 143, 151, 255, 139, 143, 154,
464 255, 164, 167, 185, 187, 128, 131, 133,
465 159, 161, 162, 169, 178, 180, 183, 130,
466 135, 137, 139, 148, 151, 153, 155, 157,
467 159, 164, 190, 141, 143, 145, 146, 161,
468 162, 167, 170, 172, 178, 180, 183, 185,
469 188, 128, 137, 139, 155, 161, 163, 165,
470 169, 171, 187, 155, 156, 151, 255, 156,
471 157, 160, 181, 255, 186, 187, 255, 162,
472 255, 160, 168, 161, 167, 158, 255, 160,
473 132, 135, 133, 134, 176, 255, 170, 181,
474 186, 191, 176, 180, 182, 183, 186, 189,
475 134, 140, 136, 138, 142, 161, 163, 255,
476 130, 137, 136, 255, 144, 170, 176, 178,
477 160, 191, 128, 138, 174, 175, 177, 255,
478 148, 150, 164, 167, 173, 176, 185, 189,
479 190, 192, 255, 144, 146, 175, 141, 255,
480 166, 176, 178, 255, 186, 138, 170, 180,
481 181, 160, 161, 162, 164, 165, 166, 167,
482 168, 169, 170, 171, 172, 173, 174, 175,
483 176, 177, 178, 179, 180, 181, 182, 184,
484 186, 187, 188, 189, 190, 183, 185, 154,
485 164, 168, 128, 149, 128, 152, 189, 132,
486 185, 144, 152, 161, 177, 255, 169, 177,
487 129, 132, 141, 142, 145, 146, 179, 181,
488 186, 188, 190, 255, 142, 156, 157, 159,
489 161, 176, 177, 133, 138, 143, 144, 147,
490 168, 170, 176, 178, 179, 181, 182, 184,
491 185, 158, 153, 156, 178, 180, 189, 133,
492 141, 143, 145, 147, 168, 170, 176, 178,
493 179, 181, 185, 144, 185, 160, 161, 189,
494 133, 140, 143, 144, 147, 168, 170, 176,
495 178, 179, 181, 185, 177, 156, 157, 159,
496 161, 131, 156, 133, 138, 142, 144, 146,
497 149, 153, 154, 158, 159, 163, 164, 168,
498 170, 174, 185, 144, 189, 133, 140, 142,
499 144, 146, 168, 170, 185, 152, 154, 160,
500 161, 128, 189, 133, 140, 142, 144, 146,
501 168, 170, 179, 181, 185, 158, 160, 161,
502 177, 178, 189, 133, 140, 142, 144, 146,
503 186, 142, 148, 150, 159, 161, 186, 191,
504 189, 133, 150, 154, 177, 179, 187, 128,
505 134, 129, 176, 178, 179, 132, 138, 141,
506 165, 167, 189, 129, 130, 135, 136, 148,
507 151, 153, 159, 161, 163, 170, 171, 173,
508 176, 178, 179, 134, 128, 132, 156, 159,
509 128, 128, 135, 137, 172, 136, 140, 128,
510 129, 130, 131, 137, 138, 139, 140, 141,
511 142, 143, 144, 153, 154, 155, 156, 157,
512 158, 159, 160, 161, 162, 163, 164, 165,
513 166, 167, 168, 169, 170, 172, 173, 174,
514 175, 176, 177, 178, 179, 180, 181, 182,
515 184, 188, 189, 190, 191, 132, 152, 185,
516 187, 191, 128, 170, 161, 144, 149, 154,
517 157, 165, 166, 174, 176, 181, 255, 130,
518 141, 143, 159, 155, 255, 128, 140, 142,
519 145, 160, 177, 128, 145, 160, 172, 174,
520 176, 151, 156, 170, 128, 168, 176, 255,
521 138, 255, 128, 150, 160, 255, 149, 255,
522 167, 133, 179, 133, 139, 131, 160, 174,
523 175, 186, 255, 166, 255, 128, 163, 141,
524 143, 154, 189, 169, 172, 174, 177, 181,
525 182, 129, 130, 132, 133, 134, 176, 177,
526 178, 179, 180, 181, 182, 183, 177, 191,
527 165, 170, 175, 177, 180, 255, 168, 174,
528 176, 255, 128, 134, 136, 142, 144, 150,
529 152, 158, 128, 129, 130, 131, 132, 133,
530 134, 135, 144, 145, 255, 133, 135, 161,
531 169, 177, 181, 184, 188, 160, 151, 154,
532 128, 146, 147, 148, 152, 153, 154, 155,
533 156, 158, 159, 160, 161, 162, 163, 164,
534 165, 166, 167, 168, 169, 170, 171, 172,
535 173, 174, 175, 176, 129, 255, 141, 143,
536 160, 169, 172, 255, 191, 128, 174, 130,
537 134, 139, 163, 255, 130, 179, 187, 189,
538 178, 183, 138, 165, 176, 255, 135, 159,
539 189, 255, 132, 178, 143, 160, 164, 166,
540 175, 186, 190, 128, 168, 186, 128, 130,
541 132, 139, 160, 182, 190, 255, 176, 178,
542 180, 183, 184, 190, 255, 128, 130, 155,
543 157, 160, 170, 178, 180, 128, 162, 164,
544 169, 171, 172, 173, 174, 175, 180, 181,
545 182, 183, 185, 186, 187, 188, 189, 190,
546 191, 165, 179, 157, 190, 128, 134, 147,
547 151, 159, 168, 170, 182, 184, 188, 176,
548 180, 182, 255, 161, 186, 144, 145, 146,
549 147, 148, 150, 151, 152, 155, 157, 158,
550 160, 170, 171, 172, 175, 161, 169, 128,
551 129, 130, 131, 133, 138, 139, 140, 141,
552 142, 143, 144, 145, 146, 147, 148, 149,
553 152, 156, 157, 160, 161, 162, 163, 164,
554 166, 168, 169, 170, 171, 172, 173, 174,
555 176, 177, 153, 155, 178, 179, 145, 255,
556 139, 143, 182, 255, 158, 175, 128, 144,
557 147, 149, 151, 153, 179, 128, 135, 137,
558 164, 128, 130, 131, 132, 133, 134, 135,
559 136, 138, 139, 140, 141, 144, 145, 146,
560 147, 150, 151, 152, 153, 154, 156, 162,
561 163, 171, 176, 177, 178, 131, 183, 131,
562 175, 144, 168, 131, 166, 182, 144, 178,
563 131, 178, 154, 156, 129, 132, 128, 145,
564 147, 171, 159, 255, 144, 157, 161, 135,
565 138, 128, 175, 135, 132, 133, 128, 174,
566 152, 155, 132, 128, 170, 128, 153, 160,
567 190, 192, 255, 128, 136, 138, 174, 128,
568 178, 255, 160, 168, 169, 171, 172, 173,
569 174, 188, 189, 190, 191, 161, 167, 144,
570 173, 128, 131, 163, 183, 189, 255, 133,
571 143, 145, 255, 147, 159, 128, 176, 177,
572 178, 128, 136, 144, 153, 144, 145, 146,
573 147, 148, 149, 154, 155, 156, 157, 158,
574 159, 150, 153, 131, 140, 255, 160, 163,
575 164, 165, 184, 185, 186, 161, 162, 133,
576 255, 170, 181, 183, 186, 128, 150, 152,
577 182, 184, 255, 192, 255, 128, 255, 173,
578 130, 133, 146, 159, 165, 171, 175, 255,
579 181, 190, 184, 185, 192, 255, 140, 134,
580 138, 142, 161, 163, 255, 182, 130, 136,
581 137, 176, 151, 152, 154, 160, 190, 136,
582 144, 192, 255, 135, 129, 130, 132, 133,
583 144, 170, 176, 178, 144, 154, 160, 191,
584 128, 169, 174, 255, 148, 169, 157, 158,
585 189, 190, 192, 255, 144, 255, 139, 140,
586 178, 255, 186, 128, 181, 160, 161, 162,
587 163, 164, 165, 166, 167, 168, 169, 170,
588 171, 172, 173, 174, 175, 176, 177, 178,
589 179, 180, 181, 182, 183, 184, 185, 186,
590 187, 188, 189, 190, 191, 128, 173, 128,
591 155, 160, 180, 182, 189, 148, 161, 163,
592 255, 176, 164, 165, 132, 169, 177, 141,
593 142, 145, 146, 179, 181, 186, 187, 158,
594 133, 134, 137, 138, 143, 150, 152, 155,
595 164, 165, 178, 255, 188, 129, 131, 133,
596 138, 143, 144, 147, 168, 170, 176, 178,
597 179, 181, 182, 184, 185, 190, 255, 157,
598 131, 134, 137, 138, 142, 144, 146, 152,
599 159, 165, 182, 255, 129, 131, 133, 141,
600 143, 145, 147, 168, 170, 176, 178, 179,
601 181, 185, 188, 255, 134, 138, 142, 143,
602 145, 159, 164, 165, 176, 184, 186, 255,
603 129, 131, 133, 140, 143, 144, 147, 168,
604 170, 176, 178, 179, 181, 185, 188, 191,
605 177, 128, 132, 135, 136, 139, 141, 150,
606 151, 156, 157, 159, 163, 166, 175, 156,
607 130, 131, 133, 138, 142, 144, 146, 149,
608 153, 154, 158, 159, 163, 164, 168, 170,
609 174, 185, 190, 191, 144, 151, 128, 130,
610 134, 136, 138, 141, 166, 175, 128, 131,
611 133, 140, 142, 144, 146, 168, 170, 185,
612 189, 255, 133, 137, 151, 142, 148, 155,
613 159, 164, 165, 176, 255, 128, 131, 133,
614 140, 142, 144, 146, 168, 170, 179, 181,
615 185, 188, 191, 158, 128, 132, 134, 136,
616 138, 141, 149, 150, 160, 163, 166, 175,
617 177, 178, 129, 131, 133, 140, 142, 144,
618 146, 186, 189, 255, 133, 137, 143, 147,
619 152, 158, 164, 165, 176, 185, 192, 255,
620 189, 130, 131, 133, 150, 154, 177, 179,
621 187, 138, 150, 128, 134, 143, 148, 152,
622 159, 166, 175, 178, 179, 129, 186, 128,
623 142, 144, 153, 132, 138, 141, 165, 167,
624 129, 130, 135, 136, 148, 151, 153, 159,
625 161, 163, 170, 171, 173, 185, 187, 189,
626 134, 128, 132, 136, 141, 144, 153, 156,
627 159, 128, 181, 183, 185, 152, 153, 160,
628 169, 190, 191, 128, 135, 137, 172, 177,
629 191, 128, 132, 134, 151, 153, 188, 134,
630 128, 129, 130, 131, 137, 138, 139, 140,
631 141, 142, 143, 144, 153, 154, 155, 156,
632 157, 158, 159, 160, 161, 162, 163, 164,
633 165, 166, 167, 168, 169, 170, 173, 175,
634 176, 177, 178, 179, 181, 182, 183, 188,
635 189, 190, 191, 132, 152, 172, 184, 185,
636 187, 128, 191, 128, 137, 144, 255, 158,
637 159, 134, 187, 136, 140, 142, 143, 137,
638 151, 153, 142, 143, 158, 159, 137, 177,
639 142, 143, 182, 183, 191, 255, 128, 130,
640 133, 136, 150, 152, 255, 145, 150, 151,
641 155, 156, 160, 168, 178, 255, 128, 143,
642 160, 255, 182, 183, 190, 255, 129, 255,
643 173, 174, 192, 255, 129, 154, 160, 255,
644 171, 173, 185, 255, 128, 140, 142, 148,
645 160, 180, 128, 147, 160, 172, 174, 176,
646 178, 179, 148, 150, 152, 155, 158, 159,
647 170, 255, 139, 141, 144, 153, 160, 255,
648 184, 255, 128, 170, 176, 255, 182, 255,
649 128, 158, 160, 171, 176, 187, 134, 173,
650 176, 180, 128, 171, 176, 255, 138, 143,
651 155, 255, 128, 155, 160, 255, 159, 189,
652 190, 192, 255, 167, 128, 137, 144, 153,
653 176, 189, 140, 143, 154, 170, 180, 255,
654 180, 255, 128, 183, 128, 137, 141, 189,
655 128, 136, 144, 146, 148, 182, 184, 185,
656 128, 181, 187, 191, 150, 151, 158, 159,
657 152, 154, 156, 158, 134, 135, 142, 143,
658 190, 255, 190, 128, 180, 182, 188, 130,
659 132, 134, 140, 144, 147, 150, 155, 160,
660 172, 178, 180, 182, 188, 128, 129, 130,
661 131, 132, 133, 134, 176, 177, 178, 179,
662 180, 181, 182, 183, 191, 255, 129, 147,
663 149, 176, 178, 190, 192, 255, 144, 156,
664 161, 144, 156, 165, 176, 130, 135, 149,
665 164, 166, 168, 138, 147, 152, 157, 170,
666 185, 188, 191, 142, 133, 137, 160, 255,
667 137, 255, 128, 174, 176, 255, 159, 165,
668 170, 180, 255, 167, 173, 128, 165, 176,
669 255, 168, 174, 176, 190, 192, 255, 128,
670 150, 160, 166, 168, 174, 176, 182, 184,
671 190, 128, 134, 136, 142, 144, 150, 152,
672 158, 160, 191, 128, 129, 130, 131, 132,
673 133, 134, 135, 144, 145, 255, 133, 135,
674 161, 175, 177, 181, 184, 188, 160, 151,
675 152, 187, 192, 255, 133, 173, 177, 255,
676 143, 159, 187, 255, 176, 191, 182, 183,
677 184, 191, 192, 255, 150, 255, 128, 146,
678 147, 148, 152, 153, 154, 155, 156, 158,
679 159, 160, 161, 162, 163, 164, 165, 166,
680 167, 168, 169, 170, 171, 172, 173, 174,
681 175, 176, 129, 255, 141, 255, 144, 189,
682 141, 143, 172, 255, 191, 128, 175, 180,
683 189, 151, 159, 162, 255, 175, 137, 138,
684 184, 255, 183, 255, 168, 255, 128, 179,
685 188, 134, 143, 154, 159, 184, 186, 190,
686 255, 128, 173, 176, 255, 148, 159, 189,
687 255, 129, 142, 154, 159, 191, 255, 128,
688 182, 128, 141, 144, 153, 160, 182, 186,
689 255, 128, 130, 155, 157, 160, 175, 178,
690 182, 129, 134, 137, 142, 145, 150, 160,
691 166, 168, 174, 176, 255, 155, 166, 175,
692 128, 170, 172, 173, 176, 185, 158, 159,
693 160, 255, 164, 175, 135, 138, 188, 255,
694 164, 169, 171, 172, 173, 174, 175, 180,
695 181, 182, 183, 184, 185, 187, 188, 189,
696 190, 191, 165, 186, 174, 175, 154, 255,
697 190, 128, 134, 147, 151, 157, 168, 170,
698 182, 184, 188, 128, 129, 131, 132, 134,
699 255, 147, 255, 190, 255, 144, 145, 136,
700 175, 188, 255, 128, 143, 160, 175, 179,
701 180, 141, 143, 176, 180, 182, 255, 189,
702 255, 191, 144, 153, 161, 186, 129, 154,
703 166, 255, 191, 255, 130, 135, 138, 143,
704 146, 151, 154, 156, 144, 145, 146, 147,
705 148, 150, 151, 152, 155, 157, 158, 160,
706 170, 171, 172, 175, 161, 169, 128, 129,
707 130, 131, 133, 135, 138, 139, 140, 141,
708 142, 143, 144, 145, 146, 147, 148, 149,
709 152, 156, 157, 160, 161, 162, 163, 164,
710 166, 168, 169, 170, 171, 172, 173, 174,
711 176, 177, 153, 155, 178, 179, 128, 139,
712 141, 166, 168, 186, 188, 189, 191, 255,
713 142, 143, 158, 255, 187, 255, 128, 180,
714 189, 128, 156, 160, 255, 145, 159, 161,
715 255, 128, 159, 176, 255, 139, 143, 187,
716 255, 128, 157, 160, 255, 144, 132, 135,
717 150, 255, 158, 159, 170, 175, 148, 151,
718 188, 255, 128, 167, 176, 255, 164, 255,
719 183, 255, 128, 149, 160, 167, 136, 188,
720 128, 133, 138, 181, 183, 184, 191, 255,
721 150, 159, 183, 255, 128, 158, 160, 178,
722 180, 181, 128, 149, 160, 185, 128, 183,
723 190, 191, 191, 128, 131, 133, 134, 140,
724 147, 149, 151, 153, 179, 184, 186, 160,
725 188, 128, 156, 128, 135, 137, 166, 128,
726 181, 128, 149, 160, 178, 128, 145, 128,
727 178, 129, 130, 131, 132, 133, 135, 136,
728 138, 139, 140, 141, 144, 145, 146, 147,
729 150, 151, 152, 153, 154, 155, 156, 162,
730 163, 171, 176, 177, 178, 128, 134, 135,
731 165, 176, 190, 144, 168, 176, 185, 128,
732 180, 182, 191, 182, 144, 179, 155, 133,
733 137, 141, 143, 157, 255, 190, 128, 145,
734 147, 183, 136, 128, 134, 138, 141, 143,
735 157, 159, 168, 176, 255, 171, 175, 186,
736 255, 128, 131, 133, 140, 143, 144, 147,
737 168, 170, 176, 178, 179, 181, 185, 188,
738 191, 144, 151, 128, 132, 135, 136, 139,
739 141, 157, 163, 166, 172, 176, 180, 128,
740 138, 144, 153, 134, 136, 143, 154, 255,
741 128, 181, 184, 255, 129, 151, 158, 255,
742 129, 131, 133, 143, 154, 255, 128, 137,
743 128, 153, 157, 171, 176, 185, 160, 255,
744 170, 190, 192, 255, 128, 184, 128, 136,
745 138, 182, 184, 191, 128, 144, 153, 178,
746 255, 168, 144, 145, 183, 255, 128, 142,
747 145, 149, 129, 141, 144, 146, 147, 148,
748 175, 255, 132, 255, 128, 144, 129, 143,
749 144, 153, 145, 152, 135, 255, 160, 168,
750 169, 171, 172, 173, 174, 188, 189, 190,
751 191, 161, 167, 185, 255, 128, 158, 160,
752 169, 144, 173, 176, 180, 128, 131, 144,
753 153, 163, 183, 189, 255, 144, 255, 133,
754 143, 191, 255, 143, 159, 160, 128, 129,
755 255, 159, 160, 171, 172, 255, 173, 255,
756 179, 255, 128, 176, 177, 178, 128, 129,
757 171, 175, 189, 255, 128, 136, 144, 153,
758 157, 158, 133, 134, 137, 144, 145, 146,
759 147, 148, 149, 154, 155, 156, 157, 158,
760 159, 168, 169, 170, 150, 153, 165, 169,
761 173, 178, 187, 255, 131, 132, 140, 169,
762 174, 255, 130, 132, 149, 157, 173, 186,
763 188, 160, 161, 163, 164, 167, 168, 132,
764 134, 149, 157, 186, 139, 140, 191, 255,
765 134, 128, 132, 138, 144, 146, 255, 166,
766 167, 129, 155, 187, 149, 181, 143, 175,
767 137, 169, 131, 140, 141, 192, 255, 128,
768 182, 187, 255, 173, 180, 182, 255, 132,
769 155, 159, 161, 175, 128, 160, 163, 164,
770 165, 184, 185, 186, 161, 162, 128, 134,
771 136, 152, 155, 161, 163, 164, 166, 170,
772 133, 143, 151, 255, 139, 143, 154, 255,
773 164, 167, 185, 187, 128, 131, 133, 159,
774 161, 162, 169, 178, 180, 183, 130, 135,
775 137, 139, 148, 151, 153, 155, 157, 159,
776 164, 190, 141, 143, 145, 146, 161, 162,
777 167, 170, 172, 178, 180, 183, 185, 188,
778 128, 137, 139, 155, 161, 163, 165, 169,
779 171, 187, 155, 156, 151, 255, 156, 157,
780 160, 181, 255, 186, 187, 255, 162, 255,
781 160, 168, 161, 167, 158, 255, 160, 132,
782 135, 133, 134, 176, 255, 128, 191, 154,
783 164, 168, 128, 149, 150, 191, 128, 152,
784 153, 191, 181, 128, 159, 160, 189, 190,
785 191, 189, 128, 131, 132, 185, 186, 191,
786 144, 128, 151, 152, 161, 162, 176, 177,
787 255, 169, 177, 129, 132, 141, 142, 145,
788 146, 179, 181, 186, 188, 190, 191, 192,
789 255, 142, 158, 128, 155, 156, 161, 162,
790 175, 176, 177, 178, 191, 169, 177, 180,
791 183, 128, 132, 133, 138, 139, 142, 143,
792 144, 145, 146, 147, 185, 186, 191, 157,
793 128, 152, 153, 158, 159, 177, 178, 180,
794 181, 191, 142, 146, 169, 177, 180, 189,
795 128, 132, 133, 185, 186, 191, 144, 185,
796 128, 159, 160, 161, 162, 191, 169, 177,
797 180, 189, 128, 132, 133, 140, 141, 142,
798 143, 144, 145, 146, 147, 185, 186, 191,
799 158, 177, 128, 155, 156, 161, 162, 191,
800 131, 145, 155, 157, 128, 132, 133, 138,
801 139, 141, 142, 149, 150, 152, 153, 159,
802 160, 162, 163, 164, 165, 167, 168, 170,
803 171, 173, 174, 185, 186, 191, 144, 128,
804 191, 141, 145, 169, 189, 128, 132, 133,
805 185, 186, 191, 128, 151, 152, 154, 155,
806 159, 160, 161, 162, 191, 128, 141, 145,
807 169, 180, 189, 129, 132, 133, 185, 186,
808 191, 158, 128, 159, 160, 161, 162, 176,
809 177, 178, 179, 191, 141, 145, 189, 128,
810 132, 133, 186, 187, 191, 142, 128, 147,
811 148, 150, 151, 158, 159, 161, 162, 185,
812 186, 191, 178, 188, 128, 132, 133, 150,
813 151, 153, 154, 189, 190, 191, 128, 134,
814 135, 191, 128, 177, 129, 179, 180, 191,
815 128, 131, 137, 141, 152, 160, 164, 166,
816 172, 177, 189, 129, 132, 133, 134, 135,
817 138, 139, 147, 148, 167, 168, 169, 170,
818 179, 180, 191, 133, 128, 134, 135, 155,
819 156, 159, 160, 191, 128, 129, 191, 136,
820 128, 172, 173, 191, 128, 135, 136, 140,
821 141, 191, 191, 128, 170, 171, 190, 161,
822 128, 143, 144, 149, 150, 153, 154, 157,
823 158, 164, 165, 166, 167, 173, 174, 176,
824 177, 180, 181, 255, 130, 141, 143, 159,
825 134, 187, 136, 140, 142, 143, 137, 151,
826 153, 142, 143, 158, 159, 137, 177, 191,
827 142, 143, 182, 183, 192, 255, 129, 151,
828 128, 133, 134, 135, 136, 255, 145, 150,
829 151, 155, 191, 192, 255, 128, 143, 144,
830 159, 160, 255, 182, 183, 190, 191, 192,
831 255, 128, 129, 255, 173, 174, 192, 255,
832 128, 129, 154, 155, 159, 160, 255, 171,
833 173, 185, 191, 192, 255, 141, 128, 145,
834 146, 159, 160, 177, 178, 191, 173, 128,
835 145, 146, 159, 160, 176, 177, 191, 128,
836 179, 180, 191, 151, 156, 128, 191, 128,
837 159, 160, 255, 184, 191, 192, 255, 169,
838 128, 170, 171, 175, 176, 255, 182, 191,
839 192, 255, 128, 158, 159, 191, 128, 143,
840 144, 173, 174, 175, 176, 180, 181, 191,
841 128, 171, 172, 175, 176, 255, 138, 191,
842 192, 255, 128, 150, 151, 159, 160, 255,
843 149, 191, 192, 255, 167, 128, 191, 128,
844 132, 133, 179, 180, 191, 128, 132, 133,
845 139, 140, 191, 128, 130, 131, 160, 161,
846 173, 174, 175, 176, 185, 186, 255, 166,
847 191, 192, 255, 128, 163, 164, 191, 128,
848 140, 141, 143, 144, 153, 154, 189, 190,
849 191, 128, 136, 137, 191, 173, 128, 168,
850 169, 177, 178, 180, 181, 182, 183, 191,
851 0, 127, 192, 255, 150, 151, 158, 159,
852 152, 154, 156, 158, 134, 135, 142, 143,
853 190, 191, 192, 255, 181, 189, 191, 128,
854 190, 133, 181, 128, 129, 130, 140, 141,
855 143, 144, 147, 148, 149, 150, 155, 156,
856 159, 160, 172, 173, 177, 178, 188, 189,
857 191, 177, 191, 128, 190, 128, 143, 144,
858 156, 157, 191, 130, 135, 148, 164, 166,
859 168, 128, 137, 138, 149, 150, 151, 152,
860 157, 158, 169, 170, 185, 186, 187, 188,
861 191, 142, 128, 132, 133, 137, 138, 159,
862 160, 255, 137, 191, 192, 255, 175, 128,
863 255, 159, 165, 170, 175, 177, 180, 191,
864 192, 255, 166, 173, 128, 167, 168, 175,
865 176, 255, 168, 174, 176, 191, 192, 255,
866 167, 175, 183, 191, 128, 150, 151, 159,
867 160, 190, 135, 143, 151, 128, 158, 159,
868 191, 128, 132, 133, 135, 136, 160, 161,
869 169, 170, 176, 177, 181, 182, 183, 184,
870 188, 189, 191, 160, 151, 154, 187, 192,
871 255, 128, 132, 133, 173, 174, 176, 177,
872 255, 143, 159, 187, 191, 192, 255, 128,
873 175, 176, 191, 150, 191, 192, 255, 141,
874 191, 192, 255, 128, 143, 144, 189, 190,
875 191, 141, 143, 160, 169, 172, 191, 192,
876 255, 191, 128, 174, 175, 190, 128, 157,
877 158, 159, 160, 255, 176, 191, 192, 255,
878 128, 150, 151, 159, 160, 161, 162, 255,
879 175, 137, 138, 184, 191, 192, 255, 128,
880 182, 183, 255, 130, 134, 139, 163, 191,
881 192, 255, 128, 129, 130, 179, 180, 191,
882 187, 189, 128, 177, 178, 183, 184, 191,
883 128, 137, 138, 165, 166, 175, 176, 255,
884 135, 159, 189, 191, 192, 255, 128, 131,
885 132, 178, 179, 191, 143, 165, 191, 128,
886 159, 160, 175, 176, 185, 186, 190, 128,
887 168, 169, 191, 131, 186, 128, 139, 140,
888 159, 160, 182, 183, 189, 190, 255, 176,
889 178, 180, 183, 184, 190, 191, 192, 255,
890 129, 128, 130, 131, 154, 155, 157, 158,
891 159, 160, 170, 171, 177, 178, 180, 181,
892 191, 128, 167, 175, 129, 134, 135, 136,
893 137, 142, 143, 144, 145, 150, 151, 159,
894 160, 255, 155, 166, 175, 128, 162, 163,
895 191, 164, 175, 135, 138, 188, 191, 192,
896 255, 174, 175, 154, 191, 192, 255, 157,
897 169, 183, 189, 191, 128, 134, 135, 146,
898 147, 151, 152, 158, 159, 190, 130, 133,
899 128, 255, 178, 191, 192, 255, 128, 146,
900 147, 255, 190, 191, 192, 255, 128, 143,
901 144, 255, 144, 145, 136, 175, 188, 191,
902 192, 255, 181, 128, 175, 176, 255, 189,
903 191, 192, 255, 128, 160, 161, 186, 187,
904 191, 128, 129, 154, 155, 165, 166, 255,
905 191, 192, 255, 128, 129, 130, 135, 136,
906 137, 138, 143, 144, 145, 146, 151, 152,
907 153, 154, 156, 157, 191, 128, 191, 128,
908 129, 130, 131, 133, 138, 139, 140, 141,
909 142, 143, 144, 145, 146, 147, 148, 149,
910 152, 156, 157, 160, 161, 162, 163, 164,
911 166, 168, 169, 170, 171, 172, 173, 174,
912 176, 177, 132, 151, 153, 155, 158, 175,
913 178, 179, 180, 191, 140, 167, 187, 190,
914 128, 255, 142, 143, 158, 191, 192, 255,
915 187, 191, 192, 255, 128, 180, 181, 191,
916 128, 156, 157, 159, 160, 255, 145, 191,
917 192, 255, 128, 159, 160, 175, 176, 255,
918 139, 143, 182, 191, 192, 255, 144, 132,
919 135, 150, 191, 192, 255, 158, 175, 148,
920 151, 188, 191, 192, 255, 128, 167, 168,
921 175, 176, 255, 164, 191, 192, 255, 183,
922 191, 192, 255, 128, 149, 150, 159, 160,
923 167, 168, 191, 136, 182, 188, 128, 133,
924 134, 137, 138, 184, 185, 190, 191, 255,
925 150, 159, 183, 191, 192, 255, 179, 128,
926 159, 160, 181, 182, 191, 128, 149, 150,
927 159, 160, 185, 186, 191, 128, 183, 184,
928 189, 190, 191, 128, 148, 152, 129, 143,
929 144, 179, 180, 191, 128, 159, 160, 188,
930 189, 191, 128, 156, 157, 191, 136, 128,
931 164, 165, 191, 128, 181, 182, 191, 128,
932 149, 150, 159, 160, 178, 179, 191, 128,
933 145, 146, 191, 128, 178, 179, 191, 128,
934 130, 131, 132, 133, 134, 135, 136, 138,
935 139, 140, 141, 144, 145, 146, 147, 150,
936 151, 152, 153, 154, 156, 162, 163, 171,
937 176, 177, 178, 129, 191, 128, 130, 131,
938 183, 184, 191, 128, 130, 131, 175, 176,
939 191, 128, 143, 144, 168, 169, 191, 128,
940 130, 131, 166, 167, 191, 182, 128, 143,
941 144, 178, 179, 191, 128, 130, 131, 178,
942 179, 191, 128, 154, 156, 129, 132, 133,
943 191, 146, 128, 171, 172, 191, 135, 137,
944 142, 158, 128, 168, 169, 175, 176, 255,
945 159, 191, 192, 255, 144, 128, 156, 157,
946 161, 162, 191, 128, 134, 135, 138, 139,
947 191, 128, 175, 176, 191, 134, 128, 131,
948 132, 135, 136, 191, 128, 174, 175, 191,
949 128, 151, 152, 155, 156, 191, 132, 128,
950 191, 128, 170, 171, 191, 128, 153, 154,
951 191, 160, 190, 192, 255, 128, 184, 185,
952 191, 137, 128, 174, 175, 191, 128, 129,
953 177, 178, 255, 144, 191, 192, 255, 128,
954 142, 143, 144, 145, 146, 149, 129, 148,
955 150, 191, 175, 191, 192, 255, 132, 191,
956 192, 255, 128, 144, 129, 143, 145, 191,
957 144, 153, 128, 143, 145, 152, 154, 191,
958 135, 191, 192, 255, 160, 168, 169, 171,
959 172, 173, 174, 188, 189, 190, 191, 128,
960 159, 161, 167, 170, 187, 185, 191, 192,
961 255, 128, 143, 144, 173, 174, 191, 128,
962 131, 132, 162, 163, 183, 184, 188, 189,
963 255, 133, 143, 145, 191, 192, 255, 128,
964 146, 147, 159, 160, 191, 160, 128, 191,
965 128, 129, 191, 192, 255, 159, 160, 171,
966 128, 170, 172, 191, 192, 255, 173, 191,
967 192, 255, 179, 191, 192, 255, 128, 176,
968 177, 178, 129, 191, 128, 129, 130, 191,
969 171, 175, 189, 191, 192, 255, 128, 136,
970 137, 143, 144, 153, 154, 191, 144, 145,
971 146, 147, 148, 149, 154, 155, 156, 157,
972 158, 159, 128, 143, 150, 153, 160, 191,
973 149, 157, 173, 186, 188, 160, 161, 163,
974 164, 167, 168, 132, 134, 149, 157, 186,
975 191, 139, 140, 192, 255, 133, 145, 128,
976 134, 135, 137, 138, 255, 166, 167, 129,
977 155, 187, 149, 181, 143, 175, 137, 169,
978 131, 140, 191, 192, 255, 160, 163, 164,
979 165, 184, 185, 186, 128, 159, 161, 162,
980 166, 191, 133, 191, 192, 255, 132, 160,
981 163, 167, 179, 184, 186, 128, 164, 165,
982 168, 169, 187, 188, 191, 130, 135, 137,
983 139, 144, 147, 151, 153, 155, 157, 159,
984 163, 171, 179, 184, 189, 191, 128, 140,
985 141, 148, 149, 160, 161, 164, 165, 166,
986 167, 190, 138, 164, 170, 128, 155, 156,
987 160, 161, 187, 188, 191, 128, 191, 155,
988 156, 128, 191, 151, 191, 192, 255, 156,
989 157, 160, 128, 191, 181, 191, 192, 255,
990 158, 159, 186, 128, 185, 187, 191, 192,
991 255, 162, 191, 192, 255, 160, 168, 128,
992 159, 161, 167, 169, 191, 158, 191, 192,
993 255, 123, 123, 128, 191, 128, 191, 128,
994 191, 128, 191, 128, 191, 10, 123, 123,
995 128, 191, 128, 191, 128, 191, 123, 123,
996 10, 123, 128, 191, 128, 191, 128, 191,
997 123, 123, 170, 181, 183, 186, 128, 150,
998 152, 182, 184, 255, 192, 255, 128, 255,
999 173, 130, 133, 146, 159, 165, 171, 175,
1000 255, 181, 190, 184, 185, 192, 255, 140,
1001 134, 138, 142, 161, 163, 255, 182, 130,
1002 136, 137, 176, 151, 152, 154, 160, 190,
1003 136, 144, 192, 255, 135, 129, 130, 132,
1004 133, 144, 170, 176, 178, 144, 154, 160,
1005 191, 128, 169, 174, 255, 148, 169, 157,
1006 158, 189, 190, 192, 255, 144, 255, 139,
1007 140, 178, 255, 186, 128, 181, 160, 161,
1008 162, 163, 164, 165, 166, 167, 168, 169,
1009 170, 171, 172, 173, 174, 175, 176, 177,
1010 178, 179, 180, 181, 182, 183, 184, 185,
1011 186, 187, 188, 189, 190, 191, 128, 173,
1012 128, 155, 160, 180, 182, 189, 148, 161,
1013 163, 255, 176, 164, 165, 132, 169, 177,
1014 141, 142, 145, 146, 179, 181, 186, 187,
1015 158, 133, 134, 137, 138, 143, 150, 152,
1016 155, 164, 165, 178, 255, 188, 129, 131,
1017 133, 138, 143, 144, 147, 168, 170, 176,
1018 178, 179, 181, 182, 184, 185, 190, 255,
1019 157, 131, 134, 137, 138, 142, 144, 146,
1020 152, 159, 165, 182, 255, 129, 131, 133,
1021 141, 143, 145, 147, 168, 170, 176, 178,
1022 179, 181, 185, 188, 255, 134, 138, 142,
1023 143, 145, 159, 164, 165, 176, 184, 186,
1024 255, 129, 131, 133, 140, 143, 144, 147,
1025 168, 170, 176, 178, 179, 181, 185, 188,
1026 191, 177, 128, 132, 135, 136, 139, 141,
1027 150, 151, 156, 157, 159, 163, 166, 175,
1028 156, 130, 131, 133, 138, 142, 144, 146,
1029 149, 153, 154, 158, 159, 163, 164, 168,
1030 170, 174, 185, 190, 191, 144, 151, 128,
1031 130, 134, 136, 138, 141, 166, 175, 128,
1032 131, 133, 140, 142, 144, 146, 168, 170,
1033 185, 189, 255, 133, 137, 151, 142, 148,
1034 155, 159, 164, 165, 176, 255, 128, 131,
1035 133, 140, 142, 144, 146, 168, 170, 179,
1036 181, 185, 188, 191, 158, 128, 132, 134,
1037 136, 138, 141, 149, 150, 160, 163, 166,
1038 175, 177, 178, 129, 131, 133, 140, 142,
1039 144, 146, 186, 189, 255, 133, 137, 143,
1040 147, 152, 158, 164, 165, 176, 185, 192,
1041 255, 189, 130, 131, 133, 150, 154, 177,
1042 179, 187, 138, 150, 128, 134, 143, 148,
1043 152, 159, 166, 175, 178, 179, 129, 186,
1044 128, 142, 144, 153, 132, 138, 141, 165,
1045 167, 129, 130, 135, 136, 148, 151, 153,
1046 159, 161, 163, 170, 171, 173, 185, 187,
1047 189, 134, 128, 132, 136, 141, 144, 153,
1048 156, 159, 128, 181, 183, 185, 152, 153,
1049 160, 169, 190, 191, 128, 135, 137, 172,
1050 177, 191, 128, 132, 134, 151, 153, 188,
1051 134, 128, 129, 130, 131, 137, 138, 139,
1052 140, 141, 142, 143, 144, 153, 154, 155,
1053 156, 157, 158, 159, 160, 161, 162, 163,
1054 164, 165, 166, 167, 168, 169, 170, 173,
1055 175, 176, 177, 178, 179, 181, 182, 183,
1056 188, 189, 190, 191, 132, 152, 172, 184,
1057 185, 187, 128, 191, 128, 137, 144, 255,
1058 158, 159, 134, 187, 136, 140, 142, 143,
1059 137, 151, 153, 142, 143, 158, 159, 137,
1060 177, 142, 143, 182, 183, 191, 255, 128,
1061 130, 133, 136, 150, 152, 255, 145, 150,
1062 151, 155, 156, 160, 168, 178, 255, 128,
1063 143, 160, 255, 182, 183, 190, 255, 129,
1064 255, 173, 174, 192, 255, 129, 154, 160,
1065 255, 171, 173, 185, 255, 128, 140, 142,
1066 148, 160, 180, 128, 147, 160, 172, 174,
1067 176, 178, 179, 148, 150, 152, 155, 158,
1068 159, 170, 255, 139, 141, 144, 153, 160,
1069 255, 184, 255, 128, 170, 176, 255, 182,
1070 255, 128, 158, 160, 171, 176, 187, 134,
1071 173, 176, 180, 128, 171, 176, 255, 138,
1072 143, 155, 255, 128, 155, 160, 255, 159,
1073 189, 190, 192, 255, 167, 128, 137, 144,
1074 153, 176, 189, 140, 143, 154, 170, 180,
1075 255, 180, 255, 128, 183, 128, 137, 141,
1076 189, 128, 136, 144, 146, 148, 182, 184,
1077 185, 128, 181, 187, 191, 150, 151, 158,
1078 159, 152, 154, 156, 158, 134, 135, 142,
1079 143, 190, 255, 190, 128, 180, 182, 188,
1080 130, 132, 134, 140, 144, 147, 150, 155,
1081 160, 172, 178, 180, 182, 188, 128, 129,
1082 130, 131, 132, 133, 134, 176, 177, 178,
1083 179, 180, 181, 182, 183, 191, 255, 129,
1084 147, 149, 176, 178, 190, 192, 255, 144,
1085 156, 161, 144, 156, 165, 176, 130, 135,
1086 149, 164, 166, 168, 138, 147, 152, 157,
1087 170, 185, 188, 191, 142, 133, 137, 160,
1088 255, 137, 255, 128, 174, 176, 255, 159,
1089 165, 170, 180, 255, 167, 173, 128, 165,
1090 176, 255, 168, 174, 176, 190, 192, 255,
1091 128, 150, 160, 166, 168, 174, 176, 182,
1092 184, 190, 128, 134, 136, 142, 144, 150,
1093 152, 158, 160, 191, 128, 129, 130, 131,
1094 132, 133, 134, 135, 144, 145, 255, 133,
1095 135, 161, 175, 177, 181, 184, 188, 160,
1096 151, 152, 187, 192, 255, 133, 173, 177,
1097 255, 143, 159, 187, 255, 176, 191, 182,
1098 183, 184, 191, 192, 255, 150, 255, 128,
1099 146, 147, 148, 152, 153, 154, 155, 156,
1100 158, 159, 160, 161, 162, 163, 164, 165,
1101 166, 167, 168, 169, 170, 171, 172, 173,
1102 174, 175, 176, 129, 255, 141, 255, 144,
1103 189, 141, 143, 172, 255, 191, 128, 175,
1104 180, 189, 151, 159, 162, 255, 175, 137,
1105 138, 184, 255, 183, 255, 168, 255, 128,
1106 179, 188, 134, 143, 154, 159, 184, 186,
1107 190, 255, 128, 173, 176, 255, 148, 159,
1108 189, 255, 129, 142, 154, 159, 191, 255,
1109 128, 182, 128, 141, 144, 153, 160, 182,
1110 186, 255, 128, 130, 155, 157, 160, 175,
1111 178, 182, 129, 134, 137, 142, 145, 150,
1112 160, 166, 168, 174, 176, 255, 155, 166,
1113 175, 128, 170, 172, 173, 176, 185, 158,
1114 159, 160, 255, 164, 175, 135, 138, 188,
1115 255, 164, 169, 171, 172, 173, 174, 175,
1116 180, 181, 182, 183, 184, 185, 187, 188,
1117 189, 190, 191, 165, 186, 174, 175, 154,
1118 255, 190, 128, 134, 147, 151, 157, 168,
1119 170, 182, 184, 188, 128, 129, 131, 132,
1120 134, 255, 147, 255, 190, 255, 144, 145,
1121 136, 175, 188, 255, 128, 143, 160, 175,
1122 179, 180, 141, 143, 176, 180, 182, 255,
1123 189, 255, 191, 144, 153, 161, 186, 129,
1124 154, 166, 255, 191, 255, 130, 135, 138,
1125 143, 146, 151, 154, 156, 144, 145, 146,
1126 147, 148, 150, 151, 152, 155, 157, 158,
1127 160, 170, 171, 172, 175, 161, 169, 128,
1128 129, 130, 131, 133, 135, 138, 139, 140,
1129 141, 142, 143, 144, 145, 146, 147, 148,
1130 149, 152, 156, 157, 160, 161, 162, 163,
1131 164, 166, 168, 169, 170, 171, 172, 173,
1132 174, 176, 177, 153, 155, 178, 179, 128,
1133 139, 141, 166, 168, 186, 188, 189, 191,
1134 255, 142, 143, 158, 255, 187, 255, 128,
1135 180, 189, 128, 156, 160, 255, 145, 159,
1136 161, 255, 128, 159, 176, 255, 139, 143,
1137 187, 255, 128, 157, 160, 255, 144, 132,
1138 135, 150, 255, 158, 159, 170, 175, 148,
1139 151, 188, 255, 128, 167, 176, 255, 164,
1140 255, 183, 255, 128, 149, 160, 167, 136,
1141 188, 128, 133, 138, 181, 183, 184, 191,
1142 255, 150, 159, 183, 255, 128, 158, 160,
1143 178, 180, 181, 128, 149, 160, 185, 128,
1144 183, 190, 191, 191, 128, 131, 133, 134,
1145 140, 147, 149, 151, 153, 179, 184, 186,
1146 160, 188, 128, 156, 128, 135, 137, 166,
1147 128, 181, 128, 149, 160, 178, 128, 145,
1148 128, 178, 129, 130, 131, 132, 133, 135,
1149 136, 138, 139, 140, 141, 144, 145, 146,
1150 147, 150, 151, 152, 153, 154, 155, 156,
1151 162, 163, 171, 176, 177, 178, 128, 134,
1152 135, 165, 176, 190, 144, 168, 176, 185,
1153 128, 180, 182, 191, 182, 144, 179, 155,
1154 133, 137, 141, 143, 157, 255, 190, 128,
1155 145, 147, 183, 136, 128, 134, 138, 141,
1156 143, 157, 159, 168, 176, 255, 171, 175,
1157 186, 255, 128, 131, 133, 140, 143, 144,
1158 147, 168, 170, 176, 178, 179, 181, 185,
1159 188, 191, 144, 151, 128, 132, 135, 136,
1160 139, 141, 157, 163, 166, 172, 176, 180,
1161 128, 138, 144, 153, 134, 136, 143, 154,
1162 255, 128, 181, 184, 255, 129, 151, 158,
1163 255, 129, 131, 133, 143, 154, 255, 128,
1164 137, 128, 153, 157, 171, 176, 185, 160,
1165 255, 170, 190, 192, 255, 128, 184, 128,
1166 136, 138, 182, 184, 191, 128, 144, 153,
1167 178, 255, 168, 144, 145, 183, 255, 128,
1168 142, 145, 149, 129, 141, 144, 146, 147,
1169 148, 175, 255, 132, 255, 128, 144, 129,
1170 143, 144, 153, 145, 152, 135, 255, 160,
1171 168, 169, 171, 172, 173, 174, 188, 189,
1172 190, 191, 161, 167, 185, 255, 128, 158,
1173 160, 169, 144, 173, 176, 180, 128, 131,
1174 144, 153, 163, 183, 189, 255, 144, 255,
1175 133, 143, 191, 255, 143, 159, 160, 128,
1176 129, 255, 159, 160, 171, 172, 255, 173,
1177 255, 179, 255, 128, 176, 177, 178, 128,
1178 129, 171, 175, 189, 255, 128, 136, 144,
1179 153, 157, 158, 133, 134, 137, 144, 145,
1180 146, 147, 148, 149, 154, 155, 156, 157,
1181 158, 159, 168, 169, 170, 150, 153, 165,
1182 169, 173, 178, 187, 255, 131, 132, 140,
1183 169, 174, 255, 130, 132, 149, 157, 173,
1184 186, 188, 160, 161, 163, 164, 167, 168,
1185 132, 134, 149, 157, 186, 139, 140, 191,
1186 255, 134, 128, 132, 138, 144, 146, 255,
1187 166, 167, 129, 155, 187, 149, 181, 143,
1188 175, 137, 169, 131, 140, 141, 192, 255,
1189 128, 182, 187, 255, 173, 180, 182, 255,
1190 132, 155, 159, 161, 175, 128, 160, 163,
1191 164, 165, 184, 185, 186, 161, 162, 128,
1192 134, 136, 152, 155, 161, 163, 164, 166,
1193 170, 133, 143, 151, 255, 139, 143, 154,
1194 255, 164, 167, 185, 187, 128, 131, 133,
1195 159, 161, 162, 169, 178, 180, 183, 130,
1196 135, 137, 139, 148, 151, 153, 155, 157,
1197 159, 164, 190, 141, 143, 145, 146, 161,
1198 162, 167, 170, 172, 178, 180, 183, 185,
1199 188, 128, 137, 139, 155, 161, 163, 165,
1200 169, 171, 187, 155, 156, 151, 255, 156,
1201 157, 160, 181, 255, 186, 187, 255, 162,
1202 255, 160, 168, 161, 167, 158, 255, 160,
1203 132, 135, 133, 134, 176, 255, 128, 191,
1204 154, 164, 168, 128, 149, 150, 191, 128,
1205 152, 153, 191, 181, 128, 159, 160, 189,
1206 190, 191, 189, 128, 131, 132, 185, 186,
1207 191, 144, 128, 151, 152, 161, 162, 176,
1208 177, 255, 169, 177, 129, 132, 141, 142,
1209 145, 146, 179, 181, 186, 188, 190, 191,
1210 192, 255, 142, 158, 128, 155, 156, 161,
1211 162, 175, 176, 177, 178, 191, 169, 177,
1212 180, 183, 128, 132, 133, 138, 139, 142,
1213 143, 144, 145, 146, 147, 185, 186, 191,
1214 157, 128, 152, 153, 158, 159, 177, 178,
1215 180, 181, 191, 142, 146, 169, 177, 180,
1216 189, 128, 132, 133, 185, 186, 191, 144,
1217 185, 128, 159, 160, 161, 162, 191, 169,
1218 177, 180, 189, 128, 132, 133, 140, 141,
1219 142, 143, 144, 145, 146, 147, 185, 186,
1220 191, 158, 177, 128, 155, 156, 161, 162,
1221 191, 131, 145, 155, 157, 128, 132, 133,
1222 138, 139, 141, 142, 149, 150, 152, 153,
1223 159, 160, 162, 163, 164, 165, 167, 168,
1224 170, 171, 173, 174, 185, 186, 191, 144,
1225 128, 191, 141, 145, 169, 189, 128, 132,
1226 133, 185, 186, 191, 128, 151, 152, 154,
1227 155, 159, 160, 161, 162, 191, 128, 141,
1228 145, 169, 180, 189, 129, 132, 133, 185,
1229 186, 191, 158, 128, 159, 160, 161, 162,
1230 176, 177, 178, 179, 191, 141, 145, 189,
1231 128, 132, 133, 186, 187, 191, 142, 128,
1232 147, 148, 150, 151, 158, 159, 161, 162,
1233 185, 186, 191, 178, 188, 128, 132, 133,
1234 150, 151, 153, 154, 189, 190, 191, 128,
1235 134, 135, 191, 128, 177, 129, 179, 180,
1236 191, 128, 131, 137, 141, 152, 160, 164,
1237 166, 172, 177, 189, 129, 132, 133, 134,
1238 135, 138, 139, 147, 148, 167, 168, 169,
1239 170, 179, 180, 191, 133, 128, 134, 135,
1240 155, 156, 159, 160, 191, 128, 129, 191,
1241 136, 128, 172, 173, 191, 128, 135, 136,
1242 140, 141, 191, 191, 128, 170, 171, 190,
1243 161, 128, 143, 144, 149, 150, 153, 154,
1244 157, 158, 164, 165, 166, 167, 173, 174,
1245 176, 177, 180, 181, 255, 130, 141, 143,
1246 159, 134, 187, 136, 140, 142, 143, 137,
1247 151, 153, 142, 143, 158, 159, 137, 177,
1248 191, 142, 143, 182, 183, 192, 255, 129,
1249 151, 128, 133, 134, 135, 136, 255, 145,
1250 150, 151, 155, 191, 192, 255, 128, 143,
1251 144, 159, 160, 255, 182, 183, 190, 191,
1252 192, 255, 128, 129, 255, 173, 174, 192,
1253 255, 128, 129, 154, 155, 159, 160, 255,
1254 171, 173, 185, 191, 192, 255, 141, 128,
1255 145, 146, 159, 160, 177, 178, 191, 173,
1256 128, 145, 146, 159, 160, 176, 177, 191,
1257 128, 179, 180, 191, 151, 156, 128, 191,
1258 128, 159, 160, 255, 184, 191, 192, 255,
1259 169, 128, 170, 171, 175, 176, 255, 182,
1260 191, 192, 255, 128, 158, 159, 191, 128,
1261 143, 144, 173, 174, 175, 176, 180, 181,
1262 191, 128, 171, 172, 175, 176, 255, 138,
1263 191, 192, 255, 128, 150, 151, 159, 160,
1264 255, 149, 191, 192, 255, 167, 128, 191,
1265 128, 132, 133, 179, 180, 191, 128, 132,
1266 133, 139, 140, 191, 128, 130, 131, 160,
1267 161, 173, 174, 175, 176, 185, 186, 255,
1268 166, 191, 192, 255, 128, 163, 164, 191,
1269 128, 140, 141, 143, 144, 153, 154, 189,
1270 190, 191, 128, 136, 137, 191, 173, 128,
1271 168, 169, 177, 178, 180, 181, 182, 183,
1272 191, 0, 127, 192, 255, 150, 151, 158,
1273 159, 152, 154, 156, 158, 134, 135, 142,
1274 143, 190, 191, 192, 255, 181, 189, 191,
1275 128, 190, 133, 181, 128, 129, 130, 140,
1276 141, 143, 144, 147, 148, 149, 150, 155,
1277 156, 159, 160, 172, 173, 177, 178, 188,
1278 189, 191, 177, 191, 128, 190, 128, 143,
1279 144, 156, 157, 191, 130, 135, 148, 164,
1280 166, 168, 128, 137, 138, 149, 150, 151,
1281 152, 157, 158, 169, 170, 185, 186, 187,
1282 188, 191, 142, 128, 132, 133, 137, 138,
1283 159, 160, 255, 137, 191, 192, 255, 175,
1284 128, 255, 159, 165, 170, 175, 177, 180,
1285 191, 192, 255, 166, 173, 128, 167, 168,
1286 175, 176, 255, 168, 174, 176, 191, 192,
1287 255, 167, 175, 183, 191, 128, 150, 151,
1288 159, 160, 190, 135, 143, 151, 128, 158,
1289 159, 191, 128, 132, 133, 135, 136, 160,
1290 161, 169, 170, 176, 177, 181, 182, 183,
1291 184, 188, 189, 191, 160, 151, 154, 187,
1292 192, 255, 128, 132, 133, 173, 174, 176,
1293 177, 255, 143, 159, 187, 191, 192, 255,
1294 128, 175, 176, 191, 150, 191, 192, 255,
1295 141, 191, 192, 255, 128, 143, 144, 189,
1296 190, 191, 141, 143, 160, 169, 172, 191,
1297 192, 255, 191, 128, 174, 175, 190, 128,
1298 157, 158, 159, 160, 255, 176, 191, 192,
1299 255, 128, 150, 151, 159, 160, 161, 162,
1300 255, 175, 137, 138, 184, 191, 192, 255,
1301 128, 182, 183, 255, 130, 134, 139, 163,
1302 191, 192, 255, 128, 129, 130, 179, 180,
1303 191, 187, 189, 128, 177, 178, 183, 184,
1304 191, 128, 137, 138, 165, 166, 175, 176,
1305 255, 135, 159, 189, 191, 192, 255, 128,
1306 131, 132, 178, 179, 191, 143, 165, 191,
1307 128, 159, 160, 175, 176, 185, 186, 190,
1308 128, 168, 169, 191, 131, 186, 128, 139,
1309 140, 159, 160, 182, 183, 189, 190, 255,
1310 176, 178, 180, 183, 184, 190, 191, 192,
1311 255, 129, 128, 130, 131, 154, 155, 157,
1312 158, 159, 160, 170, 171, 177, 178, 180,
1313 181, 191, 128, 167, 175, 129, 134, 135,
1314 136, 137, 142, 143, 144, 145, 150, 151,
1315 159, 160, 255, 155, 166, 175, 128, 162,
1316 163, 191, 164, 175, 135, 138, 188, 191,
1317 192, 255, 174, 175, 154, 191, 192, 255,
1318 157, 169, 183, 189, 191, 128, 134, 135,
1319 146, 147, 151, 152, 158, 159, 190, 130,
1320 133, 128, 255, 178, 191, 192, 255, 128,
1321 146, 147, 255, 190, 191, 192, 255, 128,
1322 143, 144, 255, 144, 145, 136, 175, 188,
1323 191, 192, 255, 181, 128, 175, 176, 255,
1324 189, 191, 192, 255, 128, 160, 161, 186,
1325 187, 191, 128, 129, 154, 155, 165, 166,
1326 255, 191, 192, 255, 128, 129, 130, 135,
1327 136, 137, 138, 143, 144, 145, 146, 151,
1328 152, 153, 154, 156, 157, 191, 128, 191,
1329 128, 129, 130, 131, 133, 138, 139, 140,
1330 141, 142, 143, 144, 145, 146, 147, 148,
1331 149, 152, 156, 157, 160, 161, 162, 163,
1332 164, 166, 168, 169, 170, 171, 172, 173,
1333 174, 176, 177, 132, 151, 153, 155, 158,
1334 175, 178, 179, 180, 191, 140, 167, 187,
1335 190, 128, 255, 142, 143, 158, 191, 192,
1336 255, 187, 191, 192, 255, 128, 180, 181,
1337 191, 128, 156, 157, 159, 160, 255, 145,
1338 191, 192, 255, 128, 159, 160, 175, 176,
1339 255, 139, 143, 182, 191, 192, 255, 144,
1340 132, 135, 150, 191, 192, 255, 158, 175,
1341 148, 151, 188, 191, 192, 255, 128, 167,
1342 168, 175, 176, 255, 164, 191, 192, 255,
1343 183, 191, 192, 255, 128, 149, 150, 159,
1344 160, 167, 168, 191, 136, 182, 188, 128,
1345 133, 134, 137, 138, 184, 185, 190, 191,
1346 255, 150, 159, 183, 191, 192, 255, 179,
1347 128, 159, 160, 181, 182, 191, 128, 149,
1348 150, 159, 160, 185, 186, 191, 128, 183,
1349 184, 189, 190, 191, 128, 148, 152, 129,
1350 143, 144, 179, 180, 191, 128, 159, 160,
1351 188, 189, 191, 128, 156, 157, 191, 136,
1352 128, 164, 165, 191, 128, 181, 182, 191,
1353 128, 149, 150, 159, 160, 178, 179, 191,
1354 128, 145, 146, 191, 128, 178, 179, 191,
1355 128, 130, 131, 132, 133, 134, 135, 136,
1356 138, 139, 140, 141, 144, 145, 146, 147,
1357 150, 151, 152, 153, 154, 156, 162, 163,
1358 171, 176, 177, 178, 129, 191, 128, 130,
1359 131, 183, 184, 191, 128, 130, 131, 175,
1360 176, 191, 128, 143, 144, 168, 169, 191,
1361 128, 130, 131, 166, 167, 191, 182, 128,
1362 143, 144, 178, 179, 191, 128, 130, 131,
1363 178, 179, 191, 128, 154, 156, 129, 132,
1364 133, 191, 146, 128, 171, 172, 191, 135,
1365 137, 142, 158, 128, 168, 169, 175, 176,
1366 255, 159, 191, 192, 255, 144, 128, 156,
1367 157, 161, 162, 191, 128, 134, 135, 138,
1368 139, 191, 128, 175, 176, 191, 134, 128,
1369 131, 132, 135, 136, 191, 128, 174, 175,
1370 191, 128, 151, 152, 155, 156, 191, 132,
1371 128, 191, 128, 170, 171, 191, 128, 153,
1372 154, 191, 160, 190, 192, 255, 128, 184,
1373 185, 191, 137, 128, 174, 175, 191, 128,
1374 129, 177, 178, 255, 144, 191, 192, 255,
1375 128, 142, 143, 144, 145, 146, 149, 129,
1376 148, 150, 191, 175, 191, 192, 255, 132,
1377 191, 192, 255, 128, 144, 129, 143, 145,
1378 191, 144, 153, 128, 143, 145, 152, 154,
1379 191, 135, 191, 192, 255, 160, 168, 169,
1380 171, 172, 173, 174, 188, 189, 190, 191,
1381 128, 159, 161, 167, 170, 187, 185, 191,
1382 192, 255, 128, 143, 144, 173, 174, 191,
1383 128, 131, 132, 162, 163, 183, 184, 188,
1384 189, 255, 133, 143, 145, 191, 192, 255,
1385 128, 146, 147, 159, 160, 191, 160, 128,
1386 191, 128, 129, 191, 192, 255, 159, 160,
1387 171, 128, 170, 172, 191, 192, 255, 173,
1388 191, 192, 255, 179, 191, 192, 255, 128,
1389 176, 177, 178, 129, 191, 128, 129, 130,
1390 191, 171, 175, 189, 191, 192, 255, 128,
1391 136, 137, 143, 144, 153, 154, 191, 144,
1392 145, 146, 147, 148, 149, 154, 155, 156,
1393 157, 158, 159, 128, 143, 150, 153, 160,
1394 191, 149, 157, 173, 186, 188, 160, 161,
1395 163, 164, 167, 168, 132, 134, 149, 157,
1396 186, 191, 139, 140, 192, 255, 133, 145,
1397 128, 134, 135, 137, 138, 255, 166, 167,
1398 129, 155, 187, 149, 181, 143, 175, 137,
1399 169, 131, 140, 191, 192, 255, 160, 163,
1400 164, 165, 184, 185, 186, 128, 159, 161,
1401 162, 166, 191, 133, 191, 192, 255, 132,
1402 160, 163, 167, 179, 184, 186, 128, 164,
1403 165, 168, 169, 187, 188, 191, 130, 135,
1404 137, 139, 144, 147, 151, 153, 155, 157,
1405 159, 163, 171, 179, 184, 189, 191, 128,
1406 140, 141, 148, 149, 160, 161, 164, 165,
1407 166, 167, 190, 138, 164, 170, 128, 155,
1408 156, 160, 161, 187, 188, 191, 128, 191,
1409 155, 156, 128, 191, 151, 191, 192, 255,
1410 156, 157, 160, 128, 191, 181, 191, 192,
1411 255, 158, 159, 186, 128, 185, 187, 191,
1412 192, 255, 162, 191, 192, 255, 160, 168,
1413 128, 159, 161, 167, 169, 191, 158, 191,
1414 192, 255, 9, 10, 13, 32, 33, 34,
1415 35, 37, 38, 46, 47, 60, 61, 62,
1416 64, 92, 95, 123, 124, 125, 126, 127,
1417 194, 195, 198, 199, 203, 204, 205, 206,
1418 207, 210, 212, 213, 214, 215, 216, 217,
1419 219, 220, 221, 222, 223, 224, 225, 226,
1420 227, 228, 233, 234, 237, 238, 239, 240,
1421 0, 39, 40, 45, 48, 57, 58, 63,
1422 65, 90, 91, 96, 97, 122, 192, 193,
1423 196, 218, 229, 236, 241, 247, 9, 32,
1424 10, 61, 10, 38, 46, 42, 47, 42,
1425 46, 69, 101, 48, 57, 60, 61, 61,
1426 62, 61, 45, 95, 194, 195, 198, 199,
1427 203, 204, 205, 206, 207, 210, 212, 213,
1428 214, 215, 216, 217, 219, 220, 221, 222,
1429 223, 224, 225, 226, 227, 228, 233, 234,
1430 237, 239, 240, 243, 48, 57, 65, 90,
1431 97, 122, 196, 218, 229, 236, 124, 125,
1432 128, 191, 170, 181, 186, 128, 191, 151,
1433 183, 128, 255, 192, 255, 0, 127, 173,
1434 130, 133, 146, 159, 165, 171, 175, 191,
1435 192, 255, 181, 190, 128, 175, 176, 183,
1436 184, 185, 186, 191, 134, 139, 141, 162,
1437 128, 135, 136, 255, 182, 130, 137, 176,
1438 151, 152, 154, 160, 136, 191, 192, 255,
1439 128, 143, 144, 170, 171, 175, 176, 178,
1440 179, 191, 128, 159, 160, 191, 176, 128,
1441 138, 139, 173, 174, 255, 148, 150, 164,
1442 167, 173, 176, 185, 189, 190, 192, 255,
1443 144, 128, 145, 146, 175, 176, 191, 128,
1444 140, 141, 255, 166, 176, 178, 191, 192,
1445 255, 186, 128, 137, 138, 170, 171, 179,
1446 180, 181, 182, 191, 160, 161, 162, 164,
1447 165, 166, 167, 168, 169, 170, 171, 172,
1448 173, 174, 175, 176, 177, 178, 179, 180,
1449 181, 182, 183, 184, 185, 186, 187, 188,
1450 189, 190, 128, 191, 128, 129, 130, 131,
1451 137, 138, 139, 140, 141, 142, 143, 144,
1452 153, 154, 155, 156, 157, 158, 159, 160,
1453 161, 162, 163, 164, 165, 166, 167, 168,
1454 169, 170, 171, 172, 173, 174, 175, 176,
1455 177, 178, 179, 180, 182, 183, 184, 188,
1456 189, 190, 191, 132, 187, 129, 130, 132,
1457 133, 134, 176, 177, 178, 179, 180, 181,
1458 182, 183, 128, 191, 128, 129, 130, 131,
1459 132, 133, 134, 135, 144, 136, 143, 145,
1460 191, 192, 255, 182, 183, 184, 128, 191,
1461 128, 191, 191, 128, 190, 192, 255, 128,
1462 146, 147, 148, 152, 153, 154, 155, 156,
1463 158, 159, 160, 161, 162, 163, 164, 165,
1464 166, 167, 168, 169, 170, 171, 172, 173,
1465 174, 175, 176, 129, 191, 192, 255, 158,
1466 159, 128, 157, 160, 191, 192, 255, 128,
1467 191, 164, 169, 171, 172, 173, 174, 175,
1468 180, 181, 182, 183, 184, 185, 187, 188,
1469 189, 190, 191, 128, 163, 165, 186, 144,
1470 145, 146, 147, 148, 150, 151, 152, 155,
1471 157, 158, 160, 170, 171, 172, 175, 128,
1472 159, 161, 169, 173, 191, 128, 191, 10,
1473 13, 34, 36, 37, 92, 128, 191, 192,
1474 223, 224, 239, 240, 247, 248, 255, 10,
1475 13, 34, 36, 37, 92, 128, 191, 192,
1476 223, 224, 239, 240, 247, 248, 255, 10,
1477 13, 34, 36, 37, 92, 128, 191, 192,
1478 223, 224, 239, 240, 247, 248, 255, 10,
1479 13, 34, 36, 37, 92, 128, 191, 192,
1480 223, 224, 239, 240, 247, 248, 255, 10,
1481 13, 36, 37, 92, 128, 191, 192, 223,
1482 224, 239, 240, 247, 248, 255, 36, 37,
1483 92, 123, 192, 223, 224, 239, 240, 247,
1484 10, 13, 34, 36, 37, 92, 123, 128,
1485 191, 192, 223, 224, 239, 240, 247, 248,
1486 255, 10, 13, 34, 36, 37, 92, 123,
1487 128, 191, 192, 223, 224, 239, 240, 247,
1488 248, 255, 10, 13, 34, 36, 37, 92,
1489 123, 128, 191, 192, 223, 224, 239, 240,
1490 247, 248, 255, 10, 13, 34, 36, 37,
1491 92, 128, 191, 192, 223, 224, 239, 240,
1492 247, 248, 255, 36, 37, 92, 123, 192,
1493 223, 224, 239, 240, 247, 10, 13, 34,
1494 36, 37, 92, 123, 128, 191, 192, 223,
1495 224, 239, 240, 247, 248, 255, 10, 13,
1496 34, 36, 37, 92, 128, 191, 192, 223,
1497 224, 239, 240, 247, 248, 255, 10, 13,
1498 34, 36, 37, 92, 128, 191, 192, 223,
1499 224, 239, 240, 247, 248, 255, 10, 13,
1500 34, 36, 37, 92, 128, 191, 192, 223,
1501 224, 239, 240, 247, 248, 255, 10, 13,
1502 34, 36, 37, 92, 128, 191, 192, 223,
1503 224, 239, 240, 247, 248, 255, 10, 13,
1504 34, 36, 37, 92, 128, 191, 192, 223,
1505 224, 239, 240, 247, 248, 255, 10, 13,
1506 34, 36, 37, 92, 128, 191, 192, 223,
1507 224, 239, 240, 247, 248, 255, 10, 13,
1508 34, 36, 37, 92, 128, 191, 192, 223,
1509 224, 239, 240, 247, 248, 255, 123, 126,
1510 123, 126, 128, 191, 128, 191, 128, 191,
1511 10, 13, 36, 37, 128, 191, 192, 223,
1512 224, 239, 240, 247, 248, 255, 10, 13,
1513 36, 37, 128, 191, 192, 223, 224, 239,
1514 240, 247, 248, 255, 10, 13, 36, 37,
1515 128, 191, 192, 223, 224, 239, 240, 247,
1516 248, 255, 10, 13, 36, 37, 128, 191,
1517 192, 223, 224, 239, 240, 247, 248, 255,
1518 126, 126, 128, 191, 128, 191, 128, 191,
1519 10, 13, 36, 37, 128, 191, 192, 223,
1520 224, 239, 240, 247, 248, 255, 10, 13,
1521 36, 37, 128, 191, 192, 223, 224, 239,
1522 240, 247, 248, 255, 126, 126, 128, 191,
1523 128, 191, 128, 191, 95, 194, 195, 198,
1524 199, 203, 204, 205, 206, 207, 210, 212,
1525 213, 214, 215, 216, 217, 219, 220, 221,
1526 222, 223, 224, 225, 226, 227, 228, 233,
1527 234, 237, 238, 239, 240, 65, 90, 97,
1528 122, 128, 191, 192, 193, 196, 218, 229,
1529 236, 241, 247, 248, 255, 45, 95, 194,
1530 195, 198, 199, 203, 204, 205, 206, 207,
1531 210, 212, 213, 214, 215, 216, 217, 219,
1532 220, 221, 222, 223, 224, 225, 226, 227,
1533 228, 233, 234, 237, 239, 240, 243, 48,
1534 57, 65, 90, 97, 122, 196, 218, 229,
1535 236, 128, 191, 170, 181, 186, 128, 191,
1536 151, 183, 128, 255, 192, 255, 0, 127,
1537 173, 130, 133, 146, 159, 165, 171, 175,
1538 191, 192, 255, 181, 190, 128, 175, 176,
1539 183, 184, 185, 186, 191, 134, 139, 141,
1540 162, 128, 135, 136, 255, 182, 130, 137,
1541 176, 151, 152, 154, 160, 136, 191, 192,
1542 255, 128, 143, 144, 170, 171, 175, 176,
1543 178, 179, 191, 128, 159, 160, 191, 176,
1544 128, 138, 139, 173, 174, 255, 148, 150,
1545 164, 167, 173, 176, 185, 189, 190, 192,
1546 255, 144, 128, 145, 146, 175, 176, 191,
1547 128, 140, 141, 255, 166, 176, 178, 191,
1548 192, 255, 186, 128, 137, 138, 170, 171,
1549 179, 180, 181, 182, 191, 160, 161, 162,
1550 164, 165, 166, 167, 168, 169, 170, 171,
1551 172, 173, 174, 175, 176, 177, 178, 179,
1552 180, 181, 182, 183, 184, 185, 186, 187,
1553 188, 189, 190, 128, 191, 128, 129, 130,
1554 131, 137, 138, 139, 140, 141, 142, 143,
1555 144, 153, 154, 155, 156, 157, 158, 159,
1556 160, 161, 162, 163, 164, 165, 166, 167,
1557 168, 169, 170, 171, 172, 173, 174, 175,
1558 176, 177, 178, 179, 180, 182, 183, 184,
1559 188, 189, 190, 191, 132, 187, 129, 130,
1560 132, 133, 134, 176, 177, 178, 179, 180,
1561 181, 182, 183, 128, 191, 128, 129, 130,
1562 131, 132, 133, 134, 135, 144, 136, 143,
1563 145, 191, 192, 255, 182, 183, 184, 128,
1564 191, 128, 191, 191, 128, 190, 192, 255,
1565 128, 146, 147, 148, 152, 153, 154, 155,
1566 156, 158, 159, 160, 161, 162, 163, 164,
1567 165, 166, 167, 168, 169, 170, 171, 172,
1568 173, 174, 175, 176, 129, 191, 192, 255,
1569 158, 159, 128, 157, 160, 191, 192, 255,
1570 128, 191, 164, 169, 171, 172, 173, 174,
1571 175, 180, 181, 182, 183, 184, 185, 187,
1572 188, 189, 190, 191, 128, 163, 165, 186,
1573 144, 145, 146, 147, 148, 150, 151, 152,
1574 155, 157, 158, 160, 170, 171, 172, 175,
1575 128, 159, 161, 169, 173, 191, 128, 191,
1576}
1577
1578var _hcltok_single_lengths []byte = []byte{
1579 0, 1, 1, 1, 2, 3, 2, 0,
1580 32, 31, 36, 1, 4, 0, 0, 0,
1581 0, 1, 2, 1, 1, 1, 1, 0,
1582 1, 1, 0, 0, 2, 0, 0, 0,
1583 1, 32, 0, 0, 0, 0, 1, 3,
1584 1, 1, 1, 0, 2, 0, 1, 1,
1585 2, 0, 3, 0, 1, 0, 2, 1,
1586 2, 0, 0, 5, 1, 4, 0, 0,
1587 1, 43, 0, 0, 0, 2, 3, 2,
1588 1, 1, 0, 0, 0, 0, 0, 0,
1589 0, 0, 0, 0, 0, 0, 0, 0,
1590 0, 0, 0, 0, 0, 1, 1, 0,
1591 0, 0, 0, 0, 0, 0, 0, 4,
1592 1, 0, 15, 0, 0, 0, 1, 6,
1593 1, 0, 0, 1, 0, 2, 0, 0,
1594 0, 9, 0, 1, 1, 0, 0, 0,
1595 3, 0, 1, 0, 28, 0, 0, 0,
1596 1, 0, 1, 0, 0, 0, 1, 0,
1597 0, 0, 0, 0, 0, 0, 1, 0,
1598 2, 0, 0, 18, 0, 0, 1, 0,
1599 0, 0, 0, 0, 0, 0, 0, 1,
1600 0, 0, 0, 16, 36, 0, 0, 0,
1601 0, 1, 0, 0, 0, 0, 0, 1,
1602 0, 0, 0, 0, 0, 0, 2, 0,
1603 0, 0, 0, 0, 1, 0, 0, 0,
1604 0, 0, 0, 0, 28, 0, 0, 0,
1605 1, 1, 1, 1, 0, 0, 2, 0,
1606 1, 0, 0, 0, 0, 0, 0, 0,
1607 0, 0, 1, 1, 4, 0, 0, 2,
1608 2, 0, 11, 0, 0, 0, 0, 0,
1609 0, 0, 1, 1, 3, 0, 0, 4,
1610 0, 0, 0, 18, 0, 0, 0, 1,
1611 4, 1, 4, 1, 0, 3, 2, 2,
1612 2, 1, 0, 0, 1, 8, 0, 0,
1613 0, 4, 12, 0, 2, 0, 3, 0,
1614 1, 0, 2, 0, 1, 2, 0, 3,
1615 1, 2, 0, 0, 0, 0, 0, 1,
1616 1, 0, 0, 1, 28, 3, 0, 1,
1617 1, 2, 1, 0, 1, 1, 2, 1,
1618 1, 2, 1, 1, 0, 2, 1, 1,
1619 1, 1, 0, 0, 6, 1, 1, 0,
1620 0, 46, 1, 1, 0, 0, 0, 0,
1621 2, 1, 0, 0, 0, 1, 0, 0,
1622 0, 0, 0, 0, 0, 13, 2, 0,
1623 0, 0, 9, 0, 1, 28, 0, 1,
1624 3, 0, 2, 0, 0, 0, 1, 0,
1625 1, 1, 2, 0, 18, 2, 0, 0,
1626 16, 35, 0, 0, 0, 1, 0, 28,
1627 0, 0, 0, 0, 1, 0, 2, 0,
1628 0, 1, 0, 0, 1, 0, 0, 1,
1629 0, 0, 0, 0, 1, 11, 0, 0,
1630 0, 0, 4, 0, 12, 1, 7, 0,
1631 4, 0, 0, 0, 0, 1, 2, 1,
1632 1, 1, 1, 0, 1, 1, 0, 0,
1633 2, 0, 0, 0, 1, 32, 0, 0,
1634 0, 0, 1, 3, 1, 1, 1, 0,
1635 2, 0, 1, 1, 2, 0, 3, 0,
1636 1, 0, 2, 1, 2, 0, 0, 5,
1637 1, 4, 0, 0, 1, 43, 0, 0,
1638 0, 2, 3, 2, 1, 1, 0, 0,
1639 0, 0, 0, 0, 0, 0, 0, 0,
1640 0, 0, 0, 0, 0, 0, 0, 0,
1641 0, 1, 1, 0, 0, 0, 0, 0,
1642 0, 0, 0, 4, 1, 0, 15, 0,
1643 0, 0, 1, 6, 1, 0, 0, 1,
1644 0, 2, 0, 0, 0, 9, 0, 1,
1645 1, 0, 0, 0, 3, 0, 1, 0,
1646 28, 0, 0, 0, 1, 0, 1, 0,
1647 0, 0, 1, 0, 0, 0, 0, 0,
1648 0, 0, 1, 0, 2, 0, 0, 18,
1649 0, 0, 1, 0, 0, 0, 0, 0,
1650 0, 0, 0, 1, 0, 0, 0, 16,
1651 36, 0, 0, 0, 0, 1, 0, 0,
1652 0, 0, 0, 1, 0, 0, 0, 0,
1653 0, 0, 2, 0, 0, 0, 0, 0,
1654 1, 0, 0, 0, 0, 0, 0, 0,
1655 28, 0, 0, 0, 1, 1, 1, 1,
1656 0, 0, 2, 0, 1, 0, 0, 0,
1657 0, 0, 0, 0, 0, 0, 1, 1,
1658 4, 0, 0, 2, 2, 0, 11, 0,
1659 0, 0, 0, 0, 0, 0, 1, 1,
1660 3, 0, 0, 4, 0, 0, 0, 18,
1661 0, 0, 0, 1, 4, 1, 4, 1,
1662 0, 3, 2, 2, 2, 1, 0, 0,
1663 1, 8, 0, 0, 0, 4, 12, 0,
1664 2, 0, 3, 0, 1, 0, 2, 0,
1665 1, 2, 0, 0, 3, 0, 1, 1,
1666 1, 2, 2, 4, 1, 6, 2, 4,
1667 2, 4, 1, 4, 0, 6, 1, 3,
1668 1, 2, 0, 2, 11, 1, 1, 1,
1669 0, 1, 1, 0, 2, 0, 3, 3,
1670 2, 1, 0, 0, 0, 1, 0, 1,
1671 0, 1, 1, 0, 2, 0, 0, 1,
1672 0, 0, 0, 0, 0, 0, 0, 1,
1673 0, 0, 0, 0, 0, 0, 0, 1,
1674 0, 0, 0, 4, 3, 2, 2, 0,
1675 6, 1, 0, 1, 1, 0, 2, 0,
1676 4, 3, 0, 1, 1, 0, 0, 0,
1677 0, 0, 0, 0, 1, 0, 0, 0,
1678 1, 0, 3, 0, 2, 0, 0, 0,
1679 3, 0, 2, 1, 1, 3, 1, 0,
1680 0, 0, 0, 0, 5, 2, 0, 0,
1681 0, 0, 0, 0, 1, 0, 0, 1,
1682 1, 0, 0, 35, 4, 0, 0, 0,
1683 0, 0, 0, 0, 1, 0, 0, 0,
1684 0, 0, 0, 3, 0, 1, 0, 0,
1685 3, 0, 0, 1, 0, 0, 0, 0,
1686 28, 0, 0, 0, 0, 1, 0, 3,
1687 1, 4, 0, 1, 0, 0, 1, 0,
1688 0, 1, 0, 0, 0, 0, 1, 1,
1689 0, 7, 0, 0, 2, 2, 0, 11,
1690 0, 0, 0, 0, 0, 1, 1, 3,
1691 0, 0, 4, 0, 0, 0, 12, 1,
1692 4, 1, 5, 2, 0, 3, 2, 2,
1693 2, 1, 7, 0, 7, 17, 3, 0,
1694 2, 0, 3, 0, 0, 1, 0, 2,
1695 0, 1, 1, 0, 0, 0, 0, 0,
1696 1, 1, 1, 0, 0, 0, 1, 1,
1697 1, 1, 0, 0, 0, 1, 1, 4,
1698 0, 0, 0, 0, 1, 2, 1, 1,
1699 1, 1, 0, 1, 1, 0, 0, 2,
1700 0, 0, 0, 1, 32, 0, 0, 0,
1701 0, 1, 3, 1, 1, 1, 0, 2,
1702 0, 1, 1, 2, 0, 3, 0, 1,
1703 0, 2, 1, 2, 0, 0, 5, 1,
1704 4, 0, 0, 1, 43, 0, 0, 0,
1705 2, 3, 2, 1, 1, 0, 0, 0,
1706 0, 0, 0, 0, 0, 0, 0, 0,
1707 0, 0, 0, 0, 0, 0, 0, 0,
1708 1, 1, 0, 0, 0, 0, 0, 0,
1709 0, 0, 4, 1, 0, 15, 0, 0,
1710 0, 1, 6, 1, 0, 0, 1, 0,
1711 2, 0, 0, 0, 9, 0, 1, 1,
1712 0, 0, 0, 3, 0, 1, 0, 28,
1713 0, 0, 0, 1, 0, 1, 0, 0,
1714 0, 1, 0, 0, 0, 0, 0, 0,
1715 0, 1, 0, 2, 0, 0, 18, 0,
1716 0, 1, 0, 0, 0, 0, 0, 0,
1717 0, 0, 1, 0, 0, 0, 16, 36,
1718 0, 0, 0, 0, 1, 0, 0, 0,
1719 0, 0, 1, 0, 0, 0, 0, 0,
1720 0, 2, 0, 0, 0, 0, 0, 1,
1721 0, 0, 0, 0, 0, 0, 0, 28,
1722 0, 0, 0, 1, 1, 1, 1, 0,
1723 0, 2, 0, 1, 0, 0, 0, 0,
1724 0, 0, 0, 0, 0, 1, 1, 4,
1725 0, 0, 2, 2, 0, 11, 0, 0,
1726 0, 0, 0, 0, 0, 1, 1, 3,
1727 0, 0, 4, 0, 0, 0, 18, 0,
1728 0, 0, 1, 4, 1, 4, 1, 0,
1729 3, 2, 2, 2, 1, 0, 0, 1,
1730 8, 0, 0, 0, 4, 12, 0, 2,
1731 0, 3, 0, 1, 0, 2, 0, 1,
1732 2, 0, 0, 3, 0, 1, 1, 1,
1733 2, 2, 4, 1, 6, 2, 4, 2,
1734 4, 1, 4, 0, 6, 1, 3, 1,
1735 2, 0, 2, 11, 1, 1, 1, 0,
1736 1, 1, 0, 2, 0, 3, 3, 2,
1737 1, 0, 0, 0, 1, 0, 1, 0,
1738 1, 1, 0, 2, 0, 0, 1, 0,
1739 0, 0, 0, 0, 0, 0, 1, 0,
1740 0, 0, 0, 0, 0, 0, 1, 0,
1741 0, 0, 4, 3, 2, 2, 0, 6,
1742 1, 0, 1, 1, 0, 2, 0, 4,
1743 3, 0, 1, 1, 0, 0, 0, 0,
1744 0, 0, 0, 1, 0, 0, 0, 1,
1745 0, 3, 0, 2, 0, 0, 0, 3,
1746 0, 2, 1, 1, 3, 1, 0, 0,
1747 0, 0, 0, 5, 2, 0, 0, 0,
1748 0, 0, 0, 1, 0, 0, 1, 1,
1749 0, 0, 35, 4, 0, 0, 0, 0,
1750 0, 0, 0, 1, 0, 0, 0, 0,
1751 0, 0, 3, 0, 1, 0, 0, 3,
1752 0, 0, 1, 0, 0, 0, 0, 28,
1753 0, 0, 0, 0, 1, 0, 3, 1,
1754 4, 0, 1, 0, 0, 1, 0, 0,
1755 1, 0, 0, 0, 0, 1, 1, 0,
1756 7, 0, 0, 2, 2, 0, 11, 0,
1757 0, 0, 0, 0, 1, 1, 3, 0,
1758 0, 4, 0, 0, 0, 12, 1, 4,
1759 1, 5, 2, 0, 3, 2, 2, 2,
1760 1, 7, 0, 7, 17, 3, 0, 2,
1761 0, 3, 0, 0, 1, 0, 2, 0,
1762 54, 2, 1, 1, 1, 1, 1, 2,
1763 1, 3, 2, 2, 1, 34, 1, 1,
1764 0, 3, 2, 0, 0, 0, 1, 2,
1765 4, 1, 0, 1, 0, 0, 0, 0,
1766 1, 1, 1, 0, 0, 1, 30, 47,
1767 13, 9, 3, 0, 1, 28, 2, 0,
1768 18, 16, 0, 6, 6, 6, 6, 5,
1769 4, 7, 7, 7, 6, 4, 7, 6,
1770 6, 6, 6, 6, 6, 6, 1, 1,
1771 1, 1, 0, 0, 0, 4, 4, 4,
1772 4, 1, 1, 0, 0, 0, 4, 2,
1773 1, 1, 0, 0, 0, 33, 34, 0,
1774 3, 2, 0, 0, 0, 1, 2, 4,
1775 1, 0, 1, 0, 0, 0, 0, 1,
1776 1, 1, 0, 0, 1, 30, 47, 13,
1777 9, 3, 0, 1, 28, 2, 0, 18,
1778 16, 0,
1779}
1780
1781var _hcltok_range_lengths []byte = []byte{
1782 0, 0, 0, 0, 0, 1, 1, 1,
1783 5, 5, 5, 0, 0, 3, 0, 1,
1784 1, 4, 2, 3, 0, 1, 0, 2,
1785 2, 4, 2, 2, 3, 1, 1, 1,
1786 1, 0, 1, 1, 2, 2, 1, 4,
1787 6, 9, 6, 8, 5, 8, 7, 10,
1788 4, 6, 4, 7, 7, 5, 5, 4,
1789 5, 1, 2, 8, 4, 3, 3, 3,
1790 0, 3, 1, 2, 1, 2, 2, 3,
1791 3, 1, 3, 2, 2, 1, 2, 2,
1792 2, 3, 4, 4, 3, 1, 2, 1,
1793 3, 2, 2, 2, 2, 2, 3, 3,
1794 1, 1, 2, 1, 3, 2, 2, 3,
1795 2, 7, 0, 1, 4, 1, 2, 4,
1796 2, 1, 2, 0, 2, 2, 3, 5,
1797 5, 1, 4, 1, 1, 2, 2, 1,
1798 0, 0, 1, 1, 1, 1, 1, 2,
1799 2, 2, 2, 1, 1, 1, 4, 2,
1800 2, 3, 1, 4, 4, 6, 1, 3,
1801 1, 1, 2, 1, 1, 1, 5, 3,
1802 1, 1, 1, 2, 3, 3, 1, 2,
1803 2, 1, 4, 1, 2, 5, 2, 1,
1804 1, 0, 2, 2, 2, 2, 2, 2,
1805 2, 2, 2, 1, 1, 2, 4, 2,
1806 1, 2, 2, 2, 6, 1, 1, 2,
1807 1, 2, 1, 1, 1, 2, 2, 2,
1808 1, 3, 2, 5, 2, 8, 6, 2,
1809 2, 2, 2, 3, 1, 3, 1, 2,
1810 1, 3, 2, 2, 3, 1, 1, 1,
1811 1, 1, 1, 1, 2, 2, 4, 1,
1812 2, 1, 0, 1, 1, 1, 1, 0,
1813 1, 2, 3, 1, 3, 3, 1, 0,
1814 3, 0, 2, 3, 1, 0, 0, 0,
1815 0, 2, 2, 2, 2, 1, 5, 2,
1816 2, 5, 7, 5, 0, 1, 0, 1,
1817 1, 1, 1, 1, 0, 1, 1, 0,
1818 3, 3, 1, 1, 2, 1, 3, 5,
1819 1, 1, 2, 2, 1, 1, 1, 1,
1820 2, 6, 3, 7, 2, 6, 1, 6,
1821 2, 8, 0, 4, 2, 5, 2, 3,
1822 3, 3, 1, 2, 8, 2, 0, 2,
1823 1, 2, 1, 5, 2, 1, 3, 3,
1824 0, 2, 1, 2, 1, 0, 1, 1,
1825 3, 1, 1, 2, 3, 0, 0, 3,
1826 2, 4, 1, 4, 1, 1, 3, 1,
1827 1, 1, 1, 2, 2, 1, 3, 1,
1828 4, 3, 3, 1, 1, 5, 2, 1,
1829 1, 2, 1, 2, 1, 3, 2, 0,
1830 1, 1, 1, 1, 1, 1, 1, 2,
1831 1, 1, 1, 1, 1, 1, 1, 0,
1832 1, 1, 2, 2, 1, 1, 1, 3,
1833 2, 1, 0, 2, 1, 1, 1, 1,
1834 0, 3, 0, 1, 1, 4, 2, 3,
1835 0, 1, 0, 2, 2, 4, 2, 2,
1836 3, 1, 1, 1, 1, 0, 1, 1,
1837 2, 2, 1, 4, 6, 9, 6, 8,
1838 5, 8, 7, 10, 4, 6, 4, 7,
1839 7, 5, 5, 4, 5, 1, 2, 8,
1840 4, 3, 3, 3, 0, 3, 1, 2,
1841 1, 2, 2, 3, 3, 1, 3, 2,
1842 2, 1, 2, 2, 2, 3, 4, 4,
1843 3, 1, 2, 1, 3, 2, 2, 2,
1844 2, 2, 3, 3, 1, 1, 2, 1,
1845 3, 2, 2, 3, 2, 7, 0, 1,
1846 4, 1, 2, 4, 2, 1, 2, 0,
1847 2, 2, 3, 5, 5, 1, 4, 1,
1848 1, 2, 2, 1, 0, 0, 1, 1,
1849 1, 1, 1, 2, 2, 2, 2, 1,
1850 1, 1, 4, 2, 2, 3, 1, 4,
1851 4, 6, 1, 3, 1, 1, 2, 1,
1852 1, 1, 5, 3, 1, 1, 1, 2,
1853 3, 3, 1, 2, 2, 1, 4, 1,
1854 2, 5, 2, 1, 1, 0, 2, 2,
1855 2, 2, 2, 2, 2, 2, 2, 1,
1856 1, 2, 4, 2, 1, 2, 2, 2,
1857 6, 1, 1, 2, 1, 2, 1, 1,
1858 1, 2, 2, 2, 1, 3, 2, 5,
1859 2, 8, 6, 2, 2, 2, 2, 3,
1860 1, 3, 1, 2, 1, 3, 2, 2,
1861 3, 1, 1, 1, 1, 1, 1, 1,
1862 2, 2, 4, 1, 2, 1, 0, 1,
1863 1, 1, 1, 0, 1, 2, 3, 1,
1864 3, 3, 1, 0, 3, 0, 2, 3,
1865 1, 0, 0, 0, 0, 2, 2, 2,
1866 2, 1, 5, 2, 2, 5, 7, 5,
1867 0, 1, 0, 1, 1, 1, 1, 1,
1868 0, 1, 1, 1, 2, 2, 3, 3,
1869 4, 7, 5, 7, 5, 3, 3, 7,
1870 3, 13, 1, 3, 5, 3, 5, 3,
1871 6, 5, 2, 2, 8, 4, 1, 2,
1872 3, 2, 10, 2, 2, 0, 2, 3,
1873 3, 1, 2, 3, 3, 1, 2, 3,
1874 3, 4, 4, 2, 1, 2, 2, 3,
1875 2, 2, 5, 3, 2, 3, 2, 1,
1876 3, 3, 6, 2, 2, 5, 2, 5,
1877 1, 1, 2, 4, 1, 11, 1, 3,
1878 8, 4, 2, 1, 0, 4, 3, 3,
1879 3, 2, 9, 1, 1, 4, 3, 2,
1880 2, 2, 3, 4, 2, 3, 2, 4,
1881 3, 2, 2, 3, 3, 4, 3, 3,
1882 4, 2, 5, 4, 8, 7, 1, 2,
1883 1, 3, 1, 2, 5, 1, 2, 2,
1884 2, 2, 1, 3, 2, 2, 3, 3,
1885 1, 9, 1, 5, 1, 3, 2, 2,
1886 3, 2, 3, 3, 3, 1, 3, 3,
1887 2, 2, 4, 5, 3, 3, 4, 3,
1888 3, 3, 2, 2, 2, 4, 2, 2,
1889 1, 3, 3, 3, 3, 3, 3, 2,
1890 2, 3, 2, 3, 3, 2, 3, 2,
1891 3, 1, 2, 2, 2, 2, 2, 2,
1892 2, 2, 2, 2, 2, 3, 2, 3,
1893 2, 3, 5, 3, 3, 1, 2, 3,
1894 2, 2, 1, 2, 3, 4, 3, 0,
1895 3, 0, 2, 3, 1, 0, 0, 0,
1896 0, 2, 3, 2, 4, 6, 4, 1,
1897 1, 2, 1, 2, 1, 3, 2, 3,
1898 2, 0, 0, 1, 1, 1, 1, 1,
1899 0, 0, 0, 1, 1, 1, 0, 0,
1900 0, 0, 1, 1, 1, 0, 0, 0,
1901 3, 0, 1, 1, 4, 2, 3, 0,
1902 1, 0, 2, 2, 4, 2, 2, 3,
1903 1, 1, 1, 1, 0, 1, 1, 2,
1904 2, 1, 4, 6, 9, 6, 8, 5,
1905 8, 7, 10, 4, 6, 4, 7, 7,
1906 5, 5, 4, 5, 1, 2, 8, 4,
1907 3, 3, 3, 0, 3, 1, 2, 1,
1908 2, 2, 3, 3, 1, 3, 2, 2,
1909 1, 2, 2, 2, 3, 4, 4, 3,
1910 1, 2, 1, 3, 2, 2, 2, 2,
1911 2, 3, 3, 1, 1, 2, 1, 3,
1912 2, 2, 3, 2, 7, 0, 1, 4,
1913 1, 2, 4, 2, 1, 2, 0, 2,
1914 2, 3, 5, 5, 1, 4, 1, 1,
1915 2, 2, 1, 0, 0, 1, 1, 1,
1916 1, 1, 2, 2, 2, 2, 1, 1,
1917 1, 4, 2, 2, 3, 1, 4, 4,
1918 6, 1, 3, 1, 1, 2, 1, 1,
1919 1, 5, 3, 1, 1, 1, 2, 3,
1920 3, 1, 2, 2, 1, 4, 1, 2,
1921 5, 2, 1, 1, 0, 2, 2, 2,
1922 2, 2, 2, 2, 2, 2, 1, 1,
1923 2, 4, 2, 1, 2, 2, 2, 6,
1924 1, 1, 2, 1, 2, 1, 1, 1,
1925 2, 2, 2, 1, 3, 2, 5, 2,
1926 8, 6, 2, 2, 2, 2, 3, 1,
1927 3, 1, 2, 1, 3, 2, 2, 3,
1928 1, 1, 1, 1, 1, 1, 1, 2,
1929 2, 4, 1, 2, 1, 0, 1, 1,
1930 1, 1, 0, 1, 2, 3, 1, 3,
1931 3, 1, 0, 3, 0, 2, 3, 1,
1932 0, 0, 0, 0, 2, 2, 2, 2,
1933 1, 5, 2, 2, 5, 7, 5, 0,
1934 1, 0, 1, 1, 1, 1, 1, 0,
1935 1, 1, 1, 2, 2, 3, 3, 4,
1936 7, 5, 7, 5, 3, 3, 7, 3,
1937 13, 1, 3, 5, 3, 5, 3, 6,
1938 5, 2, 2, 8, 4, 1, 2, 3,
1939 2, 10, 2, 2, 0, 2, 3, 3,
1940 1, 2, 3, 3, 1, 2, 3, 3,
1941 4, 4, 2, 1, 2, 2, 3, 2,
1942 2, 5, 3, 2, 3, 2, 1, 3,
1943 3, 6, 2, 2, 5, 2, 5, 1,
1944 1, 2, 4, 1, 11, 1, 3, 8,
1945 4, 2, 1, 0, 4, 3, 3, 3,
1946 2, 9, 1, 1, 4, 3, 2, 2,
1947 2, 3, 4, 2, 3, 2, 4, 3,
1948 2, 2, 3, 3, 4, 3, 3, 4,
1949 2, 5, 4, 8, 7, 1, 2, 1,
1950 3, 1, 2, 5, 1, 2, 2, 2,
1951 2, 1, 3, 2, 2, 3, 3, 1,
1952 9, 1, 5, 1, 3, 2, 2, 3,
1953 2, 3, 3, 3, 1, 3, 3, 2,
1954 2, 4, 5, 3, 3, 4, 3, 3,
1955 3, 2, 2, 2, 4, 2, 2, 1,
1956 3, 3, 3, 3, 3, 3, 2, 2,
1957 3, 2, 3, 3, 2, 3, 2, 3,
1958 1, 2, 2, 2, 2, 2, 2, 2,
1959 2, 2, 2, 2, 3, 2, 3, 2,
1960 3, 5, 3, 3, 1, 2, 3, 2,
1961 2, 1, 2, 3, 4, 3, 0, 3,
1962 0, 2, 3, 1, 0, 0, 0, 0,
1963 2, 3, 2, 4, 6, 4, 1, 1,
1964 2, 1, 2, 1, 3, 2, 3, 2,
1965 11, 0, 0, 0, 0, 0, 0, 0,
1966 0, 1, 0, 0, 0, 5, 0, 0,
1967 1, 1, 1, 0, 1, 1, 5, 4,
1968 2, 0, 1, 0, 2, 2, 5, 2,
1969 3, 5, 3, 2, 3, 5, 1, 1,
1970 1, 3, 1, 1, 2, 2, 3, 1,
1971 2, 3, 1, 5, 5, 5, 5, 5,
1972 3, 5, 5, 5, 5, 3, 5, 5,
1973 5, 5, 5, 5, 5, 5, 0, 0,
1974 0, 0, 1, 1, 1, 5, 5, 5,
1975 5, 0, 0, 1, 1, 1, 5, 6,
1976 0, 0, 1, 1, 1, 8, 5, 1,
1977 1, 1, 0, 1, 1, 5, 4, 2,
1978 0, 1, 0, 2, 2, 5, 2, 3,
1979 5, 3, 2, 3, 5, 1, 1, 1,
1980 3, 1, 1, 2, 2, 3, 1, 2,
1981 3, 1,
1982}
1983
1984var _hcltok_index_offsets []int16 = []int16{
1985 0, 0, 2, 4, 6, 9, 14, 18,
1986 20, 58, 95, 137, 139, 144, 148, 149,
1987 151, 153, 159, 164, 169, 171, 174, 176,
1988 179, 183, 189, 192, 195, 201, 203, 205,
1989 207, 210, 243, 245, 247, 250, 253, 256,
1990 264, 272, 283, 291, 300, 308, 317, 326,
1991 338, 345, 352, 360, 368, 377, 383, 391,
1992 397, 405, 407, 410, 424, 430, 438, 442,
1993 446, 448, 495, 497, 500, 502, 507, 513,
1994 519, 524, 527, 531, 534, 537, 539, 542,
1995 545, 548, 552, 557, 562, 566, 568, 571,
1996 573, 577, 580, 583, 586, 589, 593, 598,
1997 602, 604, 606, 609, 611, 615, 618, 621,
1998 629, 633, 641, 657, 659, 664, 666, 670,
1999 681, 685, 687, 690, 692, 695, 700, 704,
2000 710, 716, 727, 732, 735, 738, 741, 744,
2001 746, 750, 751, 754, 756, 786, 788, 790,
2002 793, 797, 800, 804, 806, 808, 810, 816,
2003 819, 822, 826, 828, 833, 838, 845, 848,
2004 852, 856, 858, 861, 881, 883, 885, 892,
2005 896, 898, 900, 902, 905, 909, 913, 915,
2006 919, 922, 924, 929, 947, 986, 992, 995,
2007 997, 999, 1001, 1004, 1007, 1010, 1013, 1016,
2008 1020, 1023, 1026, 1029, 1031, 1033, 1036, 1043,
2009 1046, 1048, 1051, 1054, 1057, 1065, 1067, 1069,
2010 1072, 1074, 1077, 1079, 1081, 1111, 1114, 1117,
2011 1120, 1123, 1128, 1132, 1139, 1142, 1151, 1160,
2012 1163, 1167, 1170, 1173, 1177, 1179, 1183, 1185,
2013 1188, 1190, 1194, 1198, 1202, 1210, 1212, 1214,
2014 1218, 1222, 1224, 1237, 1239, 1242, 1245, 1250,
2015 1252, 1255, 1257, 1259, 1262, 1267, 1269, 1271,
2016 1276, 1278, 1281, 1285, 1305, 1309, 1313, 1315,
2017 1317, 1325, 1327, 1334, 1339, 1341, 1345, 1348,
2018 1351, 1354, 1358, 1361, 1364, 1368, 1378, 1384,
2019 1387, 1390, 1400, 1420, 1426, 1429, 1431, 1435,
2020 1437, 1440, 1442, 1446, 1448, 1450, 1454, 1456,
2021 1460, 1465, 1471, 1473, 1475, 1478, 1480, 1484,
2022 1491, 1494, 1496, 1499, 1503, 1533, 1538, 1540,
2023 1543, 1547, 1556, 1561, 1569, 1573, 1581, 1585,
2024 1593, 1597, 1608, 1610, 1616, 1619, 1627, 1631,
2025 1636, 1641, 1646, 1648, 1651, 1666, 1670, 1672,
2026 1675, 1677, 1726, 1729, 1736, 1739, 1741, 1745,
2027 1749, 1752, 1756, 1758, 1761, 1763, 1765, 1767,
2028 1769, 1773, 1775, 1777, 1780, 1784, 1798, 1801,
2029 1805, 1808, 1813, 1824, 1829, 1832, 1862, 1866,
2030 1869, 1874, 1876, 1880, 1883, 1886, 1888, 1893,
2031 1895, 1901, 1906, 1912, 1914, 1934, 1942, 1945,
2032 1947, 1965, 2003, 2005, 2008, 2010, 2015, 2018,
2033 2047, 2049, 2051, 2053, 2055, 2058, 2060, 2064,
2034 2067, 2069, 2072, 2074, 2076, 2079, 2081, 2083,
2035 2085, 2087, 2089, 2092, 2095, 2098, 2111, 2113,
2036 2117, 2120, 2122, 2127, 2130, 2144, 2147, 2156,
2037 2158, 2163, 2167, 2168, 2170, 2172, 2178, 2183,
2038 2188, 2190, 2193, 2195, 2198, 2202, 2208, 2211,
2039 2214, 2220, 2222, 2224, 2226, 2229, 2262, 2264,
2040 2266, 2269, 2272, 2275, 2283, 2291, 2302, 2310,
2041 2319, 2327, 2336, 2345, 2357, 2364, 2371, 2379,
2042 2387, 2396, 2402, 2410, 2416, 2424, 2426, 2429,
2043 2443, 2449, 2457, 2461, 2465, 2467, 2514, 2516,
2044 2519, 2521, 2526, 2532, 2538, 2543, 2546, 2550,
2045 2553, 2556, 2558, 2561, 2564, 2567, 2571, 2576,
2046 2581, 2585, 2587, 2590, 2592, 2596, 2599, 2602,
2047 2605, 2608, 2612, 2617, 2621, 2623, 2625, 2628,
2048 2630, 2634, 2637, 2640, 2648, 2652, 2660, 2676,
2049 2678, 2683, 2685, 2689, 2700, 2704, 2706, 2709,
2050 2711, 2714, 2719, 2723, 2729, 2735, 2746, 2751,
2051 2754, 2757, 2760, 2763, 2765, 2769, 2770, 2773,
2052 2775, 2805, 2807, 2809, 2812, 2816, 2819, 2823,
2053 2825, 2827, 2829, 2835, 2838, 2841, 2845, 2847,
2054 2852, 2857, 2864, 2867, 2871, 2875, 2877, 2880,
2055 2900, 2902, 2904, 2911, 2915, 2917, 2919, 2921,
2056 2924, 2928, 2932, 2934, 2938, 2941, 2943, 2948,
2057 2966, 3005, 3011, 3014, 3016, 3018, 3020, 3023,
2058 3026, 3029, 3032, 3035, 3039, 3042, 3045, 3048,
2059 3050, 3052, 3055, 3062, 3065, 3067, 3070, 3073,
2060 3076, 3084, 3086, 3088, 3091, 3093, 3096, 3098,
2061 3100, 3130, 3133, 3136, 3139, 3142, 3147, 3151,
2062 3158, 3161, 3170, 3179, 3182, 3186, 3189, 3192,
2063 3196, 3198, 3202, 3204, 3207, 3209, 3213, 3217,
2064 3221, 3229, 3231, 3233, 3237, 3241, 3243, 3256,
2065 3258, 3261, 3264, 3269, 3271, 3274, 3276, 3278,
2066 3281, 3286, 3288, 3290, 3295, 3297, 3300, 3304,
2067 3324, 3328, 3332, 3334, 3336, 3344, 3346, 3353,
2068 3358, 3360, 3364, 3367, 3370, 3373, 3377, 3380,
2069 3383, 3387, 3397, 3403, 3406, 3409, 3419, 3439,
2070 3445, 3448, 3450, 3454, 3456, 3459, 3461, 3465,
2071 3467, 3469, 3473, 3475, 3477, 3483, 3486, 3491,
2072 3496, 3502, 3512, 3520, 3532, 3539, 3549, 3555,
2073 3567, 3573, 3591, 3594, 3602, 3608, 3618, 3625,
2074 3632, 3640, 3648, 3651, 3656, 3676, 3682, 3685,
2075 3689, 3693, 3697, 3709, 3712, 3717, 3718, 3724,
2076 3731, 3737, 3740, 3743, 3747, 3751, 3754, 3757,
2077 3762, 3766, 3772, 3778, 3781, 3785, 3788, 3791,
2078 3796, 3799, 3802, 3808, 3812, 3815, 3819, 3822,
2079 3825, 3829, 3833, 3840, 3843, 3846, 3852, 3855,
2080 3862, 3864, 3866, 3869, 3878, 3883, 3897, 3901,
2081 3905, 3920, 3926, 3929, 3932, 3934, 3939, 3945,
2082 3949, 3957, 3963, 3973, 3976, 3979, 3984, 3988,
2083 3991, 3994, 3997, 4001, 4006, 4010, 4014, 4017,
2084 4022, 4027, 4030, 4036, 4040, 4046, 4051, 4055,
2085 4059, 4067, 4070, 4078, 4084, 4094, 4105, 4108,
2086 4111, 4113, 4117, 4119, 4122, 4133, 4137, 4140,
2087 4143, 4146, 4149, 4151, 4155, 4159, 4162, 4166,
2088 4171, 4174, 4184, 4186, 4227, 4233, 4237, 4240,
2089 4243, 4247, 4250, 4254, 4258, 4263, 4265, 4269,
2090 4273, 4276, 4279, 4284, 4293, 4297, 4302, 4307,
2091 4311, 4318, 4322, 4325, 4329, 4332, 4337, 4340,
2092 4343, 4373, 4377, 4381, 4385, 4389, 4394, 4398,
2093 4404, 4408, 4416, 4419, 4424, 4428, 4431, 4436,
2094 4439, 4443, 4446, 4449, 4452, 4455, 4458, 4462,
2095 4466, 4469, 4479, 4482, 4485, 4490, 4496, 4499,
2096 4514, 4517, 4521, 4527, 4531, 4535, 4538, 4542,
2097 4549, 4552, 4555, 4561, 4564, 4568, 4573, 4589,
2098 4591, 4599, 4601, 4609, 4615, 4617, 4621, 4624,
2099 4627, 4630, 4634, 4645, 4648, 4660, 4684, 4692,
2100 4694, 4698, 4701, 4706, 4709, 4711, 4716, 4719,
2101 4725, 4728, 4730, 4732, 4734, 4736, 4738, 4740,
2102 4742, 4744, 4746, 4748, 4750, 4752, 4754, 4756,
2103 4758, 4760, 4762, 4764, 4766, 4768, 4770, 4772,
2104 4777, 4781, 4782, 4784, 4786, 4792, 4797, 4802,
2105 4804, 4807, 4809, 4812, 4816, 4822, 4825, 4828,
2106 4834, 4836, 4838, 4840, 4843, 4876, 4878, 4880,
2107 4883, 4886, 4889, 4897, 4905, 4916, 4924, 4933,
2108 4941, 4950, 4959, 4971, 4978, 4985, 4993, 5001,
2109 5010, 5016, 5024, 5030, 5038, 5040, 5043, 5057,
2110 5063, 5071, 5075, 5079, 5081, 5128, 5130, 5133,
2111 5135, 5140, 5146, 5152, 5157, 5160, 5164, 5167,
2112 5170, 5172, 5175, 5178, 5181, 5185, 5190, 5195,
2113 5199, 5201, 5204, 5206, 5210, 5213, 5216, 5219,
2114 5222, 5226, 5231, 5235, 5237, 5239, 5242, 5244,
2115 5248, 5251, 5254, 5262, 5266, 5274, 5290, 5292,
2116 5297, 5299, 5303, 5314, 5318, 5320, 5323, 5325,
2117 5328, 5333, 5337, 5343, 5349, 5360, 5365, 5368,
2118 5371, 5374, 5377, 5379, 5383, 5384, 5387, 5389,
2119 5419, 5421, 5423, 5426, 5430, 5433, 5437, 5439,
2120 5441, 5443, 5449, 5452, 5455, 5459, 5461, 5466,
2121 5471, 5478, 5481, 5485, 5489, 5491, 5494, 5514,
2122 5516, 5518, 5525, 5529, 5531, 5533, 5535, 5538,
2123 5542, 5546, 5548, 5552, 5555, 5557, 5562, 5580,
2124 5619, 5625, 5628, 5630, 5632, 5634, 5637, 5640,
2125 5643, 5646, 5649, 5653, 5656, 5659, 5662, 5664,
2126 5666, 5669, 5676, 5679, 5681, 5684, 5687, 5690,
2127 5698, 5700, 5702, 5705, 5707, 5710, 5712, 5714,
2128 5744, 5747, 5750, 5753, 5756, 5761, 5765, 5772,
2129 5775, 5784, 5793, 5796, 5800, 5803, 5806, 5810,
2130 5812, 5816, 5818, 5821, 5823, 5827, 5831, 5835,
2131 5843, 5845, 5847, 5851, 5855, 5857, 5870, 5872,
2132 5875, 5878, 5883, 5885, 5888, 5890, 5892, 5895,
2133 5900, 5902, 5904, 5909, 5911, 5914, 5918, 5938,
2134 5942, 5946, 5948, 5950, 5958, 5960, 5967, 5972,
2135 5974, 5978, 5981, 5984, 5987, 5991, 5994, 5997,
2136 6001, 6011, 6017, 6020, 6023, 6033, 6053, 6059,
2137 6062, 6064, 6068, 6070, 6073, 6075, 6079, 6081,
2138 6083, 6087, 6089, 6091, 6097, 6100, 6105, 6110,
2139 6116, 6126, 6134, 6146, 6153, 6163, 6169, 6181,
2140 6187, 6205, 6208, 6216, 6222, 6232, 6239, 6246,
2141 6254, 6262, 6265, 6270, 6290, 6296, 6299, 6303,
2142 6307, 6311, 6323, 6326, 6331, 6332, 6338, 6345,
2143 6351, 6354, 6357, 6361, 6365, 6368, 6371, 6376,
2144 6380, 6386, 6392, 6395, 6399, 6402, 6405, 6410,
2145 6413, 6416, 6422, 6426, 6429, 6433, 6436, 6439,
2146 6443, 6447, 6454, 6457, 6460, 6466, 6469, 6476,
2147 6478, 6480, 6483, 6492, 6497, 6511, 6515, 6519,
2148 6534, 6540, 6543, 6546, 6548, 6553, 6559, 6563,
2149 6571, 6577, 6587, 6590, 6593, 6598, 6602, 6605,
2150 6608, 6611, 6615, 6620, 6624, 6628, 6631, 6636,
2151 6641, 6644, 6650, 6654, 6660, 6665, 6669, 6673,
2152 6681, 6684, 6692, 6698, 6708, 6719, 6722, 6725,
2153 6727, 6731, 6733, 6736, 6747, 6751, 6754, 6757,
2154 6760, 6763, 6765, 6769, 6773, 6776, 6780, 6785,
2155 6788, 6798, 6800, 6841, 6847, 6851, 6854, 6857,
2156 6861, 6864, 6868, 6872, 6877, 6879, 6883, 6887,
2157 6890, 6893, 6898, 6907, 6911, 6916, 6921, 6925,
2158 6932, 6936, 6939, 6943, 6946, 6951, 6954, 6957,
2159 6987, 6991, 6995, 6999, 7003, 7008, 7012, 7018,
2160 7022, 7030, 7033, 7038, 7042, 7045, 7050, 7053,
2161 7057, 7060, 7063, 7066, 7069, 7072, 7076, 7080,
2162 7083, 7093, 7096, 7099, 7104, 7110, 7113, 7128,
2163 7131, 7135, 7141, 7145, 7149, 7152, 7156, 7163,
2164 7166, 7169, 7175, 7178, 7182, 7187, 7203, 7205,
2165 7213, 7215, 7223, 7229, 7231, 7235, 7238, 7241,
2166 7244, 7248, 7259, 7262, 7274, 7298, 7306, 7308,
2167 7312, 7315, 7320, 7323, 7325, 7330, 7333, 7339,
2168 7342, 7408, 7411, 7413, 7415, 7417, 7419, 7421,
2169 7424, 7426, 7431, 7434, 7437, 7439, 7479, 7481,
2170 7483, 7485, 7490, 7494, 7495, 7497, 7499, 7506,
2171 7513, 7520, 7522, 7524, 7526, 7529, 7532, 7538,
2172 7541, 7546, 7553, 7558, 7561, 7565, 7572, 7604,
2173 7653, 7668, 7681, 7686, 7688, 7692, 7723, 7729,
2174 7731, 7752, 7772, 7774, 7786, 7798, 7810, 7822,
2175 7833, 7841, 7854, 7867, 7880, 7892, 7900, 7913,
2176 7925, 7937, 7949, 7961, 7973, 7985, 7997, 7999,
2177 8001, 8003, 8005, 8007, 8009, 8011, 8021, 8031,
2178 8041, 8051, 8053, 8055, 8057, 8059, 8061, 8071,
2179 8080, 8082, 8084, 8086, 8088, 8090, 8132, 8172,
2180 8174, 8179, 8183, 8184, 8186, 8188, 8195, 8202,
2181 8209, 8211, 8213, 8215, 8218, 8221, 8227, 8230,
2182 8235, 8242, 8247, 8250, 8254, 8261, 8293, 8342,
2183 8357, 8370, 8375, 8377, 8381, 8412, 8418, 8420,
2184 8441, 8461,
2185}
2186
2187var _hcltok_indicies []int16 = []int16{
2188 2, 1, 4, 3, 6, 5, 6, 7,
2189 5, 9, 11, 11, 10, 8, 12, 12,
2190 10, 8, 10, 8, 13, 14, 15, 16,
2191 18, 19, 20, 21, 22, 23, 24, 25,
2192 26, 27, 28, 29, 30, 31, 32, 33,
2193 34, 35, 36, 37, 38, 39, 40, 42,
2194 43, 44, 45, 46, 14, 14, 17, 17,
2195 41, 3, 14, 15, 16, 18, 19, 20,
2196 21, 22, 23, 24, 25, 26, 27, 28,
2197 29, 30, 31, 32, 33, 34, 35, 36,
2198 37, 38, 39, 40, 42, 43, 44, 45,
2199 46, 14, 14, 17, 17, 41, 3, 47,
2200 48, 14, 14, 49, 16, 18, 19, 20,
2201 19, 50, 51, 23, 52, 25, 26, 53,
2202 54, 55, 56, 57, 58, 59, 60, 61,
2203 62, 63, 64, 65, 40, 42, 66, 44,
2204 67, 68, 69, 14, 14, 14, 17, 41,
2205 3, 47, 3, 14, 14, 14, 14, 3,
2206 14, 14, 14, 3, 14, 3, 14, 3,
2207 14, 3, 3, 3, 3, 3, 14, 3,
2208 3, 3, 3, 14, 14, 14, 14, 14,
2209 3, 3, 14, 3, 3, 14, 3, 14,
2210 3, 3, 14, 3, 3, 3, 14, 14,
2211 14, 14, 14, 14, 3, 14, 14, 3,
2212 14, 14, 3, 3, 3, 3, 3, 3,
2213 14, 14, 3, 3, 14, 3, 14, 14,
2214 14, 3, 70, 71, 72, 73, 17, 74,
2215 75, 76, 77, 78, 79, 80, 81, 82,
2216 83, 84, 85, 86, 87, 88, 89, 90,
2217 91, 92, 93, 94, 95, 96, 97, 98,
2218 99, 100, 3, 14, 3, 14, 3, 14,
2219 14, 3, 14, 14, 3, 3, 3, 14,
2220 3, 3, 3, 3, 3, 3, 3, 14,
2221 3, 3, 3, 3, 3, 3, 3, 14,
2222 14, 14, 14, 14, 14, 14, 14, 14,
2223 14, 14, 3, 3, 3, 3, 3, 3,
2224 3, 3, 14, 14, 14, 14, 14, 14,
2225 14, 14, 14, 3, 3, 3, 3, 3,
2226 3, 3, 3, 14, 14, 14, 14, 14,
2227 14, 14, 14, 14, 3, 14, 14, 14,
2228 14, 14, 14, 14, 14, 3, 14, 14,
2229 14, 14, 14, 14, 14, 14, 14, 14,
2230 14, 3, 14, 14, 14, 14, 14, 14,
2231 3, 14, 14, 14, 14, 14, 14, 3,
2232 3, 3, 3, 3, 3, 3, 3, 14,
2233 14, 14, 14, 14, 14, 14, 14, 3,
2234 14, 14, 14, 14, 14, 14, 14, 14,
2235 3, 14, 14, 14, 14, 14, 3, 3,
2236 3, 3, 3, 3, 3, 3, 14, 14,
2237 14, 14, 14, 14, 3, 14, 14, 14,
2238 14, 14, 14, 14, 3, 14, 3, 14,
2239 14, 3, 14, 14, 14, 14, 14, 14,
2240 14, 14, 14, 14, 14, 14, 14, 3,
2241 14, 14, 14, 14, 14, 3, 14, 14,
2242 14, 14, 14, 14, 14, 3, 14, 14,
2243 14, 3, 14, 14, 14, 3, 14, 3,
2244 101, 102, 103, 104, 105, 106, 107, 108,
2245 109, 110, 111, 112, 113, 114, 115, 116,
2246 117, 19, 118, 119, 120, 121, 122, 123,
2247 124, 125, 126, 127, 128, 129, 130, 131,
2248 132, 133, 134, 135, 17, 18, 136, 137,
2249 138, 139, 140, 17, 19, 17, 3, 14,
2250 3, 14, 14, 3, 3, 14, 3, 3,
2251 3, 3, 14, 3, 3, 3, 3, 3,
2252 14, 3, 3, 3, 3, 3, 14, 14,
2253 14, 14, 14, 3, 3, 3, 14, 3,
2254 3, 3, 14, 14, 14, 3, 3, 3,
2255 14, 14, 3, 3, 3, 14, 14, 14,
2256 3, 3, 3, 14, 14, 14, 14, 3,
2257 14, 14, 14, 14, 3, 3, 3, 3,
2258 3, 14, 14, 14, 14, 3, 3, 14,
2259 14, 14, 3, 3, 14, 14, 14, 14,
2260 3, 14, 14, 3, 14, 14, 3, 3,
2261 3, 14, 14, 14, 3, 3, 3, 3,
2262 14, 14, 14, 14, 14, 3, 3, 3,
2263 3, 14, 3, 14, 14, 3, 14, 14,
2264 3, 14, 3, 14, 14, 14, 3, 14,
2265 14, 3, 3, 3, 14, 3, 3, 3,
2266 3, 3, 3, 3, 14, 14, 14, 14,
2267 3, 14, 14, 14, 14, 14, 14, 14,
2268 3, 141, 142, 143, 144, 145, 146, 147,
2269 148, 149, 17, 150, 151, 152, 153, 154,
2270 3, 14, 3, 3, 3, 3, 3, 14,
2271 14, 3, 14, 14, 14, 3, 14, 14,
2272 14, 14, 14, 14, 14, 14, 14, 14,
2273 3, 14, 14, 14, 3, 3, 14, 14,
2274 14, 3, 3, 14, 3, 3, 14, 14,
2275 14, 14, 14, 3, 3, 3, 3, 14,
2276 14, 14, 14, 14, 14, 3, 14, 14,
2277 14, 14, 14, 3, 155, 112, 156, 157,
2278 158, 17, 159, 160, 19, 17, 3, 14,
2279 14, 14, 14, 3, 3, 3, 14, 3,
2280 3, 14, 14, 14, 3, 3, 3, 14,
2281 14, 3, 122, 3, 19, 17, 17, 161,
2282 3, 17, 3, 14, 19, 162, 163, 19,
2283 164, 165, 19, 60, 166, 167, 168, 169,
2284 170, 19, 171, 172, 173, 19, 174, 175,
2285 176, 18, 177, 178, 179, 18, 180, 19,
2286 17, 3, 3, 14, 14, 3, 3, 3,
2287 14, 14, 14, 14, 3, 14, 14, 3,
2288 3, 3, 3, 14, 14, 3, 3, 14,
2289 14, 3, 3, 3, 3, 3, 3, 14,
2290 14, 14, 3, 3, 3, 14, 3, 3,
2291 3, 14, 14, 3, 14, 14, 14, 14,
2292 3, 14, 14, 14, 14, 3, 14, 14,
2293 14, 14, 14, 14, 3, 3, 3, 14,
2294 14, 14, 14, 3, 181, 182, 3, 17,
2295 3, 14, 3, 3, 14, 19, 183, 184,
2296 185, 186, 60, 187, 188, 58, 189, 190,
2297 191, 192, 193, 194, 195, 196, 197, 17,
2298 3, 3, 14, 3, 14, 14, 14, 14,
2299 14, 14, 14, 3, 14, 14, 14, 3,
2300 14, 3, 3, 14, 3, 14, 3, 3,
2301 14, 14, 14, 14, 3, 14, 14, 14,
2302 3, 3, 14, 14, 14, 14, 3, 14,
2303 14, 3, 3, 14, 14, 14, 14, 14,
2304 3, 198, 199, 200, 201, 202, 203, 204,
2305 205, 206, 207, 208, 204, 209, 210, 211,
2306 212, 41, 3, 213, 214, 19, 215, 216,
2307 217, 218, 219, 220, 221, 222, 223, 19,
2308 17, 224, 225, 226, 227, 19, 228, 229,
2309 230, 231, 232, 233, 234, 235, 236, 237,
2310 238, 239, 240, 241, 242, 19, 147, 17,
2311 243, 3, 14, 14, 14, 14, 14, 3,
2312 3, 3, 14, 3, 14, 14, 3, 14,
2313 3, 14, 14, 3, 3, 3, 14, 14,
2314 14, 3, 3, 3, 14, 14, 14, 3,
2315 3, 3, 3, 14, 3, 3, 14, 3,
2316 3, 14, 14, 14, 3, 3, 14, 3,
2317 14, 14, 14, 3, 14, 14, 14, 14,
2318 14, 14, 3, 3, 3, 14, 14, 3,
2319 14, 14, 3, 14, 14, 3, 14, 14,
2320 3, 14, 14, 14, 14, 14, 14, 14,
2321 3, 14, 3, 14, 3, 14, 14, 3,
2322 14, 3, 14, 14, 3, 14, 3, 14,
2323 3, 244, 215, 245, 246, 247, 248, 249,
2324 250, 251, 252, 253, 101, 254, 19, 255,
2325 256, 257, 19, 258, 132, 259, 260, 261,
2326 262, 263, 264, 265, 266, 19, 3, 3,
2327 3, 14, 14, 14, 3, 14, 14, 3,
2328 14, 14, 3, 3, 3, 3, 3, 14,
2329 14, 14, 14, 3, 14, 14, 14, 14,
2330 14, 14, 3, 3, 3, 14, 14, 14,
2331 14, 14, 14, 14, 14, 14, 3, 14,
2332 14, 14, 14, 14, 14, 14, 14, 3,
2333 14, 14, 3, 3, 3, 3, 14, 14,
2334 14, 3, 3, 3, 14, 3, 3, 3,
2335 14, 14, 3, 14, 14, 14, 3, 14,
2336 3, 3, 3, 14, 14, 3, 14, 14,
2337 14, 3, 14, 14, 14, 3, 3, 3,
2338 3, 14, 19, 184, 267, 268, 17, 19,
2339 17, 3, 3, 14, 3, 14, 19, 267,
2340 17, 3, 19, 269, 17, 3, 3, 14,
2341 19, 270, 271, 272, 175, 273, 274, 19,
2342 275, 276, 277, 17, 3, 3, 14, 14,
2343 14, 3, 14, 14, 3, 14, 14, 14,
2344 14, 3, 3, 14, 3, 3, 14, 14,
2345 3, 14, 3, 19, 17, 3, 278, 19,
2346 279, 3, 17, 3, 14, 3, 14, 280,
2347 19, 281, 282, 3, 14, 3, 3, 3,
2348 14, 14, 14, 14, 3, 283, 284, 285,
2349 19, 286, 287, 288, 289, 290, 291, 292,
2350 293, 294, 295, 296, 297, 298, 299, 17,
2351 3, 14, 14, 14, 3, 3, 3, 3,
2352 14, 14, 3, 3, 14, 3, 3, 3,
2353 3, 3, 3, 3, 14, 3, 14, 3,
2354 3, 3, 3, 3, 3, 14, 14, 14,
2355 14, 14, 3, 3, 14, 3, 3, 3,
2356 14, 3, 3, 14, 3, 3, 14, 3,
2357 3, 14, 3, 3, 3, 14, 14, 14,
2358 3, 3, 3, 14, 14, 14, 14, 3,
2359 300, 19, 301, 19, 302, 303, 304, 305,
2360 17, 3, 14, 14, 14, 14, 14, 3,
2361 3, 3, 14, 3, 3, 14, 14, 14,
2362 14, 14, 14, 14, 14, 14, 14, 3,
2363 14, 14, 14, 14, 14, 14, 14, 14,
2364 14, 14, 14, 14, 14, 14, 14, 14,
2365 14, 14, 14, 3, 14, 14, 14, 14,
2366 14, 3, 306, 19, 17, 3, 14, 307,
2367 19, 103, 17, 3, 14, 308, 3, 17,
2368 3, 14, 19, 309, 17, 3, 3, 14,
2369 310, 3, 19, 311, 17, 3, 3, 14,
2370 14, 14, 14, 3, 14, 14, 14, 14,
2371 3, 14, 14, 14, 14, 14, 3, 3,
2372 14, 3, 14, 14, 14, 3, 14, 3,
2373 14, 14, 14, 3, 3, 3, 3, 3,
2374 3, 3, 14, 14, 14, 3, 14, 3,
2375 3, 3, 14, 14, 14, 14, 3, 312,
2376 313, 72, 314, 315, 316, 317, 318, 319,
2377 320, 321, 322, 323, 324, 325, 326, 327,
2378 328, 329, 330, 331, 332, 334, 335, 336,
2379 337, 338, 339, 333, 3, 14, 14, 14,
2380 14, 3, 14, 3, 14, 14, 3, 14,
2381 14, 14, 3, 3, 3, 3, 3, 3,
2382 3, 3, 3, 14, 14, 14, 14, 14,
2383 3, 14, 14, 14, 14, 14, 14, 14,
2384 3, 14, 14, 14, 3, 14, 14, 14,
2385 14, 14, 14, 14, 3, 14, 14, 14,
2386 3, 14, 14, 14, 14, 14, 14, 14,
2387 3, 14, 14, 14, 3, 14, 14, 14,
2388 14, 14, 14, 14, 14, 14, 14, 3,
2389 14, 3, 14, 14, 14, 14, 14, 3,
2390 14, 14, 3, 14, 14, 14, 14, 14,
2391 14, 14, 3, 14, 14, 14, 3, 14,
2392 14, 14, 14, 3, 14, 14, 14, 14,
2393 3, 14, 14, 14, 14, 3, 14, 3,
2394 14, 14, 3, 14, 14, 14, 14, 14,
2395 14, 14, 14, 14, 14, 14, 14, 14,
2396 14, 3, 14, 14, 14, 3, 14, 3,
2397 14, 14, 3, 14, 3, 340, 341, 342,
2398 104, 105, 106, 107, 108, 343, 110, 111,
2399 112, 113, 114, 115, 344, 345, 170, 346,
2400 261, 120, 347, 122, 232, 272, 125, 348,
2401 349, 350, 351, 352, 353, 354, 355, 356,
2402 357, 134, 358, 19, 17, 18, 19, 137,
2403 138, 139, 140, 17, 17, 3, 14, 14,
2404 3, 14, 14, 14, 14, 14, 14, 3,
2405 3, 3, 14, 3, 14, 14, 14, 14,
2406 3, 14, 14, 14, 3, 14, 14, 3,
2407 14, 14, 14, 3, 3, 14, 14, 14,
2408 3, 3, 14, 14, 3, 14, 3, 14,
2409 3, 14, 14, 14, 3, 3, 14, 14,
2410 3, 14, 14, 3, 14, 14, 14, 3,
2411 359, 143, 145, 146, 147, 148, 149, 17,
2412 360, 151, 361, 153, 362, 3, 14, 14,
2413 3, 3, 3, 3, 14, 3, 3, 14,
2414 14, 14, 14, 14, 3, 363, 112, 364,
2415 157, 158, 17, 159, 160, 19, 17, 3,
2416 14, 14, 14, 14, 3, 3, 3, 14,
2417 19, 162, 163, 19, 365, 366, 222, 311,
2418 166, 167, 168, 367, 170, 368, 369, 370,
2419 371, 372, 373, 374, 375, 376, 377, 178,
2420 179, 18, 378, 19, 17, 3, 3, 3,
2421 3, 14, 14, 14, 3, 3, 3, 3,
2422 3, 14, 14, 3, 14, 14, 14, 3,
2423 14, 14, 3, 3, 3, 14, 14, 3,
2424 14, 14, 14, 14, 3, 14, 3, 14,
2425 14, 14, 14, 14, 3, 3, 3, 3,
2426 3, 14, 14, 14, 14, 14, 14, 3,
2427 14, 3, 19, 183, 184, 379, 186, 60,
2428 187, 188, 58, 189, 190, 380, 17, 193,
2429 381, 195, 196, 197, 17, 3, 14, 14,
2430 14, 14, 14, 14, 14, 3, 14, 14,
2431 3, 14, 3, 382, 383, 200, 201, 202,
2432 384, 204, 205, 385, 386, 387, 204, 209,
2433 210, 211, 212, 41, 3, 213, 214, 19,
2434 215, 216, 218, 388, 220, 389, 222, 223,
2435 19, 17, 390, 225, 226, 227, 19, 228,
2436 229, 230, 231, 232, 233, 234, 235, 391,
2437 237, 238, 392, 240, 241, 242, 19, 147,
2438 17, 243, 3, 3, 14, 3, 3, 14,
2439 3, 14, 14, 14, 14, 14, 3, 14,
2440 14, 3, 393, 394, 395, 396, 397, 398,
2441 399, 400, 250, 401, 322, 402, 216, 403,
2442 404, 405, 406, 407, 404, 408, 409, 410,
2443 261, 411, 263, 412, 413, 274, 3, 14,
2444 3, 14, 3, 14, 3, 14, 3, 14,
2445 14, 3, 14, 3, 14, 14, 14, 3,
2446 14, 14, 3, 3, 14, 14, 14, 3,
2447 14, 3, 14, 3, 14, 14, 3, 14,
2448 3, 14, 3, 14, 3, 14, 3, 14,
2449 3, 3, 3, 14, 14, 14, 3, 14,
2450 14, 3, 19, 270, 232, 414, 404, 415,
2451 274, 19, 416, 417, 277, 17, 3, 14,
2452 3, 14, 14, 14, 3, 3, 3, 14,
2453 14, 3, 280, 19, 281, 418, 3, 14,
2454 14, 3, 19, 286, 287, 288, 289, 290,
2455 291, 292, 293, 294, 295, 419, 17, 3,
2456 3, 3, 14, 19, 420, 19, 268, 303,
2457 304, 305, 17, 3, 3, 14, 422, 422,
2458 422, 422, 421, 422, 422, 422, 421, 422,
2459 421, 422, 422, 421, 421, 421, 421, 421,
2460 421, 422, 421, 421, 421, 421, 422, 422,
2461 422, 422, 422, 421, 421, 422, 421, 421,
2462 422, 421, 422, 421, 421, 422, 421, 421,
2463 421, 422, 422, 422, 422, 422, 422, 421,
2464 422, 422, 421, 422, 422, 421, 421, 421,
2465 421, 421, 421, 422, 422, 421, 421, 422,
2466 421, 422, 422, 422, 421, 423, 424, 425,
2467 426, 427, 428, 429, 430, 431, 432, 433,
2468 434, 435, 436, 437, 438, 439, 440, 441,
2469 442, 443, 444, 445, 446, 447, 448, 449,
2470 450, 451, 452, 453, 454, 421, 422, 421,
2471 422, 421, 422, 422, 421, 422, 422, 421,
2472 421, 421, 422, 421, 421, 421, 421, 421,
2473 421, 421, 422, 421, 421, 421, 421, 421,
2474 421, 421, 422, 422, 422, 422, 422, 422,
2475 422, 422, 422, 422, 422, 421, 421, 421,
2476 421, 421, 421, 421, 421, 422, 422, 422,
2477 422, 422, 422, 422, 422, 422, 421, 421,
2478 421, 421, 421, 421, 421, 421, 422, 422,
2479 422, 422, 422, 422, 422, 422, 422, 421,
2480 422, 422, 422, 422, 422, 422, 422, 422,
2481 421, 422, 422, 422, 422, 422, 422, 422,
2482 422, 422, 422, 422, 421, 422, 422, 422,
2483 422, 422, 422, 421, 422, 422, 422, 422,
2484 422, 422, 421, 421, 421, 421, 421, 421,
2485 421, 421, 422, 422, 422, 422, 422, 422,
2486 422, 422, 421, 422, 422, 422, 422, 422,
2487 422, 422, 422, 421, 422, 422, 422, 422,
2488 422, 421, 421, 421, 421, 421, 421, 421,
2489 421, 422, 422, 422, 422, 422, 422, 421,
2490 422, 422, 422, 422, 422, 422, 422, 421,
2491 422, 421, 422, 422, 421, 422, 422, 422,
2492 422, 422, 422, 422, 422, 422, 422, 422,
2493 422, 422, 421, 422, 422, 422, 422, 422,
2494 421, 422, 422, 422, 422, 422, 422, 422,
2495 421, 422, 422, 422, 421, 422, 422, 422,
2496 421, 422, 421, 455, 456, 457, 458, 459,
2497 460, 461, 462, 463, 464, 465, 466, 467,
2498 468, 469, 470, 471, 472, 473, 474, 475,
2499 476, 477, 478, 479, 480, 481, 482, 483,
2500 484, 485, 486, 487, 488, 489, 490, 427,
2501 491, 492, 493, 494, 495, 496, 427, 472,
2502 427, 421, 422, 421, 422, 422, 421, 421,
2503 422, 421, 421, 421, 421, 422, 421, 421,
2504 421, 421, 421, 422, 421, 421, 421, 421,
2505 421, 422, 422, 422, 422, 422, 421, 421,
2506 421, 422, 421, 421, 421, 422, 422, 422,
2507 421, 421, 421, 422, 422, 421, 421, 421,
2508 422, 422, 422, 421, 421, 421, 422, 422,
2509 422, 422, 421, 422, 422, 422, 422, 421,
2510 421, 421, 421, 421, 422, 422, 422, 422,
2511 421, 421, 422, 422, 422, 421, 421, 422,
2512 422, 422, 422, 421, 422, 422, 421, 422,
2513 422, 421, 421, 421, 422, 422, 422, 421,
2514 421, 421, 421, 422, 422, 422, 422, 422,
2515 421, 421, 421, 421, 422, 421, 422, 422,
2516 421, 422, 422, 421, 422, 421, 422, 422,
2517 422, 421, 422, 422, 421, 421, 421, 422,
2518 421, 421, 421, 421, 421, 421, 421, 422,
2519 422, 422, 422, 421, 422, 422, 422, 422,
2520 422, 422, 422, 421, 497, 498, 499, 500,
2521 501, 502, 503, 504, 505, 427, 506, 507,
2522 508, 509, 510, 421, 422, 421, 421, 421,
2523 421, 421, 422, 422, 421, 422, 422, 422,
2524 421, 422, 422, 422, 422, 422, 422, 422,
2525 422, 422, 422, 421, 422, 422, 422, 421,
2526 421, 422, 422, 422, 421, 421, 422, 421,
2527 421, 422, 422, 422, 422, 422, 421, 421,
2528 421, 421, 422, 422, 422, 422, 422, 422,
2529 421, 422, 422, 422, 422, 422, 421, 511,
2530 466, 512, 513, 514, 427, 515, 516, 472,
2531 427, 421, 422, 422, 422, 422, 421, 421,
2532 421, 422, 421, 421, 422, 422, 422, 421,
2533 421, 421, 422, 422, 421, 477, 421, 472,
2534 427, 427, 517, 421, 427, 421, 422, 472,
2535 518, 519, 472, 520, 521, 472, 522, 523,
2536 524, 525, 526, 527, 472, 528, 529, 530,
2537 472, 531, 532, 533, 491, 534, 535, 536,
2538 491, 537, 472, 427, 421, 421, 422, 422,
2539 421, 421, 421, 422, 422, 422, 422, 421,
2540 422, 422, 421, 421, 421, 421, 422, 422,
2541 421, 421, 422, 422, 421, 421, 421, 421,
2542 421, 421, 422, 422, 422, 421, 421, 421,
2543 422, 421, 421, 421, 422, 422, 421, 422,
2544 422, 422, 422, 421, 422, 422, 422, 422,
2545 421, 422, 422, 422, 422, 422, 422, 421,
2546 421, 421, 422, 422, 422, 422, 421, 538,
2547 539, 421, 427, 421, 422, 421, 421, 422,
2548 472, 540, 541, 542, 543, 522, 544, 545,
2549 546, 547, 548, 549, 550, 551, 552, 553,
2550 554, 555, 427, 421, 421, 422, 421, 422,
2551 422, 422, 422, 422, 422, 422, 421, 422,
2552 422, 422, 421, 422, 421, 421, 422, 421,
2553 422, 421, 421, 422, 422, 422, 422, 421,
2554 422, 422, 422, 421, 421, 422, 422, 422,
2555 422, 421, 422, 422, 421, 421, 422, 422,
2556 422, 422, 422, 421, 556, 557, 558, 559,
2557 560, 561, 562, 563, 564, 565, 566, 562,
2558 568, 569, 570, 571, 567, 421, 572, 573,
2559 472, 574, 575, 576, 577, 578, 579, 580,
2560 581, 582, 472, 427, 583, 584, 585, 586,
2561 472, 587, 588, 589, 590, 591, 592, 593,
2562 594, 595, 596, 597, 598, 599, 600, 601,
2563 472, 503, 427, 602, 421, 422, 422, 422,
2564 422, 422, 421, 421, 421, 422, 421, 422,
2565 422, 421, 422, 421, 422, 422, 421, 421,
2566 421, 422, 422, 422, 421, 421, 421, 422,
2567 422, 422, 421, 421, 421, 421, 422, 421,
2568 421, 422, 421, 421, 422, 422, 422, 421,
2569 421, 422, 421, 422, 422, 422, 421, 422,
2570 422, 422, 422, 422, 422, 421, 421, 421,
2571 422, 422, 421, 422, 422, 421, 422, 422,
2572 421, 422, 422, 421, 422, 422, 422, 422,
2573 422, 422, 422, 421, 422, 421, 422, 421,
2574 422, 422, 421, 422, 421, 422, 422, 421,
2575 422, 421, 422, 421, 603, 574, 604, 605,
2576 606, 607, 608, 609, 610, 611, 612, 455,
2577 613, 472, 614, 615, 616, 472, 617, 487,
2578 618, 619, 620, 621, 622, 623, 624, 625,
2579 472, 421, 421, 421, 422, 422, 422, 421,
2580 422, 422, 421, 422, 422, 421, 421, 421,
2581 421, 421, 422, 422, 422, 422, 421, 422,
2582 422, 422, 422, 422, 422, 421, 421, 421,
2583 422, 422, 422, 422, 422, 422, 422, 422,
2584 422, 421, 422, 422, 422, 422, 422, 422,
2585 422, 422, 421, 422, 422, 421, 421, 421,
2586 421, 422, 422, 422, 421, 421, 421, 422,
2587 421, 421, 421, 422, 422, 421, 422, 422,
2588 422, 421, 422, 421, 421, 421, 422, 422,
2589 421, 422, 422, 422, 421, 422, 422, 422,
2590 421, 421, 421, 421, 422, 472, 541, 626,
2591 627, 427, 472, 427, 421, 421, 422, 421,
2592 422, 472, 626, 427, 421, 472, 628, 427,
2593 421, 421, 422, 472, 629, 630, 631, 532,
2594 632, 633, 472, 634, 635, 636, 427, 421,
2595 421, 422, 422, 422, 421, 422, 422, 421,
2596 422, 422, 422, 422, 421, 421, 422, 421,
2597 421, 422, 422, 421, 422, 421, 472, 427,
2598 421, 637, 472, 638, 421, 427, 421, 422,
2599 421, 422, 639, 472, 640, 641, 421, 422,
2600 421, 421, 421, 422, 422, 422, 422, 421,
2601 642, 643, 644, 472, 645, 646, 647, 648,
2602 649, 650, 651, 652, 653, 654, 655, 656,
2603 657, 658, 427, 421, 422, 422, 422, 421,
2604 421, 421, 421, 422, 422, 421, 421, 422,
2605 421, 421, 421, 421, 421, 421, 421, 422,
2606 421, 422, 421, 421, 421, 421, 421, 421,
2607 422, 422, 422, 422, 422, 421, 421, 422,
2608 421, 421, 421, 422, 421, 421, 422, 421,
2609 421, 422, 421, 421, 422, 421, 421, 421,
2610 422, 422, 422, 421, 421, 421, 422, 422,
2611 422, 422, 421, 659, 472, 660, 472, 661,
2612 662, 663, 664, 427, 421, 422, 422, 422,
2613 422, 422, 421, 421, 421, 422, 421, 421,
2614 422, 422, 422, 422, 422, 422, 422, 422,
2615 422, 422, 421, 422, 422, 422, 422, 422,
2616 422, 422, 422, 422, 422, 422, 422, 422,
2617 422, 422, 422, 422, 422, 422, 421, 422,
2618 422, 422, 422, 422, 421, 665, 472, 427,
2619 421, 422, 666, 472, 457, 427, 421, 422,
2620 667, 421, 427, 421, 422, 472, 668, 427,
2621 421, 421, 422, 669, 421, 472, 670, 427,
2622 421, 421, 422, 672, 671, 422, 422, 422,
2623 422, 672, 671, 422, 672, 671, 672, 672,
2624 422, 672, 671, 422, 672, 422, 672, 671,
2625 422, 672, 422, 672, 422, 671, 672, 672,
2626 672, 672, 672, 672, 672, 672, 671, 422,
2627 422, 672, 672, 422, 672, 422, 672, 671,
2628 672, 672, 672, 672, 672, 422, 672, 422,
2629 672, 422, 672, 671, 672, 672, 422, 672,
2630 422, 672, 671, 672, 672, 672, 672, 672,
2631 422, 672, 422, 672, 671, 422, 422, 672,
2632 422, 672, 671, 672, 672, 672, 422, 672,
2633 422, 672, 422, 672, 422, 672, 671, 672,
2634 422, 672, 422, 672, 671, 422, 672, 672,
2635 672, 672, 422, 672, 422, 672, 422, 672,
2636 422, 672, 422, 672, 422, 672, 671, 422,
2637 672, 671, 672, 672, 672, 422, 672, 422,
2638 672, 671, 672, 422, 672, 422, 672, 671,
2639 422, 672, 672, 672, 672, 422, 672, 422,
2640 672, 671, 422, 672, 422, 672, 422, 672,
2641 671, 672, 672, 422, 672, 422, 672, 671,
2642 422, 672, 422, 672, 422, 672, 422, 671,
2643 672, 672, 672, 422, 672, 422, 672, 671,
2644 422, 672, 671, 672, 672, 422, 672, 671,
2645 672, 672, 672, 422, 672, 672, 672, 672,
2646 672, 672, 422, 422, 672, 422, 672, 422,
2647 672, 422, 672, 671, 672, 422, 672, 422,
2648 672, 671, 422, 672, 671, 672, 422, 672,
2649 671, 672, 422, 672, 671, 422, 422, 672,
2650 671, 422, 672, 422, 672, 422, 672, 422,
2651 672, 422, 672, 422, 671, 672, 672, 422,
2652 672, 672, 672, 672, 422, 422, 672, 672,
2653 672, 672, 672, 422, 672, 672, 672, 672,
2654 672, 671, 422, 672, 672, 422, 672, 422,
2655 671, 672, 672, 422, 672, 671, 422, 422,
2656 672, 422, 671, 672, 672, 671, 422, 672,
2657 422, 671, 672, 671, 422, 672, 422, 672,
2658 422, 671, 672, 672, 671, 422, 672, 422,
2659 672, 422, 672, 671, 672, 422, 672, 422,
2660 672, 671, 422, 672, 671, 422, 422, 672,
2661 671, 672, 422, 671, 672, 671, 422, 672,
2662 422, 672, 422, 671, 672, 671, 422, 422,
2663 672, 671, 672, 422, 672, 422, 672, 671,
2664 422, 672, 422, 671, 672, 671, 422, 422,
2665 672, 422, 671, 672, 671, 422, 422, 672,
2666 671, 672, 422, 672, 671, 672, 422, 672,
2667 671, 672, 422, 672, 422, 672, 422, 671,
2668 672, 671, 422, 422, 672, 671, 672, 422,
2669 672, 422, 672, 671, 422, 672, 671, 672,
2670 672, 422, 672, 422, 672, 671, 671, 422,
2671 671, 422, 672, 672, 422, 672, 672, 672,
2672 672, 672, 672, 672, 671, 422, 672, 672,
2673 672, 422, 671, 672, 672, 672, 422, 672,
2674 422, 672, 422, 672, 422, 672, 422, 672,
2675 671, 422, 422, 672, 671, 672, 422, 672,
2676 671, 422, 422, 672, 422, 422, 422, 672,
2677 422, 672, 422, 672, 422, 672, 422, 671,
2678 422, 672, 422, 672, 422, 671, 672, 671,
2679 422, 672, 422, 671, 672, 422, 672, 672,
2680 672, 671, 422, 672, 422, 422, 672, 422,
2681 671, 672, 672, 671, 422, 672, 672, 672,
2682 672, 422, 672, 422, 671, 672, 672, 672,
2683 422, 672, 671, 672, 422, 672, 422, 672,
2684 422, 672, 422, 672, 671, 672, 672, 422,
2685 672, 671, 422, 672, 422, 672, 422, 671,
2686 672, 672, 671, 422, 672, 422, 671, 672,
2687 671, 422, 672, 671, 422, 672, 422, 672,
2688 671, 672, 672, 672, 671, 422, 422, 422,
2689 672, 671, 422, 672, 422, 671, 672, 671,
2690 422, 672, 422, 672, 422, 671, 672, 672,
2691 672, 671, 422, 672, 422, 671, 672, 672,
2692 672, 672, 671, 422, 672, 422, 672, 671,
2693 422, 422, 672, 422, 672, 671, 672, 422,
2694 672, 422, 671, 672, 672, 671, 422, 672,
2695 422, 672, 671, 422, 672, 672, 672, 422,
2696 672, 422, 671, 422, 672, 671, 672, 422,
2697 422, 672, 422, 672, 422, 671, 672, 672,
2698 672, 672, 671, 422, 672, 422, 672, 422,
2699 672, 422, 672, 422, 672, 671, 672, 672,
2700 672, 422, 672, 422, 672, 422, 672, 422,
2701 671, 672, 672, 422, 422, 672, 671, 672,
2702 422, 672, 672, 671, 422, 672, 422, 672,
2703 671, 422, 422, 672, 672, 672, 672, 422,
2704 672, 422, 672, 422, 671, 672, 672, 422,
2705 671, 672, 671, 422, 672, 422, 671, 672,
2706 671, 422, 672, 422, 671, 672, 422, 672,
2707 672, 671, 422, 672, 672, 422, 671, 672,
2708 671, 422, 672, 422, 672, 671, 672, 422,
2709 672, 422, 671, 672, 671, 422, 672, 422,
2710 672, 422, 672, 422, 672, 422, 672, 671,
2711 673, 671, 674, 675, 676, 677, 678, 679,
2712 680, 681, 682, 683, 684, 676, 685, 686,
2713 687, 688, 689, 676, 690, 691, 692, 693,
2714 694, 695, 696, 697, 698, 699, 700, 701,
2715 702, 703, 704, 676, 705, 673, 685, 673,
2716 706, 673, 671, 672, 672, 672, 672, 422,
2717 671, 672, 672, 671, 422, 672, 671, 422,
2718 422, 672, 671, 422, 672, 422, 671, 672,
2719 671, 422, 422, 672, 422, 671, 672, 672,
2720 671, 422, 672, 672, 672, 671, 422, 672,
2721 422, 672, 672, 671, 422, 422, 672, 422,
2722 671, 672, 671, 422, 672, 671, 422, 422,
2723 672, 422, 672, 671, 422, 672, 422, 422,
2724 672, 422, 672, 422, 671, 672, 672, 671,
2725 422, 672, 672, 422, 672, 671, 422, 672,
2726 422, 672, 671, 422, 672, 422, 671, 422,
2727 672, 672, 672, 422, 672, 671, 672, 422,
2728 672, 671, 422, 672, 671, 672, 422, 672,
2729 671, 422, 672, 671, 422, 672, 422, 672,
2730 671, 422, 672, 671, 422, 672, 671, 707,
2731 708, 709, 710, 711, 712, 713, 714, 715,
2732 716, 717, 718, 678, 719, 720, 721, 722,
2733 723, 720, 724, 725, 726, 727, 728, 729,
2734 730, 731, 732, 673, 671, 672, 422, 672,
2735 671, 672, 422, 672, 671, 672, 422, 672,
2736 671, 672, 422, 672, 671, 422, 672, 422,
2737 672, 671, 672, 422, 672, 671, 672, 422,
2738 422, 422, 672, 671, 672, 422, 672, 671,
2739 672, 672, 672, 672, 422, 672, 422, 671,
2740 672, 671, 422, 422, 672, 422, 672, 671,
2741 672, 422, 672, 671, 422, 672, 671, 672,
2742 672, 422, 672, 671, 422, 672, 671, 672,
2743 422, 672, 671, 422, 672, 671, 422, 672,
2744 671, 422, 672, 671, 672, 671, 422, 422,
2745 672, 671, 672, 422, 672, 671, 422, 672,
2746 422, 671, 672, 671, 422, 676, 733, 673,
2747 676, 734, 676, 735, 685, 673, 671, 672,
2748 671, 422, 672, 671, 422, 676, 734, 685,
2749 673, 671, 676, 736, 673, 685, 673, 671,
2750 672, 671, 422, 676, 737, 694, 738, 720,
2751 739, 732, 676, 740, 741, 742, 673, 685,
2752 673, 671, 672, 671, 422, 672, 422, 672,
2753 671, 422, 672, 422, 672, 422, 671, 672,
2754 672, 671, 422, 672, 422, 672, 671, 422,
2755 672, 671, 676, 685, 427, 671, 743, 676,
2756 744, 685, 673, 671, 427, 672, 671, 422,
2757 672, 671, 422, 745, 676, 746, 747, 673,
2758 671, 422, 672, 671, 672, 672, 671, 422,
2759 422, 672, 422, 672, 671, 676, 748, 749,
2760 750, 751, 752, 753, 754, 755, 756, 757,
2761 758, 673, 685, 673, 671, 672, 422, 672,
2762 672, 672, 672, 672, 672, 672, 422, 672,
2763 422, 672, 672, 672, 672, 672, 672, 671,
2764 422, 672, 672, 422, 672, 422, 671, 672,
2765 422, 672, 672, 672, 422, 672, 672, 422,
2766 672, 672, 422, 672, 672, 422, 672, 672,
2767 671, 422, 676, 759, 676, 735, 760, 761,
2768 762, 673, 685, 673, 671, 672, 671, 422,
2769 672, 672, 672, 422, 672, 672, 672, 422,
2770 672, 422, 672, 671, 422, 422, 422, 422,
2771 672, 672, 422, 422, 422, 422, 422, 672,
2772 672, 672, 672, 672, 672, 672, 422, 672,
2773 422, 672, 422, 671, 672, 672, 672, 422,
2774 672, 422, 672, 671, 685, 427, 763, 676,
2775 685, 427, 672, 671, 422, 764, 676, 765,
2776 685, 427, 672, 671, 422, 672, 422, 766,
2777 685, 673, 671, 427, 672, 671, 422, 676,
2778 767, 673, 685, 673, 671, 672, 671, 422,
2779 768, 769, 768, 770, 771, 768, 772, 768,
2780 773, 768, 771, 774, 775, 774, 777, 776,
2781 778, 779, 778, 780, 781, 776, 782, 776,
2782 783, 778, 784, 779, 785, 780, 787, 786,
2783 788, 789, 789, 786, 790, 786, 791, 788,
2784 792, 789, 793, 789, 795, 795, 795, 795,
2785 794, 795, 795, 795, 794, 795, 794, 795,
2786 795, 794, 794, 794, 794, 794, 794, 795,
2787 794, 794, 794, 794, 795, 795, 795, 795,
2788 795, 794, 794, 795, 794, 794, 795, 794,
2789 795, 794, 794, 795, 794, 794, 794, 795,
2790 795, 795, 795, 795, 795, 794, 795, 795,
2791 794, 795, 795, 794, 794, 794, 794, 794,
2792 794, 795, 795, 794, 794, 795, 794, 795,
2793 795, 795, 794, 797, 798, 799, 800, 801,
2794 802, 803, 804, 805, 806, 807, 808, 809,
2795 810, 811, 812, 813, 814, 815, 816, 817,
2796 818, 819, 820, 821, 822, 823, 824, 825,
2797 826, 827, 828, 794, 795, 794, 795, 794,
2798 795, 795, 794, 795, 795, 794, 794, 794,
2799 795, 794, 794, 794, 794, 794, 794, 794,
2800 795, 794, 794, 794, 794, 794, 794, 794,
2801 795, 795, 795, 795, 795, 795, 795, 795,
2802 795, 795, 795, 794, 794, 794, 794, 794,
2803 794, 794, 794, 795, 795, 795, 795, 795,
2804 795, 795, 795, 795, 794, 794, 794, 794,
2805 794, 794, 794, 794, 795, 795, 795, 795,
2806 795, 795, 795, 795, 795, 794, 795, 795,
2807 795, 795, 795, 795, 795, 795, 794, 795,
2808 795, 795, 795, 795, 795, 795, 795, 795,
2809 795, 795, 794, 795, 795, 795, 795, 795,
2810 795, 794, 795, 795, 795, 795, 795, 795,
2811 794, 794, 794, 794, 794, 794, 794, 794,
2812 795, 795, 795, 795, 795, 795, 795, 795,
2813 794, 795, 795, 795, 795, 795, 795, 795,
2814 795, 794, 795, 795, 795, 795, 795, 794,
2815 794, 794, 794, 794, 794, 794, 794, 795,
2816 795, 795, 795, 795, 795, 794, 795, 795,
2817 795, 795, 795, 795, 795, 794, 795, 794,
2818 795, 795, 794, 795, 795, 795, 795, 795,
2819 795, 795, 795, 795, 795, 795, 795, 795,
2820 794, 795, 795, 795, 795, 795, 794, 795,
2821 795, 795, 795, 795, 795, 795, 794, 795,
2822 795, 795, 794, 795, 795, 795, 794, 795,
2823 794, 829, 830, 831, 832, 833, 834, 835,
2824 836, 837, 838, 839, 840, 841, 842, 843,
2825 844, 845, 846, 847, 848, 849, 850, 851,
2826 852, 853, 854, 855, 856, 857, 858, 859,
2827 860, 861, 862, 863, 864, 801, 865, 866,
2828 867, 868, 869, 870, 801, 846, 801, 794,
2829 795, 794, 795, 795, 794, 794, 795, 794,
2830 794, 794, 794, 795, 794, 794, 794, 794,
2831 794, 795, 794, 794, 794, 794, 794, 795,
2832 795, 795, 795, 795, 794, 794, 794, 795,
2833 794, 794, 794, 795, 795, 795, 794, 794,
2834 794, 795, 795, 794, 794, 794, 795, 795,
2835 795, 794, 794, 794, 795, 795, 795, 795,
2836 794, 795, 795, 795, 795, 794, 794, 794,
2837 794, 794, 795, 795, 795, 795, 794, 794,
2838 795, 795, 795, 794, 794, 795, 795, 795,
2839 795, 794, 795, 795, 794, 795, 795, 794,
2840 794, 794, 795, 795, 795, 794, 794, 794,
2841 794, 795, 795, 795, 795, 795, 794, 794,
2842 794, 794, 795, 794, 795, 795, 794, 795,
2843 795, 794, 795, 794, 795, 795, 795, 794,
2844 795, 795, 794, 794, 794, 795, 794, 794,
2845 794, 794, 794, 794, 794, 795, 795, 795,
2846 795, 794, 795, 795, 795, 795, 795, 795,
2847 795, 794, 871, 872, 873, 874, 875, 876,
2848 877, 878, 879, 801, 880, 881, 882, 883,
2849 884, 794, 795, 794, 794, 794, 794, 794,
2850 795, 795, 794, 795, 795, 795, 794, 795,
2851 795, 795, 795, 795, 795, 795, 795, 795,
2852 795, 794, 795, 795, 795, 794, 794, 795,
2853 795, 795, 794, 794, 795, 794, 794, 795,
2854 795, 795, 795, 795, 794, 794, 794, 794,
2855 795, 795, 795, 795, 795, 795, 794, 795,
2856 795, 795, 795, 795, 794, 885, 840, 886,
2857 887, 888, 801, 889, 890, 846, 801, 794,
2858 795, 795, 795, 795, 794, 794, 794, 795,
2859 794, 794, 795, 795, 795, 794, 794, 794,
2860 795, 795, 794, 851, 794, 846, 801, 801,
2861 891, 794, 801, 794, 795, 846, 892, 893,
2862 846, 894, 895, 846, 896, 897, 898, 899,
2863 900, 901, 846, 902, 903, 904, 846, 905,
2864 906, 907, 865, 908, 909, 910, 865, 911,
2865 846, 801, 794, 794, 795, 795, 794, 794,
2866 794, 795, 795, 795, 795, 794, 795, 795,
2867 794, 794, 794, 794, 795, 795, 794, 794,
2868 795, 795, 794, 794, 794, 794, 794, 794,
2869 795, 795, 795, 794, 794, 794, 795, 794,
2870 794, 794, 795, 795, 794, 795, 795, 795,
2871 795, 794, 795, 795, 795, 795, 794, 795,
2872 795, 795, 795, 795, 795, 794, 794, 794,
2873 795, 795, 795, 795, 794, 912, 913, 794,
2874 801, 794, 795, 794, 794, 795, 846, 914,
2875 915, 916, 917, 896, 918, 919, 920, 921,
2876 922, 923, 924, 925, 926, 927, 928, 929,
2877 801, 794, 794, 795, 794, 795, 795, 795,
2878 795, 795, 795, 795, 794, 795, 795, 795,
2879 794, 795, 794, 794, 795, 794, 795, 794,
2880 794, 795, 795, 795, 795, 794, 795, 795,
2881 795, 794, 794, 795, 795, 795, 795, 794,
2882 795, 795, 794, 794, 795, 795, 795, 795,
2883 795, 794, 930, 931, 932, 933, 934, 935,
2884 936, 937, 938, 939, 940, 936, 942, 943,
2885 944, 945, 941, 794, 946, 947, 846, 948,
2886 949, 950, 951, 952, 953, 954, 955, 956,
2887 846, 801, 957, 958, 959, 960, 846, 961,
2888 962, 963, 964, 965, 966, 967, 968, 969,
2889 970, 971, 972, 973, 974, 975, 846, 877,
2890 801, 976, 794, 795, 795, 795, 795, 795,
2891 794, 794, 794, 795, 794, 795, 795, 794,
2892 795, 794, 795, 795, 794, 794, 794, 795,
2893 795, 795, 794, 794, 794, 795, 795, 795,
2894 794, 794, 794, 794, 795, 794, 794, 795,
2895 794, 794, 795, 795, 795, 794, 794, 795,
2896 794, 795, 795, 795, 794, 795, 795, 795,
2897 795, 795, 795, 794, 794, 794, 795, 795,
2898 794, 795, 795, 794, 795, 795, 794, 795,
2899 795, 794, 795, 795, 795, 795, 795, 795,
2900 795, 794, 795, 794, 795, 794, 795, 795,
2901 794, 795, 794, 795, 795, 794, 795, 794,
2902 795, 794, 977, 948, 978, 979, 980, 981,
2903 982, 983, 984, 985, 986, 829, 987, 846,
2904 988, 989, 990, 846, 991, 861, 992, 993,
2905 994, 995, 996, 997, 998, 999, 846, 794,
2906 794, 794, 795, 795, 795, 794, 795, 795,
2907 794, 795, 795, 794, 794, 794, 794, 794,
2908 795, 795, 795, 795, 794, 795, 795, 795,
2909 795, 795, 795, 794, 794, 794, 795, 795,
2910 795, 795, 795, 795, 795, 795, 795, 794,
2911 795, 795, 795, 795, 795, 795, 795, 795,
2912 794, 795, 795, 794, 794, 794, 794, 795,
2913 795, 795, 794, 794, 794, 795, 794, 794,
2914 794, 795, 795, 794, 795, 795, 795, 794,
2915 795, 794, 794, 794, 795, 795, 794, 795,
2916 795, 795, 794, 795, 795, 795, 794, 794,
2917 794, 794, 795, 846, 915, 1000, 1001, 801,
2918 846, 801, 794, 794, 795, 794, 795, 846,
2919 1000, 801, 794, 846, 1002, 801, 794, 794,
2920 795, 846, 1003, 1004, 1005, 906, 1006, 1007,
2921 846, 1008, 1009, 1010, 801, 794, 794, 795,
2922 795, 795, 794, 795, 795, 794, 795, 795,
2923 795, 795, 794, 794, 795, 794, 794, 795,
2924 795, 794, 795, 794, 846, 801, 794, 1011,
2925 846, 1012, 794, 801, 794, 795, 794, 795,
2926 1013, 846, 1014, 1015, 794, 795, 794, 794,
2927 794, 795, 795, 795, 795, 794, 1016, 1017,
2928 1018, 846, 1019, 1020, 1021, 1022, 1023, 1024,
2929 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032,
2930 801, 794, 795, 795, 795, 794, 794, 794,
2931 794, 795, 795, 794, 794, 795, 794, 794,
2932 794, 794, 794, 794, 794, 795, 794, 795,
2933 794, 794, 794, 794, 794, 794, 795, 795,
2934 795, 795, 795, 794, 794, 795, 794, 794,
2935 794, 795, 794, 794, 795, 794, 794, 795,
2936 794, 794, 795, 794, 794, 794, 795, 795,
2937 795, 794, 794, 794, 795, 795, 795, 795,
2938 794, 1033, 846, 1034, 846, 1035, 1036, 1037,
2939 1038, 801, 794, 795, 795, 795, 795, 795,
2940 794, 794, 794, 795, 794, 794, 795, 795,
2941 795, 795, 795, 795, 795, 795, 795, 795,
2942 794, 795, 795, 795, 795, 795, 795, 795,
2943 795, 795, 795, 795, 795, 795, 795, 795,
2944 795, 795, 795, 795, 794, 795, 795, 795,
2945 795, 795, 794, 1039, 846, 801, 794, 795,
2946 1040, 846, 831, 801, 794, 795, 1041, 794,
2947 801, 794, 795, 846, 1042, 801, 794, 794,
2948 795, 1043, 794, 846, 1044, 801, 794, 794,
2949 795, 1046, 1045, 795, 795, 795, 795, 1046,
2950 1045, 795, 1046, 1045, 1046, 1046, 795, 1046,
2951 1045, 795, 1046, 795, 1046, 1045, 795, 1046,
2952 795, 1046, 795, 1045, 1046, 1046, 1046, 1046,
2953 1046, 1046, 1046, 1046, 1045, 795, 795, 1046,
2954 1046, 795, 1046, 795, 1046, 1045, 1046, 1046,
2955 1046, 1046, 1046, 795, 1046, 795, 1046, 795,
2956 1046, 1045, 1046, 1046, 795, 1046, 795, 1046,
2957 1045, 1046, 1046, 1046, 1046, 1046, 795, 1046,
2958 795, 1046, 1045, 795, 795, 1046, 795, 1046,
2959 1045, 1046, 1046, 1046, 795, 1046, 795, 1046,
2960 795, 1046, 795, 1046, 1045, 1046, 795, 1046,
2961 795, 1046, 1045, 795, 1046, 1046, 1046, 1046,
2962 795, 1046, 795, 1046, 795, 1046, 795, 1046,
2963 795, 1046, 795, 1046, 1045, 795, 1046, 1045,
2964 1046, 1046, 1046, 795, 1046, 795, 1046, 1045,
2965 1046, 795, 1046, 795, 1046, 1045, 795, 1046,
2966 1046, 1046, 1046, 795, 1046, 795, 1046, 1045,
2967 795, 1046, 795, 1046, 795, 1046, 1045, 1046,
2968 1046, 795, 1046, 795, 1046, 1045, 795, 1046,
2969 795, 1046, 795, 1046, 795, 1045, 1046, 1046,
2970 1046, 795, 1046, 795, 1046, 1045, 795, 1046,
2971 1045, 1046, 1046, 795, 1046, 1045, 1046, 1046,
2972 1046, 795, 1046, 1046, 1046, 1046, 1046, 1046,
2973 795, 795, 1046, 795, 1046, 795, 1046, 795,
2974 1046, 1045, 1046, 795, 1046, 795, 1046, 1045,
2975 795, 1046, 1045, 1046, 795, 1046, 1045, 1046,
2976 795, 1046, 1045, 795, 795, 1046, 1045, 795,
2977 1046, 795, 1046, 795, 1046, 795, 1046, 795,
2978 1046, 795, 1045, 1046, 1046, 795, 1046, 1046,
2979 1046, 1046, 795, 795, 1046, 1046, 1046, 1046,
2980 1046, 795, 1046, 1046, 1046, 1046, 1046, 1045,
2981 795, 1046, 1046, 795, 1046, 795, 1045, 1046,
2982 1046, 795, 1046, 1045, 795, 795, 1046, 795,
2983 1045, 1046, 1046, 1045, 795, 1046, 795, 1045,
2984 1046, 1045, 795, 1046, 795, 1046, 795, 1045,
2985 1046, 1046, 1045, 795, 1046, 795, 1046, 795,
2986 1046, 1045, 1046, 795, 1046, 795, 1046, 1045,
2987 795, 1046, 1045, 795, 795, 1046, 1045, 1046,
2988 795, 1045, 1046, 1045, 795, 1046, 795, 1046,
2989 795, 1045, 1046, 1045, 795, 795, 1046, 1045,
2990 1046, 795, 1046, 795, 1046, 1045, 795, 1046,
2991 795, 1045, 1046, 1045, 795, 795, 1046, 795,
2992 1045, 1046, 1045, 795, 795, 1046, 1045, 1046,
2993 795, 1046, 1045, 1046, 795, 1046, 1045, 1046,
2994 795, 1046, 795, 1046, 795, 1045, 1046, 1045,
2995 795, 795, 1046, 1045, 1046, 795, 1046, 795,
2996 1046, 1045, 795, 1046, 1045, 1046, 1046, 795,
2997 1046, 795, 1046, 1045, 1045, 795, 1045, 795,
2998 1046, 1046, 795, 1046, 1046, 1046, 1046, 1046,
2999 1046, 1046, 1045, 795, 1046, 1046, 1046, 795,
3000 1045, 1046, 1046, 1046, 795, 1046, 795, 1046,
3001 795, 1046, 795, 1046, 795, 1046, 1045, 795,
3002 795, 1046, 1045, 1046, 795, 1046, 1045, 795,
3003 795, 1046, 795, 795, 795, 1046, 795, 1046,
3004 795, 1046, 795, 1046, 795, 1045, 795, 1046,
3005 795, 1046, 795, 1045, 1046, 1045, 795, 1046,
3006 795, 1045, 1046, 795, 1046, 1046, 1046, 1045,
3007 795, 1046, 795, 795, 1046, 795, 1045, 1046,
3008 1046, 1045, 795, 1046, 1046, 1046, 1046, 795,
3009 1046, 795, 1045, 1046, 1046, 1046, 795, 1046,
3010 1045, 1046, 795, 1046, 795, 1046, 795, 1046,
3011 795, 1046, 1045, 1046, 1046, 795, 1046, 1045,
3012 795, 1046, 795, 1046, 795, 1045, 1046, 1046,
3013 1045, 795, 1046, 795, 1045, 1046, 1045, 795,
3014 1046, 1045, 795, 1046, 795, 1046, 1045, 1046,
3015 1046, 1046, 1045, 795, 795, 795, 1046, 1045,
3016 795, 1046, 795, 1045, 1046, 1045, 795, 1046,
3017 795, 1046, 795, 1045, 1046, 1046, 1046, 1045,
3018 795, 1046, 795, 1045, 1046, 1046, 1046, 1046,
3019 1045, 795, 1046, 795, 1046, 1045, 795, 795,
3020 1046, 795, 1046, 1045, 1046, 795, 1046, 795,
3021 1045, 1046, 1046, 1045, 795, 1046, 795, 1046,
3022 1045, 795, 1046, 1046, 1046, 795, 1046, 795,
3023 1045, 795, 1046, 1045, 1046, 795, 795, 1046,
3024 795, 1046, 795, 1045, 1046, 1046, 1046, 1046,
3025 1045, 795, 1046, 795, 1046, 795, 1046, 795,
3026 1046, 795, 1046, 1045, 1046, 1046, 1046, 795,
3027 1046, 795, 1046, 795, 1046, 795, 1045, 1046,
3028 1046, 795, 795, 1046, 1045, 1046, 795, 1046,
3029 1046, 1045, 795, 1046, 795, 1046, 1045, 795,
3030 795, 1046, 1046, 1046, 1046, 795, 1046, 795,
3031 1046, 795, 1045, 1046, 1046, 795, 1045, 1046,
3032 1045, 795, 1046, 795, 1045, 1046, 1045, 795,
3033 1046, 795, 1045, 1046, 795, 1046, 1046, 1045,
3034 795, 1046, 1046, 795, 1045, 1046, 1045, 795,
3035 1046, 795, 1046, 1045, 1046, 795, 1046, 795,
3036 1045, 1046, 1045, 795, 1046, 795, 1046, 795,
3037 1046, 795, 1046, 795, 1046, 1045, 1047, 1045,
3038 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055,
3039 1056, 1057, 1058, 1050, 1059, 1060, 1061, 1062,
3040 1063, 1050, 1064, 1065, 1066, 1067, 1068, 1069,
3041 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077,
3042 1078, 1050, 1079, 1047, 1059, 1047, 1080, 1047,
3043 1045, 1046, 1046, 1046, 1046, 795, 1045, 1046,
3044 1046, 1045, 795, 1046, 1045, 795, 795, 1046,
3045 1045, 795, 1046, 795, 1045, 1046, 1045, 795,
3046 795, 1046, 795, 1045, 1046, 1046, 1045, 795,
3047 1046, 1046, 1046, 1045, 795, 1046, 795, 1046,
3048 1046, 1045, 795, 795, 1046, 795, 1045, 1046,
3049 1045, 795, 1046, 1045, 795, 795, 1046, 795,
3050 1046, 1045, 795, 1046, 795, 795, 1046, 795,
3051 1046, 795, 1045, 1046, 1046, 1045, 795, 1046,
3052 1046, 795, 1046, 1045, 795, 1046, 795, 1046,
3053 1045, 795, 1046, 795, 1045, 795, 1046, 1046,
3054 1046, 795, 1046, 1045, 1046, 795, 1046, 1045,
3055 795, 1046, 1045, 1046, 795, 1046, 1045, 795,
3056 1046, 1045, 795, 1046, 795, 1046, 1045, 795,
3057 1046, 1045, 795, 1046, 1045, 1081, 1082, 1083,
3058 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091,
3059 1092, 1052, 1093, 1094, 1095, 1096, 1097, 1094,
3060 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105,
3061 1106, 1047, 1045, 1046, 795, 1046, 1045, 1046,
3062 795, 1046, 1045, 1046, 795, 1046, 1045, 1046,
3063 795, 1046, 1045, 795, 1046, 795, 1046, 1045,
3064 1046, 795, 1046, 1045, 1046, 795, 795, 795,
3065 1046, 1045, 1046, 795, 1046, 1045, 1046, 1046,
3066 1046, 1046, 795, 1046, 795, 1045, 1046, 1045,
3067 795, 795, 1046, 795, 1046, 1045, 1046, 795,
3068 1046, 1045, 795, 1046, 1045, 1046, 1046, 795,
3069 1046, 1045, 795, 1046, 1045, 1046, 795, 1046,
3070 1045, 795, 1046, 1045, 795, 1046, 1045, 795,
3071 1046, 1045, 1046, 1045, 795, 795, 1046, 1045,
3072 1046, 795, 1046, 1045, 795, 1046, 795, 1045,
3073 1046, 1045, 795, 1050, 1107, 1047, 1050, 1108,
3074 1050, 1109, 1059, 1047, 1045, 1046, 1045, 795,
3075 1046, 1045, 795, 1050, 1108, 1059, 1047, 1045,
3076 1050, 1110, 1047, 1059, 1047, 1045, 1046, 1045,
3077 795, 1050, 1111, 1068, 1112, 1094, 1113, 1106,
3078 1050, 1114, 1115, 1116, 1047, 1059, 1047, 1045,
3079 1046, 1045, 795, 1046, 795, 1046, 1045, 795,
3080 1046, 795, 1046, 795, 1045, 1046, 1046, 1045,
3081 795, 1046, 795, 1046, 1045, 795, 1046, 1045,
3082 1050, 1059, 801, 1045, 1117, 1050, 1118, 1059,
3083 1047, 1045, 801, 1046, 1045, 795, 1046, 1045,
3084 795, 1119, 1050, 1120, 1121, 1047, 1045, 795,
3085 1046, 1045, 1046, 1046, 1045, 795, 795, 1046,
3086 795, 1046, 1045, 1050, 1122, 1123, 1124, 1125,
3087 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1047,
3088 1059, 1047, 1045, 1046, 795, 1046, 1046, 1046,
3089 1046, 1046, 1046, 1046, 795, 1046, 795, 1046,
3090 1046, 1046, 1046, 1046, 1046, 1045, 795, 1046,
3091 1046, 795, 1046, 795, 1045, 1046, 795, 1046,
3092 1046, 1046, 795, 1046, 1046, 795, 1046, 1046,
3093 795, 1046, 1046, 795, 1046, 1046, 1045, 795,
3094 1050, 1133, 1050, 1109, 1134, 1135, 1136, 1047,
3095 1059, 1047, 1045, 1046, 1045, 795, 1046, 1046,
3096 1046, 795, 1046, 1046, 1046, 795, 1046, 795,
3097 1046, 1045, 795, 795, 795, 795, 1046, 1046,
3098 795, 795, 795, 795, 795, 1046, 1046, 1046,
3099 1046, 1046, 1046, 1046, 795, 1046, 795, 1046,
3100 795, 1045, 1046, 1046, 1046, 795, 1046, 795,
3101 1046, 1045, 1059, 801, 1137, 1050, 1059, 801,
3102 1046, 1045, 795, 1138, 1050, 1139, 1059, 801,
3103 1046, 1045, 795, 1046, 795, 1140, 1059, 1047,
3104 1045, 801, 1046, 1045, 795, 1050, 1141, 1047,
3105 1059, 1047, 1045, 1046, 1045, 795, 1142, 1143,
3106 1144, 1142, 1145, 1146, 1147, 1148, 1149, 1150,
3107 1151, 1152, 1153, 1154, 672, 672, 422, 1155,
3108 1156, 1157, 1158, 672, 1161, 1162, 1164, 1165,
3109 1166, 1160, 1167, 1168, 1169, 1170, 1171, 1172,
3110 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1180,
3111 1181, 1182, 1183, 1184, 1185, 1186, 1188, 1189,
3112 1190, 1191, 1192, 1193, 672, 1148, 10, 1148,
3113 422, 1148, 422, 1160, 1163, 1187, 1194, 1159,
3114 1142, 1142, 1195, 1143, 1196, 1198, 1197, 2,
3115 1, 1199, 1197, 1200, 1197, 5, 1, 1197,
3116 6, 5, 9, 11, 11, 10, 1202, 1203,
3117 1204, 1197, 1205, 1206, 1197, 1207, 1197, 422,
3118 422, 1209, 1210, 491, 472, 1211, 472, 1212,
3119 1213, 1214, 1215, 1216, 1217, 1218, 1219, 1220,
3120 1221, 1222, 546, 1223, 522, 1224, 1225, 1226,
3121 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234,
3122 1235, 422, 422, 422, 427, 567, 1208, 1236,
3123 1197, 1237, 1197, 672, 1238, 422, 422, 422,
3124 672, 1238, 672, 672, 422, 1238, 422, 1238,
3125 422, 1238, 422, 672, 672, 672, 672, 672,
3126 1238, 422, 672, 672, 672, 422, 672, 422,
3127 1238, 422, 672, 672, 672, 672, 422, 1238,
3128 672, 422, 672, 422, 672, 422, 672, 672,
3129 422, 672, 1238, 422, 672, 422, 672, 422,
3130 672, 1238, 672, 422, 1238, 672, 422, 672,
3131 422, 1238, 672, 672, 672, 672, 672, 1238,
3132 422, 422, 672, 422, 672, 1238, 672, 422,
3133 1238, 672, 672, 1238, 422, 422, 672, 422,
3134 672, 422, 672, 1238, 1239, 1240, 1241, 1242,
3135 1243, 1244, 1245, 1246, 1247, 1248, 1249, 717,
3136 1250, 1251, 1252, 1253, 1254, 1255, 1256, 1257,
3137 1258, 1259, 1260, 1261, 1260, 1262, 1263, 1264,
3138 1265, 1266, 673, 1238, 1267, 1268, 1269, 1270,
3139 1271, 1272, 1273, 1274, 1275, 1276, 1277, 1278,
3140 1279, 1280, 1281, 1282, 1283, 1284, 1285, 727,
3141 1286, 1287, 1288, 694, 1289, 1290, 1291, 1292,
3142 1293, 1294, 673, 1295, 1296, 1297, 1298, 1299,
3143 1300, 1301, 1302, 676, 1303, 673, 676, 1304,
3144 1305, 1306, 1307, 685, 1238, 1308, 1309, 1310,
3145 1311, 705, 1312, 1313, 685, 1314, 1315, 1316,
3146 1317, 1318, 673, 1238, 1319, 1278, 1320, 1321,
3147 1322, 685, 1323, 1324, 676, 673, 685, 427,
3148 1238, 1288, 673, 676, 685, 427, 685, 427,
3149 1325, 685, 1238, 427, 676, 1326, 1327, 676,
3150 1328, 1329, 683, 1330, 1331, 1332, 1333, 1334,
3151 1284, 1335, 1336, 1337, 1338, 1339, 1340, 1341,
3152 1342, 1343, 1344, 1345, 1346, 1303, 1347, 676,
3153 685, 427, 1238, 1348, 1349, 685, 673, 1238,
3154 427, 673, 1238, 676, 1350, 733, 1351, 1352,
3155 1353, 1354, 1355, 1356, 1357, 1358, 673, 1359,
3156 1360, 1361, 1362, 1363, 1364, 673, 685, 1238,
3157 1366, 1367, 1368, 1369, 1370, 1371, 1372, 1373,
3158 1374, 1375, 1376, 1372, 1378, 1379, 1380, 1381,
3159 1365, 1377, 1365, 1238, 1365, 1238, 1382, 1382,
3160 1383, 1384, 1385, 1386, 1387, 1388, 1389, 1390,
3161 1387, 771, 1391, 1391, 1391, 1392, 1393, 1386,
3162 1391, 772, 773, 1394, 1391, 771, 1395, 1395,
3163 1395, 1397, 1398, 1399, 1395, 1400, 1401, 1402,
3164 1395, 1396, 1403, 1403, 1403, 1405, 1406, 1407,
3165 1403, 1408, 1409, 1410, 1403, 1404, 1391, 1391,
3166 1411, 1412, 1386, 1391, 772, 773, 1394, 1391,
3167 771, 1413, 1414, 1415, 771, 1416, 1417, 1418,
3168 769, 769, 769, 769, 1420, 1421, 1422, 1396,
3169 769, 1423, 1424, 1425, 769, 1419, 770, 770,
3170 770, 1427, 1428, 1429, 1396, 770, 1430, 1431,
3171 1432, 770, 1426, 769, 769, 769, 1434, 1435,
3172 1436, 1404, 769, 1437, 1438, 1439, 769, 1433,
3173 1395, 1395, 771, 1440, 1441, 1399, 1395, 1400,
3174 1401, 1402, 1395, 1396, 1442, 1443, 1444, 771,
3175 1445, 1446, 1447, 770, 770, 770, 770, 1449,
3176 1450, 1451, 1404, 770, 1452, 1453, 1454, 770,
3177 1448, 1403, 1403, 771, 1455, 1456, 1407, 1403,
3178 1408, 1409, 1410, 1403, 1404, 1403, 1403, 1403,
3179 1405, 1406, 1407, 771, 1408, 1409, 1410, 1403,
3180 1404, 1403, 1403, 1403, 1405, 1406, 1407, 772,
3181 1408, 1409, 1410, 1403, 1404, 1403, 1403, 1403,
3182 1405, 1406, 1407, 773, 1408, 1409, 1410, 1403,
3183 1404, 1395, 1395, 1395, 1397, 1398, 1399, 771,
3184 1400, 1401, 1402, 1395, 1396, 1395, 1395, 1395,
3185 1397, 1398, 1399, 772, 1400, 1401, 1402, 1395,
3186 1396, 1395, 1395, 1395, 1397, 1398, 1399, 773,
3187 1400, 1401, 1402, 1395, 1396, 1458, 769, 1460,
3188 1459, 1461, 770, 1463, 1462, 771, 1464, 775,
3189 1464, 1465, 1464, 777, 1466, 1467, 1468, 1469,
3190 1470, 1471, 1472, 1469, 781, 777, 1466, 1474,
3191 1475, 1473, 782, 783, 1476, 1473, 781, 1479,
3192 1480, 1481, 1482, 1477, 1483, 1484, 1485, 1477,
3193 1478, 1488, 1489, 1490, 1491, 1486, 1492, 1493,
3194 1494, 1486, 1487, 1496, 1495, 1498, 1497, 781,
3195 1499, 782, 1499, 783, 1499, 787, 1500, 1501,
3196 1502, 1503, 1504, 1505, 1506, 1503, 789, 787,
3197 1500, 1508, 1507, 790, 791, 1509, 1507, 789,
3198 1511, 1510, 1513, 1512, 789, 1514, 790, 1514,
3199 791, 1514, 795, 1517, 1518, 1520, 1521, 1522,
3200 1516, 1523, 1524, 1525, 1526, 1527, 1528, 1529,
3201 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537,
3202 1538, 1539, 1540, 1541, 1542, 1544, 1545, 1546,
3203 1547, 1548, 1549, 795, 795, 1515, 1516, 1519,
3204 1543, 1550, 1515, 1046, 795, 795, 1552, 1553,
3205 865, 846, 1554, 846, 1555, 1556, 1557, 1558,
3206 1559, 1560, 1561, 1562, 1563, 1564, 1565, 920,
3207 1566, 896, 1567, 1568, 1569, 1570, 1571, 1572,
3208 1573, 1574, 1575, 1576, 1577, 1578, 795, 795,
3209 795, 801, 941, 1551, 1046, 1579, 795, 795,
3210 795, 1046, 1579, 1046, 1046, 795, 1579, 795,
3211 1579, 795, 1579, 795, 1046, 1046, 1046, 1046,
3212 1046, 1579, 795, 1046, 1046, 1046, 795, 1046,
3213 795, 1579, 795, 1046, 1046, 1046, 1046, 795,
3214 1579, 1046, 795, 1046, 795, 1046, 795, 1046,
3215 1046, 795, 1046, 1579, 795, 1046, 795, 1046,
3216 795, 1046, 1579, 1046, 795, 1579, 1046, 795,
3217 1046, 795, 1579, 1046, 1046, 1046, 1046, 1046,
3218 1579, 795, 795, 1046, 795, 1046, 1579, 1046,
3219 795, 1579, 1046, 1046, 1579, 795, 795, 1046,
3220 795, 1046, 795, 1046, 1579, 1580, 1581, 1582,
3221 1583, 1584, 1585, 1586, 1587, 1588, 1589, 1590,
3222 1091, 1591, 1592, 1593, 1594, 1595, 1596, 1597,
3223 1598, 1599, 1600, 1601, 1602, 1601, 1603, 1604,
3224 1605, 1606, 1607, 1047, 1579, 1608, 1609, 1610,
3225 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618,
3226 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626,
3227 1101, 1627, 1628, 1629, 1068, 1630, 1631, 1632,
3228 1633, 1634, 1635, 1047, 1636, 1637, 1638, 1639,
3229 1640, 1641, 1642, 1643, 1050, 1644, 1047, 1050,
3230 1645, 1646, 1647, 1648, 1059, 1579, 1649, 1650,
3231 1651, 1652, 1079, 1653, 1654, 1059, 1655, 1656,
3232 1657, 1658, 1659, 1047, 1579, 1660, 1619, 1661,
3233 1662, 1663, 1059, 1664, 1665, 1050, 1047, 1059,
3234 801, 1579, 1629, 1047, 1050, 1059, 801, 1059,
3235 801, 1666, 1059, 1579, 801, 1050, 1667, 1668,
3236 1050, 1669, 1670, 1057, 1671, 1672, 1673, 1674,
3237 1675, 1625, 1676, 1677, 1678, 1679, 1680, 1681,
3238 1682, 1683, 1684, 1685, 1686, 1687, 1644, 1688,
3239 1050, 1059, 801, 1579, 1689, 1690, 1059, 1047,
3240 1579, 801, 1047, 1579, 1050, 1691, 1107, 1692,
3241 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1047,
3242 1700, 1701, 1702, 1703, 1704, 1705, 1047, 1059,
3243 1579, 1707, 1708, 1709, 1710, 1711, 1712, 1713,
3244 1714, 1715, 1716, 1717, 1713, 1719, 1720, 1721,
3245 1722, 1706, 1718, 1706, 1579, 1706, 1579,
3246}
3247
3248var _hcltok_trans_targs []int16 = []int16{
3249 1464, 1, 1464, 1464, 1464, 3, 4, 1472,
3250 1464, 5, 1473, 6, 7, 9, 10, 287,
3251 13, 14, 15, 16, 17, 288, 289, 20,
3252 290, 22, 23, 291, 292, 293, 294, 295,
3253 296, 297, 298, 299, 300, 329, 349, 354,
3254 128, 129, 130, 357, 152, 372, 376, 1464,
3255 11, 12, 18, 19, 21, 24, 25, 26,
3256 27, 28, 29, 30, 31, 32, 33, 65,
3257 106, 121, 132, 155, 171, 284, 34, 35,
3258 36, 37, 38, 39, 40, 41, 42, 43,
3259 44, 45, 46, 47, 48, 49, 50, 51,
3260 52, 53, 54, 55, 56, 57, 58, 59,
3261 60, 61, 62, 63, 64, 66, 67, 68,
3262 69, 70, 71, 72, 73, 74, 75, 76,
3263 77, 78, 79, 80, 81, 82, 83, 84,
3264 85, 86, 87, 88, 89, 90, 91, 92,
3265 93, 94, 95, 96, 97, 98, 99, 100,
3266 101, 102, 103, 104, 105, 107, 108, 109,
3267 110, 111, 112, 113, 114, 115, 116, 117,
3268 118, 119, 120, 122, 123, 124, 125, 126,
3269 127, 131, 133, 134, 135, 136, 137, 138,
3270 139, 140, 141, 142, 143, 144, 145, 146,
3271 147, 148, 149, 150, 151, 153, 154, 156,
3272 157, 158, 159, 160, 161, 162, 163, 164,
3273 165, 166, 167, 168, 169, 170, 172, 204,
3274 228, 231, 232, 234, 243, 244, 247, 251,
3275 269, 276, 278, 280, 282, 173, 174, 175,
3276 176, 177, 178, 179, 180, 181, 182, 183,
3277 184, 185, 186, 187, 188, 189, 190, 191,
3278 192, 193, 194, 195, 196, 197, 198, 199,
3279 200, 201, 202, 203, 205, 206, 207, 208,
3280 209, 210, 211, 212, 213, 214, 215, 216,
3281 217, 218, 219, 220, 221, 222, 223, 224,
3282 225, 226, 227, 229, 230, 233, 235, 236,
3283 237, 238, 239, 240, 241, 242, 245, 246,
3284 248, 249, 250, 252, 253, 254, 255, 256,
3285 257, 258, 259, 260, 261, 262, 263, 264,
3286 265, 266, 267, 268, 270, 271, 272, 273,
3287 274, 275, 277, 279, 281, 283, 285, 286,
3288 301, 302, 303, 304, 305, 306, 307, 308,
3289 309, 310, 311, 312, 313, 314, 315, 316,
3290 317, 318, 319, 320, 321, 322, 323, 324,
3291 325, 326, 327, 328, 330, 331, 332, 333,
3292 334, 335, 336, 337, 338, 339, 340, 341,
3293 342, 343, 344, 345, 346, 347, 348, 350,
3294 351, 352, 353, 355, 356, 358, 359, 360,
3295 361, 362, 363, 364, 365, 366, 367, 368,
3296 369, 370, 371, 373, 374, 375, 377, 383,
3297 405, 410, 412, 414, 378, 379, 380, 381,
3298 382, 384, 385, 386, 387, 388, 389, 390,
3299 391, 392, 393, 394, 395, 396, 397, 398,
3300 399, 400, 401, 402, 403, 404, 406, 407,
3301 408, 409, 411, 413, 415, 1464, 1477, 438,
3302 439, 440, 441, 418, 442, 443, 444, 445,
3303 446, 447, 448, 449, 450, 451, 452, 453,
3304 454, 455, 456, 457, 458, 459, 460, 461,
3305 462, 463, 464, 465, 466, 467, 468, 470,
3306 471, 472, 473, 474, 475, 476, 477, 478,
3307 479, 480, 481, 482, 483, 484, 485, 486,
3308 420, 487, 488, 489, 490, 491, 492, 493,
3309 494, 495, 496, 497, 498, 499, 500, 501,
3310 502, 503, 504, 419, 505, 506, 507, 508,
3311 509, 511, 512, 513, 514, 515, 516, 517,
3312 518, 519, 520, 521, 522, 523, 524, 526,
3313 527, 528, 529, 530, 531, 535, 537, 538,
3314 539, 540, 435, 541, 542, 543, 544, 545,
3315 546, 547, 548, 549, 550, 551, 552, 553,
3316 554, 555, 557, 558, 560, 561, 562, 563,
3317 564, 565, 433, 566, 567, 568, 569, 570,
3318 571, 572, 573, 574, 576, 608, 632, 635,
3319 636, 638, 647, 648, 651, 655, 673, 533,
3320 680, 682, 684, 686, 577, 578, 579, 580,
3321 581, 582, 583, 584, 585, 586, 587, 588,
3322 589, 590, 591, 592, 593, 594, 595, 596,
3323 597, 598, 599, 600, 601, 602, 603, 604,
3324 605, 606, 607, 609, 610, 611, 612, 613,
3325 614, 615, 616, 617, 618, 619, 620, 621,
3326 622, 623, 624, 625, 626, 627, 628, 629,
3327 630, 631, 633, 634, 637, 639, 640, 641,
3328 642, 643, 644, 645, 646, 649, 650, 652,
3329 653, 654, 656, 657, 658, 659, 660, 661,
3330 662, 663, 664, 665, 666, 667, 668, 669,
3331 670, 671, 672, 674, 675, 676, 677, 678,
3332 679, 681, 683, 685, 687, 689, 690, 1464,
3333 1464, 691, 828, 829, 760, 830, 831, 832,
3334 833, 834, 835, 789, 836, 725, 837, 838,
3335 839, 840, 841, 842, 843, 844, 745, 845,
3336 846, 847, 848, 849, 850, 851, 852, 853,
3337 854, 770, 855, 857, 858, 859, 860, 861,
3338 862, 863, 864, 865, 866, 703, 867, 868,
3339 869, 870, 871, 872, 873, 874, 875, 741,
3340 876, 877, 878, 879, 880, 811, 882, 883,
3341 886, 888, 889, 890, 891, 892, 893, 896,
3342 897, 899, 900, 901, 903, 904, 905, 906,
3343 907, 908, 909, 910, 911, 912, 913, 915,
3344 916, 917, 918, 921, 923, 924, 926, 928,
3345 1515, 1517, 1518, 1516, 931, 932, 1515, 934,
3346 1541, 1541, 1541, 1543, 1544, 1542, 939, 940,
3347 1545, 1546, 1550, 1550, 1550, 1551, 946, 947,
3348 1552, 1553, 1557, 1558, 1557, 973, 974, 975,
3349 976, 953, 977, 978, 979, 980, 981, 982,
3350 983, 984, 985, 986, 987, 988, 989, 990,
3351 991, 992, 993, 994, 995, 996, 997, 998,
3352 999, 1000, 1001, 1002, 1003, 1005, 1006, 1007,
3353 1008, 1009, 1010, 1011, 1012, 1013, 1014, 1015,
3354 1016, 1017, 1018, 1019, 1020, 1021, 955, 1022,
3355 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030,
3356 1031, 1032, 1033, 1034, 1035, 1036, 1037, 1038,
3357 1039, 954, 1040, 1041, 1042, 1043, 1044, 1046,
3358 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054,
3359 1055, 1056, 1057, 1058, 1059, 1061, 1062, 1063,
3360 1064, 1065, 1066, 1070, 1072, 1073, 1074, 1075,
3361 970, 1076, 1077, 1078, 1079, 1080, 1081, 1082,
3362 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090,
3363 1092, 1093, 1095, 1096, 1097, 1098, 1099, 1100,
3364 968, 1101, 1102, 1103, 1104, 1105, 1106, 1107,
3365 1108, 1109, 1111, 1143, 1167, 1170, 1171, 1173,
3366 1182, 1183, 1186, 1190, 1208, 1068, 1215, 1217,
3367 1219, 1221, 1112, 1113, 1114, 1115, 1116, 1117,
3368 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125,
3369 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133,
3370 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141,
3371 1142, 1144, 1145, 1146, 1147, 1148, 1149, 1150,
3372 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158,
3373 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166,
3374 1168, 1169, 1172, 1174, 1175, 1176, 1177, 1178,
3375 1179, 1180, 1181, 1184, 1185, 1187, 1188, 1189,
3376 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198,
3377 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206,
3378 1207, 1209, 1210, 1211, 1212, 1213, 1214, 1216,
3379 1218, 1220, 1222, 1224, 1225, 1557, 1557, 1226,
3380 1363, 1364, 1295, 1365, 1366, 1367, 1368, 1369,
3381 1370, 1324, 1371, 1260, 1372, 1373, 1374, 1375,
3382 1376, 1377, 1378, 1379, 1280, 1380, 1381, 1382,
3383 1383, 1384, 1385, 1386, 1387, 1388, 1389, 1305,
3384 1390, 1392, 1393, 1394, 1395, 1396, 1397, 1398,
3385 1399, 1400, 1401, 1238, 1402, 1403, 1404, 1405,
3386 1406, 1407, 1408, 1409, 1410, 1276, 1411, 1412,
3387 1413, 1414, 1415, 1346, 1417, 1418, 1421, 1423,
3388 1424, 1425, 1426, 1427, 1428, 1431, 1432, 1434,
3389 1435, 1436, 1438, 1439, 1440, 1441, 1442, 1443,
3390 1444, 1445, 1446, 1447, 1448, 1450, 1451, 1452,
3391 1453, 1456, 1458, 1459, 1461, 1463, 1465, 1464,
3392 1466, 1467, 1464, 1468, 1464, 1469, 1470, 1471,
3393 1474, 1475, 1476, 1464, 1478, 1464, 1479, 1464,
3394 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487,
3395 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495,
3396 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503,
3397 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511,
3398 1512, 1513, 1514, 1464, 1464, 1464, 1464, 1464,
3399 2, 1464, 1464, 8, 1464, 1464, 1464, 1464,
3400 1464, 416, 417, 421, 422, 423, 424, 425,
3401 426, 427, 428, 429, 430, 431, 432, 434,
3402 436, 437, 469, 510, 525, 532, 534, 536,
3403 556, 559, 575, 688, 1464, 1464, 1464, 692,
3404 693, 694, 695, 696, 697, 698, 699, 700,
3405 701, 702, 704, 705, 706, 707, 708, 709,
3406 710, 711, 712, 713, 714, 715, 716, 717,
3407 718, 719, 720, 721, 722, 723, 724, 726,
3408 727, 728, 729, 730, 731, 732, 733, 734,
3409 735, 736, 737, 738, 739, 740, 742, 743,
3410 744, 746, 747, 748, 749, 750, 751, 752,
3411 753, 754, 755, 756, 757, 758, 759, 761,
3412 762, 763, 764, 765, 766, 767, 768, 769,
3413 771, 772, 773, 774, 775, 776, 777, 778,
3414 779, 780, 781, 782, 783, 784, 785, 786,
3415 787, 788, 790, 791, 792, 793, 794, 795,
3416 796, 797, 798, 799, 800, 801, 802, 803,
3417 804, 805, 806, 807, 808, 809, 810, 812,
3418 813, 814, 815, 816, 817, 818, 819, 820,
3419 821, 822, 823, 824, 825, 826, 827, 856,
3420 881, 884, 885, 887, 894, 895, 898, 902,
3421 914, 919, 920, 922, 925, 927, 1515, 1515,
3422 1534, 1536, 1519, 1515, 1538, 1539, 1540, 1515,
3423 929, 930, 933, 1515, 1516, 929, 930, 1519,
3424 931, 932, 933, 1515, 1516, 929, 930, 1519,
3425 931, 932, 933, 1520, 1525, 1521, 1522, 1524,
3426 1531, 1532, 1533, 1517, 1521, 1522, 1524, 1531,
3427 1532, 1533, 1518, 1523, 1526, 1527, 1528, 1529,
3428 1530, 1517, 1521, 1522, 1524, 1531, 1532, 1533,
3429 1520, 1525, 1523, 1526, 1527, 1528, 1529, 1530,
3430 1518, 1523, 1526, 1527, 1528, 1529, 1530, 1520,
3431 1525, 1515, 1535, 1515, 1515, 1537, 1515, 1515,
3432 1515, 935, 936, 942, 943, 1541, 1547, 1548,
3433 1549, 1541, 937, 938, 941, 1541, 1542, 1541,
3434 936, 937, 938, 939, 940, 941, 1541, 1542,
3435 1541, 936, 937, 938, 939, 940, 941, 1541,
3436 1541, 1541, 1541, 1541, 944, 949, 950, 1550,
3437 1554, 1555, 1556, 1550, 945, 948, 1550, 1550,
3438 1550, 1550, 1550, 1557, 1559, 1560, 1561, 1562,
3439 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570,
3440 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578,
3441 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1586,
3442 1587, 1588, 1589, 1590, 1591, 1592, 1593, 1557,
3443 951, 952, 956, 957, 958, 959, 960, 961,
3444 962, 963, 964, 965, 966, 967, 969, 971,
3445 972, 1004, 1045, 1060, 1067, 1069, 1071, 1091,
3446 1094, 1110, 1223, 1557, 1227, 1228, 1229, 1230,
3447 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1239,
3448 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247,
3449 1248, 1249, 1250, 1251, 1252, 1253, 1254, 1255,
3450 1256, 1257, 1258, 1259, 1261, 1262, 1263, 1264,
3451 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1272,
3452 1273, 1274, 1275, 1277, 1278, 1279, 1281, 1282,
3453 1283, 1284, 1285, 1286, 1287, 1288, 1289, 1290,
3454 1291, 1292, 1293, 1294, 1296, 1297, 1298, 1299,
3455 1300, 1301, 1302, 1303, 1304, 1306, 1307, 1308,
3456 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316,
3457 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325,
3458 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333,
3459 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341,
3460 1342, 1343, 1344, 1345, 1347, 1348, 1349, 1350,
3461 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1358,
3462 1359, 1360, 1361, 1362, 1391, 1416, 1419, 1420,
3463 1422, 1429, 1430, 1433, 1437, 1449, 1454, 1455,
3464 1457, 1460, 1462,
3465}
3466
3467var _hcltok_trans_actions []byte = []byte{
3468 151, 0, 93, 147, 109, 0, 0, 201,
3469 143, 0, 13, 0, 0, 0, 0, 0,
3470 0, 0, 0, 0, 0, 0, 0, 0,
3471 0, 0, 0, 0, 0, 0, 0, 0,
3472 0, 0, 0, 0, 0, 0, 0, 0,
3473 0, 0, 0, 0, 0, 0, 0, 123,
3474 0, 0, 0, 0, 0, 0, 0, 0,
3475 0, 0, 0, 0, 0, 0, 0, 0,
3476 0, 0, 0, 0, 0, 0, 0, 0,
3477 0, 0, 0, 0, 0, 0, 0, 0,
3478 0, 0, 0, 0, 0, 0, 0, 0,
3479 0, 0, 0, 0, 0, 0, 0, 0,
3480 0, 0, 0, 0, 0, 0, 0, 0,
3481 0, 0, 0, 0, 0, 0, 0, 0,
3482 0, 0, 0, 0, 0, 0, 0, 0,
3483 0, 0, 0, 0, 0, 0, 0, 0,
3484 0, 0, 0, 0, 0, 0, 0, 0,
3485 0, 0, 0, 0, 0, 0, 0, 0,
3486 0, 0, 0, 0, 0, 0, 0, 0,
3487 0, 0, 0, 0, 0, 0, 0, 0,
3488 0, 0, 0, 0, 0, 0, 0, 0,
3489 0, 0, 0, 0, 0, 0, 0, 0,
3490 0, 0, 0, 0, 0, 0, 0, 0,
3491 0, 0, 0, 0, 0, 0, 0, 0,
3492 0, 0, 0, 0, 0, 0, 0, 0,
3493 0, 0, 0, 0, 0, 0, 0, 0,
3494 0, 0, 0, 0, 0, 0, 0, 0,
3495 0, 0, 0, 0, 0, 0, 0, 0,
3496 0, 0, 0, 0, 0, 0, 0, 0,
3497 0, 0, 0, 0, 0, 0, 0, 0,
3498 0, 0, 0, 0, 0, 0, 0, 0,
3499 0, 0, 0, 0, 0, 0, 0, 0,
3500 0, 0, 0, 0, 0, 0, 0, 0,
3501 0, 0, 0, 0, 0, 0, 0, 0,
3502 0, 0, 0, 0, 0, 0, 0, 0,
3503 0, 0, 0, 0, 0, 0, 0, 0,
3504 0, 0, 0, 0, 0, 0, 0, 0,
3505 0, 0, 0, 0, 0, 0, 0, 0,
3506 0, 0, 0, 0, 0, 0, 0, 0,
3507 0, 0, 0, 0, 0, 0, 0, 0,
3508 0, 0, 0, 0, 0, 0, 0, 0,
3509 0, 0, 0, 0, 0, 0, 0, 0,
3510 0, 0, 0, 0, 0, 0, 0, 0,
3511 0, 0, 0, 0, 0, 0, 0, 0,
3512 0, 0, 0, 0, 0, 0, 0, 0,
3513 0, 0, 0, 0, 0, 0, 0, 0,
3514 0, 0, 0, 0, 0, 0, 0, 0,
3515 0, 0, 0, 0, 0, 0, 0, 0,
3516 0, 0, 0, 0, 0, 0, 0, 0,
3517 0, 0, 0, 0, 0, 0, 0, 0,
3518 0, 0, 0, 0, 0, 0, 0, 0,
3519 0, 0, 0, 0, 0, 0, 0, 0,
3520 0, 0, 0, 0, 0, 145, 198, 0,
3521 0, 0, 0, 0, 0, 0, 0, 0,
3522 0, 0, 0, 0, 0, 0, 0, 0,
3523 0, 0, 0, 0, 0, 0, 0, 0,
3524 0, 0, 0, 0, 0, 0, 0, 0,
3525 0, 0, 0, 0, 0, 0, 0, 0,
3526 0, 0, 0, 0, 0, 0, 0, 0,
3527 0, 0, 0, 0, 0, 0, 0, 0,
3528 0, 0, 0, 0, 0, 0, 0, 0,
3529 0, 0, 0, 0, 0, 0, 0, 0,
3530 0, 0, 0, 0, 0, 0, 0, 0,
3531 0, 0, 0, 0, 0, 0, 0, 0,
3532 0, 0, 0, 0, 0, 0, 0, 0,
3533 0, 0, 0, 0, 0, 0, 0, 0,
3534 0, 0, 0, 0, 0, 0, 0, 0,
3535 0, 0, 0, 0, 0, 0, 0, 0,
3536 0, 0, 0, 0, 0, 0, 0, 0,
3537 0, 0, 0, 0, 0, 0, 0, 0,
3538 0, 0, 0, 0, 0, 0, 0, 0,
3539 0, 0, 0, 0, 0, 0, 0, 0,
3540 0, 0, 0, 0, 0, 0, 0, 0,
3541 0, 0, 0, 0, 0, 0, 0, 0,
3542 0, 0, 0, 0, 0, 0, 0, 0,
3543 0, 0, 0, 0, 0, 0, 0, 0,
3544 0, 0, 0, 0, 0, 0, 0, 0,
3545 0, 0, 0, 0, 0, 0, 0, 0,
3546 0, 0, 0, 0, 0, 0, 0, 0,
3547 0, 0, 0, 0, 0, 0, 0, 0,
3548 0, 0, 0, 0, 0, 0, 0, 0,
3549 0, 0, 0, 0, 0, 0, 0, 0,
3550 0, 0, 0, 0, 0, 0, 0, 0,
3551 0, 0, 0, 0, 0, 0, 0, 149,
3552 127, 0, 0, 0, 0, 0, 0, 0,
3553 0, 0, 0, 0, 0, 0, 0, 0,
3554 0, 0, 0, 0, 0, 0, 0, 0,
3555 0, 0, 0, 0, 0, 0, 0, 0,
3556 0, 0, 0, 0, 0, 0, 0, 0,
3557 0, 0, 0, 0, 0, 0, 0, 0,
3558 0, 0, 0, 0, 0, 0, 0, 0,
3559 0, 0, 0, 0, 0, 0, 0, 0,
3560 0, 0, 0, 0, 0, 0, 0, 0,
3561 0, 0, 0, 0, 0, 0, 0, 0,
3562 0, 0, 0, 0, 0, 0, 0, 0,
3563 0, 0, 0, 0, 0, 0, 0, 0,
3564 35, 13, 13, 13, 0, 0, 37, 0,
3565 57, 43, 55, 180, 180, 180, 0, 0,
3566 0, 0, 77, 63, 75, 186, 0, 0,
3567 0, 0, 87, 192, 91, 0, 0, 0,
3568 0, 0, 0, 0, 0, 0, 0, 0,
3569 0, 0, 0, 0, 0, 0, 0, 0,
3570 0, 0, 0, 0, 0, 0, 0, 0,
3571 0, 0, 0, 0, 0, 0, 0, 0,
3572 0, 0, 0, 0, 0, 0, 0, 0,
3573 0, 0, 0, 0, 0, 0, 0, 0,
3574 0, 0, 0, 0, 0, 0, 0, 0,
3575 0, 0, 0, 0, 0, 0, 0, 0,
3576 0, 0, 0, 0, 0, 0, 0, 0,
3577 0, 0, 0, 0, 0, 0, 0, 0,
3578 0, 0, 0, 0, 0, 0, 0, 0,
3579 0, 0, 0, 0, 0, 0, 0, 0,
3580 0, 0, 0, 0, 0, 0, 0, 0,
3581 0, 0, 0, 0, 0, 0, 0, 0,
3582 0, 0, 0, 0, 0, 0, 0, 0,
3583 0, 0, 0, 0, 0, 0, 0, 0,
3584 0, 0, 0, 0, 0, 0, 0, 0,
3585 0, 0, 0, 0, 0, 0, 0, 0,
3586 0, 0, 0, 0, 0, 0, 0, 0,
3587 0, 0, 0, 0, 0, 0, 0, 0,
3588 0, 0, 0, 0, 0, 0, 0, 0,
3589 0, 0, 0, 0, 0, 0, 0, 0,
3590 0, 0, 0, 0, 0, 0, 0, 0,
3591 0, 0, 0, 0, 0, 0, 0, 0,
3592 0, 0, 0, 0, 0, 0, 0, 0,
3593 0, 0, 0, 0, 0, 0, 0, 0,
3594 0, 0, 0, 0, 0, 0, 0, 0,
3595 0, 0, 0, 0, 0, 0, 0, 0,
3596 0, 0, 0, 0, 0, 0, 0, 0,
3597 0, 0, 0, 0, 0, 0, 0, 0,
3598 0, 0, 0, 0, 0, 89, 81, 0,
3599 0, 0, 0, 0, 0, 0, 0, 0,
3600 0, 0, 0, 0, 0, 0, 0, 0,
3601 0, 0, 0, 0, 0, 0, 0, 0,
3602 0, 0, 0, 0, 0, 0, 0, 0,
3603 0, 0, 0, 0, 0, 0, 0, 0,
3604 0, 0, 0, 0, 0, 0, 0, 0,
3605 0, 0, 0, 0, 0, 0, 0, 0,
3606 0, 0, 0, 0, 0, 0, 0, 0,
3607 0, 0, 0, 0, 0, 0, 0, 0,
3608 0, 0, 0, 0, 0, 0, 0, 0,
3609 0, 0, 0, 0, 0, 0, 0, 0,
3610 0, 0, 0, 0, 0, 0, 0, 95,
3611 0, 0, 121, 210, 113, 0, 13, 204,
3612 13, 0, 0, 115, 0, 117, 0, 125,
3613 0, 0, 0, 0, 0, 0, 0, 0,
3614 0, 0, 0, 0, 0, 0, 0, 0,
3615 0, 0, 0, 0, 0, 0, 13, 13,
3616 13, 207, 207, 207, 207, 207, 207, 13,
3617 13, 207, 13, 129, 141, 137, 99, 105,
3618 0, 135, 131, 0, 103, 97, 111, 101,
3619 133, 0, 0, 0, 0, 0, 0, 0,
3620 0, 0, 0, 0, 0, 0, 0, 0,
3621 0, 0, 0, 0, 0, 0, 0, 0,
3622 0, 0, 0, 0, 107, 119, 139, 0,
3623 0, 0, 0, 0, 0, 0, 0, 0,
3624 0, 0, 0, 0, 0, 0, 0, 0,
3625 0, 0, 0, 0, 0, 0, 0, 0,
3626 0, 0, 0, 0, 0, 0, 0, 0,
3627 0, 0, 0, 0, 0, 0, 0, 0,
3628 0, 0, 0, 0, 0, 0, 0, 0,
3629 0, 0, 0, 0, 0, 0, 0, 0,
3630 0, 0, 0, 0, 0, 0, 0, 0,
3631 0, 0, 0, 0, 0, 0, 0, 0,
3632 0, 0, 0, 0, 0, 0, 0, 0,
3633 0, 0, 0, 0, 0, 0, 0, 0,
3634 0, 0, 0, 0, 0, 0, 0, 0,
3635 0, 0, 0, 0, 0, 0, 0, 0,
3636 0, 0, 0, 0, 0, 0, 0, 0,
3637 0, 0, 0, 0, 0, 0, 0, 0,
3638 0, 0, 0, 0, 0, 0, 0, 0,
3639 0, 0, 0, 0, 0, 0, 0, 0,
3640 0, 0, 0, 0, 0, 0, 21, 19,
3641 0, 0, 13, 23, 0, 13, 13, 29,
3642 0, 0, 0, 153, 174, 1, 1, 174,
3643 1, 1, 1, 156, 177, 3, 3, 177,
3644 3, 3, 3, 0, 0, 0, 0, 13,
3645 13, 13, 13, 174, 1, 1, 174, 174,
3646 174, 174, 174, 1, 1, 174, 174, 174,
3647 174, 177, 3, 3, 177, 177, 177, 177,
3648 1, 1, 0, 0, 13, 13, 13, 13,
3649 177, 3, 3, 177, 177, 177, 177, 3,
3650 3, 31, 0, 25, 15, 0, 27, 17,
3651 33, 0, 0, 0, 0, 45, 0, 183,
3652 183, 51, 0, 0, 0, 162, 213, 159,
3653 5, 5, 5, 5, 5, 5, 168, 217,
3654 165, 7, 7, 7, 7, 7, 7, 47,
3655 39, 49, 41, 53, 0, 0, 0, 65,
3656 0, 189, 189, 71, 0, 0, 67, 59,
3657 69, 61, 73, 79, 0, 0, 0, 0,
3658 0, 0, 0, 0, 0, 0, 0, 0,
3659 0, 0, 0, 0, 0, 0, 0, 0,
3660 0, 0, 13, 13, 13, 195, 195, 195,
3661 195, 195, 195, 13, 13, 195, 13, 83,
3662 0, 0, 0, 0, 0, 0, 0, 0,
3663 0, 0, 0, 0, 0, 0, 0, 0,
3664 0, 0, 0, 0, 0, 0, 0, 0,
3665 0, 0, 0, 85, 0, 0, 0, 0,
3666 0, 0, 0, 0, 0, 0, 0, 0,
3667 0, 0, 0, 0, 0, 0, 0, 0,
3668 0, 0, 0, 0, 0, 0, 0, 0,
3669 0, 0, 0, 0, 0, 0, 0, 0,
3670 0, 0, 0, 0, 0, 0, 0, 0,
3671 0, 0, 0, 0, 0, 0, 0, 0,
3672 0, 0, 0, 0, 0, 0, 0, 0,
3673 0, 0, 0, 0, 0, 0, 0, 0,
3674 0, 0, 0, 0, 0, 0, 0, 0,
3675 0, 0, 0, 0, 0, 0, 0, 0,
3676 0, 0, 0, 0, 0, 0, 0, 0,
3677 0, 0, 0, 0, 0, 0, 0, 0,
3678 0, 0, 0, 0, 0, 0, 0, 0,
3679 0, 0, 0, 0, 0, 0, 0, 0,
3680 0, 0, 0, 0, 0, 0, 0, 0,
3681 0, 0, 0, 0, 0, 0, 0, 0,
3682 0, 0, 0, 0, 0, 0, 0, 0,
3683 0, 0, 0,
3684}
3685
3686var _hcltok_to_state_actions []byte = []byte{
3687 0, 0, 0, 0, 0, 0, 0, 0,
3688 0, 0, 0, 0, 0, 0, 0, 0,
3689 0, 0, 0, 0, 0, 0, 0, 0,
3690 0, 0, 0, 0, 0, 0, 0, 0,
3691 0, 0, 0, 0, 0, 0, 0, 0,
3692 0, 0, 0, 0, 0, 0, 0, 0,
3693 0, 0, 0, 0, 0, 0, 0, 0,
3694 0, 0, 0, 0, 0, 0, 0, 0,
3695 0, 0, 0, 0, 0, 0, 0, 0,
3696 0, 0, 0, 0, 0, 0, 0, 0,
3697 0, 0, 0, 0, 0, 0, 0, 0,
3698 0, 0, 0, 0, 0, 0, 0, 0,
3699 0, 0, 0, 0, 0, 0, 0, 0,
3700 0, 0, 0, 0, 0, 0, 0, 0,
3701 0, 0, 0, 0, 0, 0, 0, 0,
3702 0, 0, 0, 0, 0, 0, 0, 0,
3703 0, 0, 0, 0, 0, 0, 0, 0,
3704 0, 0, 0, 0, 0, 0, 0, 0,
3705 0, 0, 0, 0, 0, 0, 0, 0,
3706 0, 0, 0, 0, 0, 0, 0, 0,
3707 0, 0, 0, 0, 0, 0, 0, 0,
3708 0, 0, 0, 0, 0, 0, 0, 0,
3709 0, 0, 0, 0, 0, 0, 0, 0,
3710 0, 0, 0, 0, 0, 0, 0, 0,
3711 0, 0, 0, 0, 0, 0, 0, 0,
3712 0, 0, 0, 0, 0, 0, 0, 0,
3713 0, 0, 0, 0, 0, 0, 0, 0,
3714 0, 0, 0, 0, 0, 0, 0, 0,
3715 0, 0, 0, 0, 0, 0, 0, 0,
3716 0, 0, 0, 0, 0, 0, 0, 0,
3717 0, 0, 0, 0, 0, 0, 0, 0,
3718 0, 0, 0, 0, 0, 0, 0, 0,
3719 0, 0, 0, 0, 0, 0, 0, 0,
3720 0, 0, 0, 0, 0, 0, 0, 0,
3721 0, 0, 0, 0, 0, 0, 0, 0,
3722 0, 0, 0, 0, 0, 0, 0, 0,
3723 0, 0, 0, 0, 0, 0, 0, 0,
3724 0, 0, 0, 0, 0, 0, 0, 0,
3725 0, 0, 0, 0, 0, 0, 0, 0,
3726 0, 0, 0, 0, 0, 0, 0, 0,
3727 0, 0, 0, 0, 0, 0, 0, 0,
3728 0, 0, 0, 0, 0, 0, 0, 0,
3729 0, 0, 0, 0, 0, 0, 0, 0,
3730 0, 0, 0, 0, 0, 0, 0, 0,
3731 0, 0, 0, 0, 0, 0, 0, 0,
3732 0, 0, 0, 0, 0, 0, 0, 0,
3733 0, 0, 0, 0, 0, 0, 0, 0,
3734 0, 0, 0, 0, 0, 0, 0, 0,
3735 0, 0, 0, 0, 0, 0, 0, 0,
3736 0, 0, 0, 0, 0, 0, 0, 0,
3737 0, 0, 0, 0, 0, 0, 0, 0,
3738 0, 0, 0, 0, 0, 0, 0, 0,
3739 0, 0, 0, 0, 0, 0, 0, 0,
3740 0, 0, 0, 0, 0, 0, 0, 0,
3741 0, 0, 0, 0, 0, 0, 0, 0,
3742 0, 0, 0, 0, 0, 0, 0, 0,
3743 0, 0, 0, 0, 0, 0, 0, 0,
3744 0, 0, 0, 0, 0, 0, 0, 0,
3745 0, 0, 0, 0, 0, 0, 0, 0,
3746 0, 0, 0, 0, 0, 0, 0, 0,
3747 0, 0, 0, 0, 0, 0, 0, 0,
3748 0, 0, 0, 0, 0, 0, 0, 0,
3749 0, 0, 0, 0, 0, 0, 0, 0,
3750 0, 0, 0, 0, 0, 0, 0, 0,
3751 0, 0, 0, 0, 0, 0, 0, 0,
3752 0, 0, 0, 0, 0, 0, 0, 0,
3753 0, 0, 0, 0, 0, 0, 0, 0,
3754 0, 0, 0, 0, 0, 0, 0, 0,
3755 0, 0, 0, 0, 0, 0, 0, 0,
3756 0, 0, 0, 0, 0, 0, 0, 0,
3757 0, 0, 0, 0, 0, 0, 0, 0,
3758 0, 0, 0, 0, 0, 0, 0, 0,
3759 0, 0, 0, 0, 0, 0, 0, 0,
3760 0, 0, 0, 0, 0, 0, 0, 0,
3761 0, 0, 0, 0, 0, 0, 0, 0,
3762 0, 0, 0, 0, 0, 0, 0, 0,
3763 0, 0, 0, 0, 0, 0, 0, 0,
3764 0, 0, 0, 0, 0, 0, 0, 0,
3765 0, 0, 0, 0, 0, 0, 0, 0,
3766 0, 0, 0, 0, 0, 0, 0, 0,
3767 0, 0, 0, 0, 0, 0, 0, 0,
3768 0, 0, 0, 0, 0, 0, 0, 0,
3769 0, 0, 0, 0, 0, 0, 0, 0,
3770 0, 0, 0, 0, 0, 0, 0, 0,
3771 0, 0, 0, 0, 0, 0, 0, 0,
3772 0, 0, 0, 0, 0, 0, 0, 0,
3773 0, 0, 0, 0, 0, 0, 0, 0,
3774 0, 0, 0, 0, 0, 0, 0, 0,
3775 0, 0, 0, 0, 0, 0, 0, 0,
3776 0, 0, 0, 0, 0, 0, 0, 0,
3777 0, 0, 0, 0, 0, 0, 0, 0,
3778 0, 0, 0, 0, 0, 0, 0, 0,
3779 0, 0, 0, 0, 0, 0, 0, 0,
3780 0, 0, 0, 0, 0, 0, 0, 0,
3781 0, 0, 0, 0, 0, 0, 0, 0,
3782 0, 0, 0, 0, 0, 0, 0, 0,
3783 0, 0, 0, 0, 0, 0, 0, 0,
3784 0, 0, 0, 0, 0, 0, 0, 0,
3785 0, 0, 0, 0, 0, 0, 0, 0,
3786 0, 0, 0, 0, 0, 0, 0, 0,
3787 0, 0, 0, 0, 0, 0, 0, 0,
3788 0, 0, 0, 0, 0, 0, 0, 0,
3789 0, 0, 0, 0, 0, 0, 0, 0,
3790 0, 0, 0, 0, 0, 0, 0, 0,
3791 0, 0, 0, 0, 0, 0, 0, 0,
3792 0, 0, 0, 0, 0, 0, 0, 0,
3793 0, 0, 0, 0, 0, 0, 0, 0,
3794 0, 0, 0, 0, 0, 0, 0, 0,
3795 0, 0, 0, 0, 0, 0, 0, 0,
3796 0, 0, 0, 0, 0, 0, 0, 0,
3797 0, 0, 0, 0, 0, 0, 0, 0,
3798 0, 0, 0, 0, 0, 0, 0, 0,
3799 0, 0, 0, 0, 0, 0, 0, 0,
3800 0, 0, 0, 0, 0, 0, 0, 0,
3801 0, 0, 0, 0, 0, 0, 0, 0,
3802 0, 0, 0, 0, 0, 0, 0, 0,
3803 0, 0, 0, 0, 0, 0, 0, 0,
3804 0, 0, 0, 0, 0, 0, 0, 0,
3805 0, 0, 0, 0, 0, 0, 0, 0,
3806 0, 0, 0, 0, 0, 0, 0, 0,
3807 0, 0, 0, 0, 0, 0, 0, 0,
3808 0, 0, 0, 0, 0, 0, 0, 0,
3809 0, 0, 0, 0, 0, 0, 0, 0,
3810 0, 0, 0, 0, 0, 0, 0, 0,
3811 0, 0, 0, 0, 0, 0, 0, 0,
3812 0, 0, 0, 0, 0, 0, 0, 0,
3813 0, 0, 0, 0, 0, 0, 0, 0,
3814 0, 0, 0, 0, 0, 0, 0, 0,
3815 0, 0, 0, 0, 0, 0, 0, 0,
3816 0, 0, 0, 0, 0, 0, 0, 0,
3817 0, 0, 0, 0, 0, 0, 0, 0,
3818 0, 0, 0, 0, 0, 0, 0, 0,
3819 0, 0, 0, 0, 0, 0, 0, 0,
3820 0, 0, 0, 0, 0, 0, 0, 0,
3821 0, 0, 0, 0, 0, 0, 0, 0,
3822 0, 0, 0, 0, 0, 0, 0, 0,
3823 0, 0, 0, 0, 0, 0, 0, 0,
3824 0, 0, 0, 0, 0, 0, 0, 0,
3825 0, 0, 0, 0, 0, 0, 0, 0,
3826 0, 0, 0, 0, 0, 0, 0, 0,
3827 0, 0, 0, 0, 0, 0, 0, 0,
3828 0, 0, 0, 0, 0, 0, 0, 0,
3829 0, 0, 0, 0, 0, 0, 0, 0,
3830 0, 0, 0, 0, 0, 0, 0, 0,
3831 0, 0, 0, 0, 0, 0, 0, 0,
3832 0, 0, 0, 0, 0, 0, 0, 0,
3833 0, 0, 0, 0, 0, 0, 0, 0,
3834 0, 0, 0, 0, 0, 0, 0, 0,
3835 0, 0, 0, 0, 0, 0, 0, 0,
3836 0, 0, 0, 0, 0, 0, 0, 0,
3837 0, 0, 0, 0, 0, 0, 0, 0,
3838 0, 0, 0, 0, 0, 0, 0, 0,
3839 0, 0, 0, 0, 0, 0, 0, 0,
3840 0, 0, 0, 0, 0, 0, 0, 0,
3841 0, 0, 0, 0, 0, 0, 0, 0,
3842 0, 0, 0, 0, 0, 0, 0, 0,
3843 0, 0, 0, 0, 0, 0, 0, 0,
3844 0, 0, 0, 0, 0, 0, 0, 0,
3845 0, 0, 0, 0, 0, 0, 0, 0,
3846 0, 0, 0, 0, 0, 0, 0, 0,
3847 0, 0, 0, 0, 0, 0, 0, 0,
3848 0, 0, 0, 0, 0, 0, 0, 0,
3849 0, 0, 0, 0, 0, 0, 0, 0,
3850 0, 0, 0, 0, 0, 0, 0, 0,
3851 0, 0, 0, 0, 0, 0, 0, 0,
3852 0, 0, 0, 0, 0, 0, 0, 0,
3853 0, 0, 0, 0, 0, 0, 0, 0,
3854 0, 0, 0, 0, 0, 0, 0, 0,
3855 0, 0, 0, 0, 0, 0, 0, 0,
3856 0, 0, 0, 0, 0, 0, 0, 0,
3857 0, 0, 0, 0, 0, 0, 0, 0,
3858 0, 0, 0, 0, 0, 0, 0, 0,
3859 0, 0, 0, 0, 0, 0, 0, 0,
3860 0, 0, 0, 0, 0, 0, 0, 0,
3861 0, 0, 0, 0, 0, 0, 0, 0,
3862 0, 0, 0, 0, 0, 0, 0, 0,
3863 0, 0, 0, 0, 0, 0, 0, 0,
3864 0, 0, 0, 0, 0, 0, 0, 0,
3865 0, 0, 0, 0, 0, 0, 0, 0,
3866 0, 0, 0, 0, 0, 0, 0, 0,
3867 0, 0, 0, 0, 0, 0, 0, 0,
3868 0, 0, 0, 0, 0, 0, 0, 0,
3869 0, 0, 0, 0, 0, 0, 0, 0,
3870 9, 0, 0, 0, 0, 0, 0, 0,
3871 0, 0, 0, 0, 0, 0, 0, 0,
3872 0, 0, 0, 0, 0, 0, 0, 0,
3873 0, 0, 0, 0, 0, 0, 0, 0,
3874 0, 0, 0, 0, 0, 0, 0, 0,
3875 0, 0, 0, 0, 0, 0, 0, 0,
3876 0, 0, 0, 9, 0, 0, 0, 0,
3877 0, 0, 0, 0, 0, 0, 0, 0,
3878 0, 0, 0, 0, 0, 0, 0, 0,
3879 0, 0, 0, 0, 0, 171, 0, 0,
3880 0, 0, 0, 0, 0, 0, 171, 0,
3881 0, 0, 0, 0, 0, 9, 0, 0,
3882 0, 0, 0, 0, 0, 0, 0, 0,
3883 0, 0, 0, 0, 0, 0, 0, 0,
3884 0, 0, 0, 0, 0, 0, 0, 0,
3885 0, 0, 0, 0, 0, 0, 0, 0,
3886 0, 0,
3887}
3888
3889var _hcltok_from_state_actions []byte = []byte{
3890 0, 0, 0, 0, 0, 0, 0, 0,
3891 0, 0, 0, 0, 0, 0, 0, 0,
3892 0, 0, 0, 0, 0, 0, 0, 0,
3893 0, 0, 0, 0, 0, 0, 0, 0,
3894 0, 0, 0, 0, 0, 0, 0, 0,
3895 0, 0, 0, 0, 0, 0, 0, 0,
3896 0, 0, 0, 0, 0, 0, 0, 0,
3897 0, 0, 0, 0, 0, 0, 0, 0,
3898 0, 0, 0, 0, 0, 0, 0, 0,
3899 0, 0, 0, 0, 0, 0, 0, 0,
3900 0, 0, 0, 0, 0, 0, 0, 0,
3901 0, 0, 0, 0, 0, 0, 0, 0,
3902 0, 0, 0, 0, 0, 0, 0, 0,
3903 0, 0, 0, 0, 0, 0, 0, 0,
3904 0, 0, 0, 0, 0, 0, 0, 0,
3905 0, 0, 0, 0, 0, 0, 0, 0,
3906 0, 0, 0, 0, 0, 0, 0, 0,
3907 0, 0, 0, 0, 0, 0, 0, 0,
3908 0, 0, 0, 0, 0, 0, 0, 0,
3909 0, 0, 0, 0, 0, 0, 0, 0,
3910 0, 0, 0, 0, 0, 0, 0, 0,
3911 0, 0, 0, 0, 0, 0, 0, 0,
3912 0, 0, 0, 0, 0, 0, 0, 0,
3913 0, 0, 0, 0, 0, 0, 0, 0,
3914 0, 0, 0, 0, 0, 0, 0, 0,
3915 0, 0, 0, 0, 0, 0, 0, 0,
3916 0, 0, 0, 0, 0, 0, 0, 0,
3917 0, 0, 0, 0, 0, 0, 0, 0,
3918 0, 0, 0, 0, 0, 0, 0, 0,
3919 0, 0, 0, 0, 0, 0, 0, 0,
3920 0, 0, 0, 0, 0, 0, 0, 0,
3921 0, 0, 0, 0, 0, 0, 0, 0,
3922 0, 0, 0, 0, 0, 0, 0, 0,
3923 0, 0, 0, 0, 0, 0, 0, 0,
3924 0, 0, 0, 0, 0, 0, 0, 0,
3925 0, 0, 0, 0, 0, 0, 0, 0,
3926 0, 0, 0, 0, 0, 0, 0, 0,
3927 0, 0, 0, 0, 0, 0, 0, 0,
3928 0, 0, 0, 0, 0, 0, 0, 0,
3929 0, 0, 0, 0, 0, 0, 0, 0,
3930 0, 0, 0, 0, 0, 0, 0, 0,
3931 0, 0, 0, 0, 0, 0, 0, 0,
3932 0, 0, 0, 0, 0, 0, 0, 0,
3933 0, 0, 0, 0, 0, 0, 0, 0,
3934 0, 0, 0, 0, 0, 0, 0, 0,
3935 0, 0, 0, 0, 0, 0, 0, 0,
3936 0, 0, 0, 0, 0, 0, 0, 0,
3937 0, 0, 0, 0, 0, 0, 0, 0,
3938 0, 0, 0, 0, 0, 0, 0, 0,
3939 0, 0, 0, 0, 0, 0, 0, 0,
3940 0, 0, 0, 0, 0, 0, 0, 0,
3941 0, 0, 0, 0, 0, 0, 0, 0,
3942 0, 0, 0, 0, 0, 0, 0, 0,
3943 0, 0, 0, 0, 0, 0, 0, 0,
3944 0, 0, 0, 0, 0, 0, 0, 0,
3945 0, 0, 0, 0, 0, 0, 0, 0,
3946 0, 0, 0, 0, 0, 0, 0, 0,
3947 0, 0, 0, 0, 0, 0, 0, 0,
3948 0, 0, 0, 0, 0, 0, 0, 0,
3949 0, 0, 0, 0, 0, 0, 0, 0,
3950 0, 0, 0, 0, 0, 0, 0, 0,
3951 0, 0, 0, 0, 0, 0, 0, 0,
3952 0, 0, 0, 0, 0, 0, 0, 0,
3953 0, 0, 0, 0, 0, 0, 0, 0,
3954 0, 0, 0, 0, 0, 0, 0, 0,
3955 0, 0, 0, 0, 0, 0, 0, 0,
3956 0, 0, 0, 0, 0, 0, 0, 0,
3957 0, 0, 0, 0, 0, 0, 0, 0,
3958 0, 0, 0, 0, 0, 0, 0, 0,
3959 0, 0, 0, 0, 0, 0, 0, 0,
3960 0, 0, 0, 0, 0, 0, 0, 0,
3961 0, 0, 0, 0, 0, 0, 0, 0,
3962 0, 0, 0, 0, 0, 0, 0, 0,
3963 0, 0, 0, 0, 0, 0, 0, 0,
3964 0, 0, 0, 0, 0, 0, 0, 0,
3965 0, 0, 0, 0, 0, 0, 0, 0,
3966 0, 0, 0, 0, 0, 0, 0, 0,
3967 0, 0, 0, 0, 0, 0, 0, 0,
3968 0, 0, 0, 0, 0, 0, 0, 0,
3969 0, 0, 0, 0, 0, 0, 0, 0,
3970 0, 0, 0, 0, 0, 0, 0, 0,
3971 0, 0, 0, 0, 0, 0, 0, 0,
3972 0, 0, 0, 0, 0, 0, 0, 0,
3973 0, 0, 0, 0, 0, 0, 0, 0,
3974 0, 0, 0, 0, 0, 0, 0, 0,
3975 0, 0, 0, 0, 0, 0, 0, 0,
3976 0, 0, 0, 0, 0, 0, 0, 0,
3977 0, 0, 0, 0, 0, 0, 0, 0,
3978 0, 0, 0, 0, 0, 0, 0, 0,
3979 0, 0, 0, 0, 0, 0, 0, 0,
3980 0, 0, 0, 0, 0, 0, 0, 0,
3981 0, 0, 0, 0, 0, 0, 0, 0,
3982 0, 0, 0, 0, 0, 0, 0, 0,
3983 0, 0, 0, 0, 0, 0, 0, 0,
3984 0, 0, 0, 0, 0, 0, 0, 0,
3985 0, 0, 0, 0, 0, 0, 0, 0,
3986 0, 0, 0, 0, 0, 0, 0, 0,
3987 0, 0, 0, 0, 0, 0, 0, 0,
3988 0, 0, 0, 0, 0, 0, 0, 0,
3989 0, 0, 0, 0, 0, 0, 0, 0,
3990 0, 0, 0, 0, 0, 0, 0, 0,
3991 0, 0, 0, 0, 0, 0, 0, 0,
3992 0, 0, 0, 0, 0, 0, 0, 0,
3993 0, 0, 0, 0, 0, 0, 0, 0,
3994 0, 0, 0, 0, 0, 0, 0, 0,
3995 0, 0, 0, 0, 0, 0, 0, 0,
3996 0, 0, 0, 0, 0, 0, 0, 0,
3997 0, 0, 0, 0, 0, 0, 0, 0,
3998 0, 0, 0, 0, 0, 0, 0, 0,
3999 0, 0, 0, 0, 0, 0, 0, 0,
4000 0, 0, 0, 0, 0, 0, 0, 0,
4001 0, 0, 0, 0, 0, 0, 0, 0,
4002 0, 0, 0, 0, 0, 0, 0, 0,
4003 0, 0, 0, 0, 0, 0, 0, 0,
4004 0, 0, 0, 0, 0, 0, 0, 0,
4005 0, 0, 0, 0, 0, 0, 0, 0,
4006 0, 0, 0, 0, 0, 0, 0, 0,
4007 0, 0, 0, 0, 0, 0, 0, 0,
4008 0, 0, 0, 0, 0, 0, 0, 0,
4009 0, 0, 0, 0, 0, 0, 0, 0,
4010 0, 0, 0, 0, 0, 0, 0, 0,
4011 0, 0, 0, 0, 0, 0, 0, 0,
4012 0, 0, 0, 0, 0, 0, 0, 0,
4013 0, 0, 0, 0, 0, 0, 0, 0,
4014 0, 0, 0, 0, 0, 0, 0, 0,
4015 0, 0, 0, 0, 0, 0, 0, 0,
4016 0, 0, 0, 0, 0, 0, 0, 0,
4017 0, 0, 0, 0, 0, 0, 0, 0,
4018 0, 0, 0, 0, 0, 0, 0, 0,
4019 0, 0, 0, 0, 0, 0, 0, 0,
4020 0, 0, 0, 0, 0, 0, 0, 0,
4021 0, 0, 0, 0, 0, 0, 0, 0,
4022 0, 0, 0, 0, 0, 0, 0, 0,
4023 0, 0, 0, 0, 0, 0, 0, 0,
4024 0, 0, 0, 0, 0, 0, 0, 0,
4025 0, 0, 0, 0, 0, 0, 0, 0,
4026 0, 0, 0, 0, 0, 0, 0, 0,
4027 0, 0, 0, 0, 0, 0, 0, 0,
4028 0, 0, 0, 0, 0, 0, 0, 0,
4029 0, 0, 0, 0, 0, 0, 0, 0,
4030 0, 0, 0, 0, 0, 0, 0, 0,
4031 0, 0, 0, 0, 0, 0, 0, 0,
4032 0, 0, 0, 0, 0, 0, 0, 0,
4033 0, 0, 0, 0, 0, 0, 0, 0,
4034 0, 0, 0, 0, 0, 0, 0, 0,
4035 0, 0, 0, 0, 0, 0, 0, 0,
4036 0, 0, 0, 0, 0, 0, 0, 0,
4037 0, 0, 0, 0, 0, 0, 0, 0,
4038 0, 0, 0, 0, 0, 0, 0, 0,
4039 0, 0, 0, 0, 0, 0, 0, 0,
4040 0, 0, 0, 0, 0, 0, 0, 0,
4041 0, 0, 0, 0, 0, 0, 0, 0,
4042 0, 0, 0, 0, 0, 0, 0, 0,
4043 0, 0, 0, 0, 0, 0, 0, 0,
4044 0, 0, 0, 0, 0, 0, 0, 0,
4045 0, 0, 0, 0, 0, 0, 0, 0,
4046 0, 0, 0, 0, 0, 0, 0, 0,
4047 0, 0, 0, 0, 0, 0, 0, 0,
4048 0, 0, 0, 0, 0, 0, 0, 0,
4049 0, 0, 0, 0, 0, 0, 0, 0,
4050 0, 0, 0, 0, 0, 0, 0, 0,
4051 0, 0, 0, 0, 0, 0, 0, 0,
4052 0, 0, 0, 0, 0, 0, 0, 0,
4053 0, 0, 0, 0, 0, 0, 0, 0,
4054 0, 0, 0, 0, 0, 0, 0, 0,
4055 0, 0, 0, 0, 0, 0, 0, 0,
4056 0, 0, 0, 0, 0, 0, 0, 0,
4057 0, 0, 0, 0, 0, 0, 0, 0,
4058 0, 0, 0, 0, 0, 0, 0, 0,
4059 0, 0, 0, 0, 0, 0, 0, 0,
4060 0, 0, 0, 0, 0, 0, 0, 0,
4061 0, 0, 0, 0, 0, 0, 0, 0,
4062 0, 0, 0, 0, 0, 0, 0, 0,
4063 0, 0, 0, 0, 0, 0, 0, 0,
4064 0, 0, 0, 0, 0, 0, 0, 0,
4065 0, 0, 0, 0, 0, 0, 0, 0,
4066 0, 0, 0, 0, 0, 0, 0, 0,
4067 0, 0, 0, 0, 0, 0, 0, 0,
4068 0, 0, 0, 0, 0, 0, 0, 0,
4069 0, 0, 0, 0, 0, 0, 0, 0,
4070 0, 0, 0, 0, 0, 0, 0, 0,
4071 0, 0, 0, 0, 0, 0, 0, 0,
4072 0, 0, 0, 0, 0, 0, 0, 0,
4073 11, 0, 0, 0, 0, 0, 0, 0,
4074 0, 0, 0, 0, 0, 0, 0, 0,
4075 0, 0, 0, 0, 0, 0, 0, 0,
4076 0, 0, 0, 0, 0, 0, 0, 0,
4077 0, 0, 0, 0, 0, 0, 0, 0,
4078 0, 0, 0, 0, 0, 0, 0, 0,
4079 0, 0, 0, 11, 0, 0, 0, 0,
4080 0, 0, 0, 0, 0, 0, 0, 0,
4081 0, 0, 0, 0, 0, 0, 0, 0,
4082 0, 0, 0, 0, 0, 11, 0, 0,
4083 0, 0, 0, 0, 0, 0, 11, 0,
4084 0, 0, 0, 0, 0, 11, 0, 0,
4085 0, 0, 0, 0, 0, 0, 0, 0,
4086 0, 0, 0, 0, 0, 0, 0, 0,
4087 0, 0, 0, 0, 0, 0, 0, 0,
4088 0, 0, 0, 0, 0, 0, 0, 0,
4089 0, 0,
4090}
4091
4092var _hcltok_eof_trans []int16 = []int16{
4093 0, 1, 4, 1, 1, 9, 9, 9,
4094 4, 4, 4, 4, 4, 4, 4, 4,
4095 4, 4, 4, 4, 4, 4, 4, 4,
4096 4, 4, 4, 4, 4, 4, 4, 4,
4097 4, 4, 4, 4, 4, 4, 4, 4,
4098 4, 4, 4, 4, 4, 4, 4, 4,
4099 4, 4, 4, 4, 4, 4, 4, 4,
4100 4, 4, 4, 4, 4, 4, 4, 4,
4101 4, 4, 4, 4, 4, 4, 4, 4,
4102 4, 4, 4, 4, 4, 4, 4, 4,
4103 4, 4, 4, 4, 4, 4, 4, 4,
4104 4, 4, 4, 4, 4, 4, 4, 4,
4105 4, 4, 4, 4, 4, 4, 4, 4,
4106 4, 4, 4, 4, 4, 4, 4, 4,
4107 4, 4, 4, 4, 4, 4, 4, 4,
4108 4, 4, 4, 4, 4, 4, 4, 4,
4109 4, 4, 4, 4, 4, 4, 4, 4,
4110 4, 4, 4, 4, 4, 4, 4, 4,
4111 4, 4, 4, 4, 4, 4, 4, 4,
4112 4, 4, 4, 4, 4, 4, 4, 4,
4113 4, 4, 4, 4, 4, 4, 4, 4,
4114 4, 4, 4, 4, 4, 4, 4, 4,
4115 4, 4, 4, 4, 4, 4, 4, 4,
4116 4, 4, 4, 4, 4, 4, 4, 4,
4117 4, 4, 4, 4, 4, 4, 4, 4,
4118 4, 4, 4, 4, 4, 4, 4, 4,
4119 4, 4, 4, 4, 4, 4, 4, 4,
4120 4, 4, 4, 4, 4, 4, 4, 4,
4121 4, 4, 4, 4, 4, 4, 4, 4,
4122 4, 4, 4, 4, 4, 4, 4, 4,
4123 4, 4, 4, 4, 4, 4, 4, 4,
4124 4, 4, 4, 4, 4, 4, 4, 4,
4125 4, 4, 4, 4, 4, 4, 4, 4,
4126 4, 4, 4, 4, 4, 4, 4, 4,
4127 4, 4, 4, 4, 4, 4, 4, 4,
4128 4, 4, 4, 4, 4, 4, 4, 4,
4129 4, 4, 4, 4, 4, 4, 4, 4,
4130 4, 4, 4, 4, 4, 4, 4, 4,
4131 4, 4, 4, 4, 4, 4, 4, 4,
4132 4, 4, 4, 4, 4, 4, 4, 4,
4133 4, 4, 4, 4, 4, 4, 4, 4,
4134 4, 4, 4, 4, 4, 4, 4, 4,
4135 4, 4, 4, 4, 4, 4, 4, 4,
4136 4, 4, 4, 4, 4, 4, 4, 4,
4137 4, 4, 4, 4, 4, 4, 4, 4,
4138 4, 4, 4, 4, 4, 4, 4, 4,
4139 4, 4, 4, 4, 4, 4, 4, 4,
4140 4, 4, 4, 4, 4, 4, 4, 4,
4141 4, 4, 4, 4, 4, 4, 4, 4,
4142 4, 4, 4, 4, 4, 4, 4, 4,
4143 4, 4, 4, 4, 4, 4, 4, 4,
4144 4, 4, 4, 4, 4, 4, 4, 4,
4145 422, 422, 1, 422, 422, 422, 422, 422,
4146 422, 422, 422, 422, 422, 422, 422, 422,
4147 422, 422, 422, 422, 422, 422, 422, 422,
4148 422, 422, 422, 422, 422, 422, 422, 422,
4149 422, 422, 422, 422, 422, 422, 422, 422,
4150 422, 422, 422, 422, 422, 422, 422, 422,
4151 422, 422, 422, 422, 422, 422, 422, 422,
4152 422, 422, 422, 422, 422, 422, 422, 422,
4153 422, 422, 422, 422, 422, 422, 422, 422,
4154 422, 422, 422, 422, 422, 422, 422, 422,
4155 422, 422, 422, 422, 422, 422, 422, 422,
4156 422, 422, 422, 422, 422, 422, 422, 422,
4157 422, 422, 422, 422, 422, 422, 422, 422,
4158 422, 422, 422, 422, 422, 422, 422, 422,
4159 422, 422, 422, 422, 422, 422, 422, 422,
4160 422, 422, 422, 422, 422, 422, 422, 422,
4161 422, 422, 422, 422, 422, 422, 422, 422,
4162 422, 422, 422, 422, 422, 422, 422, 422,
4163 422, 422, 422, 422, 422, 422, 422, 422,
4164 422, 422, 422, 422, 422, 422, 422, 422,
4165 422, 422, 422, 422, 422, 422, 422, 422,
4166 422, 422, 422, 422, 422, 422, 422, 422,
4167 422, 422, 422, 422, 422, 422, 422, 422,
4168 422, 422, 422, 422, 422, 422, 422, 422,
4169 422, 422, 422, 422, 422, 422, 422, 422,
4170 422, 422, 422, 422, 422, 422, 422, 422,
4171 422, 422, 422, 422, 422, 422, 422, 422,
4172 422, 422, 422, 422, 422, 422, 422, 422,
4173 422, 422, 422, 422, 422, 422, 422, 422,
4174 422, 422, 422, 422, 422, 422, 422, 422,
4175 422, 422, 422, 422, 422, 422, 422, 422,
4176 422, 422, 422, 422, 422, 422, 422, 422,
4177 422, 422, 422, 422, 422, 422, 422, 422,
4178 422, 422, 422, 422, 422, 422, 422, 422,
4179 422, 422, 422, 672, 672, 672, 672, 672,
4180 672, 672, 672, 672, 672, 672, 672, 672,
4181 672, 672, 672, 672, 672, 672, 672, 672,
4182 672, 672, 672, 672, 672, 672, 672, 672,
4183 672, 672, 672, 672, 672, 672, 672, 672,
4184 672, 672, 672, 672, 672, 672, 672, 672,
4185 672, 672, 672, 672, 672, 672, 672, 672,
4186 672, 672, 672, 672, 672, 672, 672, 672,
4187 672, 672, 672, 672, 672, 672, 672, 672,
4188 672, 672, 672, 672, 672, 672, 672, 672,
4189 672, 672, 672, 672, 672, 672, 672, 672,
4190 672, 672, 672, 672, 672, 672, 672, 672,
4191 672, 672, 672, 672, 672, 672, 672, 672,
4192 672, 672, 672, 672, 672, 672, 672, 672,
4193 672, 672, 672, 672, 672, 672, 672, 672,
4194 672, 672, 672, 672, 672, 672, 672, 672,
4195 672, 672, 672, 672, 672, 672, 672, 672,
4196 672, 672, 672, 672, 672, 672, 672, 672,
4197 672, 672, 672, 672, 672, 672, 672, 672,
4198 672, 672, 672, 672, 672, 672, 672, 672,
4199 672, 672, 672, 672, 672, 672, 672, 672,
4200 672, 672, 672, 672, 672, 672, 672, 672,
4201 672, 672, 672, 672, 672, 672, 672, 672,
4202 672, 672, 672, 672, 672, 672, 672, 672,
4203 672, 672, 672, 672, 672, 672, 672, 672,
4204 672, 672, 672, 672, 672, 672, 672, 672,
4205 672, 672, 672, 672, 672, 672, 672, 672,
4206 672, 672, 672, 672, 672, 672, 672, 672,
4207 672, 672, 672, 672, 672, 672, 672, 672,
4208 672, 672, 672, 672, 672, 672, 672, 672,
4209 672, 769, 769, 769, 769, 769, 775, 775,
4210 777, 779, 779, 777, 777, 779, 0, 0,
4211 787, 789, 787, 787, 789, 0, 0, 795,
4212 795, 797, 795, 795, 795, 795, 795, 795,
4213 795, 795, 795, 795, 795, 795, 795, 795,
4214 795, 795, 795, 795, 795, 795, 795, 795,
4215 795, 795, 795, 795, 795, 795, 795, 795,
4216 795, 795, 795, 795, 795, 795, 795, 795,
4217 795, 795, 795, 795, 795, 795, 795, 795,
4218 795, 795, 795, 795, 795, 795, 795, 795,
4219 795, 795, 795, 795, 795, 795, 795, 795,
4220 795, 795, 795, 795, 795, 795, 795, 795,
4221 795, 795, 795, 795, 795, 795, 795, 795,
4222 795, 795, 795, 795, 795, 795, 795, 795,
4223 795, 795, 795, 795, 795, 795, 795, 795,
4224 795, 795, 795, 795, 795, 795, 795, 795,
4225 795, 795, 795, 795, 795, 795, 795, 795,
4226 795, 795, 795, 795, 795, 795, 795, 795,
4227 795, 795, 795, 795, 795, 795, 795, 795,
4228 795, 795, 795, 795, 795, 795, 795, 795,
4229 795, 795, 795, 795, 795, 795, 795, 795,
4230 795, 795, 795, 795, 795, 795, 795, 795,
4231 795, 795, 795, 795, 795, 795, 795, 795,
4232 795, 795, 795, 795, 795, 795, 795, 795,
4233 795, 795, 795, 795, 795, 795, 795, 795,
4234 795, 795, 795, 795, 795, 795, 795, 795,
4235 795, 795, 795, 795, 795, 795, 795, 795,
4236 795, 795, 795, 795, 795, 795, 795, 795,
4237 795, 795, 795, 795, 795, 795, 795, 795,
4238 795, 795, 795, 795, 795, 795, 795, 795,
4239 795, 795, 795, 795, 795, 795, 795, 795,
4240 795, 795, 795, 795, 795, 795, 795, 795,
4241 795, 795, 795, 795, 795, 795, 795, 795,
4242 795, 795, 795, 795, 795, 795, 795, 795,
4243 795, 795, 795, 795, 795, 795, 795, 795,
4244 795, 795, 795, 795, 795, 795, 795, 795,
4245 795, 795, 795, 795, 795, 795, 795, 795,
4246 795, 795, 1046, 1046, 1046, 1046, 1046, 1046,
4247 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4248 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4249 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4250 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4251 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4252 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4253 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4254 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4255 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4256 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4257 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4258 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4259 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4260 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4261 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4262 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4263 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4264 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4265 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4266 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4267 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4268 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4269 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4270 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4271 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4272 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4273 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4274 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4275 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
4276 0, 1196, 1197, 1198, 1197, 1198, 1198, 1198,
4277 1202, 1203, 1198, 1198, 1198, 1209, 1198, 1198,
4278 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239,
4279 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239,
4280 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239,
4281 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239,
4282 1239, 1239, 1239, 0, 1392, 1396, 1404, 1392,
4283 1392, 1396, 1396, 1404, 1396, 1392, 1404, 1404,
4284 1404, 1404, 1404, 1396, 1396, 1396, 1458, 1460,
4285 1458, 1463, 1465, 1465, 1465, 0, 1474, 1478,
4286 1487, 1496, 1498, 1500, 1500, 1500, 0, 1508,
4287 1511, 1513, 1515, 1515, 1515, 0, 1552, 1580,
4288 1580, 1580, 1580, 1580, 1580, 1580, 1580, 1580,
4289 1580, 1580, 1580, 1580, 1580, 1580, 1580, 1580,
4290 1580, 1580, 1580, 1580, 1580, 1580, 1580, 1580,
4291 1580, 1580, 1580, 1580, 1580, 1580, 1580, 1580,
4292 1580, 1580,
4293}
4294
4295const hcltok_start int = 1464
4296const hcltok_first_final int = 1464
4297const hcltok_error int = 0
4298
4299const hcltok_en_stringTemplate int = 1515
4300const hcltok_en_heredocTemplate int = 1541
4301const hcltok_en_bareTemplate int = 1550
4302const hcltok_en_identOnly int = 1557
4303const hcltok_en_main int = 1464
4304
4305// line 16 "scan_tokens.rl"
4306
4307func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []Token {
4308 f := &tokenAccum{
4309 Filename: filename,
4310 Bytes: data,
4311 Pos: start,
4312 }
4313
4314 // line 294 "scan_tokens.rl"
4315
4316 // Ragel state
4317 p := 0 // "Pointer" into data
4318 pe := len(data) // End-of-data "pointer"
4319 ts := 0
4320 te := 0
4321 act := 0
4322 eof := pe
4323 var stack []int
4324 var top int
4325
4326 var cs int // current state
4327 switch mode {
4328 case scanNormal:
4329 cs = hcltok_en_main
4330 case scanTemplate:
4331 cs = hcltok_en_bareTemplate
4332 case scanIdentOnly:
4333 cs = hcltok_en_identOnly
4334 default:
4335 panic("invalid scanMode")
4336 }
4337
4338 braces := 0
4339 var retBraces []int // stack of brace levels that cause us to use fret
4340 var heredocs []heredocInProgress // stack of heredocs we're currently processing
4341
4342 // line 329 "scan_tokens.rl"
4343
4344 // Make Go compiler happy
4345 _ = ts
4346 _ = te
4347 _ = act
4348 _ = eof
4349
4350 token := func(ty TokenType) {
4351 f.emitToken(ty, ts, te)
4352 }
4353 selfToken := func() {
4354 b := data[ts:te]
4355 if len(b) != 1 {
4356 // should never happen
4357 panic("selfToken only works for single-character tokens")
4358 }
4359 f.emitToken(TokenType(b[0]), ts, te)
4360 }
4361
4362 // line 4372 "scan_tokens.go"
4363 {
4364 top = 0
4365 ts = 0
4366 te = 0
4367 act = 0
4368 }
4369
4370 // line 4380 "scan_tokens.go"
4371 {
4372 var _klen int
4373 var _trans int
4374 var _acts int
4375 var _nacts uint
4376 var _keys int
4377 if p == pe {
4378 goto _test_eof
4379 }
4380 if cs == 0 {
4381 goto _out
4382 }
4383 _resume:
4384 _acts = int(_hcltok_from_state_actions[cs])
4385 _nacts = uint(_hcltok_actions[_acts])
4386 _acts++
4387 for ; _nacts > 0; _nacts-- {
4388 _acts++
4389 switch _hcltok_actions[_acts-1] {
4390 case 6:
4391 // line 1 "NONE"
4392
4393 ts = p
4394
4395 // line 4404 "scan_tokens.go"
4396 }
4397 }
4398
4399 _keys = int(_hcltok_key_offsets[cs])
4400 _trans = int(_hcltok_index_offsets[cs])
4401
4402 _klen = int(_hcltok_single_lengths[cs])
4403 if _klen > 0 {
4404 _lower := int(_keys)
4405 var _mid int
4406 _upper := int(_keys + _klen - 1)
4407 for {
4408 if _upper < _lower {
4409 break
4410 }
4411
4412 _mid = _lower + ((_upper - _lower) >> 1)
4413 switch {
4414 case data[p] < _hcltok_trans_keys[_mid]:
4415 _upper = _mid - 1
4416 case data[p] > _hcltok_trans_keys[_mid]:
4417 _lower = _mid + 1
4418 default:
4419 _trans += int(_mid - int(_keys))
4420 goto _match
4421 }
4422 }
4423 _keys += _klen
4424 _trans += _klen
4425 }
4426
4427 _klen = int(_hcltok_range_lengths[cs])
4428 if _klen > 0 {
4429 _lower := int(_keys)
4430 var _mid int
4431 _upper := int(_keys + (_klen << 1) - 2)
4432 for {
4433 if _upper < _lower {
4434 break
4435 }
4436
4437 _mid = _lower + (((_upper - _lower) >> 1) & ^1)
4438 switch {
4439 case data[p] < _hcltok_trans_keys[_mid]:
4440 _upper = _mid - 2
4441 case data[p] > _hcltok_trans_keys[_mid+1]:
4442 _lower = _mid + 2
4443 default:
4444 _trans += int((_mid - int(_keys)) >> 1)
4445 goto _match
4446 }
4447 }
4448 _trans += _klen
4449 }
4450
4451 _match:
4452 _trans = int(_hcltok_indicies[_trans])
4453 _eof_trans:
4454 cs = int(_hcltok_trans_targs[_trans])
4455
4456 if _hcltok_trans_actions[_trans] == 0 {
4457 goto _again
4458 }
4459
4460 _acts = int(_hcltok_trans_actions[_trans])
4461 _nacts = uint(_hcltok_actions[_acts])
4462 _acts++
4463 for ; _nacts > 0; _nacts-- {
4464 _acts++
4465 switch _hcltok_actions[_acts-1] {
4466 case 0:
4467 // line 218 "scan_tokens.rl"
4468
4469 p--
4470
4471 case 1:
4472 // line 219 "scan_tokens.rl"
4473
4474 p--
4475
4476 case 2:
4477 // line 224 "scan_tokens.rl"
4478
4479 p--
4480
4481 case 3:
4482 // line 225 "scan_tokens.rl"
4483
4484 p--
4485
4486 case 7:
4487 // line 1 "NONE"
4488
4489 te = p + 1
4490
4491 case 8:
4492 // line 155 "scan_tokens.rl"
4493
4494 te = p + 1
4495 {
4496 token(TokenTemplateInterp)
4497 braces++
4498 retBraces = append(retBraces, braces)
4499 if len(heredocs) > 0 {
4500 heredocs[len(heredocs)-1].StartOfLine = false
4501 }
4502 {
4503 stack = append(stack, 0)
4504 stack[top] = cs
4505 top++
4506 cs = 1464
4507 goto _again
4508 }
4509 }
4510 case 9:
4511 // line 165 "scan_tokens.rl"
4512
4513 te = p + 1
4514 {
4515 token(TokenTemplateControl)
4516 braces++
4517 retBraces = append(retBraces, braces)
4518 if len(heredocs) > 0 {
4519 heredocs[len(heredocs)-1].StartOfLine = false
4520 }
4521 {
4522 stack = append(stack, 0)
4523 stack[top] = cs
4524 top++
4525 cs = 1464
4526 goto _again
4527 }
4528 }
4529 case 10:
4530 // line 79 "scan_tokens.rl"
4531
4532 te = p + 1
4533 {
4534 token(TokenCQuote)
4535 top--
4536 cs = stack[top]
4537 {
4538 stack = stack[:len(stack)-1]
4539 }
4540 goto _again
4541
4542 }
4543 case 11:
4544 // line 239 "scan_tokens.rl"
4545
4546 te = p + 1
4547 {
4548 token(TokenInvalid)
4549 }
4550 case 12:
4551 // line 240 "scan_tokens.rl"
4552
4553 te = p + 1
4554 {
4555 token(TokenBadUTF8)
4556 }
4557 case 13:
4558 // line 155 "scan_tokens.rl"
4559
4560 te = p
4561 p--
4562 {
4563 token(TokenTemplateInterp)
4564 braces++
4565 retBraces = append(retBraces, braces)
4566 if len(heredocs) > 0 {
4567 heredocs[len(heredocs)-1].StartOfLine = false
4568 }
4569 {
4570 stack = append(stack, 0)
4571 stack[top] = cs
4572 top++
4573 cs = 1464
4574 goto _again
4575 }
4576 }
4577 case 14:
4578 // line 165 "scan_tokens.rl"
4579
4580 te = p
4581 p--
4582 {
4583 token(TokenTemplateControl)
4584 braces++
4585 retBraces = append(retBraces, braces)
4586 if len(heredocs) > 0 {
4587 heredocs[len(heredocs)-1].StartOfLine = false
4588 }
4589 {
4590 stack = append(stack, 0)
4591 stack[top] = cs
4592 top++
4593 cs = 1464
4594 goto _again
4595 }
4596 }
4597 case 15:
4598 // line 238 "scan_tokens.rl"
4599
4600 te = p
4601 p--
4602 {
4603 token(TokenQuotedLit)
4604 }
4605 case 16:
4606 // line 239 "scan_tokens.rl"
4607
4608 te = p
4609 p--
4610 {
4611 token(TokenInvalid)
4612 }
4613 case 17:
4614 // line 240 "scan_tokens.rl"
4615
4616 te = p
4617 p--
4618 {
4619 token(TokenBadUTF8)
4620 }
4621 case 18:
4622 // line 238 "scan_tokens.rl"
4623
4624 p = (te) - 1
4625 {
4626 token(TokenQuotedLit)
4627 }
4628 case 19:
4629 // line 240 "scan_tokens.rl"
4630
4631 p = (te) - 1
4632 {
4633 token(TokenBadUTF8)
4634 }
4635 case 20:
4636 // line 143 "scan_tokens.rl"
4637
4638 act = 10
4639 case 21:
4640 // line 248 "scan_tokens.rl"
4641
4642 act = 11
4643 case 22:
4644 // line 155 "scan_tokens.rl"
4645
4646 te = p + 1
4647 {
4648 token(TokenTemplateInterp)
4649 braces++
4650 retBraces = append(retBraces, braces)
4651 if len(heredocs) > 0 {
4652 heredocs[len(heredocs)-1].StartOfLine = false
4653 }
4654 {
4655 stack = append(stack, 0)
4656 stack[top] = cs
4657 top++
4658 cs = 1464
4659 goto _again
4660 }
4661 }
4662 case 23:
4663 // line 165 "scan_tokens.rl"
4664
4665 te = p + 1
4666 {
4667 token(TokenTemplateControl)
4668 braces++
4669 retBraces = append(retBraces, braces)
4670 if len(heredocs) > 0 {
4671 heredocs[len(heredocs)-1].StartOfLine = false
4672 }
4673 {
4674 stack = append(stack, 0)
4675 stack[top] = cs
4676 top++
4677 cs = 1464
4678 goto _again
4679 }
4680 }
4681 case 24:
4682 // line 106 "scan_tokens.rl"
4683
4684 te = p + 1
4685 {
4686 // This action is called specificially when a heredoc literal
4687 // ends with a newline character.
4688
4689 // This might actually be our end marker.
4690 topdoc := &heredocs[len(heredocs)-1]
4691 if topdoc.StartOfLine {
4692 maybeMarker := bytes.TrimSpace(data[ts:te])
4693 if bytes.Equal(maybeMarker, topdoc.Marker) {
4694 // We actually emit two tokens here: the end-of-heredoc
4695 // marker first, and then separately the newline that
4696 // follows it. This then avoids issues with the closing
4697 // marker consuming a newline that would normally be used
4698 // to mark the end of an attribute definition.
4699 // We might have either a \n sequence or an \r\n sequence
4700 // here, so we must handle both.
4701 nls := te - 1
4702 nle := te
4703 te--
4704 if data[te-1] == '\r' {
4705 // back up one more byte
4706 nls--
4707 te--
4708 }
4709 token(TokenCHeredoc)
4710 ts = nls
4711 te = nle
4712 token(TokenNewline)
4713 heredocs = heredocs[:len(heredocs)-1]
4714 top--
4715 cs = stack[top]
4716 {
4717 stack = stack[:len(stack)-1]
4718 }
4719 goto _again
4720
4721 }
4722 }
4723
4724 topdoc.StartOfLine = true
4725 token(TokenStringLit)
4726 }
4727 case 25:
4728 // line 248 "scan_tokens.rl"
4729
4730 te = p + 1
4731 {
4732 token(TokenBadUTF8)
4733 }
4734 case 26:
4735 // line 155 "scan_tokens.rl"
4736
4737 te = p
4738 p--
4739 {
4740 token(TokenTemplateInterp)
4741 braces++
4742 retBraces = append(retBraces, braces)
4743 if len(heredocs) > 0 {
4744 heredocs[len(heredocs)-1].StartOfLine = false
4745 }
4746 {
4747 stack = append(stack, 0)
4748 stack[top] = cs
4749 top++
4750 cs = 1464
4751 goto _again
4752 }
4753 }
4754 case 27:
4755 // line 165 "scan_tokens.rl"
4756
4757 te = p
4758 p--
4759 {
4760 token(TokenTemplateControl)
4761 braces++
4762 retBraces = append(retBraces, braces)
4763 if len(heredocs) > 0 {
4764 heredocs[len(heredocs)-1].StartOfLine = false
4765 }
4766 {
4767 stack = append(stack, 0)
4768 stack[top] = cs
4769 top++
4770 cs = 1464
4771 goto _again
4772 }
4773 }
4774 case 28:
4775 // line 143 "scan_tokens.rl"
4776
4777 te = p
4778 p--
4779 {
4780 // This action is called when a heredoc literal _doesn't_ end
4781 // with a newline character, e.g. because we're about to enter
4782 // an interpolation sequence.
4783 heredocs[len(heredocs)-1].StartOfLine = false
4784 token(TokenStringLit)
4785 }
4786 case 29:
4787 // line 248 "scan_tokens.rl"
4788
4789 te = p
4790 p--
4791 {
4792 token(TokenBadUTF8)
4793 }
4794 case 30:
4795 // line 143 "scan_tokens.rl"
4796
4797 p = (te) - 1
4798 {
4799 // This action is called when a heredoc literal _doesn't_ end
4800 // with a newline character, e.g. because we're about to enter
4801 // an interpolation sequence.
4802 heredocs[len(heredocs)-1].StartOfLine = false
4803 token(TokenStringLit)
4804 }
4805 case 31:
4806 // line 1 "NONE"
4807
4808 switch act {
4809 case 0:
4810 {
4811 cs = 0
4812 goto _again
4813 }
4814 case 10:
4815 {
4816 p = (te) - 1
4817
4818 // This action is called when a heredoc literal _doesn't_ end
4819 // with a newline character, e.g. because we're about to enter
4820 // an interpolation sequence.
4821 heredocs[len(heredocs)-1].StartOfLine = false
4822 token(TokenStringLit)
4823 }
4824 case 11:
4825 {
4826 p = (te) - 1
4827 token(TokenBadUTF8)
4828 }
4829 }
4830
4831 case 32:
4832 // line 151 "scan_tokens.rl"
4833
4834 act = 14
4835 case 33:
4836 // line 255 "scan_tokens.rl"
4837
4838 act = 15
4839 case 34:
4840 // line 155 "scan_tokens.rl"
4841
4842 te = p + 1
4843 {
4844 token(TokenTemplateInterp)
4845 braces++
4846 retBraces = append(retBraces, braces)
4847 if len(heredocs) > 0 {
4848 heredocs[len(heredocs)-1].StartOfLine = false
4849 }
4850 {
4851 stack = append(stack, 0)
4852 stack[top] = cs
4853 top++
4854 cs = 1464
4855 goto _again
4856 }
4857 }
4858 case 35:
4859 // line 165 "scan_tokens.rl"
4860
4861 te = p + 1
4862 {
4863 token(TokenTemplateControl)
4864 braces++
4865 retBraces = append(retBraces, braces)
4866 if len(heredocs) > 0 {
4867 heredocs[len(heredocs)-1].StartOfLine = false
4868 }
4869 {
4870 stack = append(stack, 0)
4871 stack[top] = cs
4872 top++
4873 cs = 1464
4874 goto _again
4875 }
4876 }
4877 case 36:
4878 // line 151 "scan_tokens.rl"
4879
4880 te = p + 1
4881 {
4882 token(TokenStringLit)
4883 }
4884 case 37:
4885 // line 255 "scan_tokens.rl"
4886
4887 te = p + 1
4888 {
4889 token(TokenBadUTF8)
4890 }
4891 case 38:
4892 // line 155 "scan_tokens.rl"
4893
4894 te = p
4895 p--
4896 {
4897 token(TokenTemplateInterp)
4898 braces++
4899 retBraces = append(retBraces, braces)
4900 if len(heredocs) > 0 {
4901 heredocs[len(heredocs)-1].StartOfLine = false
4902 }
4903 {
4904 stack = append(stack, 0)
4905 stack[top] = cs
4906 top++
4907 cs = 1464
4908 goto _again
4909 }
4910 }
4911 case 39:
4912 // line 165 "scan_tokens.rl"
4913
4914 te = p
4915 p--
4916 {
4917 token(TokenTemplateControl)
4918 braces++
4919 retBraces = append(retBraces, braces)
4920 if len(heredocs) > 0 {
4921 heredocs[len(heredocs)-1].StartOfLine = false
4922 }
4923 {
4924 stack = append(stack, 0)
4925 stack[top] = cs
4926 top++
4927 cs = 1464
4928 goto _again
4929 }
4930 }
4931 case 40:
4932 // line 151 "scan_tokens.rl"
4933
4934 te = p
4935 p--
4936 {
4937 token(TokenStringLit)
4938 }
4939 case 41:
4940 // line 255 "scan_tokens.rl"
4941
4942 te = p
4943 p--
4944 {
4945 token(TokenBadUTF8)
4946 }
4947 case 42:
4948 // line 151 "scan_tokens.rl"
4949
4950 p = (te) - 1
4951 {
4952 token(TokenStringLit)
4953 }
4954 case 43:
4955 // line 1 "NONE"
4956
4957 switch act {
4958 case 0:
4959 {
4960 cs = 0
4961 goto _again
4962 }
4963 case 14:
4964 {
4965 p = (te) - 1
4966
4967 token(TokenStringLit)
4968 }
4969 case 15:
4970 {
4971 p = (te) - 1
4972 token(TokenBadUTF8)
4973 }
4974 }
4975
4976 case 44:
4977 // line 259 "scan_tokens.rl"
4978
4979 act = 16
4980 case 45:
4981 // line 260 "scan_tokens.rl"
4982
4983 act = 17
4984 case 46:
4985 // line 260 "scan_tokens.rl"
4986
4987 te = p + 1
4988 {
4989 token(TokenBadUTF8)
4990 }
4991 case 47:
4992 // line 261 "scan_tokens.rl"
4993
4994 te = p + 1
4995 {
4996 token(TokenInvalid)
4997 }
4998 case 48:
4999 // line 259 "scan_tokens.rl"
5000
5001 te = p
5002 p--
5003 {
5004 token(TokenIdent)
5005 }
5006 case 49:
5007 // line 260 "scan_tokens.rl"
5008
5009 te = p
5010 p--
5011 {
5012 token(TokenBadUTF8)
5013 }
5014 case 50:
5015 // line 259 "scan_tokens.rl"
5016
5017 p = (te) - 1
5018 {
5019 token(TokenIdent)
5020 }
5021 case 51:
5022 // line 260 "scan_tokens.rl"
5023
5024 p = (te) - 1
5025 {
5026 token(TokenBadUTF8)
5027 }
5028 case 52:
5029 // line 1 "NONE"
5030
5031 switch act {
5032 case 16:
5033 {
5034 p = (te) - 1
5035 token(TokenIdent)
5036 }
5037 case 17:
5038 {
5039 p = (te) - 1
5040 token(TokenBadUTF8)
5041 }
5042 }
5043
5044 case 53:
5045 // line 267 "scan_tokens.rl"
5046
5047 act = 21
5048 case 54:
5049 // line 269 "scan_tokens.rl"
5050
5051 act = 22
5052 case 55:
5053 // line 280 "scan_tokens.rl"
5054
5055 act = 32
5056 case 56:
5057 // line 290 "scan_tokens.rl"
5058
5059 act = 38
5060 case 57:
5061 // line 291 "scan_tokens.rl"
5062
5063 act = 39
5064 case 58:
5065 // line 269 "scan_tokens.rl"
5066
5067 te = p + 1
5068 {
5069 token(TokenComment)
5070 }
5071 case 59:
5072 // line 270 "scan_tokens.rl"
5073
5074 te = p + 1
5075 {
5076 token(TokenNewline)
5077 }
5078 case 60:
5079 // line 272 "scan_tokens.rl"
5080
5081 te = p + 1
5082 {
5083 token(TokenEqualOp)
5084 }
5085 case 61:
5086 // line 273 "scan_tokens.rl"
5087
5088 te = p + 1
5089 {
5090 token(TokenNotEqual)
5091 }
5092 case 62:
5093 // line 274 "scan_tokens.rl"
5094
5095 te = p + 1
5096 {
5097 token(TokenGreaterThanEq)
5098 }
5099 case 63:
5100 // line 275 "scan_tokens.rl"
5101
5102 te = p + 1
5103 {
5104 token(TokenLessThanEq)
5105 }
5106 case 64:
5107 // line 276 "scan_tokens.rl"
5108
5109 te = p + 1
5110 {
5111 token(TokenAnd)
5112 }
5113 case 65:
5114 // line 277 "scan_tokens.rl"
5115
5116 te = p + 1
5117 {
5118 token(TokenOr)
5119 }
5120 case 66:
5121 // line 278 "scan_tokens.rl"
5122
5123 te = p + 1
5124 {
5125 token(TokenEllipsis)
5126 }
5127 case 67:
5128 // line 279 "scan_tokens.rl"
5129
5130 te = p + 1
5131 {
5132 token(TokenFatArrow)
5133 }
5134 case 68:
5135 // line 280 "scan_tokens.rl"
5136
5137 te = p + 1
5138 {
5139 selfToken()
5140 }
5141 case 69:
5142 // line 175 "scan_tokens.rl"
5143
5144 te = p + 1
5145 {
5146 token(TokenOBrace)
5147 braces++
5148 }
5149 case 70:
5150 // line 180 "scan_tokens.rl"
5151
5152 te = p + 1
5153 {
5154 if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces {
5155 token(TokenTemplateSeqEnd)
5156 braces--
5157 retBraces = retBraces[0 : len(retBraces)-1]
5158 top--
5159 cs = stack[top]
5160 {
5161 stack = stack[:len(stack)-1]
5162 }
5163 goto _again
5164
5165 } else {
5166 token(TokenCBrace)
5167 braces--
5168 }
5169 }
5170 case 71:
5171 // line 192 "scan_tokens.rl"
5172
5173 te = p + 1
5174 {
5175 // Only consume from the retBraces stack and return if we are at
5176 // a suitable brace nesting level, otherwise things will get
5177 // confused. (Not entering this branch indicates a syntax error,
5178 // which we will catch in the parser.)
5179 if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces {
5180 token(TokenTemplateSeqEnd)
5181 braces--
5182 retBraces = retBraces[0 : len(retBraces)-1]
5183 top--
5184 cs = stack[top]
5185 {
5186 stack = stack[:len(stack)-1]
5187 }
5188 goto _again
5189
5190 } else {
5191 // We intentionally generate a TokenTemplateSeqEnd here,
5192 // even though the user apparently wanted a brace, because
5193 // we want to allow the parser to catch the incorrect use
5194 // of a ~} to balance a generic opening brace, rather than
5195 // a template sequence.
5196 token(TokenTemplateSeqEnd)
5197 braces--
5198 }
5199 }
5200 case 72:
5201 // line 74 "scan_tokens.rl"
5202
5203 te = p + 1
5204 {
5205 token(TokenOQuote)
5206 {
5207 stack = append(stack, 0)
5208 stack[top] = cs
5209 top++
5210 cs = 1515
5211 goto _again
5212 }
5213 }
5214 case 73:
5215 // line 84 "scan_tokens.rl"
5216
5217 te = p + 1
5218 {
5219 token(TokenOHeredoc)
5220 // the token is currently the whole heredoc introducer, like
5221 // <<EOT or <<-EOT, followed by a newline. We want to extract
5222 // just the "EOT" portion that we'll use as the closing marker.
5223
5224 marker := data[ts+2 : te-1]
5225 if marker[0] == '-' {
5226 marker = marker[1:]
5227 }
5228 if marker[len(marker)-1] == '\r' {
5229 marker = marker[:len(marker)-1]
5230 }
5231
5232 heredocs = append(heredocs, heredocInProgress{
5233 Marker: marker,
5234 StartOfLine: true,
5235 })
5236
5237 {
5238 stack = append(stack, 0)
5239 stack[top] = cs
5240 top++
5241 cs = 1541
5242 goto _again
5243 }
5244 }
5245 case 74:
5246 // line 290 "scan_tokens.rl"
5247
5248 te = p + 1
5249 {
5250 token(TokenBadUTF8)
5251 }
5252 case 75:
5253 // line 291 "scan_tokens.rl"
5254
5255 te = p + 1
5256 {
5257 token(TokenInvalid)
5258 }
5259 case 76:
5260 // line 265 "scan_tokens.rl"
5261
5262 te = p
5263 p--
5264
5265 case 77:
5266 // line 266 "scan_tokens.rl"
5267
5268 te = p
5269 p--
5270 {
5271 token(TokenNumberLit)
5272 }
5273 case 78:
5274 // line 267 "scan_tokens.rl"
5275
5276 te = p
5277 p--
5278 {
5279 token(TokenIdent)
5280 }
5281 case 79:
5282 // line 269 "scan_tokens.rl"
5283
5284 te = p
5285 p--
5286 {
5287 token(TokenComment)
5288 }
5289 case 80:
5290 // line 280 "scan_tokens.rl"
5291
5292 te = p
5293 p--
5294 {
5295 selfToken()
5296 }
5297 case 81:
5298 // line 290 "scan_tokens.rl"
5299
5300 te = p
5301 p--
5302 {
5303 token(TokenBadUTF8)
5304 }
5305 case 82:
5306 // line 291 "scan_tokens.rl"
5307
5308 te = p
5309 p--
5310 {
5311 token(TokenInvalid)
5312 }
5313 case 83:
5314 // line 266 "scan_tokens.rl"
5315
5316 p = (te) - 1
5317 {
5318 token(TokenNumberLit)
5319 }
5320 case 84:
5321 // line 267 "scan_tokens.rl"
5322
5323 p = (te) - 1
5324 {
5325 token(TokenIdent)
5326 }
5327 case 85:
5328 // line 280 "scan_tokens.rl"
5329
5330 p = (te) - 1
5331 {
5332 selfToken()
5333 }
5334 case 86:
5335 // line 290 "scan_tokens.rl"
5336
5337 p = (te) - 1
5338 {
5339 token(TokenBadUTF8)
5340 }
5341 case 87:
5342 // line 1 "NONE"
5343
5344 switch act {
5345 case 21:
5346 {
5347 p = (te) - 1
5348 token(TokenIdent)
5349 }
5350 case 22:
5351 {
5352 p = (te) - 1
5353 token(TokenComment)
5354 }
5355 case 32:
5356 {
5357 p = (te) - 1
5358 selfToken()
5359 }
5360 case 38:
5361 {
5362 p = (te) - 1
5363 token(TokenBadUTF8)
5364 }
5365 case 39:
5366 {
5367 p = (te) - 1
5368 token(TokenInvalid)
5369 }
5370 }
5371
5372 // line 5232 "scan_tokens.go"
5373 }
5374 }
5375
5376 _again:
5377 _acts = int(_hcltok_to_state_actions[cs])
5378 _nacts = uint(_hcltok_actions[_acts])
5379 _acts++
5380 for ; _nacts > 0; _nacts-- {
5381 _acts++
5382 switch _hcltok_actions[_acts-1] {
5383 case 4:
5384 // line 1 "NONE"
5385
5386 ts = 0
5387
5388 case 5:
5389 // line 1 "NONE"
5390
5391 act = 0
5392
5393 // line 5252 "scan_tokens.go"
5394 }
5395 }
5396
5397 if cs == 0 {
5398 goto _out
5399 }
5400 p++
5401 if p != pe {
5402 goto _resume
5403 }
5404 _test_eof:
5405 {
5406 }
5407 if p == eof {
5408 if _hcltok_eof_trans[cs] > 0 {
5409 _trans = int(_hcltok_eof_trans[cs] - 1)
5410 goto _eof_trans
5411 }
5412 }
5413
5414 _out:
5415 {
5416 }
5417 }
5418
5419 // line 352 "scan_tokens.rl"
5420
5421 // If we fall out here without being in a final state then we've
5422 // encountered something that the scanner can't match, which we'll
5423 // deal with as an invalid.
5424 if cs < hcltok_first_final {
5425 if mode == scanTemplate && len(stack) == 0 {
5426 // If we're scanning a bare template then any straggling
5427 // top-level stuff is actually literal string, rather than
5428 // invalid. This handles the case where the template ends
5429 // with a single "$" or "%", which trips us up because we
5430 // want to see another character to decide if it's a sequence
5431 // or an escape.
5432 f.emitToken(TokenStringLit, ts, len(data))
5433 } else {
5434 f.emitToken(TokenInvalid, ts, len(data))
5435 }
5436 }
5437
5438 // We always emit a synthetic EOF token at the end, since it gives the
5439 // parser position information for an "unexpected EOF" diagnostic.
5440 f.emitToken(TokenEOF, len(data), len(data))
5441
5442 return f.Tokens
5443}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.rl b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.rl
new file mode 100644
index 0000000..83ef65b
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.rl
@@ -0,0 +1,376 @@
1
2package hclsyntax
3
4import (
5 "bytes"
6
7 "github.com/hashicorp/hcl2/hcl"
8)
9
10// This file is generated from scan_tokens.rl. DO NOT EDIT.
11%%{
12 # (except you are actually in scan_tokens.rl here, so edit away!)
13
14 machine hcltok;
15 write data;
16}%%
17
18func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []Token {
19 f := &tokenAccum{
20 Filename: filename,
21 Bytes: data,
22 Pos: start,
23 }
24
25 %%{
26 include UnicodeDerived "unicode_derived.rl";
27
28 UTF8Cont = 0x80 .. 0xBF;
29 AnyUTF8 = (
30 0x00..0x7F |
31 0xC0..0xDF . UTF8Cont |
32 0xE0..0xEF . UTF8Cont . UTF8Cont |
33 0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont
34 );
35 BrokenUTF8 = any - AnyUTF8;
36
37 NumberLitContinue = (digit|'.'|('e'|'E') ('+'|'-')? digit);
38 NumberLit = digit ("" | (NumberLitContinue - '.') | (NumberLitContinue* (NumberLitContinue - '.')));
39 Ident = (ID_Start | '_') (ID_Continue | '-')*;
40
41 # Symbols that just represent themselves are handled as a single rule.
42 SelfToken = "[" | "]" | "(" | ")" | "." | "," | "*" | "/" | "%" | "+" | "-" | "=" | "<" | ">" | "!" | "?" | ":" | "\n" | "&" | "|" | "~" | "^" | ";" | "`";
43
44 EqualOp = "==";
45 NotEqual = "!=";
46 GreaterThanEqual = ">=";
47 LessThanEqual = "<=";
48 LogicalAnd = "&&";
49 LogicalOr = "||";
50
51 Ellipsis = "...";
52 FatArrow = "=>";
53
54 Newline = '\r' ? '\n';
55 EndOfLine = Newline;
56
57 BeginStringTmpl = '"';
58 BeginHeredocTmpl = '<<' ('-')? Ident Newline;
59
60 Comment = (
61 ("#" (any - EndOfLine)* EndOfLine) |
62 ("//" (any - EndOfLine)* EndOfLine) |
63 ("/*" any* "*/")
64 );
65
66 # Note: hclwrite assumes that only ASCII spaces appear between tokens,
67 # and uses this assumption to recreate the spaces between tokens by
68 # looking at byte offset differences. This means it will produce
69 # incorrect results in the presence of tabs, but that's acceptable
70 # because the canonical style (which hclwrite itself can impose
71 # automatically is to never use tabs).
72 Spaces = (' ' | 0x09)+;
73
74 action beginStringTemplate {
75 token(TokenOQuote);
76 fcall stringTemplate;
77 }
78
79 action endStringTemplate {
80 token(TokenCQuote);
81 fret;
82 }
83
84 action beginHeredocTemplate {
85 token(TokenOHeredoc);
86 // the token is currently the whole heredoc introducer, like
87 // <<EOT or <<-EOT, followed by a newline. We want to extract
88 // just the "EOT" portion that we'll use as the closing marker.
89
90 marker := data[ts+2:te-1]
91 if marker[0] == '-' {
92 marker = marker[1:]
93 }
94 if marker[len(marker)-1] == '\r' {
95 marker = marker[:len(marker)-1]
96 }
97
98 heredocs = append(heredocs, heredocInProgress{
99 Marker: marker,
100 StartOfLine: true,
101 })
102
103 fcall heredocTemplate;
104 }
105
106 action heredocLiteralEOL {
107 // This action is called specificially when a heredoc literal
108 // ends with a newline character.
109
110 // This might actually be our end marker.
111 topdoc := &heredocs[len(heredocs)-1]
112 if topdoc.StartOfLine {
113 maybeMarker := bytes.TrimSpace(data[ts:te])
114 if bytes.Equal(maybeMarker, topdoc.Marker) {
115 // We actually emit two tokens here: the end-of-heredoc
116 // marker first, and then separately the newline that
117 // follows it. This then avoids issues with the closing
118 // marker consuming a newline that would normally be used
119 // to mark the end of an attribute definition.
120 // We might have either a \n sequence or an \r\n sequence
121 // here, so we must handle both.
122 nls := te-1
123 nle := te
124 te--
125 if data[te-1] == '\r' {
126 // back up one more byte
127 nls--
128 te--
129 }
130 token(TokenCHeredoc);
131 ts = nls
132 te = nle
133 token(TokenNewline);
134 heredocs = heredocs[:len(heredocs)-1]
135 fret;
136 }
137 }
138
139 topdoc.StartOfLine = true;
140 token(TokenStringLit);
141 }
142
143 action heredocLiteralMidline {
144 // This action is called when a heredoc literal _doesn't_ end
145 // with a newline character, e.g. because we're about to enter
146 // an interpolation sequence.
147 heredocs[len(heredocs)-1].StartOfLine = false;
148 token(TokenStringLit);
149 }
150
151 action bareTemplateLiteral {
152 token(TokenStringLit);
153 }
154
155 action beginTemplateInterp {
156 token(TokenTemplateInterp);
157 braces++;
158 retBraces = append(retBraces, braces);
159 if len(heredocs) > 0 {
160 heredocs[len(heredocs)-1].StartOfLine = false;
161 }
162 fcall main;
163 }
164
165 action beginTemplateControl {
166 token(TokenTemplateControl);
167 braces++;
168 retBraces = append(retBraces, braces);
169 if len(heredocs) > 0 {
170 heredocs[len(heredocs)-1].StartOfLine = false;
171 }
172 fcall main;
173 }
174
175 action openBrace {
176 token(TokenOBrace);
177 braces++;
178 }
179
180 action closeBrace {
181 if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces {
182 token(TokenTemplateSeqEnd);
183 braces--;
184 retBraces = retBraces[0:len(retBraces)-1]
185 fret;
186 } else {
187 token(TokenCBrace);
188 braces--;
189 }
190 }
191
192 action closeTemplateSeqEatWhitespace {
193 // Only consume from the retBraces stack and return if we are at
194 // a suitable brace nesting level, otherwise things will get
195 // confused. (Not entering this branch indicates a syntax error,
196 // which we will catch in the parser.)
197 if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces {
198 token(TokenTemplateSeqEnd);
199 braces--;
200 retBraces = retBraces[0:len(retBraces)-1]
201 fret;
202 } else {
203 // We intentionally generate a TokenTemplateSeqEnd here,
204 // even though the user apparently wanted a brace, because
205 // we want to allow the parser to catch the incorrect use
206 // of a ~} to balance a generic opening brace, rather than
207 // a template sequence.
208 token(TokenTemplateSeqEnd);
209 braces--;
210 }
211 }
212
213 TemplateInterp = "${" ("~")?;
214 TemplateControl = "%{" ("~")?;
215 EndStringTmpl = '"';
216 StringLiteralChars = (AnyUTF8 - ("\r"|"\n"));
217 TemplateStringLiteral = (
218 ('$' ^'{' %{ fhold; }) |
219 ('%' ^'{' %{ fhold; }) |
220 ('\\' StringLiteralChars) |
221 (StringLiteralChars - ("$" | '%' | '"'))
222 )+;
223 HeredocStringLiteral = (
224 ('$' ^'{' %{ fhold; }) |
225 ('%' ^'{' %{ fhold; }) |
226 (StringLiteralChars - ("$" | '%'))
227 )*;
228 BareStringLiteral = (
229 ('$' ^'{') |
230 ('%' ^'{') |
231 (StringLiteralChars - ("$" | '%'))
232 )* Newline?;
233
234 stringTemplate := |*
235 TemplateInterp => beginTemplateInterp;
236 TemplateControl => beginTemplateControl;
237 EndStringTmpl => endStringTemplate;
238 TemplateStringLiteral => { token(TokenQuotedLit); };
239 AnyUTF8 => { token(TokenInvalid); };
240 BrokenUTF8 => { token(TokenBadUTF8); };
241 *|;
242
243 heredocTemplate := |*
244 TemplateInterp => beginTemplateInterp;
245 TemplateControl => beginTemplateControl;
246 HeredocStringLiteral EndOfLine => heredocLiteralEOL;
247 HeredocStringLiteral => heredocLiteralMidline;
248 BrokenUTF8 => { token(TokenBadUTF8); };
249 *|;
250
251 bareTemplate := |*
252 TemplateInterp => beginTemplateInterp;
253 TemplateControl => beginTemplateControl;
254 BareStringLiteral => bareTemplateLiteral;
255 BrokenUTF8 => { token(TokenBadUTF8); };
256 *|;
257
258 identOnly := |*
259 Ident => { token(TokenIdent) };
260 BrokenUTF8 => { token(TokenBadUTF8) };
261 AnyUTF8 => { token(TokenInvalid) };
262 *|;
263
264 main := |*
265 Spaces => {};
266 NumberLit => { token(TokenNumberLit) };
267 Ident => { token(TokenIdent) };
268
269 Comment => { token(TokenComment) };
270 Newline => { token(TokenNewline) };
271
272 EqualOp => { token(TokenEqualOp); };
273 NotEqual => { token(TokenNotEqual); };
274 GreaterThanEqual => { token(TokenGreaterThanEq); };
275 LessThanEqual => { token(TokenLessThanEq); };
276 LogicalAnd => { token(TokenAnd); };
277 LogicalOr => { token(TokenOr); };
278 Ellipsis => { token(TokenEllipsis); };
279 FatArrow => { token(TokenFatArrow); };
280 SelfToken => { selfToken() };
281
282 "{" => openBrace;
283 "}" => closeBrace;
284
285 "~}" => closeTemplateSeqEatWhitespace;
286
287 BeginStringTmpl => beginStringTemplate;
288 BeginHeredocTmpl => beginHeredocTemplate;
289
290 BrokenUTF8 => { token(TokenBadUTF8) };
291 AnyUTF8 => { token(TokenInvalid) };
292 *|;
293
294 }%%
295
296 // Ragel state
297 p := 0 // "Pointer" into data
298 pe := len(data) // End-of-data "pointer"
299 ts := 0
300 te := 0
301 act := 0
302 eof := pe
303 var stack []int
304 var top int
305
306 var cs int // current state
307 switch mode {
308 case scanNormal:
309 cs = hcltok_en_main
310 case scanTemplate:
311 cs = hcltok_en_bareTemplate
312 case scanIdentOnly:
313 cs = hcltok_en_identOnly
314 default:
315 panic("invalid scanMode")
316 }
317
318 braces := 0
319 var retBraces []int // stack of brace levels that cause us to use fret
320 var heredocs []heredocInProgress // stack of heredocs we're currently processing
321
322 %%{
323 prepush {
324 stack = append(stack, 0);
325 }
326 postpop {
327 stack = stack[:len(stack)-1];
328 }
329 }%%
330
331 // Make Go compiler happy
332 _ = ts
333 _ = te
334 _ = act
335 _ = eof
336
337 token := func (ty TokenType) {
338 f.emitToken(ty, ts, te)
339 }
340 selfToken := func () {
341 b := data[ts:te]
342 if len(b) != 1 {
343 // should never happen
344 panic("selfToken only works for single-character tokens")
345 }
346 f.emitToken(TokenType(b[0]), ts, te)
347 }
348
349 %%{
350 write init nocs;
351 write exec;
352 }%%
353
354 // If we fall out here without being in a final state then we've
355 // encountered something that the scanner can't match, which we'll
356 // deal with as an invalid.
357 if cs < hcltok_first_final {
358 if mode == scanTemplate && len(stack) == 0 {
359 // If we're scanning a bare template then any straggling
360 // top-level stuff is actually literal string, rather than
361 // invalid. This handles the case where the template ends
362 // with a single "$" or "%", which trips us up because we
363 // want to see another character to decide if it's a sequence
364 // or an escape.
365 f.emitToken(TokenStringLit, ts, len(data))
366 } else {
367 f.emitToken(TokenInvalid, ts, len(data))
368 }
369 }
370
371 // We always emit a synthetic EOF token at the end, since it gives the
372 // parser position information for an "unexpected EOF" diagnostic.
373 f.emitToken(TokenEOF, len(data), len(data))
374
375 return f.Tokens
376}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/spec.md b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/spec.md
new file mode 100644
index 0000000..49b9a3e
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/spec.md
@@ -0,0 +1,923 @@
1# HCL Native Syntax Specification
2
3This is the specification of the syntax and semantics of the native syntax
4for HCL. HCL is a system for defining configuration languages for applications.
5The HCL information model is designed to support multiple concrete syntaxes
6for configuration, but this native syntax is considered the primary format
7and is optimized for human authoring and maintenence, as opposed to machine
8generation of configuration.
9
10The language consists of three integrated sub-languages:
11
12* The _structural_ language defines the overall heirarchical configuration
13 structure, and is a serialization of HCL bodies, blocks and attributes.
14
15* The _expression_ language is used to express attribute values, either as
16 literals or as derivations of other values.
17
18* The _template_ language is used to compose values together into strings,
19 as one of several types of expression in the expression language.
20
21In normal use these three sub-languages are used together within configuration
22files to describe an overall configuration, with the structural language
23being used at the top level. The expression and template languages can also
24be used in isolation, to implement features such as REPLs, debuggers, and
25integration into more limited HCL syntaxes such as the JSON profile.
26
27## Syntax Notation
28
29Within this specification a semi-formal notation is used to illustrate the
30details of syntax. This notation is intended for human consumption rather
31than machine consumption, with the following conventions:
32
33* A naked name starting with an uppercase letter is a global production,
34 common to all of the syntax specifications in this document.
35* A naked name starting with a lowercase letter is a local production,
36 meaningful only within the specification where it is defined.
37* Double and single quotes (`"` and `'`) are used to mark literal character
38 sequences, which may be either punctuation markers or keywords.
39* The default operator for combining items, which has no punctuation,
40 is concatenation.
41* The symbol `|` indicates that any one of its left and right operands may
42 be present.
43* The `*` symbol indicates zero or more repetitions of the item to its left.
44* The `?` symbol indicates zero or one of the item to its left.
45* Parentheses (`(` and `)`) are used to group items together to apply
46 the `|`, `*` and `?` operators to them collectively.
47
48The grammar notation does not fully describe the language. The prose may
49augment or conflict with the illustrated grammar. In case of conflict, prose
50has priority.
51
52## Source Code Representation
53
54Source code is unicode text expressed in the UTF-8 encoding. The language
55itself does not perform unicode normalization, so syntax features such as
56identifiers are sequences of unicode code points and so e.g. a precombined
57accented character is distinct from a letter associated with a combining
58accent. (String literals have some special handling with regard to Unicode
59normalization which will be covered later in the relevant section.)
60
61UTF-8 encoded Unicode byte order marks are not permitted. Invalid or
62non-normalized UTF-8 encoding is always a parse error.
63
64## Lexical Elements
65
66### Comments and Whitespace
67
68Comments and Whitespace are recognized as lexical elements but are ignored
69except as described below.
70
71Whitespace is defined as a sequence of zero or more space characters
72(U+0020). Newline sequences (either U+000A or U+000D followed by U+000A)
73are _not_ considered whitespace but are ignored as such in certain contexts.
74
75Horizontal tab characters (U+0009) are not considered to be whitespace and
76are not valid within HCL native syntax.
77
78Comments serve as program documentation and come in two forms:
79
80* _Line comments_ start with either the `//` or `#` sequences and end with
81 the next newline sequence. A line comments is considered equivalent to a
82 newline sequence.
83
84* _Inline comments_ start with the `/*` sequence and end with the `*/`
85 sequence, and may have any characters within except the ending sequence.
86 An inline comments is considered equivalent to a whitespace sequence.
87
88Comments and whitespace cannot begin within within other comments, or within
89template literals except inside an interpolation sequence or template directive.
90
91### Identifiers
92
93Identifiers name entities such as blocks, attributes and expression variables.
94Identifiers are interpreted as per [UAX #31][UAX31] Section 2. Specifically,
95their syntax is defined in terms of the `ID_Start` and `ID_Continue`
96character properties as follows:
97
98```ebnf
99Identifier = ID_Start (ID_Continue | '-')*;
100```
101
102The Unicode specification provides the normative requirements for identifier
103parsing. Non-normatively, the spirit of this specification is that `ID_Start`
104consists of Unicode letter and certain unambiguous punctuation tokens, while
105`ID_Continue` augments that set with Unicode digits, combining marks, etc.
106
107The dash character `-` is additionally allowed in identifiers, even though
108that is not part of the unicode `ID_Continue` definition. This is to allow
109attribute names and block type names to contain dashes, although underscores
110as word separators are considered the idiomatic usage.
111
112[UAX31]: http://unicode.org/reports/tr31/ "Unicode Identifier and Pattern Syntax"
113
114### Keywords
115
116There are no globally-reserved words, but in some contexts certain identifiers
117are reserved to function as keywords. These are discussed further in the
118relevant documentation sections that follow. In such situations, the
119identifier's role as a keyword supersedes any other valid interpretation that
120may be possible. Outside of these specific situations, the keywords have no
121special meaning and are interpreted as regular identifiers.
122
123### Operators and Delimiters
124
125The following character sequences represent operators, delimiters, and other
126special tokens:
127
128```
129+ && == < : { [ ( ${
130- || != > ? } ] ) %{
131* ! <= = .
132/ >= => ,
133% ...
134```
135
136### Numeric Literals
137
138A numeric literal is a decimal representation of a
139real number. It has an integer part, a fractional part,
140and an exponent part.
141
142```ebnf
143NumericLit = decimal+ ("." decimal+)? (expmark decimal+)?;
144decimal = '0' .. '9';
145expmark = ('e' | 'E') ("+" | "-")?;
146```
147
148## Structural Elements
149
150The structural language consists of syntax representing the following
151constructs:
152
153* _Attributes_, which assign a value to a specified name.
154* _Blocks_, which create a child body annotated by a type and optional labels.
155* _Body Content_, which consists of a collection of attributes and blocks.
156
157These constructs correspond to the similarly-named concepts in the
158language-agnostic HCL information model.
159
160```ebnf
161ConfigFile = Body;
162Body = (Attribute | Block)*;
163Attribute = Identifier "=" Expression Newline;
164Block = Identifier (StringLit|Identifier)* "{" Newline Body "}" Newline;
165```
166
167### Configuration Files
168
169A _configuration file_ is a sequence of characters whose top-level is
170interpreted as a Body.
171
172### Bodies
173
174A _body_ is a collection of associated attributes and blocks. The meaning of
175this association is defined by the calling application.
176
177### Attribute Definitions
178
179An _attribute definition_ assigns a value to a particular attribute name within
180a body. Each distinct attribute name may be defined no more than once within a
181single body.
182
183The attribute value is given as an expression, which is retained literally
184for later evaluation by the calling application.
185
186### Blocks
187
188A _block_ creates a child body that is annotated with a block _type_ and
189zero or more block _labels_. Blocks create a structural heirachy which can be
190interpreted by the calling application.
191
192Block labels can either be quoted literal strings or naked identifiers.
193
194## Expressions
195
196The expression sub-language is used within attribute definitions to specify
197values.
198
199```ebnf
200Expression = (
201 ExprTerm |
202 Operation |
203 Conditional
204);
205```
206
207### Types
208
209The value types used within the expression language are those defined by the
210syntax-agnostic HCL information model. An expression may return any valid
211type, but only a subset of the available types have first-class syntax.
212A calling application may make other types available via _variables_ and
213_functions_.
214
215### Expression Terms
216
217Expression _terms_ are the operands for unary and binary expressions, as well
218as acting as expressions in their own right.
219
220```ebnf
221ExprTerm = (
222 LiteralValue |
223 CollectionValue |
224 TemplateExpr |
225 VariableExpr |
226 FunctionCall |
227 ForExpr |
228 ExprTerm Index |
229 ExprTerm GetAttr |
230 ExprTerm Splat |
231 "(" Expression ")"
232);
233```
234
235The productions for these different term types are given in their corresponding
236sections.
237
238Between the `(` and `)` characters denoting a sub-expression, newline
239characters are ignored as whitespace.
240
241### Literal Values
242
243A _literal value_ immediately represents a particular value of a primitive
244type.
245
246```ebnf
247LiteralValue = (
248 NumericLit |
249 "true" |
250 "false" |
251 "null"
252);
253```
254
255* Numeric literals represent values of type _number_.
256* The `true` and `false` keywords represent values of type _bool_.
257* The `null` keyword represents a null value of the dynamic pseudo-type.
258
259String literals are not directly available in the expression sub-language, but
260are available via the template sub-language, which can in turn be incorporated
261via _template expressions_.
262
263### Collection Values
264
265A _collection value_ combines zero or more other expressions to produce a
266collection value.
267
268```ebnf
269CollectionValue = tuple | object;
270tuple = "[" (
271 (Expression ("," Expression)* ","?)?
272) "]";
273object = "{" (
274 (objectelem ("," objectelem)* ","?)?
275) "}";
276objectelem = (Identifier | Expression) "=" Expression;
277```
278
279Only tuple and object values can be directly constructed via native syntax.
280Tuple and object values can in turn be converted to list, set and map values
281with other operations, which behaves as defined by the syntax-agnostic HCL
282information model.
283
284When specifying an object element, an identifier is interpreted as a literal
285attribute name as opposed to a variable reference. To populate an item key
286from a variable, use parentheses to disambiguate:
287
288* `{foo = "baz"}` is interpreted as an attribute literally named `foo`.
289* `{(foo) = "baz"}` is interpreted as an attribute whose name is taken
290 from the variable named `foo`.
291
292Between the open and closing delimiters of these sequences, newline sequences
293are ignored as whitespace.
294
295There is a syntax ambiguity between _for expressions_ and collection values
296whose first element is a reference to a variable named `for`. The
297_for expression_ interpretation has priority, so to produce a tuple whose
298first element is the value of a variable named `for`, or an object with a
299key named `for`, use paretheses to disambiguate:
300
301* `[for, foo, baz]` is a syntax error.
302* `[(for), foo, baz]` is a tuple whose first element is the value of variable
303 `for`.
304* `{for: 1, baz: 2}` is a syntax error.
305* `{(for): 1, baz: 2}` is an object with an attribute literally named `for`.
306* `{baz: 2, for: 1}` is equivalent to the previous example, and resolves the
307 ambiguity by reordering.
308
309### Template Expressions
310
311A _template expression_ embeds a program written in the template sub-language
312as an expression. Template expressions come in two forms:
313
314* A _quoted_ template expression is delimited by quote characters (`"`) and
315 defines a template as a single-line expression with escape characters.
316* A _heredoc_ template expression is introduced by a `<<` sequence and
317 defines a template via a multi-line sequence terminated by a user-chosen
318 delimiter.
319
320In both cases the template interpolation and directive syntax is available for
321use within the delimiters, and any text outside of these special sequences is
322interpreted as a literal string.
323
324In _quoted_ template expressions any literal string sequences within the
325template behave in a special way: literal newline sequences are not permitted
326and instead _escape sequences_ can be included, starting with the
327backslash `\`:
328
329```
330 \n Unicode newline control character
331 \r Unicode carriage return control character
332 \t Unicode tab control character
333 \" Literal quote mark, used to prevent interpretation as end of string
334 \\ Literal backslash, used to prevent interpretation as escape sequence
335 \uNNNN Unicode character from Basic Multilingual Plane (NNNN is four hexadecimal digits)
336 \UNNNNNNNN Unicode character from supplementary planes (NNNNNNNN is eight hexadecimal digits)
337```
338
339The _heredoc_ template expression type is introduced by either `<<` or `<<-`,
340followed by an identifier. The template expression ends when the given
341identifier subsequently appears again on a line of its own.
342
343If a heredoc template is introduced with the `<<-` symbol, any literal string
344at the start of each line is analyzed to find the minimum number of leading
345spaces, and then that number of prefix spaces is removed from all line-leading
346literal strings. The final closing marker may also have an arbitrary number
347of spaces preceding it on its line.
348
349```ebnf
350TemplateExpr = quotedTemplate | heredocTemplate;
351quotedTemplate = (as defined in prose above);
352heredocTemplate = (
353 ("<<" | "<<-") Identifier Newline
354 (content as defined in prose above)
355 Identifier Newline
356);
357```
358
359A quoted template expression containing only a single literal string serves
360as a syntax for defining literal string _expressions_. In certain contexts
361the template syntax is restricted in this manner:
362
363```ebnf
364StringLit = '"' (quoted literals as defined in prose above) '"';
365```
366
367The `StringLit` production permits the escape sequences discussed for quoted
368template expressions as above, but does _not_ permit template interpolation
369or directive sequences.
370
371### Variables and Variable Expressions
372
373A _variable_ is a value that has been assigned a symbolic name. Variables are
374made available for use in expressions by the calling application, by populating
375the _global scope_ used for expression evaluation.
376
377Variables can also be created by expressions themselves, which always creates
378a _child scope_ that incorporates the variables from its parent scope but
379(re-)defines zero or more names with new values.
380
381The value of a variable is accessed using a _variable expression_, which is
382a standalone `Identifier` whose name corresponds to a defined variable:
383
384```ebnf
385VariableExpr = Identifier;
386```
387
388Variables in a particular scope are immutable, but child scopes may _hide_
389a variable from an ancestor scope by defining a new variable of the same name.
390When looking up variables, the most locally-defined variable of the given name
391is used, and ancestor-scoped variables of the same name cannot be accessed.
392
393No direct syntax is provided for declaring or assigning variables, but other
394expression constructs implicitly create child scopes and define variables as
395part of their evaluation.
396
397### Functions and Function Calls
398
399A _function_ is an operation that has been assigned a symbolic name. Functions
400are made available for use in expressions by the calling application, by
401populating the _function table_ used for expression evaluation.
402
403The namespace of functions is distinct from the namespace of variables. A
404function and a variable may share the same name with no implication that they
405are in any way related.
406
407A function can be executed via a _function call_ expression:
408
409```ebnf
410FunctionCall = Identifier "(" arguments ")";
411Arguments = (
412 () ||
413 (Expression ("," Expression)* ("," | "...")?)
414);
415```
416
417The definition of functions and the semantics of calling them are defined by
418the language-agnostic HCL information model. The given arguments are mapped
419onto the function's _parameters_ and the result of a function call expression
420is the return value of the named function when given those arguments.
421
422If the final argument expression is followed by the ellipsis symbol (`...`),
423the final argument expression must evaluate to either a list or tuple value.
424The elements of the value are each mapped to a single parameter of the
425named function, beginning at the first parameter remaining after all other
426argument expressions have been mapped.
427
428Within the parentheses that delimit the function arguments, newline sequences
429are ignored as whitespace.
430
431### For Expressions
432
433A _for expression_ is a construct for constructing a collection by projecting
434the items from another collection.
435
436```ebnf
437ForExpr = forTupleExpr | forObjectExpr;
438forTupleExpr = "[" forIntro Expression forCond? "]";
439forObjectExpr = "{" forIntro Expression "=>" Expression "..."? forCond? "}";
440forIntro = "for" Identifier ("," Identifier)? "in" Expression ":";
441forCond = "if" Expression;
442```
443
444The punctuation used to delimit a for expression decide whether it will produce
445a tuple value (`[` and `]`) or an object value (`{` and `}`).
446
447The "introduction" is equivalent in both cases: the keyword `for` followed by
448either one or two identifiers separated by a comma which define the temporary
449variable names used for iteration, followed by the keyword `in` and then
450an expression that must evaluate to a value that can be iterated. The
451introduction is then terminated by the colon (`:`) symbol.
452
453If only one identifier is provided, it is the name of a variable that will
454be temporarily assigned the value of each element during iteration. If both
455are provided, the first is the key and the second is the value.
456
457Tuple, object, list, map, and set types are iterable. The type of collection
458used defines how the key and value variables are populated:
459
460* For tuple and list types, the _key_ is the zero-based index into the
461 sequence for each element, and the _value_ is the element value. The
462 elements are visited in index order.
463* For object and map types, the _key_ is the string attribute name or element
464 key, and the _value_ is the attribute or element value. The elements are
465 visited in the order defined by a lexicographic sort of the attribute names
466 or keys.
467* For set types, the _key_ and _value_ are both the element value. The elements
468 are visited in an undefined but consistent order.
469
470The expression after the colon and (in the case of object `for`) the expression
471after the `=>` are both evaluated once for each element of the source
472collection, in a local scope that defines the key and value variable names
473specified.
474
475The results of evaluating these expressions for each input element are used
476to populate an element in the new collection. In the case of tuple `for`, the
477single expression becomes an element, appending values to the tuple in visit
478order. In the case of object `for`, the pair of expressions is used as an
479attribute name and value respectively, creating an element in the resulting
480object.
481
482In the case of object `for`, it is an error if two input elements produce
483the same result from the attribute name expression, since duplicate
484attributes are not possible. If the ellipsis symbol (`...`) appears
485immediately after the value experssion, this activates the grouping mode in
486which each value in the resulting object is a _tuple_ of all of the values
487that were produced against each distinct key.
488
489* `[for v in ["a", "b"]: v]` returns `["a", "b"]`.
490* `[for i, v in ["a", "b"]: i]` returns `[0, 1]`.
491* `{for i, v in ["a", "b"]: v => i}` returns `{a = 0, b = 1}`.
492* `{for i, v in ["a", "a", "b"]: k => v}` produces an error, because attribute
493 `a` is defined twice.
494* `{for i, v in ["a", "a", "b"]: v => i...}` returns `{a = [0, 1], b = [2]}`.
495
496If the `if` keyword is used after the element expression(s), it applies an
497additional predicate that can be used to conditionally filter elements from
498the source collection from consideration. The expression following `if` is
499evaluated once for each source element, in the same scope used for the
500element expression(s). It must evaluate to a boolean value; if `true`, the
501element will be evaluated as normal, while if `false` the element will be
502skipped.
503
504* `[for i, v in ["a", "b", "c"]: v if i < 2]` returns `["a", "b"]`.
505
506If the collection value, element expression(s) or condition expression return
507unknown values that are otherwise type-valid, the result is a value of the
508dynamic pseudo-type.
509
510### Index Operator
511
512The _index_ operator returns the value of a single element of a collection
513value. It is a postfix operator and can be applied to any value that has
514a tuple, object, map, or list type.
515
516```ebnf
517Index = "[" Expression "]";
518```
519
520The expression delimited by the brackets is the _key_ by which an element
521will be looked up.
522
523If the index operator is applied to a value of tuple or list type, the
524key expression must be an non-negative integer number representing the
525zero-based element index to access. If applied to a value of object or map
526type, the key expression must be a string representing the attribute name
527or element key. If the given key value is not of the appropriate type, a
528conversion is attempted using the conversion rules from the HCL
529syntax-agnostic information model.
530
531An error is produced if the given key expression does not correspond to
532an element in the collection, either because it is of an unconvertable type,
533because it is outside the range of elements for a tuple or list, or because
534the given attribute or key does not exist.
535
536If either the collection or the key are an unknown value of an
537otherwise-suitable type, the return value is an unknown value whose type
538matches what type would be returned given known values, or a value of the
539dynamic pseudo-type if type information alone cannot determine a suitable
540return type.
541
542Within the brackets that delimit the index key, newline sequences are ignored
543as whitespace.
544
545### Attribute Access Operator
546
547The _attribute access_ operator returns the value of a single attribute in
548an object value. It is a postfix operator and can be applied to any value
549that has an object type.
550
551```ebnf
552GetAttr = "." Identifier;
553```
554
555The given identifier is interpreted as the name of the attribute to access.
556An error is produced if the object to which the operator is applied does not
557have an attribute with the given name.
558
559If the object is an unknown value of a type that has the attribute named, the
560result is an unknown value of the attribute's type.
561
562### Splat Operators
563
564The _splat operators_ allow convenient access to attributes or elements of
565elements in a tuple, list, or set value.
566
567There are two kinds of "splat" operator:
568
569* The _attribute-only_ splat operator supports only attribute lookups into
570 the elements from a list, but supports an arbitrary number of them.
571
572* The _full_ splat operator additionally supports indexing into the elements
573 from a list, and allows any combination of attribute access and index
574 operations.
575
576```ebnf
577Splat = attrSplat | fullSplat;
578attrSplat = "." "*" GetAttr*;
579fullSplat = "[" "*" "]" (GetAttr | Index)*;
580```
581
582The splat operators can be thought of as shorthands for common operations that
583could otherwise be performed using _for expressions_:
584
585* `tuple.*.foo.bar[0]` is approximately equivalent to
586 `[for v in tuple: v.foo.bar][0]`.
587* `tuple[*].foo.bar[0]` is approximately equivalent to
588 `[for v in tuple: v.foo.bar[0]]`
589
590Note the difference in how the trailing index operator is interpreted in
591each case. This different interpretation is the key difference between the
592_attribute-only_ and _full_ splat operators.
593
594Splat operators have one additional behavior compared to the equivalent
595_for expressions_ shown above: if a splat operator is applied to a value that
596is _not_ of tuple, list, or set type, the value is coerced automatically into
597a single-value list of the value type:
598
599* `any_object.*.id` is equivalent to `[any_object.id]`, assuming that `any_object`
600 is a single object.
601* `any_number.*` is equivalent to `[any_number]`, assuming that `any_number`
602 is a single number.
603
604If the left operand of a splat operator is an unknown value of any type, the
605result is a value of the dynamic pseudo-type.
606
607### Operations
608
609Operations apply a particular operator to either one or two expression terms.
610
611```ebnf
612Operation = unaryOp | binaryOp;
613unaryOp = ("-" | "!") ExprTerm;
614binaryOp = ExprTerm binaryOperator ExprTerm;
615binaryOperator = compareOperator | arithmeticOperator | logicOperator;
616compareOperator = "==" | "!=" | "<" | ">" | "<=" | ">=";
617arithmeticOperator = "+" | "-" | "*" | "/" | "%";
618logicOperator = "&&" | "||" | "!";
619```
620
621The unary operators have the highest precedence.
622
623The binary operators are grouped into the following precedence levels:
624
625```
626Level Operators
627 6 * / %
628 5 + -
629 4 > >= < <=
630 3 == !=
631 2 &&
632 1 ||
633```
634
635Higher values of "level" bind tighter. Operators within the same precedence
636level have left-to-right associativity. For example, `x / y * z` is equivalent
637to `(x / y) * z`.
638
639### Comparison Operators
640
641Comparison operators always produce boolean values, as a result of testing
642the relationship between two values.
643
644The two equality operators apply to values of any type:
645
646```
647a == b equal
648a != b not equal
649```
650
651Two values are equal if the are of identical types and their values are
652equal as defined in the HCL syntax-agnostic information model. The equality
653operators are commutative and opposite, such that `(a == b) == !(a != b)`
654and `(a == b) == (b == a)` for all values `a` and `b`.
655
656The four numeric comparison operators apply only to numbers:
657
658```
659a < b less than
660a <= b less than or equal to
661a > b greater than
662a >= b greater than or equal to
663```
664
665If either operand of a comparison operator is a correctly-typed unknown value
666or a value of the dynamic pseudo-type, the result is an unknown boolean.
667
668### Arithmetic Operators
669
670Arithmetic operators apply only to number values and always produce number
671values as results.
672
673```
674a + b sum (addition)
675a - b difference (subtraction)
676a * b product (multiplication)
677a / b quotient (division)
678a % b remainder (modulo)
679-a negation
680```
681
682Arithmetic operations are considered to be performed in an arbitrary-precision
683number space.
684
685If either operand of an arithmetic operator is an unknown number or a value
686of the dynamic pseudo-type, the result is an unknown number.
687
688### Logic Operators
689
690Logic operators apply only to boolean values and always produce boolean values
691as results.
692
693```
694a && b logical AND
695a || b logical OR
696!a logical NOT
697```
698
699If either operand of a logic operator is an unknown bool value or a value
700of the dynamic pseudo-type, the result is an unknown bool value.
701
702### Conditional Operator
703
704The conditional operator allows selecting from one of two expressions based on
705the outcome of a boolean expression.
706
707```ebnf
708Conditional = Expression "?" Expression ":" Expression;
709```
710
711The first expression is the _predicate_, which is evaluated and must produce
712a boolean result. If the predicate value is `true`, the result of the second
713expression is the result of the conditional. If the predicate value is
714`false`, the result of the third expression is the result of the conditional.
715
716The second and third expressions must be of the same type or must be able to
717unify into a common type using the type unification rules defined in the
718HCL syntax-agnostic information model. This unified type is the result type
719of the conditional, with both expressions converted as necessary to the
720unified type.
721
722If the predicate is an unknown boolean value or a value of the dynamic
723pseudo-type then the result is an unknown value of the unified type of the
724other two expressions.
725
726If either the second or third expressions produce errors when evaluated,
727these errors are passed through only if the erroneous expression is selected.
728This allows for expressions such as
729`length(some_list) > 0 ? some_list[0] : default` (given some suitable `length`
730function) without producing an error when the predicate is `false`.
731
732## Templates
733
734The template sub-language is used within template expressions to concisely
735combine strings and other values to produce other strings. It can also be
736used in isolation as a standalone template language.
737
738```ebnf
739Template = (
740 TemplateLiteral |
741 TemplateInterpolation |
742 TemplateDirective
743)*
744TemplateDirective = TemplateIf | TemplateFor;
745```
746
747A template behaves like an expression that always returns a string value.
748The different elements of the template are evaluated and combined into a
749single string to return. If any of the elements produce an unknown string
750or a value of the dynamic pseudo-type, the result is an unknown string.
751
752An important use-case for standalone templates is to enable the use of
753expressions in alternative HCL syntaxes where a native expression grammar is
754not available. For example, the HCL JSON profile treats the values of JSON
755strings as standalone templates when attributes are evaluated in expression
756mode.
757
758### Template Literals
759
760A template literal is a literal sequence of characters to include in the
761resulting string. When the template sub-language is used standalone, a
762template literal can contain any unicode character, with the exception
763of the sequences that introduce interpolations and directives, and for the
764sequences that escape those introductions.
765
766The interpolation and directive introductions are escaped by doubling their
767leading characters. The `${` sequence is escaped as `$${` and the `%{`
768sequence is escaped as `%%{`.
769
770When the template sub-language is embedded in the expression language via
771_template expressions_, additional constraints and transforms are applied to
772template literalsas described in the definition of template expressions.
773
774The value of a template literal can be modified by _strip markers_ in any
775interpolations or directives that are adjacent to it. A strip marker is
776a tilde (`~`) placed immediately after the opening `{` or before the closing
777`}` of a template sequence:
778
779* `hello ${~ "world" }` produces `"helloworld"`.
780* `%{ if true ~} hello %{~ endif }` produces `"hello"`.
781
782When a strip marker is present, any spaces adjacent to it in the corresponding
783string literal (if any) are removed before producing the final value. Space
784characters are interpreted as per Unicode's definition.
785
786Stripping is done at syntax level rather than value level. Values returned
787by interpolations or directives are not subject to stripping:
788
789* `${"hello" ~}${" world"}` produces `"hello world"`, and not `"helloworld"`,
790 because the space is not in a template literal directly adjacent to the
791 strip marker.
792
793### Template Interpolations
794
795An _interpolation sequence_ evaluates an expression (written in the
796expression sub-language), converts the result to a string value, and
797replaces itself with the resulting string.
798
799```ebnf
800TemplateInterpolation = ("${" | "${~") Expression ("}" | "~}";
801```
802
803If the expression result cannot be converted to a string, an error is
804produced.
805
806### Template If Directive
807
808The template `if` directive is the template equivalent of the
809_conditional expression_, allowing selection of one of two sub-templates based
810on the value of a predicate expression.
811
812```ebnf
813TemplateIf = (
814 ("%{" | "%{~") "if" Expression ("}" | "~}")
815 Template
816 (
817 ("%{" | "%{~") "else" ("}" | "~}")
818 Template
819 )?
820 ("%{" | "%{~") "endif" ("}" | "~}")
821);
822```
823
824The evaluation of the `if` directive is equivalent to the conditional
825expression, with the following exceptions:
826
827* The two sub-templates always produce strings, and thus the result value is
828 also always a string.
829* The `else` clause may be omitted, in which case the conditional's third
830 expression result is implied to be the empty string.
831
832### Template For Directive
833
834The template `for` directive is the template equivalent of the _for expression_,
835producing zero or more copies of its sub-template based on the elements of
836a collection.
837
838```ebnf
839TemplateFor = (
840 ("%{" | "%{~") "for" Identifier ("," Identifier) "in" Expression ("}" | "~}")
841 Template
842 ("%{" | "%{~") "endfor" ("}" | "~}")
843);
844```
845
846The evaluation of the `for` directive is equivalent to the _for expression_
847when producing a tuple, with the following exceptions:
848
849* The sub-template always produces a string.
850* There is no equivalent of the "if" clause on the for expression.
851* The elements of the resulting tuple are all converted to strings and
852 concatenated to produce a flat string result.
853
854### Template Interpolation Unwrapping
855
856As a special case, a template that consists only of a single interpolation,
857with no surrounding literals, directives or other interpolations, is
858"unwrapped". In this case, the result of the interpolation expression is
859returned verbatim, without conversion to string.
860
861This special case exists primarily to enable the native template language
862to be used inside strings in alternative HCL syntaxes that lack a first-class
863template or expression syntax. Unwrapping allows arbitrary expressions to be
864used to populate attributes when strings in such languages are interpreted
865as templates.
866
867* `${true}` produces the boolean value `true`
868* `${"${true}"}` produces the boolean value `true`, because both the inner
869 and outer interpolations are subject to unwrapping.
870* `hello ${true}` produces the string `"hello true"`
871* `${""}${true}` produces the string `"true"` because there are two
872 interpolation sequences, even though one produces an empty result.
873* `%{ for v in [true] }${v}%{ endif }` produces the string `true` because
874 the presence of the `for` directive circumvents the unwrapping even though
875 the final result is a single value.
876
877In some contexts this unwrapping behavior may be circumvented by the calling
878application, by converting the final template result to string. This is
879necessary, for example, if a standalone template is being used to produce
880the direct contents of a file, since the result in that case must always be a
881string.
882
883## Static Analysis
884
885The HCL static analysis operations are implemented for some expression types
886in the native syntax, as described in the following sections.
887
888A goal for static analysis of the native syntax is for the interpretation to
889be as consistent as possible with the dynamic evaluation interpretation of
890the given expression, though some deviations are intentionally made in order
891to maximize the potential for analysis.
892
893### Static List
894
895The tuple construction syntax can be interpreted as a static list. All of
896the expression elements given are returned as the static list elements,
897with no further interpretation.
898
899### Static Map
900
901The object construction syntax can be interpreted as a static map. All of the
902key/value pairs given are returned as the static pairs, with no further
903interpretation.
904
905The usual requirement that an attribute name be interpretable as a string
906does not apply to this static analyis, allowing callers to provide map-like
907constructs with different key types by building on the map syntax.
908
909### Static Call
910
911The function call syntax can be interpreted as a static call. The called
912function name is returned verbatim and the given argument expressions are
913returned as the static arguments, with no further interpretation.
914
915### Static Traversal
916
917A variable expression and any attached attribute access operations and
918constant index operations can be interpreted as a static traversal.
919
920The keywords `true`, `false` and `null` can also be interpreted as
921static traversals, behaving as if they were references to variables of those
922names, to allow callers to redefine the meaning of those keywords in certain
923contexts.
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure.go
new file mode 100644
index 0000000..d69f65b
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure.go
@@ -0,0 +1,379 @@
1package hclsyntax
2
3import (
4 "fmt"
5 "strings"
6
7 "github.com/hashicorp/hcl2/hcl"
8)
9
10// AsHCLBlock returns the block data expressed as a *hcl.Block.
11func (b *Block) AsHCLBlock() *hcl.Block {
12 lastHeaderRange := b.TypeRange
13 if len(b.LabelRanges) > 0 {
14 lastHeaderRange = b.LabelRanges[len(b.LabelRanges)-1]
15 }
16
17 return &hcl.Block{
18 Type: b.Type,
19 Labels: b.Labels,
20 Body: b.Body,
21
22 DefRange: hcl.RangeBetween(b.TypeRange, lastHeaderRange),
23 TypeRange: b.TypeRange,
24 LabelRanges: b.LabelRanges,
25 }
26}
27
28// Body is the implementation of hcl.Body for the HCL native syntax.
29type Body struct {
30 Attributes Attributes
31 Blocks Blocks
32
33 // These are used with PartialContent to produce a "remaining items"
34 // body to return. They are nil on all bodies fresh out of the parser.
35 hiddenAttrs map[string]struct{}
36 hiddenBlocks map[string]struct{}
37
38 SrcRange hcl.Range
39 EndRange hcl.Range // Final token of the body, for reporting missing items
40}
41
42// Assert that *Body implements hcl.Body
43var assertBodyImplBody hcl.Body = &Body{}
44
45func (b *Body) walkChildNodes(w internalWalkFunc) {
46 b.Attributes = w(b.Attributes).(Attributes)
47 b.Blocks = w(b.Blocks).(Blocks)
48}
49
50func (b *Body) Range() hcl.Range {
51 return b.SrcRange
52}
53
54func (b *Body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
55 content, remainHCL, diags := b.PartialContent(schema)
56
57 // No we'll see if anything actually remains, to produce errors about
58 // extraneous items.
59 remain := remainHCL.(*Body)
60
61 for name, attr := range b.Attributes {
62 if _, hidden := remain.hiddenAttrs[name]; !hidden {
63 var suggestions []string
64 for _, attrS := range schema.Attributes {
65 if _, defined := content.Attributes[attrS.Name]; defined {
66 continue
67 }
68 suggestions = append(suggestions, attrS.Name)
69 }
70 suggestion := nameSuggestion(name, suggestions)
71 if suggestion != "" {
72 suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
73 } else {
74 // Is there a block of the same name?
75 for _, blockS := range schema.Blocks {
76 if blockS.Type == name {
77 suggestion = fmt.Sprintf(" Did you mean to define a block of type %q?", name)
78 break
79 }
80 }
81 }
82
83 diags = append(diags, &hcl.Diagnostic{
84 Severity: hcl.DiagError,
85 Summary: "Unsupported attribute",
86 Detail: fmt.Sprintf("An attribute named %q is not expected here.%s", name, suggestion),
87 Subject: &attr.NameRange,
88 })
89 }
90 }
91
92 for _, block := range b.Blocks {
93 blockTy := block.Type
94 if _, hidden := remain.hiddenBlocks[blockTy]; !hidden {
95 var suggestions []string
96 for _, blockS := range schema.Blocks {
97 suggestions = append(suggestions, blockS.Type)
98 }
99 suggestion := nameSuggestion(blockTy, suggestions)
100 if suggestion != "" {
101 suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
102 } else {
103 // Is there an attribute of the same name?
104 for _, attrS := range schema.Attributes {
105 if attrS.Name == blockTy {
106 suggestion = fmt.Sprintf(" Did you mean to define attribute %q?", blockTy)
107 break
108 }
109 }
110 }
111
112 diags = append(diags, &hcl.Diagnostic{
113 Severity: hcl.DiagError,
114 Summary: "Unsupported block type",
115 Detail: fmt.Sprintf("Blocks of type %q are not expected here.%s", blockTy, suggestion),
116 Subject: &block.TypeRange,
117 })
118 }
119 }
120
121 return content, diags
122}
123
124func (b *Body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
125 attrs := make(hcl.Attributes)
126 var blocks hcl.Blocks
127 var diags hcl.Diagnostics
128 hiddenAttrs := make(map[string]struct{})
129 hiddenBlocks := make(map[string]struct{})
130
131 if b.hiddenAttrs != nil {
132 for k, v := range b.hiddenAttrs {
133 hiddenAttrs[k] = v
134 }
135 }
136 if b.hiddenBlocks != nil {
137 for k, v := range b.hiddenBlocks {
138 hiddenBlocks[k] = v
139 }
140 }
141
142 for _, attrS := range schema.Attributes {
143 name := attrS.Name
144 attr, exists := b.Attributes[name]
145 _, hidden := hiddenAttrs[name]
146 if hidden || !exists {
147 if attrS.Required {
148 diags = append(diags, &hcl.Diagnostic{
149 Severity: hcl.DiagError,
150 Summary: "Missing required attribute",
151 Detail: fmt.Sprintf("The attribute %q is required, but no definition was found.", attrS.Name),
152 Subject: b.MissingItemRange().Ptr(),
153 })
154 }
155 continue
156 }
157
158 hiddenAttrs[name] = struct{}{}
159 attrs[name] = attr.AsHCLAttribute()
160 }
161
162 blocksWanted := make(map[string]hcl.BlockHeaderSchema)
163 for _, blockS := range schema.Blocks {
164 blocksWanted[blockS.Type] = blockS
165 }
166
167 for _, block := range b.Blocks {
168 if _, hidden := hiddenBlocks[block.Type]; hidden {
169 continue
170 }
171 blockS, wanted := blocksWanted[block.Type]
172 if !wanted {
173 continue
174 }
175
176 if len(block.Labels) > len(blockS.LabelNames) {
177 name := block.Type
178 if len(blockS.LabelNames) == 0 {
179 diags = append(diags, &hcl.Diagnostic{
180 Severity: hcl.DiagError,
181 Summary: fmt.Sprintf("Extraneous label for %s", name),
182 Detail: fmt.Sprintf(
183 "No labels are expected for %s blocks.", name,
184 ),
185 Subject: block.LabelRanges[0].Ptr(),
186 Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(),
187 })
188 } else {
189 diags = append(diags, &hcl.Diagnostic{
190 Severity: hcl.DiagError,
191 Summary: fmt.Sprintf("Extraneous label for %s", name),
192 Detail: fmt.Sprintf(
193 "Only %d labels (%s) are expected for %s blocks.",
194 len(blockS.LabelNames), strings.Join(blockS.LabelNames, ", "), name,
195 ),
196 Subject: block.LabelRanges[len(blockS.LabelNames)].Ptr(),
197 Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(),
198 })
199 }
200 continue
201 }
202
203 if len(block.Labels) < len(blockS.LabelNames) {
204 name := block.Type
205 diags = append(diags, &hcl.Diagnostic{
206 Severity: hcl.DiagError,
207 Summary: fmt.Sprintf("Missing %s for %s", blockS.LabelNames[len(block.Labels)], name),
208 Detail: fmt.Sprintf(
209 "All %s blocks must have %d labels (%s).",
210 name, len(blockS.LabelNames), strings.Join(blockS.LabelNames, ", "),
211 ),
212 Subject: &block.OpenBraceRange,
213 Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(),
214 })
215 continue
216 }
217
218 blocks = append(blocks, block.AsHCLBlock())
219 }
220
221 // We hide blocks only after we've processed all of them, since otherwise
222 // we can't process more than one of the same type.
223 for _, blockS := range schema.Blocks {
224 hiddenBlocks[blockS.Type] = struct{}{}
225 }
226
227 remain := &Body{
228 Attributes: b.Attributes,
229 Blocks: b.Blocks,
230
231 hiddenAttrs: hiddenAttrs,
232 hiddenBlocks: hiddenBlocks,
233
234 SrcRange: b.SrcRange,
235 EndRange: b.EndRange,
236 }
237
238 return &hcl.BodyContent{
239 Attributes: attrs,
240 Blocks: blocks,
241
242 MissingItemRange: b.MissingItemRange(),
243 }, remain, diags
244}
245
246func (b *Body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
247 attrs := make(hcl.Attributes)
248 var diags hcl.Diagnostics
249
250 if len(b.Blocks) > 0 {
251 example := b.Blocks[0]
252 diags = append(diags, &hcl.Diagnostic{
253 Severity: hcl.DiagError,
254 Summary: fmt.Sprintf("Unexpected %s block", example.Type),
255 Detail: "Blocks are not allowed here.",
256 Context: &example.TypeRange,
257 })
258 // we will continue processing anyway, and return the attributes
259 // we are able to find so that certain analyses can still be done
260 // in the face of errors.
261 }
262
263 if b.Attributes == nil {
264 return attrs, diags
265 }
266
267 for name, attr := range b.Attributes {
268 if _, hidden := b.hiddenAttrs[name]; hidden {
269 continue
270 }
271 attrs[name] = attr.AsHCLAttribute()
272 }
273
274 return attrs, diags
275}
276
277func (b *Body) MissingItemRange() hcl.Range {
278 return b.EndRange
279}
280
281// Attributes is the collection of attribute definitions within a body.
282type Attributes map[string]*Attribute
283
284func (a Attributes) walkChildNodes(w internalWalkFunc) {
285 for k, attr := range a {
286 a[k] = w(attr).(*Attribute)
287 }
288}
289
290// Range returns the range of some arbitrary point within the set of
291// attributes, or an invalid range if there are no attributes.
292//
293// This is provided only to complete the Node interface, but has no practical
294// use.
295func (a Attributes) Range() hcl.Range {
296 // An attributes doesn't really have a useful range to report, since
297 // it's just a grouping construct. So we'll arbitrarily take the
298 // range of one of the attributes, or produce an invalid range if we have
299 // none. In practice, there's little reason to ask for the range of
300 // an Attributes.
301 for _, attr := range a {
302 return attr.Range()
303 }
304 return hcl.Range{
305 Filename: "<unknown>",
306 }
307}
308
309// Attribute represents a single attribute definition within a body.
310type Attribute struct {
311 Name string
312 Expr Expression
313
314 SrcRange hcl.Range
315 NameRange hcl.Range
316 EqualsRange hcl.Range
317}
318
319func (a *Attribute) walkChildNodes(w internalWalkFunc) {
320 a.Expr = w(a.Expr).(Expression)
321}
322
323func (a *Attribute) Range() hcl.Range {
324 return a.SrcRange
325}
326
327// AsHCLAttribute returns the block data expressed as a *hcl.Attribute.
328func (a *Attribute) AsHCLAttribute() *hcl.Attribute {
329 return &hcl.Attribute{
330 Name: a.Name,
331 Expr: a.Expr,
332
333 Range: a.SrcRange,
334 NameRange: a.NameRange,
335 }
336}
337
338// Blocks is the list of nested blocks within a body.
339type Blocks []*Block
340
341func (bs Blocks) walkChildNodes(w internalWalkFunc) {
342 for i, block := range bs {
343 bs[i] = w(block).(*Block)
344 }
345}
346
347// Range returns the range of some arbitrary point within the list of
348// blocks, or an invalid range if there are no blocks.
349//
350// This is provided only to complete the Node interface, but has no practical
351// use.
352func (bs Blocks) Range() hcl.Range {
353 if len(bs) > 0 {
354 return bs[0].Range()
355 }
356 return hcl.Range{
357 Filename: "<unknown>",
358 }
359}
360
361// Block represents a nested block structure
362type Block struct {
363 Type string
364 Labels []string
365 Body *Body
366
367 TypeRange hcl.Range
368 LabelRanges []hcl.Range
369 OpenBraceRange hcl.Range
370 CloseBraceRange hcl.Range
371}
372
373func (b *Block) walkChildNodes(w internalWalkFunc) {
374 b.Body = w(b.Body).(*Body)
375}
376
377func (b *Block) Range() hcl.Range {
378 return hcl.RangeBetween(b.TypeRange, b.CloseBraceRange)
379}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token.go
new file mode 100644
index 0000000..bcaa15f
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token.go
@@ -0,0 +1,272 @@
1package hclsyntax
2
3import (
4 "fmt"
5
6 "github.com/apparentlymart/go-textseg/textseg"
7 "github.com/hashicorp/hcl2/hcl"
8)
9
10// Token represents a sequence of bytes from some HCL code that has been
11// tagged with a type and its range within the source file.
12type Token struct {
13 Type TokenType
14 Bytes []byte
15 Range hcl.Range
16}
17
18// Tokens is a slice of Token.
19type Tokens []Token
20
21// TokenType is an enumeration used for the Type field on Token.
22type TokenType rune
23
24const (
25 // Single-character tokens are represented by their own character, for
26 // convenience in producing these within the scanner. However, the values
27 // are otherwise arbitrary and just intended to be mnemonic for humans
28 // who might see them in debug output.
29
30 TokenOBrace TokenType = '{'
31 TokenCBrace TokenType = '}'
32 TokenOBrack TokenType = '['
33 TokenCBrack TokenType = ']'
34 TokenOParen TokenType = '('
35 TokenCParen TokenType = ')'
36 TokenOQuote TokenType = '«'
37 TokenCQuote TokenType = '»'
38 TokenOHeredoc TokenType = 'H'
39 TokenCHeredoc TokenType = 'h'
40
41 TokenStar TokenType = '*'
42 TokenSlash TokenType = '/'
43 TokenPlus TokenType = '+'
44 TokenMinus TokenType = '-'
45 TokenPercent TokenType = '%'
46
47 TokenEqual TokenType = '='
48 TokenEqualOp TokenType = '≔'
49 TokenNotEqual TokenType = '≠'
50 TokenLessThan TokenType = '<'
51 TokenLessThanEq TokenType = '≤'
52 TokenGreaterThan TokenType = '>'
53 TokenGreaterThanEq TokenType = '≥'
54
55 TokenAnd TokenType = '∧'
56 TokenOr TokenType = '∨'
57 TokenBang TokenType = '!'
58
59 TokenDot TokenType = '.'
60 TokenComma TokenType = ','
61
62 TokenEllipsis TokenType = '…'
63 TokenFatArrow TokenType = '⇒'
64
65 TokenQuestion TokenType = '?'
66 TokenColon TokenType = ':'
67
68 TokenTemplateInterp TokenType = '∫'
69 TokenTemplateControl TokenType = 'λ'
70 TokenTemplateSeqEnd TokenType = '∎'
71
72 TokenQuotedLit TokenType = 'Q' // might contain backslash escapes
73 TokenStringLit TokenType = 'S' // cannot contain backslash escapes
74 TokenNumberLit TokenType = 'N'
75 TokenIdent TokenType = 'I'
76
77 TokenComment TokenType = 'C'
78
79 TokenNewline TokenType = '\n'
80 TokenEOF TokenType = '␄'
81
82 // The rest are not used in the language but recognized by the scanner so
83 // we can generate good diagnostics in the parser when users try to write
84 // things that might work in other languages they are familiar with, or
85 // simply make incorrect assumptions about the HCL language.
86
87 TokenBitwiseAnd TokenType = '&'
88 TokenBitwiseOr TokenType = '|'
89 TokenBitwiseNot TokenType = '~'
90 TokenBitwiseXor TokenType = '^'
91 TokenStarStar TokenType = '➚'
92 TokenBacktick TokenType = '`'
93 TokenSemicolon TokenType = ';'
94 TokenTabs TokenType = '␉'
95 TokenInvalid TokenType = '�'
96 TokenBadUTF8 TokenType = '💩'
97
98 // TokenNil is a placeholder for when a token is required but none is
99 // available, e.g. when reporting errors. The scanner will never produce
100 // this as part of a token stream.
101 TokenNil TokenType = '\x00'
102)
103
104func (t TokenType) GoString() string {
105 return fmt.Sprintf("hclsyntax.%s", t.String())
106}
107
108type scanMode int
109
110const (
111 scanNormal scanMode = iota
112 scanTemplate
113 scanIdentOnly
114)
115
116type tokenAccum struct {
117 Filename string
118 Bytes []byte
119 Pos hcl.Pos
120 Tokens []Token
121}
122
123func (f *tokenAccum) emitToken(ty TokenType, startOfs, endOfs int) {
124 // Walk through our buffer to figure out how much we need to adjust
125 // the start pos to get our end pos.
126
127 start := f.Pos
128 start.Column += startOfs - f.Pos.Byte // Safe because only ASCII spaces can be in the offset
129 start.Byte = startOfs
130
131 end := start
132 end.Byte = endOfs
133 b := f.Bytes[startOfs:endOfs]
134 for len(b) > 0 {
135 advance, seq, _ := textseg.ScanGraphemeClusters(b, true)
136 if (len(seq) == 1 && seq[0] == '\n') || (len(seq) == 2 && seq[0] == '\r' && seq[1] == '\n') {
137 end.Line++
138 end.Column = 1
139 } else {
140 end.Column++
141 }
142 b = b[advance:]
143 }
144
145 f.Pos = end
146
147 f.Tokens = append(f.Tokens, Token{
148 Type: ty,
149 Bytes: f.Bytes[startOfs:endOfs],
150 Range: hcl.Range{
151 Filename: f.Filename,
152 Start: start,
153 End: end,
154 },
155 })
156}
157
158type heredocInProgress struct {
159 Marker []byte
160 StartOfLine bool
161}
162
163// checkInvalidTokens does a simple pass across the given tokens and generates
164// diagnostics for tokens that should _never_ appear in HCL source. This
165// is intended to avoid the need for the parser to have special support
166// for them all over.
167//
168// Returns a diagnostics with no errors if everything seems acceptable.
169// Otherwise, returns zero or more error diagnostics, though tries to limit
170// repetition of the same information.
171func checkInvalidTokens(tokens Tokens) hcl.Diagnostics {
172 var diags hcl.Diagnostics
173
174 toldBitwise := 0
175 toldExponent := 0
176 toldBacktick := 0
177 toldSemicolon := 0
178 toldTabs := 0
179 toldBadUTF8 := 0
180
181 for _, tok := range tokens {
182 switch tok.Type {
183 case TokenBitwiseAnd, TokenBitwiseOr, TokenBitwiseXor, TokenBitwiseNot:
184 if toldBitwise < 4 {
185 var suggestion string
186 switch tok.Type {
187 case TokenBitwiseAnd:
188 suggestion = " Did you mean boolean AND (\"&&\")?"
189 case TokenBitwiseOr:
190 suggestion = " Did you mean boolean OR (\"&&\")?"
191 case TokenBitwiseNot:
192 suggestion = " Did you mean boolean NOT (\"!\")?"
193 }
194
195 diags = append(diags, &hcl.Diagnostic{
196 Severity: hcl.DiagError,
197 Summary: "Unsupported operator",
198 Detail: fmt.Sprintf("Bitwise operators are not supported.%s", suggestion),
199 Subject: &tok.Range,
200 })
201 toldBitwise++
202 }
203 case TokenStarStar:
204 if toldExponent < 1 {
205 diags = append(diags, &hcl.Diagnostic{
206 Severity: hcl.DiagError,
207 Summary: "Unsupported operator",
208 Detail: "\"**\" is not a supported operator. Exponentiation is not supported as an operator.",
209 Subject: &tok.Range,
210 })
211
212 toldExponent++
213 }
214 case TokenBacktick:
215 // Only report for alternating (even) backticks, so we won't report both start and ends of the same
216 // backtick-quoted string.
217 if toldExponent < 4 && (toldExponent%2) == 0 {
218 diags = append(diags, &hcl.Diagnostic{
219 Severity: hcl.DiagError,
220 Summary: "Invalid character",
221 Detail: "The \"`\" character is not valid. To create a multi-line string, use the \"heredoc\" syntax, like \"<<EOT\".",
222 Subject: &tok.Range,
223 })
224
225 toldBacktick++
226 }
227 case TokenSemicolon:
228 if toldSemicolon < 1 {
229 diags = append(diags, &hcl.Diagnostic{
230 Severity: hcl.DiagError,
231 Summary: "Invalid character",
232 Detail: "The \";\" character is not valid. Use newlines to separate attributes and blocks, and commas to separate items in collection values.",
233 Subject: &tok.Range,
234 })
235
236 toldSemicolon++
237 }
238 case TokenTabs:
239 if toldTabs < 1 {
240 diags = append(diags, &hcl.Diagnostic{
241 Severity: hcl.DiagError,
242 Summary: "Invalid character",
243 Detail: "Tab characters may not be used. The recommended indentation style is two spaces per indent.",
244 Subject: &tok.Range,
245 })
246
247 toldTabs++
248 }
249 case TokenBadUTF8:
250 if toldBadUTF8 < 1 {
251 diags = append(diags, &hcl.Diagnostic{
252 Severity: hcl.DiagError,
253 Summary: "Invalid character encoding",
254 Detail: "All input files must be UTF-8 encoded. Ensure that UTF-8 encoding is selected in your editor.",
255 Subject: &tok.Range,
256 })
257
258 toldBadUTF8++
259 }
260 case TokenInvalid:
261 diags = append(diags, &hcl.Diagnostic{
262 Severity: hcl.DiagError,
263 Summary: "Invalid character",
264 Detail: "This character is not used within the language.",
265 Subject: &tok.Range,
266 })
267
268 toldTabs++
269 }
270 }
271 return diags
272}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token_type_string.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token_type_string.go
new file mode 100644
index 0000000..93de7ee
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token_type_string.go
@@ -0,0 +1,69 @@
1// Code generated by "stringer -type TokenType -output token_type_string.go"; DO NOT EDIT.
2
3package hclsyntax
4
5import "strconv"
6
7const _TokenType_name = "TokenNilTokenNewlineTokenBangTokenPercentTokenBitwiseAndTokenOParenTokenCParenTokenStarTokenPlusTokenCommaTokenMinusTokenDotTokenSlashTokenColonTokenSemicolonTokenLessThanTokenEqualTokenGreaterThanTokenQuestionTokenCommentTokenOHeredocTokenIdentTokenNumberLitTokenQuotedLitTokenStringLitTokenOBrackTokenCBrackTokenBitwiseXorTokenBacktickTokenCHeredocTokenOBraceTokenBitwiseOrTokenCBraceTokenBitwiseNotTokenOQuoteTokenCQuoteTokenTemplateControlTokenEllipsisTokenFatArrowTokenTemplateSeqEndTokenAndTokenOrTokenTemplateInterpTokenEqualOpTokenNotEqualTokenLessThanEqTokenGreaterThanEqTokenEOFTokenTabsTokenStarStarTokenInvalidTokenBadUTF8"
8
9var _TokenType_map = map[TokenType]string{
10 0: _TokenType_name[0:8],
11 10: _TokenType_name[8:20],
12 33: _TokenType_name[20:29],
13 37: _TokenType_name[29:41],
14 38: _TokenType_name[41:56],
15 40: _TokenType_name[56:67],
16 41: _TokenType_name[67:78],
17 42: _TokenType_name[78:87],
18 43: _TokenType_name[87:96],
19 44: _TokenType_name[96:106],
20 45: _TokenType_name[106:116],
21 46: _TokenType_name[116:124],
22 47: _TokenType_name[124:134],
23 58: _TokenType_name[134:144],
24 59: _TokenType_name[144:158],
25 60: _TokenType_name[158:171],
26 61: _TokenType_name[171:181],
27 62: _TokenType_name[181:197],
28 63: _TokenType_name[197:210],
29 67: _TokenType_name[210:222],
30 72: _TokenType_name[222:235],
31 73: _TokenType_name[235:245],
32 78: _TokenType_name[245:259],
33 81: _TokenType_name[259:273],
34 83: _TokenType_name[273:287],
35 91: _TokenType_name[287:298],
36 93: _TokenType_name[298:309],
37 94: _TokenType_name[309:324],
38 96: _TokenType_name[324:337],
39 104: _TokenType_name[337:350],
40 123: _TokenType_name[350:361],
41 124: _TokenType_name[361:375],
42 125: _TokenType_name[375:386],
43 126: _TokenType_name[386:401],
44 171: _TokenType_name[401:412],
45 187: _TokenType_name[412:423],
46 955: _TokenType_name[423:443],
47 8230: _TokenType_name[443:456],
48 8658: _TokenType_name[456:469],
49 8718: _TokenType_name[469:488],
50 8743: _TokenType_name[488:496],
51 8744: _TokenType_name[496:503],
52 8747: _TokenType_name[503:522],
53 8788: _TokenType_name[522:534],
54 8800: _TokenType_name[534:547],
55 8804: _TokenType_name[547:562],
56 8805: _TokenType_name[562:580],
57 9220: _TokenType_name[580:588],
58 9225: _TokenType_name[588:597],
59 10138: _TokenType_name[597:610],
60 65533: _TokenType_name[610:622],
61 128169: _TokenType_name[622:634],
62}
63
64func (i TokenType) String() string {
65 if str, ok := _TokenType_map[i]; ok {
66 return str
67 }
68 return "TokenType(" + strconv.FormatInt(int64(i), 10) + ")"
69}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode2ragel.rb b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode2ragel.rb
new file mode 100644
index 0000000..422e4e5
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode2ragel.rb
@@ -0,0 +1,335 @@
1#!/usr/bin/env ruby
2#
3# This scripted has been updated to accept more command-line arguments:
4#
5# -u, --url URL to process
6# -m, --machine Machine name
7# -p, --properties Properties to add to the machine
8# -o, --output Write output to file
9#
10# Updated by: Marty Schoch <marty.schoch@gmail.com>
11#
12# This script uses the unicode spec to generate a Ragel state machine
13# that recognizes unicode alphanumeric characters. It generates 5
14# character classes: uupper, ulower, ualpha, udigit, and ualnum.
15# Currently supported encodings are UTF-8 [default] and UCS-4.
16#
17# Usage: unicode2ragel.rb [options]
18# -e, --encoding [ucs4 | utf8] Data encoding
19# -h, --help Show this message
20#
21# This script was originally written as part of the Ferret search
22# engine library.
23#
24# Author: Rakan El-Khalil <rakan@well.com>
25
26require 'optparse'
27require 'open-uri'
28
29ENCODINGS = [ :utf8, :ucs4 ]
30ALPHTYPES = { :utf8 => "byte", :ucs4 => "rune" }
31DEFAULT_CHART_URL = "http://www.unicode.org/Public/5.1.0/ucd/DerivedCoreProperties.txt"
32DEFAULT_MACHINE_NAME= "WChar"
33
34###
35# Display vars & default option
36
37TOTAL_WIDTH = 80
38RANGE_WIDTH = 23
39@encoding = :utf8
40@chart_url = DEFAULT_CHART_URL
41machine_name = DEFAULT_MACHINE_NAME
42properties = []
43@output = $stdout
44
45###
46# Option parsing
47
48cli_opts = OptionParser.new do |opts|
49 opts.on("-e", "--encoding [ucs4 | utf8]", "Data encoding") do |o|
50 @encoding = o.downcase.to_sym
51 end
52 opts.on("-h", "--help", "Show this message") do
53 puts opts
54 exit
55 end
56 opts.on("-u", "--url URL", "URL to process") do |o|
57 @chart_url = o
58 end
59 opts.on("-m", "--machine MACHINE_NAME", "Machine name") do |o|
60 machine_name = o
61 end
62 opts.on("-p", "--properties x,y,z", Array, "Properties to add to machine") do |o|
63 properties = o
64 end
65 opts.on("-o", "--output FILE", "output file") do |o|
66 @output = File.new(o, "w+")
67 end
68end
69
70cli_opts.parse(ARGV)
71unless ENCODINGS.member? @encoding
72 puts "Invalid encoding: #{@encoding}"
73 puts cli_opts
74 exit
75end
76
77##
78# Downloads the document at url and yields every alpha line's hex
79# range and description.
80
81def each_alpha( url, property )
82 open( url ) do |file|
83 file.each_line do |line|
84 next if line =~ /^#/;
85 next if line !~ /; #{property} #/;
86
87 range, description = line.split(/;/)
88 range.strip!
89 description.gsub!(/.*#/, '').strip!
90
91 if range =~ /\.\./
92 start, stop = range.split '..'
93 else start = stop = range
94 end
95
96 yield start.hex .. stop.hex, description
97 end
98 end
99end
100
101###
102# Formats to hex at minimum width
103
104def to_hex( n )
105 r = "%0X" % n
106 r = "0#{r}" unless (r.length % 2).zero?
107 r
108end
109
110###
111# UCS4 is just a straight hex conversion of the unicode codepoint.
112
113def to_ucs4( range )
114 rangestr = "0x" + to_hex(range.begin)
115 rangestr << "..0x" + to_hex(range.end) if range.begin != range.end
116 [ rangestr ]
117end
118
119##
120# 0x00 - 0x7f -> 0zzzzzzz[7]
121# 0x80 - 0x7ff -> 110yyyyy[5] 10zzzzzz[6]
122# 0x800 - 0xffff -> 1110xxxx[4] 10yyyyyy[6] 10zzzzzz[6]
123# 0x010000 - 0x10ffff -> 11110www[3] 10xxxxxx[6] 10yyyyyy[6] 10zzzzzz[6]
124
125UTF8_BOUNDARIES = [0x7f, 0x7ff, 0xffff, 0x10ffff]
126
127def to_utf8_enc( n )
128 r = 0
129 if n <= 0x7f
130 r = n
131 elsif n <= 0x7ff
132 y = 0xc0 | (n >> 6)
133 z = 0x80 | (n & 0x3f)
134 r = y << 8 | z
135 elsif n <= 0xffff
136 x = 0xe0 | (n >> 12)
137 y = 0x80 | (n >> 6) & 0x3f
138 z = 0x80 | n & 0x3f
139 r = x << 16 | y << 8 | z
140 elsif n <= 0x10ffff
141 w = 0xf0 | (n >> 18)
142 x = 0x80 | (n >> 12) & 0x3f
143 y = 0x80 | (n >> 6) & 0x3f
144 z = 0x80 | n & 0x3f
145 r = w << 24 | x << 16 | y << 8 | z
146 end
147
148 to_hex(r)
149end
150
151def from_utf8_enc( n )
152 n = n.hex
153 r = 0
154 if n <= 0x7f
155 r = n
156 elsif n <= 0xdfff
157 y = (n >> 8) & 0x1f
158 z = n & 0x3f
159 r = y << 6 | z
160 elsif n <= 0xefffff
161 x = (n >> 16) & 0x0f
162 y = (n >> 8) & 0x3f
163 z = n & 0x3f
164 r = x << 10 | y << 6 | z
165 elsif n <= 0xf7ffffff
166 w = (n >> 24) & 0x07
167 x = (n >> 16) & 0x3f
168 y = (n >> 8) & 0x3f
169 z = n & 0x3f
170 r = w << 18 | x << 12 | y << 6 | z
171 end
172 r
173end
174
175###
176# Given a range, splits it up into ranges that can be continuously
177# encoded into utf8. Eg: 0x00 .. 0xff => [0x00..0x7f, 0x80..0xff]
178# This is not strictly needed since the current [5.1] unicode standard
179# doesn't have ranges that straddle utf8 boundaries. This is included
180# for completeness as there is no telling if that will ever change.
181
182def utf8_ranges( range )
183 ranges = []
184 UTF8_BOUNDARIES.each do |max|
185 if range.begin <= max
186 if range.end <= max
187 ranges << range
188 return ranges
189 end
190
191 ranges << (range.begin .. max)
192 range = (max + 1) .. range.end
193 end
194 end
195 ranges
196end
197
198def build_range( start, stop )
199 size = start.size/2
200 left = size - 1
201 return [""] if size < 1
202
203 a = start[0..1]
204 b = stop[0..1]
205
206 ###
207 # Shared prefix
208
209 if a == b
210 return build_range(start[2..-1], stop[2..-1]).map do |elt|
211 "0x#{a} " + elt
212 end
213 end
214
215 ###
216 # Unshared prefix, end of run
217
218 return ["0x#{a}..0x#{b} "] if left.zero?
219
220 ###
221 # Unshared prefix, not end of run
222 # Range can be 0x123456..0x56789A
223 # Which is equivalent to:
224 # 0x123456 .. 0x12FFFF
225 # 0x130000 .. 0x55FFFF
226 # 0x560000 .. 0x56789A
227
228 ret = []
229 ret << build_range(start, a + "FF" * left)
230
231 ###
232 # Only generate middle range if need be.
233
234 if a.hex+1 != b.hex
235 max = to_hex(b.hex - 1)
236 max = "FF" if b == "FF"
237 ret << "0x#{to_hex(a.hex+1)}..0x#{max} " + "0x00..0xFF " * left
238 end
239
240 ###
241 # Don't generate last range if it is covered by first range
242
243 ret << build_range(b + "00" * left, stop) unless b == "FF"
244 ret.flatten!
245end
246
247def to_utf8( range )
248 utf8_ranges( range ).map do |r|
249 begin_enc = to_utf8_enc(r.begin)
250 end_enc = to_utf8_enc(r.end)
251 build_range begin_enc, end_enc
252 end.flatten!
253end
254
255##
256# Perform a 3-way comparison of the number of codepoints advertised by
257# the unicode spec for the given range, the originally parsed range,
258# and the resulting utf8 encoded range.
259
260def count_codepoints( code )
261 code.split(' ').inject(1) do |acc, elt|
262 if elt =~ /0x(.+)\.\.0x(.+)/
263 if @encoding == :utf8
264 acc * (from_utf8_enc($2) - from_utf8_enc($1) + 1)
265 else
266 acc * ($2.hex - $1.hex + 1)
267 end
268 else
269 acc
270 end
271 end
272end
273
274def is_valid?( range, desc, codes )
275 spec_count = 1
276 spec_count = $1.to_i if desc =~ /\[(\d+)\]/
277 range_count = range.end - range.begin + 1
278
279 sum = codes.inject(0) { |acc, elt| acc + count_codepoints(elt) }
280 sum == spec_count and sum == range_count
281end
282
283##
284# Generate the state maching to stdout
285
286def generate_machine( name, property )
287 pipe = " "
288 @output.puts " #{name} = "
289 each_alpha( @chart_url, property ) do |range, desc|
290
291 codes = (@encoding == :ucs4) ? to_ucs4(range) : to_utf8(range)
292
293 #raise "Invalid encoding of range #{range}: #{codes.inspect}" unless
294 # is_valid? range, desc, codes
295
296 range_width = codes.map { |a| a.size }.max
297 range_width = RANGE_WIDTH if range_width < RANGE_WIDTH
298
299 desc_width = TOTAL_WIDTH - RANGE_WIDTH - 11
300 desc_width -= (range_width - RANGE_WIDTH) if range_width > RANGE_WIDTH
301
302 if desc.size > desc_width
303 desc = desc[0..desc_width - 4] + "..."
304 end
305
306 codes.each_with_index do |r, idx|
307 desc = "" unless idx.zero?
308 code = "%-#{range_width}s" % r
309 @output.puts " #{pipe} #{code} ##{desc}"
310 pipe = "|"
311 end
312 end
313 @output.puts " ;"
314 @output.puts ""
315end
316
317@output.puts <<EOF
318# The following Ragel file was autogenerated with #{$0}
319# from: #{@chart_url}
320#
321# It defines #{properties}.
322#
323# To use this, make sure that your alphtype is set to #{ALPHTYPES[@encoding]},
324# and that your input is in #{@encoding}.
325
326%%{
327 machine #{machine_name};
328
329EOF
330
331properties.each { |x| generate_machine( x, x ) }
332
333@output.puts <<EOF
334}%%
335EOF
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode_derived.rl b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode_derived.rl
new file mode 100644
index 0000000..612ad62
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/unicode_derived.rl
@@ -0,0 +1,2135 @@
1# The following Ragel file was autogenerated with unicode2ragel.rb
2# from: http://www.unicode.org/Public/9.0.0/ucd/DerivedCoreProperties.txt
3#
4# It defines ["ID_Start", "ID_Continue"].
5#
6# To use this, make sure that your alphtype is set to byte,
7# and that your input is in utf8.
8
9%%{
10 machine UnicodeDerived;
11
12 ID_Start =
13 0x41..0x5A #L& [26] LATIN CAPITAL LETTER A..LATIN CAPI...
14 | 0x61..0x7A #L& [26] LATIN SMALL LETTER A..LATIN SMALL ...
15 | 0xC2 0xAA #Lo FEMININE ORDINAL INDICATOR
16 | 0xC2 0xB5 #L& MICRO SIGN
17 | 0xC2 0xBA #Lo MASCULINE ORDINAL INDICATOR
18 | 0xC3 0x80..0x96 #L& [23] LATIN CAPITAL LETTER A WITH GRAVE....
19 | 0xC3 0x98..0xB6 #L& [31] LATIN CAPITAL LETTER O WITH STROKE...
20 | 0xC3 0xB8..0xFF #L& [195] LATIN SMALL LETTER O WITH STROKE.....
21 | 0xC4..0xC5 0x00..0xFF #
22 | 0xC6 0x00..0xBA #
23 | 0xC6 0xBB #Lo LATIN LETTER TWO WITH STROKE
24 | 0xC6 0xBC..0xBF #L& [4] LATIN CAPITAL LETTER TONE FIVE..LA...
25 | 0xC7 0x80..0x83 #Lo [4] LATIN LETTER DENTAL CLICK..LATIN L...
26 | 0xC7 0x84..0xFF #L& [208] LATIN CAPITAL LETTER DZ WITH CARON...
27 | 0xC8..0xC9 0x00..0xFF #
28 | 0xCA 0x00..0x93 #
29 | 0xCA 0x94 #Lo LATIN LETTER GLOTTAL STOP
30 | 0xCA 0x95..0xAF #L& [27] LATIN LETTER PHARYNGEAL VOICED FRI...
31 | 0xCA 0xB0..0xFF #Lm [18] MODIFIER LETTER SMALL H..MODIFIER ...
32 | 0xCB 0x00..0x81 #
33 | 0xCB 0x86..0x91 #Lm [12] MODIFIER LETTER CIRCUMFLEX ACCENT....
34 | 0xCB 0xA0..0xA4 #Lm [5] MODIFIER LETTER SMALL GAMMA..MODIF...
35 | 0xCB 0xAC #Lm MODIFIER LETTER VOICING
36 | 0xCB 0xAE #Lm MODIFIER LETTER DOUBLE APOSTROPHE
37 | 0xCD 0xB0..0xB3 #L& [4] GREEK CAPITAL LETTER HETA..GREEK S...
38 | 0xCD 0xB4 #Lm GREEK NUMERAL SIGN
39 | 0xCD 0xB6..0xB7 #L& [2] GREEK CAPITAL LETTER PAMPHYLIAN DI...
40 | 0xCD 0xBA #Lm GREEK YPOGEGRAMMENI
41 | 0xCD 0xBB..0xBD #L& [3] GREEK SMALL REVERSED LUNATE SIGMA ...
42 | 0xCD 0xBF #L& GREEK CAPITAL LETTER YOT
43 | 0xCE 0x86 #L& GREEK CAPITAL LETTER ALPHA WITH TONOS
44 | 0xCE 0x88..0x8A #L& [3] GREEK CAPITAL LETTER EPSILON WITH ...
45 | 0xCE 0x8C #L& GREEK CAPITAL LETTER OMICRON WITH ...
46 | 0xCE 0x8E..0xA1 #L& [20] GREEK CAPITAL LETTER UPSILON WITH ...
47 | 0xCE 0xA3..0xFF #L& [83] GREEK CAPITAL LETTER SIGMA..GREEK ...
48 | 0xCF 0x00..0xB5 #
49 | 0xCF 0xB7..0xFF #L& [139] GREEK CAPITAL LETTER SHO..CYRILLIC...
50 | 0xD0..0xD1 0x00..0xFF #
51 | 0xD2 0x00..0x81 #
52 | 0xD2 0x8A..0xFF #L& [166] CYRILLIC CAPITAL LETTER SHORT I WI...
53 | 0xD3..0xD3 0x00..0xFF #
54 | 0xD4 0x00..0xAF #
55 | 0xD4 0xB1..0xFF #L& [38] ARMENIAN CAPITAL LETTER AYB..ARMEN...
56 | 0xD5 0x00..0x96 #
57 | 0xD5 0x99 #Lm ARMENIAN MODIFIER LETTER LEFT HALF...
58 | 0xD5 0xA1..0xFF #L& [39] ARMENIAN SMALL LETTER AYB..ARMENIA...
59 | 0xD6 0x00..0x87 #
60 | 0xD7 0x90..0xAA #Lo [27] HEBREW LETTER ALEF..HEBREW LETTER TAV
61 | 0xD7 0xB0..0xB2 #Lo [3] HEBREW LIGATURE YIDDISH DOUBLE VAV...
62 | 0xD8 0xA0..0xBF #Lo [32] ARABIC LETTER KASHMIRI YEH..ARABIC...
63 | 0xD9 0x80 #Lm ARABIC TATWEEL
64 | 0xD9 0x81..0x8A #Lo [10] ARABIC LETTER FEH..ARABIC LETTER YEH
65 | 0xD9 0xAE..0xAF #Lo [2] ARABIC LETTER DOTLESS BEH..ARABIC ...
66 | 0xD9 0xB1..0xFF #Lo [99] ARABIC LETTER ALEF WASLA..ARABIC L...
67 | 0xDA..0xDA 0x00..0xFF #
68 | 0xDB 0x00..0x93 #
69 | 0xDB 0x95 #Lo ARABIC LETTER AE
70 | 0xDB 0xA5..0xA6 #Lm [2] ARABIC SMALL WAW..ARABIC SMALL YEH
71 | 0xDB 0xAE..0xAF #Lo [2] ARABIC LETTER DAL WITH INVERTED V....
72 | 0xDB 0xBA..0xBC #Lo [3] ARABIC LETTER SHEEN WITH DOT BELOW...
73 | 0xDB 0xBF #Lo ARABIC LETTER HEH WITH INVERTED V
74 | 0xDC 0x90 #Lo SYRIAC LETTER ALAPH
75 | 0xDC 0x92..0xAF #Lo [30] SYRIAC LETTER BETH..SYRIAC LETTER ...
76 | 0xDD 0x8D..0xFF #Lo [89] SYRIAC LETTER SOGDIAN ZHAIN..THAAN...
77 | 0xDE 0x00..0xA5 #
78 | 0xDE 0xB1 #Lo THAANA LETTER NAA
79 | 0xDF 0x8A..0xAA #Lo [33] NKO LETTER A..NKO LETTER JONA RA
80 | 0xDF 0xB4..0xB5 #Lm [2] NKO HIGH TONE APOSTROPHE..NKO LOW ...
81 | 0xDF 0xBA #Lm NKO LAJANYALAN
82 | 0xE0 0xA0 0x80..0x95 #Lo [22] SAMARITAN LETTER ALAF..SAMARITAN L...
83 | 0xE0 0xA0 0x9A #Lm SAMARITAN MODIFIER LETTER EPENTHET...
84 | 0xE0 0xA0 0xA4 #Lm SAMARITAN MODIFIER LETTER SHORT A
85 | 0xE0 0xA0 0xA8 #Lm SAMARITAN MODIFIER LETTER I
86 | 0xE0 0xA1 0x80..0x98 #Lo [25] MANDAIC LETTER HALQA..MANDAIC LETT...
87 | 0xE0 0xA2 0xA0..0xB4 #Lo [21] ARABIC LETTER BEH WITH SMALL V BEL...
88 | 0xE0 0xA2 0xB6..0xBD #Lo [8] ARABIC LETTER BEH WITH SMALL MEEM ...
89 | 0xE0 0xA4 0x84..0xB9 #Lo [54] DEVANAGARI LETTER SHORT A..DEVANAG...
90 | 0xE0 0xA4 0xBD #Lo DEVANAGARI SIGN AVAGRAHA
91 | 0xE0 0xA5 0x90 #Lo DEVANAGARI OM
92 | 0xE0 0xA5 0x98..0xA1 #Lo [10] DEVANAGARI LETTER QA..DEVANAGARI L...
93 | 0xE0 0xA5 0xB1 #Lm DEVANAGARI SIGN HIGH SPACING DOT
94 | 0xE0 0xA5 0xB2..0xFF #Lo [15] DEVANAGARI LETTER CANDRA A..BENGAL...
95 | 0xE0 0xA6 0x00..0x80 #
96 | 0xE0 0xA6 0x85..0x8C #Lo [8] BENGALI LETTER A..BENGALI LETTER V...
97 | 0xE0 0xA6 0x8F..0x90 #Lo [2] BENGALI LETTER E..BENGALI LETTER AI
98 | 0xE0 0xA6 0x93..0xA8 #Lo [22] BENGALI LETTER O..BENGALI LETTER NA
99 | 0xE0 0xA6 0xAA..0xB0 #Lo [7] BENGALI LETTER PA..BENGALI LETTER RA
100 | 0xE0 0xA6 0xB2 #Lo BENGALI LETTER LA
101 | 0xE0 0xA6 0xB6..0xB9 #Lo [4] BENGALI LETTER SHA..BENGALI LETTER HA
102 | 0xE0 0xA6 0xBD #Lo BENGALI SIGN AVAGRAHA
103 | 0xE0 0xA7 0x8E #Lo BENGALI LETTER KHANDA TA
104 | 0xE0 0xA7 0x9C..0x9D #Lo [2] BENGALI LETTER RRA..BENGALI LETTER...
105 | 0xE0 0xA7 0x9F..0xA1 #Lo [3] BENGALI LETTER YYA..BENGALI LETTER...
106 | 0xE0 0xA7 0xB0..0xB1 #Lo [2] BENGALI LETTER RA WITH MIDDLE DIAG...
107 | 0xE0 0xA8 0x85..0x8A #Lo [6] GURMUKHI LETTER A..GURMUKHI LETTER UU
108 | 0xE0 0xA8 0x8F..0x90 #Lo [2] GURMUKHI LETTER EE..GURMUKHI LETTE...
109 | 0xE0 0xA8 0x93..0xA8 #Lo [22] GURMUKHI LETTER OO..GURMUKHI LETTE...
110 | 0xE0 0xA8 0xAA..0xB0 #Lo [7] GURMUKHI LETTER PA..GURMUKHI LETTE...
111 | 0xE0 0xA8 0xB2..0xB3 #Lo [2] GURMUKHI LETTER LA..GURMUKHI LETTE...
112 | 0xE0 0xA8 0xB5..0xB6 #Lo [2] GURMUKHI LETTER VA..GURMUKHI LETTE...
113 | 0xE0 0xA8 0xB8..0xB9 #Lo [2] GURMUKHI LETTER SA..GURMUKHI LETTE...
114 | 0xE0 0xA9 0x99..0x9C #Lo [4] GURMUKHI LETTER KHHA..GURMUKHI LET...
115 | 0xE0 0xA9 0x9E #Lo GURMUKHI LETTER FA
116 | 0xE0 0xA9 0xB2..0xB4 #Lo [3] GURMUKHI IRI..GURMUKHI EK ONKAR
117 | 0xE0 0xAA 0x85..0x8D #Lo [9] GUJARATI LETTER A..GUJARATI VOWEL ...
118 | 0xE0 0xAA 0x8F..0x91 #Lo [3] GUJARATI LETTER E..GUJARATI VOWEL ...
119 | 0xE0 0xAA 0x93..0xA8 #Lo [22] GUJARATI LETTER O..GUJARATI LETTER NA
120 | 0xE0 0xAA 0xAA..0xB0 #Lo [7] GUJARATI LETTER PA..GUJARATI LETTE...
121 | 0xE0 0xAA 0xB2..0xB3 #Lo [2] GUJARATI LETTER LA..GUJARATI LETTE...
122 | 0xE0 0xAA 0xB5..0xB9 #Lo [5] GUJARATI LETTER VA..GUJARATI LETTE...
123 | 0xE0 0xAA 0xBD #Lo GUJARATI SIGN AVAGRAHA
124 | 0xE0 0xAB 0x90 #Lo GUJARATI OM
125 | 0xE0 0xAB 0xA0..0xA1 #Lo [2] GUJARATI LETTER VOCALIC RR..GUJARA...
126 | 0xE0 0xAB 0xB9 #Lo GUJARATI LETTER ZHA
127 | 0xE0 0xAC 0x85..0x8C #Lo [8] ORIYA LETTER A..ORIYA LETTER VOCAL...
128 | 0xE0 0xAC 0x8F..0x90 #Lo [2] ORIYA LETTER E..ORIYA LETTER AI
129 | 0xE0 0xAC 0x93..0xA8 #Lo [22] ORIYA LETTER O..ORIYA LETTER NA
130 | 0xE0 0xAC 0xAA..0xB0 #Lo [7] ORIYA LETTER PA..ORIYA LETTER RA
131 | 0xE0 0xAC 0xB2..0xB3 #Lo [2] ORIYA LETTER LA..ORIYA LETTER LLA
132 | 0xE0 0xAC 0xB5..0xB9 #Lo [5] ORIYA LETTER VA..ORIYA LETTER HA
133 | 0xE0 0xAC 0xBD #Lo ORIYA SIGN AVAGRAHA
134 | 0xE0 0xAD 0x9C..0x9D #Lo [2] ORIYA LETTER RRA..ORIYA LETTER RHA
135 | 0xE0 0xAD 0x9F..0xA1 #Lo [3] ORIYA LETTER YYA..ORIYA LETTER VOC...
136 | 0xE0 0xAD 0xB1 #Lo ORIYA LETTER WA
137 | 0xE0 0xAE 0x83 #Lo TAMIL SIGN VISARGA
138 | 0xE0 0xAE 0x85..0x8A #Lo [6] TAMIL LETTER A..TAMIL LETTER UU
139 | 0xE0 0xAE 0x8E..0x90 #Lo [3] TAMIL LETTER E..TAMIL LETTER AI
140 | 0xE0 0xAE 0x92..0x95 #Lo [4] TAMIL LETTER O..TAMIL LETTER KA
141 | 0xE0 0xAE 0x99..0x9A #Lo [2] TAMIL LETTER NGA..TAMIL LETTER CA
142 | 0xE0 0xAE 0x9C #Lo TAMIL LETTER JA
143 | 0xE0 0xAE 0x9E..0x9F #Lo [2] TAMIL LETTER NYA..TAMIL LETTER TTA
144 | 0xE0 0xAE 0xA3..0xA4 #Lo [2] TAMIL LETTER NNA..TAMIL LETTER TA
145 | 0xE0 0xAE 0xA8..0xAA #Lo [3] TAMIL LETTER NA..TAMIL LETTER PA
146 | 0xE0 0xAE 0xAE..0xB9 #Lo [12] TAMIL LETTER MA..TAMIL LETTER HA
147 | 0xE0 0xAF 0x90 #Lo TAMIL OM
148 | 0xE0 0xB0 0x85..0x8C #Lo [8] TELUGU LETTER A..TELUGU LETTER VOC...
149 | 0xE0 0xB0 0x8E..0x90 #Lo [3] TELUGU LETTER E..TELUGU LETTER AI
150 | 0xE0 0xB0 0x92..0xA8 #Lo [23] TELUGU LETTER O..TELUGU LETTER NA
151 | 0xE0 0xB0 0xAA..0xB9 #Lo [16] TELUGU LETTER PA..TELUGU LETTER HA
152 | 0xE0 0xB0 0xBD #Lo TELUGU SIGN AVAGRAHA
153 | 0xE0 0xB1 0x98..0x9A #Lo [3] TELUGU LETTER TSA..TELUGU LETTER RRRA
154 | 0xE0 0xB1 0xA0..0xA1 #Lo [2] TELUGU LETTER VOCALIC RR..TELUGU L...
155 | 0xE0 0xB2 0x80 #Lo KANNADA SIGN SPACING CANDRABINDU
156 | 0xE0 0xB2 0x85..0x8C #Lo [8] KANNADA LETTER A..KANNADA LETTER V...
157 | 0xE0 0xB2 0x8E..0x90 #Lo [3] KANNADA LETTER E..KANNADA LETTER AI
158 | 0xE0 0xB2 0x92..0xA8 #Lo [23] KANNADA LETTER O..KANNADA LETTER NA
159 | 0xE0 0xB2 0xAA..0xB3 #Lo [10] KANNADA LETTER PA..KANNADA LETTER LLA
160 | 0xE0 0xB2 0xB5..0xB9 #Lo [5] KANNADA LETTER VA..KANNADA LETTER HA
161 | 0xE0 0xB2 0xBD #Lo KANNADA SIGN AVAGRAHA
162 | 0xE0 0xB3 0x9E #Lo KANNADA LETTER FA
163 | 0xE0 0xB3 0xA0..0xA1 #Lo [2] KANNADA LETTER VOCALIC RR..KANNADA...
164 | 0xE0 0xB3 0xB1..0xB2 #Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA ...
165 | 0xE0 0xB4 0x85..0x8C #Lo [8] MALAYALAM LETTER A..MALAYALAM LETT...
166 | 0xE0 0xB4 0x8E..0x90 #Lo [3] MALAYALAM LETTER E..MALAYALAM LETT...
167 | 0xE0 0xB4 0x92..0xBA #Lo [41] MALAYALAM LETTER O..MALAYALAM LETT...
168 | 0xE0 0xB4 0xBD #Lo MALAYALAM SIGN AVAGRAHA
169 | 0xE0 0xB5 0x8E #Lo MALAYALAM LETTER DOT REPH
170 | 0xE0 0xB5 0x94..0x96 #Lo [3] MALAYALAM LETTER CHILLU M..MALAYAL...
171 | 0xE0 0xB5 0x9F..0xA1 #Lo [3] MALAYALAM LETTER ARCHAIC II..MALAY...
172 | 0xE0 0xB5 0xBA..0xBF #Lo [6] MALAYALAM LETTER CHILLU NN..MALAYA...
173 | 0xE0 0xB6 0x85..0x96 #Lo [18] SINHALA LETTER AYANNA..SINHALA LET...
174 | 0xE0 0xB6 0x9A..0xB1 #Lo [24] SINHALA LETTER ALPAPRAANA KAYANNA....
175 | 0xE0 0xB6 0xB3..0xBB #Lo [9] SINHALA LETTER SANYAKA DAYANNA..SI...
176 | 0xE0 0xB6 0xBD #Lo SINHALA LETTER DANTAJA LAYANNA
177 | 0xE0 0xB7 0x80..0x86 #Lo [7] SINHALA LETTER VAYANNA..SINHALA LE...
178 | 0xE0 0xB8 0x81..0xB0 #Lo [48] THAI CHARACTER KO KAI..THAI CHARAC...
179 | 0xE0 0xB8 0xB2..0xB3 #Lo [2] THAI CHARACTER SARA AA..THAI CHARA...
180 | 0xE0 0xB9 0x80..0x85 #Lo [6] THAI CHARACTER SARA E..THAI CHARAC...
181 | 0xE0 0xB9 0x86 #Lm THAI CHARACTER MAIYAMOK
182 | 0xE0 0xBA 0x81..0x82 #Lo [2] LAO LETTER KO..LAO LETTER KHO SUNG
183 | 0xE0 0xBA 0x84 #Lo LAO LETTER KHO TAM
184 | 0xE0 0xBA 0x87..0x88 #Lo [2] LAO LETTER NGO..LAO LETTER CO
185 | 0xE0 0xBA 0x8A #Lo LAO LETTER SO TAM
186 | 0xE0 0xBA 0x8D #Lo LAO LETTER NYO
187 | 0xE0 0xBA 0x94..0x97 #Lo [4] LAO LETTER DO..LAO LETTER THO TAM
188 | 0xE0 0xBA 0x99..0x9F #Lo [7] LAO LETTER NO..LAO LETTER FO SUNG
189 | 0xE0 0xBA 0xA1..0xA3 #Lo [3] LAO LETTER MO..LAO LETTER LO LING
190 | 0xE0 0xBA 0xA5 #Lo LAO LETTER LO LOOT
191 | 0xE0 0xBA 0xA7 #Lo LAO LETTER WO
192 | 0xE0 0xBA 0xAA..0xAB #Lo [2] LAO LETTER SO SUNG..LAO LETTER HO ...
193 | 0xE0 0xBA 0xAD..0xB0 #Lo [4] LAO LETTER O..LAO VOWEL SIGN A
194 | 0xE0 0xBA 0xB2..0xB3 #Lo [2] LAO VOWEL SIGN AA..LAO VOWEL SIGN AM
195 | 0xE0 0xBA 0xBD #Lo LAO SEMIVOWEL SIGN NYO
196 | 0xE0 0xBB 0x80..0x84 #Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
197 | 0xE0 0xBB 0x86 #Lm LAO KO LA
198 | 0xE0 0xBB 0x9C..0x9F #Lo [4] LAO HO NO..LAO LETTER KHMU NYO
199 | 0xE0 0xBC 0x80 #Lo TIBETAN SYLLABLE OM
200 | 0xE0 0xBD 0x80..0x87 #Lo [8] TIBETAN LETTER KA..TIBETAN LETTER JA
201 | 0xE0 0xBD 0x89..0xAC #Lo [36] TIBETAN LETTER NYA..TIBETAN LETTER...
202 | 0xE0 0xBE 0x88..0x8C #Lo [5] TIBETAN SIGN LCE TSA CAN..TIBETAN ...
203 | 0xE1 0x80 0x80..0xAA #Lo [43] MYANMAR LETTER KA..MYANMAR LETTER AU
204 | 0xE1 0x80 0xBF #Lo MYANMAR LETTER GREAT SA
205 | 0xE1 0x81 0x90..0x95 #Lo [6] MYANMAR LETTER SHA..MYANMAR LETTER...
206 | 0xE1 0x81 0x9A..0x9D #Lo [4] MYANMAR LETTER MON NGA..MYANMAR LE...
207 | 0xE1 0x81 0xA1 #Lo MYANMAR LETTER SGAW KAREN SHA
208 | 0xE1 0x81 0xA5..0xA6 #Lo [2] MYANMAR LETTER WESTERN PWO KAREN T...
209 | 0xE1 0x81 0xAE..0xB0 #Lo [3] MYANMAR LETTER EASTERN PWO KAREN N...
210 | 0xE1 0x81 0xB5..0xFF #Lo [13] MYANMAR LETTER SHAN KA..MYANMAR LE...
211 | 0xE1 0x82 0x00..0x81 #
212 | 0xE1 0x82 0x8E #Lo MYANMAR LETTER RUMAI PALAUNG FA
213 | 0xE1 0x82 0xA0..0xFF #L& [38] GEORGIAN CAPITAL LETTER AN..GEORGI...
214 | 0xE1 0x83 0x00..0x85 #
215 | 0xE1 0x83 0x87 #L& GEORGIAN CAPITAL LETTER YN
216 | 0xE1 0x83 0x8D #L& GEORGIAN CAPITAL LETTER AEN
217 | 0xE1 0x83 0x90..0xBA #Lo [43] GEORGIAN LETTER AN..GEORGIAN LETTE...
218 | 0xE1 0x83 0xBC #Lm MODIFIER LETTER GEORGIAN NAR
219 | 0xE1 0x83 0xBD..0xFF #Lo [332] GEORGIAN LETTER AEN..ETHIOPIC ...
220 | 0xE1 0x84..0x88 0x00..0xFF #
221 | 0xE1 0x89 0x00..0x88 #
222 | 0xE1 0x89 0x8A..0x8D #Lo [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SY...
223 | 0xE1 0x89 0x90..0x96 #Lo [7] ETHIOPIC SYLLABLE QHA..ETHIOPIC SY...
224 | 0xE1 0x89 0x98 #Lo ETHIOPIC SYLLABLE QHWA
225 | 0xE1 0x89 0x9A..0x9D #Lo [4] ETHIOPIC SYLLABLE QHWI..ETHIOPIC S...
226 | 0xE1 0x89 0xA0..0xFF #Lo [41] ETHIOPIC SYLLABLE BA..ETHIOPIC SYL...
227 | 0xE1 0x8A 0x00..0x88 #
228 | 0xE1 0x8A 0x8A..0x8D #Lo [4] ETHIOPIC SYLLABLE XWI..ETHIOPIC SY...
229 | 0xE1 0x8A 0x90..0xB0 #Lo [33] ETHIOPIC SYLLABLE NA..ETHIOPIC SYL...
230 | 0xE1 0x8A 0xB2..0xB5 #Lo [4] ETHIOPIC SYLLABLE KWI..ETHIOPIC SY...
231 | 0xE1 0x8A 0xB8..0xBE #Lo [7] ETHIOPIC SYLLABLE KXA..ETHIOPIC SY...
232 | 0xE1 0x8B 0x80 #Lo ETHIOPIC SYLLABLE KXWA
233 | 0xE1 0x8B 0x82..0x85 #Lo [4] ETHIOPIC SYLLABLE KXWI..ETHIOPIC S...
234 | 0xE1 0x8B 0x88..0x96 #Lo [15] ETHIOPIC SYLLABLE WA..ETHIOPIC SYL...
235 | 0xE1 0x8B 0x98..0xFF #Lo [57] ETHIOPIC SYLLABLE ZA..ETHIOPIC SYL...
236 | 0xE1 0x8C 0x00..0x90 #
237 | 0xE1 0x8C 0x92..0x95 #Lo [4] ETHIOPIC SYLLABLE GWI..ETHIOPIC SY...
238 | 0xE1 0x8C 0x98..0xFF #Lo [67] ETHIOPIC SYLLABLE GGA..ETHIOPIC SY...
239 | 0xE1 0x8D 0x00..0x9A #
240 | 0xE1 0x8E 0x80..0x8F #Lo [16] ETHIOPIC SYLLABLE SEBATBEIT MWA..E...
241 | 0xE1 0x8E 0xA0..0xFF #L& [86] CHEROKEE LETTER A..CHEROKEE LETTER MV
242 | 0xE1 0x8F 0x00..0xB5 #
243 | 0xE1 0x8F 0xB8..0xBD #L& [6] CHEROKEE SMALL LETTER YE..CHEROKEE...
244 | 0xE1 0x90 0x81..0xFF #Lo [620] CANADIAN SYLLABICS E..CANADIAN...
245 | 0xE1 0x91..0x98 0x00..0xFF #
246 | 0xE1 0x99 0x00..0xAC #
247 | 0xE1 0x99 0xAF..0xBF #Lo [17] CANADIAN SYLLABICS QAI..CANADIAN S...
248 | 0xE1 0x9A 0x81..0x9A #Lo [26] OGHAM LETTER BEITH..OGHAM LETTER P...
249 | 0xE1 0x9A 0xA0..0xFF #Lo [75] RUNIC LETTER FEHU FEOH FE F..RUNIC...
250 | 0xE1 0x9B 0x00..0xAA #
251 | 0xE1 0x9B 0xAE..0xB0 #Nl [3] RUNIC ARLAUG SYMBOL..RUNIC BELGTHO...
252 | 0xE1 0x9B 0xB1..0xB8 #Lo [8] RUNIC LETTER K..RUNIC LETTER FRANK...
253 | 0xE1 0x9C 0x80..0x8C #Lo [13] TAGALOG LETTER A..TAGALOG LETTER YA
254 | 0xE1 0x9C 0x8E..0x91 #Lo [4] TAGALOG LETTER LA..TAGALOG LETTER HA
255 | 0xE1 0x9C 0xA0..0xB1 #Lo [18] HANUNOO LETTER A..HANUNOO LETTER HA
256 | 0xE1 0x9D 0x80..0x91 #Lo [18] BUHID LETTER A..BUHID LETTER HA
257 | 0xE1 0x9D 0xA0..0xAC #Lo [13] TAGBANWA LETTER A..TAGBANWA LETTER YA
258 | 0xE1 0x9D 0xAE..0xB0 #Lo [3] TAGBANWA LETTER LA..TAGBANWA LETTE...
259 | 0xE1 0x9E 0x80..0xB3 #Lo [52] KHMER LETTER KA..KHMER INDEPENDENT...
260 | 0xE1 0x9F 0x97 #Lm KHMER SIGN LEK TOO
261 | 0xE1 0x9F 0x9C #Lo KHMER SIGN AVAKRAHASANYA
262 | 0xE1 0xA0 0xA0..0xFF #Lo [35] MONGOLIAN LETTER A..MONGOLIAN LETT...
263 | 0xE1 0xA1 0x00..0x82 #
264 | 0xE1 0xA1 0x83 #Lm MONGOLIAN LETTER TODO LONG VOWEL SIGN
265 | 0xE1 0xA1 0x84..0xB7 #Lo [52] MONGOLIAN LETTER TODO E..MONGOLIAN...
266 | 0xE1 0xA2 0x80..0x84 #Lo [5] MONGOLIAN LETTER ALI GALI ANUSVARA...
267 | 0xE1 0xA2 0x85..0x86 #Mn [2] MONGOLIAN LETTER ALI GALI BALUDA.....
268 | 0xE1 0xA2 0x87..0xA8 #Lo [34] MONGOLIAN LETTER ALI GALI A..MONGO...
269 | 0xE1 0xA2 0xAA #Lo MONGOLIAN LETTER MANCHU ALI GALI LHA
270 | 0xE1 0xA2 0xB0..0xFF #Lo [70] CANADIAN SYLLABICS OY..CANADIAN SY...
271 | 0xE1 0xA3 0x00..0xB5 #
272 | 0xE1 0xA4 0x80..0x9E #Lo [31] LIMBU VOWEL-CARRIER LETTER..LIMBU ...
273 | 0xE1 0xA5 0x90..0xAD #Lo [30] TAI LE LETTER KA..TAI LE LETTER AI
274 | 0xE1 0xA5 0xB0..0xB4 #Lo [5] TAI LE LETTER TONE-2..TAI LE LETTE...
275 | 0xE1 0xA6 0x80..0xAB #Lo [44] NEW TAI LUE LETTER HIGH QA..NEW TA...
276 | 0xE1 0xA6 0xB0..0xFF #Lo [26] NEW TAI LUE VOWEL SIGN VOWEL SHORT...
277 | 0xE1 0xA7 0x00..0x89 #
278 | 0xE1 0xA8 0x80..0x96 #Lo [23] BUGINESE LETTER KA..BUGINESE LETTE...
279 | 0xE1 0xA8 0xA0..0xFF #Lo [53] TAI THAM LETTER HIGH KA..TAI THAM ...
280 | 0xE1 0xA9 0x00..0x94 #
281 | 0xE1 0xAA 0xA7 #Lm TAI THAM SIGN MAI YAMOK
282 | 0xE1 0xAC 0x85..0xB3 #Lo [47] BALINESE LETTER AKARA..BALINESE LE...
283 | 0xE1 0xAD 0x85..0x8B #Lo [7] BALINESE LETTER KAF SASAK..BALINES...
284 | 0xE1 0xAE 0x83..0xA0 #Lo [30] SUNDANESE LETTER A..SUNDANESE LETT...
285 | 0xE1 0xAE 0xAE..0xAF #Lo [2] SUNDANESE LETTER KHA..SUNDANESE LE...
286 | 0xE1 0xAE 0xBA..0xFF #Lo [44] SUNDANESE AVAGRAHA..BATAK LETTER U
287 | 0xE1 0xAF 0x00..0xA5 #
288 | 0xE1 0xB0 0x80..0xA3 #Lo [36] LEPCHA LETTER KA..LEPCHA LETTER A
289 | 0xE1 0xB1 0x8D..0x8F #Lo [3] LEPCHA LETTER TTA..LEPCHA LETTER DDA
290 | 0xE1 0xB1 0x9A..0xB7 #Lo [30] OL CHIKI LETTER LA..OL CHIKI LETTE...
291 | 0xE1 0xB1 0xB8..0xBD #Lm [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD
292 | 0xE1 0xB2 0x80..0x88 #L& [9] CYRILLIC SMALL LETTER ROUNDED VE.....
293 | 0xE1 0xB3 0xA9..0xAC #Lo [4] VEDIC SIGN ANUSVARA ANTARGOMUKHA.....
294 | 0xE1 0xB3 0xAE..0xB1 #Lo [4] VEDIC SIGN HEXIFORM LONG ANUSVARA....
295 | 0xE1 0xB3 0xB5..0xB6 #Lo [2] VEDIC SIGN JIHVAMULIYA..VEDIC SIGN...
296 | 0xE1 0xB4 0x80..0xAB #L& [44] LATIN LETTER SMALL CAPITAL A..CYRI...
297 | 0xE1 0xB4 0xAC..0xFF #Lm [63] MODIFIER LETTER CAPITAL A..GREEK S...
298 | 0xE1 0xB5 0x00..0xAA #
299 | 0xE1 0xB5 0xAB..0xB7 #L& [13] LATIN SMALL LETTER UE..LATIN SMALL...
300 | 0xE1 0xB5 0xB8 #Lm MODIFIER LETTER CYRILLIC EN
301 | 0xE1 0xB5 0xB9..0xFF #L& [34] LATIN SMALL LETTER INSULAR G..LATI...
302 | 0xE1 0xB6 0x00..0x9A #
303 | 0xE1 0xB6 0x9B..0xBF #Lm [37] MODIFIER LETTER SMALL TURNED ALPHA...
304 | 0xE1 0xB8 0x80..0xFF #L& [278] LATIN CAPITAL LETTER A WITH RI...
305 | 0xE1 0xB9..0xBB 0x00..0xFF #
306 | 0xE1 0xBC 0x00..0x95 #
307 | 0xE1 0xBC 0x98..0x9D #L& [6] GREEK CAPITAL LETTER EPSILON WITH ...
308 | 0xE1 0xBC 0xA0..0xFF #L& [38] GREEK SMALL LETTER ETA WITH PSILI....
309 | 0xE1 0xBD 0x00..0x85 #
310 | 0xE1 0xBD 0x88..0x8D #L& [6] GREEK CAPITAL LETTER OMICRON WITH ...
311 | 0xE1 0xBD 0x90..0x97 #L& [8] GREEK SMALL LETTER UPSILON WITH PS...
312 | 0xE1 0xBD 0x99 #L& GREEK CAPITAL LETTER UPSILON WITH ...
313 | 0xE1 0xBD 0x9B #L& GREEK CAPITAL LETTER UPSILON WITH ...
314 | 0xE1 0xBD 0x9D #L& GREEK CAPITAL LETTER UPSILON WITH ...
315 | 0xE1 0xBD 0x9F..0xBD #L& [31] GREEK CAPITAL LETTER UPSILON WITH ...
316 | 0xE1 0xBE 0x80..0xB4 #L& [53] GREEK SMALL LETTER ALPHA WITH PSIL...
317 | 0xE1 0xBE 0xB6..0xBC #L& [7] GREEK SMALL LETTER ALPHA WITH PERI...
318 | 0xE1 0xBE 0xBE #L& GREEK PROSGEGRAMMENI
319 | 0xE1 0xBF 0x82..0x84 #L& [3] GREEK SMALL LETTER ETA WITH VARIA ...
320 | 0xE1 0xBF 0x86..0x8C #L& [7] GREEK SMALL LETTER ETA WITH PERISP...
321 | 0xE1 0xBF 0x90..0x93 #L& [4] GREEK SMALL LETTER IOTA WITH VRACH...
322 | 0xE1 0xBF 0x96..0x9B #L& [6] GREEK SMALL LETTER IOTA WITH PERIS...
323 | 0xE1 0xBF 0xA0..0xAC #L& [13] GREEK SMALL LETTER UPSILON WITH VR...
324 | 0xE1 0xBF 0xB2..0xB4 #L& [3] GREEK SMALL LETTER OMEGA WITH VARI...
325 | 0xE1 0xBF 0xB6..0xBC #L& [7] GREEK SMALL LETTER OMEGA WITH PERI...
326 | 0xE2 0x81 0xB1 #Lm SUPERSCRIPT LATIN SMALL LETTER I
327 | 0xE2 0x81 0xBF #Lm SUPERSCRIPT LATIN SMALL LETTER N
328 | 0xE2 0x82 0x90..0x9C #Lm [13] LATIN SUBSCRIPT SMALL LETTER A..LA...
329 | 0xE2 0x84 0x82 #L& DOUBLE-STRUCK CAPITAL C
330 | 0xE2 0x84 0x87 #L& EULER CONSTANT
331 | 0xE2 0x84 0x8A..0x93 #L& [10] SCRIPT SMALL G..SCRIPT SMALL L
332 | 0xE2 0x84 0x95 #L& DOUBLE-STRUCK CAPITAL N
333 | 0xE2 0x84 0x98 #Sm SCRIPT CAPITAL P
334 | 0xE2 0x84 0x99..0x9D #L& [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-ST...
335 | 0xE2 0x84 0xA4 #L& DOUBLE-STRUCK CAPITAL Z
336 | 0xE2 0x84 0xA6 #L& OHM SIGN
337 | 0xE2 0x84 0xA8 #L& BLACK-LETTER CAPITAL Z
338 | 0xE2 0x84 0xAA..0xAD #L& [4] KELVIN SIGN..BLACK-LETTER CAPITAL C
339 | 0xE2 0x84 0xAE #So ESTIMATED SYMBOL
340 | 0xE2 0x84 0xAF..0xB4 #L& [6] SCRIPT SMALL E..SCRIPT SMALL O
341 | 0xE2 0x84 0xB5..0xB8 #Lo [4] ALEF SYMBOL..DALET SYMBOL
342 | 0xE2 0x84 0xB9 #L& INFORMATION SOURCE
343 | 0xE2 0x84 0xBC..0xBF #L& [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STR...
344 | 0xE2 0x85 0x85..0x89 #L& [5] DOUBLE-STRUCK ITALIC CAPITAL D..DO...
345 | 0xE2 0x85 0x8E #L& TURNED SMALL F
346 | 0xE2 0x85 0xA0..0xFF #Nl [35] ROMAN NUMERAL ONE..ROMAN NUMERAL T...
347 | 0xE2 0x86 0x00..0x82 #
348 | 0xE2 0x86 0x83..0x84 #L& [2] ROMAN NUMERAL REVERSED ONE HUNDRED...
349 | 0xE2 0x86 0x85..0x88 #Nl [4] ROMAN NUMERAL SIX LATE FORM..ROMAN...
350 | 0xE2 0xB0 0x80..0xAE #L& [47] GLAGOLITIC CAPITAL LETTER AZU..GLA...
351 | 0xE2 0xB0 0xB0..0xFF #L& [47] GLAGOLITIC SMALL LETTER AZU..GLAGO...
352 | 0xE2 0xB1 0x00..0x9E #
353 | 0xE2 0xB1 0xA0..0xBB #L& [28] LATIN CAPITAL LETTER L WITH DOUBLE...
354 | 0xE2 0xB1 0xBC..0xBD #Lm [2] LATIN SUBSCRIPT SMALL LETTER J..MO...
355 | 0xE2 0xB1 0xBE..0xFF #L& [103] LATIN CAPITAL LETTER S WITH SW...
356 | 0xE2 0xB2..0xB2 0x00..0xFF #
357 | 0xE2 0xB3 0x00..0xA4 #
358 | 0xE2 0xB3 0xAB..0xAE #L& [4] COPTIC CAPITAL LETTER CRYPTOGRAMMI...
359 | 0xE2 0xB3 0xB2..0xB3 #L& [2] COPTIC CAPITAL LETTER BOHAIRIC KHE...
360 | 0xE2 0xB4 0x80..0xA5 #L& [38] GEORGIAN SMALL LETTER AN..GEORGIAN...
361 | 0xE2 0xB4 0xA7 #L& GEORGIAN SMALL LETTER YN
362 | 0xE2 0xB4 0xAD #L& GEORGIAN SMALL LETTER AEN
363 | 0xE2 0xB4 0xB0..0xFF #Lo [56] TIFINAGH LETTER YA..TIFINAGH LETTE...
364 | 0xE2 0xB5 0x00..0xA7 #
365 | 0xE2 0xB5 0xAF #Lm TIFINAGH MODIFIER LETTER LABIALIZA...
366 | 0xE2 0xB6 0x80..0x96 #Lo [23] ETHIOPIC SYLLABLE LOA..ETHIOPIC SY...
367 | 0xE2 0xB6 0xA0..0xA6 #Lo [7] ETHIOPIC SYLLABLE SSA..ETHIOPIC SY...
368 | 0xE2 0xB6 0xA8..0xAE #Lo [7] ETHIOPIC SYLLABLE CCA..ETHIOPIC SY...
369 | 0xE2 0xB6 0xB0..0xB6 #Lo [7] ETHIOPIC SYLLABLE ZZA..ETHIOPIC SY...
370 | 0xE2 0xB6 0xB8..0xBE #Lo [7] ETHIOPIC SYLLABLE CCHA..ETHIOPIC S...
371 | 0xE2 0xB7 0x80..0x86 #Lo [7] ETHIOPIC SYLLABLE QYA..ETHIOPIC SY...
372 | 0xE2 0xB7 0x88..0x8E #Lo [7] ETHIOPIC SYLLABLE KYA..ETHIOPIC SY...
373 | 0xE2 0xB7 0x90..0x96 #Lo [7] ETHIOPIC SYLLABLE XYA..ETHIOPIC SY...
374 | 0xE2 0xB7 0x98..0x9E #Lo [7] ETHIOPIC SYLLABLE GYA..ETHIOPIC SY...
375 | 0xE3 0x80 0x85 #Lm IDEOGRAPHIC ITERATION MARK
376 | 0xE3 0x80 0x86 #Lo IDEOGRAPHIC CLOSING MARK
377 | 0xE3 0x80 0x87 #Nl IDEOGRAPHIC NUMBER ZERO
378 | 0xE3 0x80 0xA1..0xA9 #Nl [9] HANGZHOU NUMERAL ONE..HANGZHOU NUM...
379 | 0xE3 0x80 0xB1..0xB5 #Lm [5] VERTICAL KANA REPEAT MARK..VERTICA...
380 | 0xE3 0x80 0xB8..0xBA #Nl [3] HANGZHOU NUMERAL TEN..HANGZHOU NUM...
381 | 0xE3 0x80 0xBB #Lm VERTICAL IDEOGRAPHIC ITERATION MARK
382 | 0xE3 0x80 0xBC #Lo MASU MARK
383 | 0xE3 0x81 0x81..0xFF #Lo [86] HIRAGANA LETTER SMALL A..HIRAGANA ...
384 | 0xE3 0x82 0x00..0x96 #
385 | 0xE3 0x82 0x9B..0x9C #Sk [2] KATAKANA-HIRAGANA VOICED SOUND MAR...
386 | 0xE3 0x82 0x9D..0x9E #Lm [2] HIRAGANA ITERATION MARK..HIRAGANA ...
387 | 0xE3 0x82 0x9F #Lo HIRAGANA DIGRAPH YORI
388 | 0xE3 0x82 0xA1..0xFF #Lo [90] KATAKANA LETTER SMALL A..KATAKANA ...
389 | 0xE3 0x83 0x00..0xBA #
390 | 0xE3 0x83 0xBC..0xBE #Lm [3] KATAKANA-HIRAGANA PROLONGED SOUND ...
391 | 0xE3 0x83 0xBF #Lo KATAKANA DIGRAPH KOTO
392 | 0xE3 0x84 0x85..0xAD #Lo [41] BOPOMOFO LETTER B..BOPOMOFO LETTER IH
393 | 0xE3 0x84 0xB1..0xFF #Lo [94] HANGUL LETTER KIYEOK..HANGUL L...
394 | 0xE3 0x85..0x85 0x00..0xFF #
395 | 0xE3 0x86 0x00..0x8E #
396 | 0xE3 0x86 0xA0..0xBA #Lo [27] BOPOMOFO LETTER BU..BOPOMOFO LETTE...
397 | 0xE3 0x87 0xB0..0xBF #Lo [16] KATAKANA LETTER SMALL KU..KATAKANA...
398 | 0xE3 0x90 0x80..0xFF #Lo [6582] CJK UNIFIED IDEOGRAPH-3400..C...
399 | 0xE3 0x91..0xFF 0x00..0xFF #
400 | 0xE4 0x00 0x00..0xFF #
401 | 0xE4 0x01..0xB5 0x00..0xFF #
402 | 0xE4 0xB6 0x00..0xB5 #
403 | 0xE4 0xB8 0x80..0xFF #Lo [20950] CJK UNIFIED IDEOGRAPH-...
404 | 0xE4 0xB9..0xFF 0x00..0xFF #
405 | 0xE5..0xE8 0x00..0xFF 0x00..0xFF #
406 | 0xE9 0x00 0x00..0xFF #
407 | 0xE9 0x01..0xBE 0x00..0xFF #
408 | 0xE9 0xBF 0x00..0x95 #
409 | 0xEA 0x80 0x80..0x94 #Lo [21] YI SYLLABLE IT..YI SYLLABLE E
410 | 0xEA 0x80 0x95 #Lm YI SYLLABLE WU
411 | 0xEA 0x80 0x96..0xFF #Lo [1143] YI SYLLABLE BIT..YI SYLLABLE YYR
412 | 0xEA 0x81..0x91 0x00..0xFF #
413 | 0xEA 0x92 0x00..0x8C #
414 | 0xEA 0x93 0x90..0xB7 #Lo [40] LISU LETTER BA..LISU LETTER OE
415 | 0xEA 0x93 0xB8..0xBD #Lm [6] LISU LETTER TONE MYA TI..LISU LETT...
416 | 0xEA 0x94 0x80..0xFF #Lo [268] VAI SYLLABLE EE..VAI SYLLABLE NG
417 | 0xEA 0x95..0x97 0x00..0xFF #
418 | 0xEA 0x98 0x00..0x8B #
419 | 0xEA 0x98 0x8C #Lm VAI SYLLABLE LENGTHENER
420 | 0xEA 0x98 0x90..0x9F #Lo [16] VAI SYLLABLE NDOLE FA..VAI SYMBOL ...
421 | 0xEA 0x98 0xAA..0xAB #Lo [2] VAI SYLLABLE NDOLE MA..VAI SYLLABL...
422 | 0xEA 0x99 0x80..0xAD #L& [46] CYRILLIC CAPITAL LETTER ZEMLYA..CY...
423 | 0xEA 0x99 0xAE #Lo CYRILLIC LETTER MULTIOCULAR O
424 | 0xEA 0x99 0xBF #Lm CYRILLIC PAYEROK
425 | 0xEA 0x9A 0x80..0x9B #L& [28] CYRILLIC CAPITAL LETTER DWE..CYRIL...
426 | 0xEA 0x9A 0x9C..0x9D #Lm [2] MODIFIER LETTER CYRILLIC HARD SIGN...
427 | 0xEA 0x9A 0xA0..0xFF #Lo [70] BAMUM LETTER A..BAMUM LETTER KI
428 | 0xEA 0x9B 0x00..0xA5 #
429 | 0xEA 0x9B 0xA6..0xAF #Nl [10] BAMUM LETTER MO..BAMUM LETTER KOGHOM
430 | 0xEA 0x9C 0x97..0x9F #Lm [9] MODIFIER LETTER DOT VERTICAL BAR.....
431 | 0xEA 0x9C 0xA2..0xFF #L& [78] LATIN CAPITAL LETTER EGYPTOLOGICAL...
432 | 0xEA 0x9D 0x00..0xAF #
433 | 0xEA 0x9D 0xB0 #Lm MODIFIER LETTER US
434 | 0xEA 0x9D 0xB1..0xFF #L& [23] LATIN SMALL LETTER DUM..LATIN SMAL...
435 | 0xEA 0x9E 0x00..0x87 #
436 | 0xEA 0x9E 0x88 #Lm MODIFIER LETTER LOW CIRCUMFLEX ACCENT
437 | 0xEA 0x9E 0x8B..0x8E #L& [4] LATIN CAPITAL LETTER SALTILLO..LAT...
438 | 0xEA 0x9E 0x8F #Lo LATIN LETTER SINOLOGICAL DOT
439 | 0xEA 0x9E 0x90..0xAE #L& [31] LATIN CAPITAL LETTER N WITH DESCEN...
440 | 0xEA 0x9E 0xB0..0xB7 #L& [8] LATIN CAPITAL LETTER TURNED K..LAT...
441 | 0xEA 0x9F 0xB7 #Lo LATIN EPIGRAPHIC LETTER SIDEWAYS I
442 | 0xEA 0x9F 0xB8..0xB9 #Lm [2] MODIFIER LETTER CAPITAL H WITH STR...
443 | 0xEA 0x9F 0xBA #L& LATIN LETTER SMALL CAPITAL TURNED M
444 | 0xEA 0x9F 0xBB..0xFF #Lo [7] LATIN EPIGRAPHIC LETTER REVERSED F...
445 | 0xEA 0xA0 0x00..0x81 #
446 | 0xEA 0xA0 0x83..0x85 #Lo [3] SYLOTI NAGRI LETTER U..SYLOTI NAGR...
447 | 0xEA 0xA0 0x87..0x8A #Lo [4] SYLOTI NAGRI LETTER KO..SYLOTI NAG...
448 | 0xEA 0xA0 0x8C..0xA2 #Lo [23] SYLOTI NAGRI LETTER CO..SYLOTI NAG...
449 | 0xEA 0xA1 0x80..0xB3 #Lo [52] PHAGS-PA LETTER KA..PHAGS-PA LETTE...
450 | 0xEA 0xA2 0x82..0xB3 #Lo [50] SAURASHTRA LETTER A..SAURASHTRA LE...
451 | 0xEA 0xA3 0xB2..0xB7 #Lo [6] DEVANAGARI SIGN SPACING CANDRABIND...
452 | 0xEA 0xA3 0xBB #Lo DEVANAGARI HEADSTROKE
453 | 0xEA 0xA3 0xBD #Lo DEVANAGARI JAIN OM
454 | 0xEA 0xA4 0x8A..0xA5 #Lo [28] KAYAH LI LETTER KA..KAYAH LI LETTE...
455 | 0xEA 0xA4 0xB0..0xFF #Lo [23] REJANG LETTER KA..REJANG LETTER A
456 | 0xEA 0xA5 0x00..0x86 #
457 | 0xEA 0xA5 0xA0..0xBC #Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANG...
458 | 0xEA 0xA6 0x84..0xB2 #Lo [47] JAVANESE LETTER A..JAVANESE LETTER HA
459 | 0xEA 0xA7 0x8F #Lm JAVANESE PANGRANGKEP
460 | 0xEA 0xA7 0xA0..0xA4 #Lo [5] MYANMAR LETTER SHAN GHA..MYANMAR L...
461 | 0xEA 0xA7 0xA6 #Lm MYANMAR MODIFIER LETTER SHAN REDUP...
462 | 0xEA 0xA7 0xA7..0xAF #Lo [9] MYANMAR LETTER TAI LAING NYA..MYAN...
463 | 0xEA 0xA7 0xBA..0xBE #Lo [5] MYANMAR LETTER TAI LAING LLA..MYAN...
464 | 0xEA 0xA8 0x80..0xA8 #Lo [41] CHAM LETTER A..CHAM LETTER HA
465 | 0xEA 0xA9 0x80..0x82 #Lo [3] CHAM LETTER FINAL K..CHAM LETTER F...
466 | 0xEA 0xA9 0x84..0x8B #Lo [8] CHAM LETTER FINAL CH..CHAM LETTER ...
467 | 0xEA 0xA9 0xA0..0xAF #Lo [16] MYANMAR LETTER KHAMTI GA..MYANMAR ...
468 | 0xEA 0xA9 0xB0 #Lm MYANMAR MODIFIER LETTER KHAMTI RED...
469 | 0xEA 0xA9 0xB1..0xB6 #Lo [6] MYANMAR LETTER KHAMTI XA..MYANMAR ...
470 | 0xEA 0xA9 0xBA #Lo MYANMAR LETTER AITON RA
471 | 0xEA 0xA9 0xBE..0xFF #Lo [50] MYANMAR LETTER SHWE PALAUNG CHA..T...
472 | 0xEA 0xAA 0x00..0xAF #
473 | 0xEA 0xAA 0xB1 #Lo TAI VIET VOWEL AA
474 | 0xEA 0xAA 0xB5..0xB6 #Lo [2] TAI VIET VOWEL E..TAI VIET VOWEL O
475 | 0xEA 0xAA 0xB9..0xBD #Lo [5] TAI VIET VOWEL UEA..TAI VIET VOWEL AN
476 | 0xEA 0xAB 0x80 #Lo TAI VIET TONE MAI NUENG
477 | 0xEA 0xAB 0x82 #Lo TAI VIET TONE MAI SONG
478 | 0xEA 0xAB 0x9B..0x9C #Lo [2] TAI VIET SYMBOL KON..TAI VIET SYMB...
479 | 0xEA 0xAB 0x9D #Lm TAI VIET SYMBOL SAM
480 | 0xEA 0xAB 0xA0..0xAA #Lo [11] MEETEI MAYEK LETTER E..MEETEI MAYE...
481 | 0xEA 0xAB 0xB2 #Lo MEETEI MAYEK ANJI
482 | 0xEA 0xAB 0xB3..0xB4 #Lm [2] MEETEI MAYEK SYLLABLE REPETITION M...
483 | 0xEA 0xAC 0x81..0x86 #Lo [6] ETHIOPIC SYLLABLE TTHU..ETHIOPIC S...
484 | 0xEA 0xAC 0x89..0x8E #Lo [6] ETHIOPIC SYLLABLE DDHU..ETHIOPIC S...
485 | 0xEA 0xAC 0x91..0x96 #Lo [6] ETHIOPIC SYLLABLE DZU..ETHIOPIC SY...
486 | 0xEA 0xAC 0xA0..0xA6 #Lo [7] ETHIOPIC SYLLABLE CCHHA..ETHIOPIC ...
487 | 0xEA 0xAC 0xA8..0xAE #Lo [7] ETHIOPIC SYLLABLE BBA..ETHIOPIC SY...
488 | 0xEA 0xAC 0xB0..0xFF #L& [43] LATIN SMALL LETTER BARRED ALPHA..L...
489 | 0xEA 0xAD 0x00..0x9A #
490 | 0xEA 0xAD 0x9C..0x9F #Lm [4] MODIFIER LETTER SMALL HENG..MODIFI...
491 | 0xEA 0xAD 0xA0..0xA5 #L& [6] LATIN SMALL LETTER SAKHA YAT..GREE...
492 | 0xEA 0xAD 0xB0..0xFF #L& [80] CHEROKEE SMALL LETTER A..CHEROKEE ...
493 | 0xEA 0xAE 0x00..0xBF #
494 | 0xEA 0xAF 0x80..0xA2 #Lo [35] MEETEI MAYEK LETTER KOK..MEETEI MA...
495 | 0xEA 0xB0 0x80..0xFF #Lo [11172] HANGUL SYLLABLE GA..HA...
496 | 0xEA 0xB1..0xFF 0x00..0xFF #
497 | 0xEB..0xEC 0x00..0xFF 0x00..0xFF #
498 | 0xED 0x00 0x00..0xFF #
499 | 0xED 0x01..0x9D 0x00..0xFF #
500 | 0xED 0x9E 0x00..0xA3 #
501 | 0xED 0x9E 0xB0..0xFF #Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUN...
502 | 0xED 0x9F 0x00..0x86 #
503 | 0xED 0x9F 0x8B..0xBB #Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANG...
504 | 0xEF 0xA4 0x80..0xFF #Lo [366] CJK COMPATIBILITY IDEOGRAPH-F9...
505 | 0xEF 0xA5..0xA8 0x00..0xFF #
506 | 0xEF 0xA9 0x00..0xAD #
507 | 0xEF 0xA9 0xB0..0xFF #Lo [106] CJK COMPATIBILITY IDEOGRAPH-FA...
508 | 0xEF 0xAA..0xAA 0x00..0xFF #
509 | 0xEF 0xAB 0x00..0x99 #
510 | 0xEF 0xAC 0x80..0x86 #L& [7] LATIN SMALL LIGATURE FF..LATIN SMA...
511 | 0xEF 0xAC 0x93..0x97 #L& [5] ARMENIAN SMALL LIGATURE MEN NOW..A...
512 | 0xEF 0xAC 0x9D #Lo HEBREW LETTER YOD WITH HIRIQ
513 | 0xEF 0xAC 0x9F..0xA8 #Lo [10] HEBREW LIGATURE YIDDISH YOD YOD PA...
514 | 0xEF 0xAC 0xAA..0xB6 #Lo [13] HEBREW LETTER SHIN WITH SHIN DOT.....
515 | 0xEF 0xAC 0xB8..0xBC #Lo [5] HEBREW LETTER TET WITH DAGESH..HEB...
516 | 0xEF 0xAC 0xBE #Lo HEBREW LETTER MEM WITH DAGESH
517 | 0xEF 0xAD 0x80..0x81 #Lo [2] HEBREW LETTER NUN WITH DAGESH..HEB...
518 | 0xEF 0xAD 0x83..0x84 #Lo [2] HEBREW LETTER FINAL PE WITH DAGESH...
519 | 0xEF 0xAD 0x86..0xFF #Lo [108] HEBREW LETTER TSADI WITH DAGESH..A...
520 | 0xEF 0xAE 0x00..0xB1 #
521 | 0xEF 0xAF 0x93..0xFF #Lo [363] ARABIC LETTER NG ISOLATED FORM...
522 | 0xEF 0xB0..0xB3 0x00..0xFF #
523 | 0xEF 0xB4 0x00..0xBD #
524 | 0xEF 0xB5 0x90..0xFF #Lo [64] ARABIC LIGATURE TEH WITH JEEM WITH...
525 | 0xEF 0xB6 0x00..0x8F #
526 | 0xEF 0xB6 0x92..0xFF #Lo [54] ARABIC LIGATURE MEEM WITH JEEM WIT...
527 | 0xEF 0xB7 0x00..0x87 #
528 | 0xEF 0xB7 0xB0..0xBB #Lo [12] ARABIC LIGATURE SALLA USED AS KORA...
529 | 0xEF 0xB9 0xB0..0xB4 #Lo [5] ARABIC FATHATAN ISOLATED FORM..ARA...
530 | 0xEF 0xB9 0xB6..0xFF #Lo [135] ARABIC FATHA ISOLATED FORM..AR...
531 | 0xEF 0xBA..0xBA 0x00..0xFF #
532 | 0xEF 0xBB 0x00..0xBC #
533 | 0xEF 0xBC 0xA1..0xBA #L& [26] FULLWIDTH LATIN CAPITAL LETTER A.....
534 | 0xEF 0xBD 0x81..0x9A #L& [26] FULLWIDTH LATIN SMALL LETTER A..FU...
535 | 0xEF 0xBD 0xA6..0xAF #Lo [10] HALFWIDTH KATAKANA LETTER WO..HALF...
536 | 0xEF 0xBD 0xB0 #Lm HALFWIDTH KATAKANA-HIRAGANA PROLON...
537 | 0xEF 0xBD 0xB1..0xFF #Lo [45] HALFWIDTH KATAKANA LETTER A..HALFW...
538 | 0xEF 0xBE 0x00..0x9D #
539 | 0xEF 0xBE 0x9E..0x9F #Lm [2] HALFWIDTH KATAKANA VOICED SOUND MA...
540 | 0xEF 0xBE 0xA0..0xBE #Lo [31] HALFWIDTH HANGUL FILLER..HALFWIDTH...
541 | 0xEF 0xBF 0x82..0x87 #Lo [6] HALFWIDTH HANGUL LETTER A..HALFWID...
542 | 0xEF 0xBF 0x8A..0x8F #Lo [6] HALFWIDTH HANGUL LETTER YEO..HALFW...
543 | 0xEF 0xBF 0x92..0x97 #Lo [6] HALFWIDTH HANGUL LETTER YO..HALFWI...
544 | 0xEF 0xBF 0x9A..0x9C #Lo [3] HALFWIDTH HANGUL LETTER EU..HALFWI...
545 | 0xF0 0x90 0x80 0x80..0x8B #Lo [12] LINEAR B SYLLABLE B008 A..LINEA...
546 | 0xF0 0x90 0x80 0x8D..0xA6 #Lo [26] LINEAR B SYLLABLE B036 JO..LINE...
547 | 0xF0 0x90 0x80 0xA8..0xBA #Lo [19] LINEAR B SYLLABLE B060 RA..LINE...
548 | 0xF0 0x90 0x80 0xBC..0xBD #Lo [2] LINEAR B SYLLABLE B017 ZA..LINE...
549 | 0xF0 0x90 0x80 0xBF..0xFF #Lo [15] LINEAR B SYLLABLE B020 ZO..LINE...
550 | 0xF0 0x90 0x81 0x00..0x8D #
551 | 0xF0 0x90 0x81 0x90..0x9D #Lo [14] LINEAR B SYMBOL B018..LINEAR B ...
552 | 0xF0 0x90 0x82 0x80..0xFF #Lo [123] LINEAR B IDEOGRAM B100 MAN..LIN...
553 | 0xF0 0x90 0x83 0x00..0xBA #
554 | 0xF0 0x90 0x85 0x80..0xB4 #Nl [53] GREEK ACROPHONIC ATTIC ONE QUAR...
555 | 0xF0 0x90 0x8A 0x80..0x9C #Lo [29] LYCIAN LETTER A..LYCIAN LETTER X
556 | 0xF0 0x90 0x8A 0xA0..0xFF #Lo [49] CARIAN LETTER A..CARIAN LETTER ...
557 | 0xF0 0x90 0x8B 0x00..0x90 #
558 | 0xF0 0x90 0x8C 0x80..0x9F #Lo [32] OLD ITALIC LETTER A..OLD ITALIC...
559 | 0xF0 0x90 0x8C 0xB0..0xFF #Lo [17] GOTHIC LETTER AHSA..GOTHIC LETT...
560 | 0xF0 0x90 0x8D 0x00..0x80 #
561 | 0xF0 0x90 0x8D 0x81 #Nl GOTHIC LETTER NINETY
562 | 0xF0 0x90 0x8D 0x82..0x89 #Lo [8] GOTHIC LETTER RAIDA..GOTHIC LET...
563 | 0xF0 0x90 0x8D 0x8A #Nl GOTHIC LETTER NINE HUNDRED
564 | 0xF0 0x90 0x8D 0x90..0xB5 #Lo [38] OLD PERMIC LETTER AN..OLD PERMI...
565 | 0xF0 0x90 0x8E 0x80..0x9D #Lo [30] UGARITIC LETTER ALPA..UGARITIC ...
566 | 0xF0 0x90 0x8E 0xA0..0xFF #Lo [36] OLD PERSIAN SIGN A..OLD PERSIAN...
567 | 0xF0 0x90 0x8F 0x00..0x83 #
568 | 0xF0 0x90 0x8F 0x88..0x8F #Lo [8] OLD PERSIAN SIGN AURAMAZDAA..OL...
569 | 0xF0 0x90 0x8F 0x91..0x95 #Nl [5] OLD PERSIAN NUMBER ONE..OLD PER...
570 | 0xF0 0x90 0x90 0x80..0xFF #L& [80] DESERET CAPITAL LETTER LONG I.....
571 | 0xF0 0x90 0x91 0x00..0x8F #
572 | 0xF0 0x90 0x91 0x90..0xFF #Lo [78] SHAVIAN LETTER PEEP..OSMANYA LE...
573 | 0xF0 0x90 0x92 0x00..0x9D #
574 | 0xF0 0x90 0x92 0xB0..0xFF #L& [36] OSAGE CAPITAL LETTER A..OSAGE C...
575 | 0xF0 0x90 0x93 0x00..0x93 #
576 | 0xF0 0x90 0x93 0x98..0xBB #L& [36] OSAGE SMALL LETTER A..OSAGE SMA...
577 | 0xF0 0x90 0x94 0x80..0xA7 #Lo [40] ELBASAN LETTER A..ELBASAN LETTE...
578 | 0xF0 0x90 0x94 0xB0..0xFF #Lo [52] CAUCASIAN ALBANIAN LETTER ALT.....
579 | 0xF0 0x90 0x95 0x00..0xA3 #
580 | 0xF0 0x90 0x98 0x80..0xFF #Lo [311] LINEAR A SIGN AB001..LINE...
581 | 0xF0 0x90 0x99..0x9B 0x00..0xFF #
582 | 0xF0 0x90 0x9C 0x00..0xB6 #
583 | 0xF0 0x90 0x9D 0x80..0x95 #Lo [22] LINEAR A SIGN A701 A..LINEAR A ...
584 | 0xF0 0x90 0x9D 0xA0..0xA7 #Lo [8] LINEAR A SIGN A800..LINEAR A SI...
585 | 0xF0 0x90 0xA0 0x80..0x85 #Lo [6] CYPRIOT SYLLABLE A..CYPRIOT SYL...
586 | 0xF0 0x90 0xA0 0x88 #Lo CYPRIOT SYLLABLE JO
587 | 0xF0 0x90 0xA0 0x8A..0xB5 #Lo [44] CYPRIOT SYLLABLE KA..CYPRIOT SY...
588 | 0xF0 0x90 0xA0 0xB7..0xB8 #Lo [2] CYPRIOT SYLLABLE XA..CYPRIOT SY...
589 | 0xF0 0x90 0xA0 0xBC #Lo CYPRIOT SYLLABLE ZA
590 | 0xF0 0x90 0xA0 0xBF..0xFF #Lo [23] CYPRIOT SYLLABLE ZO..IMPERIAL A...
591 | 0xF0 0x90 0xA1 0x00..0x95 #
592 | 0xF0 0x90 0xA1 0xA0..0xB6 #Lo [23] PALMYRENE LETTER ALEPH..PALMYRE...
593 | 0xF0 0x90 0xA2 0x80..0x9E #Lo [31] NABATAEAN LETTER FINAL ALEPH..N...
594 | 0xF0 0x90 0xA3 0xA0..0xB2 #Lo [19] HATRAN LETTER ALEPH..HATRAN LET...
595 | 0xF0 0x90 0xA3 0xB4..0xB5 #Lo [2] HATRAN LETTER SHIN..HATRAN LETT...
596 | 0xF0 0x90 0xA4 0x80..0x95 #Lo [22] PHOENICIAN LETTER ALF..PHOENICI...
597 | 0xF0 0x90 0xA4 0xA0..0xB9 #Lo [26] LYDIAN LETTER A..LYDIAN LETTER C
598 | 0xF0 0x90 0xA6 0x80..0xB7 #Lo [56] MEROITIC HIEROGLYPHIC LETTER A....
599 | 0xF0 0x90 0xA6 0xBE..0xBF #Lo [2] MEROITIC CURSIVE LOGOGRAM RMT.....
600 | 0xF0 0x90 0xA8 0x80 #Lo KHAROSHTHI LETTER A
601 | 0xF0 0x90 0xA8 0x90..0x93 #Lo [4] KHAROSHTHI LETTER KA..KHAROSHTH...
602 | 0xF0 0x90 0xA8 0x95..0x97 #Lo [3] KHAROSHTHI LETTER CA..KHAROSHTH...
603 | 0xF0 0x90 0xA8 0x99..0xB3 #Lo [27] KHAROSHTHI LETTER NYA..KHAROSHT...
604 | 0xF0 0x90 0xA9 0xA0..0xBC #Lo [29] OLD SOUTH ARABIAN LETTER HE..OL...
605 | 0xF0 0x90 0xAA 0x80..0x9C #Lo [29] OLD NORTH ARABIAN LETTER HEH..O...
606 | 0xF0 0x90 0xAB 0x80..0x87 #Lo [8] MANICHAEAN LETTER ALEPH..MANICH...
607 | 0xF0 0x90 0xAB 0x89..0xA4 #Lo [28] MANICHAEAN LETTER ZAYIN..MANICH...
608 | 0xF0 0x90 0xAC 0x80..0xB5 #Lo [54] AVESTAN LETTER A..AVESTAN LETTE...
609 | 0xF0 0x90 0xAD 0x80..0x95 #Lo [22] INSCRIPTIONAL PARTHIAN LETTER A...
610 | 0xF0 0x90 0xAD 0xA0..0xB2 #Lo [19] INSCRIPTIONAL PAHLAVI LETTER AL...
611 | 0xF0 0x90 0xAE 0x80..0x91 #Lo [18] PSALTER PAHLAVI LETTER ALEPH..P...
612 | 0xF0 0x90 0xB0 0x80..0xFF #Lo [73] OLD TURKIC LETTER ORKHON A..OLD...
613 | 0xF0 0x90 0xB1 0x00..0x88 #
614 | 0xF0 0x90 0xB2 0x80..0xB2 #L& [51] OLD HUNGARIAN CAPITAL LETTER A....
615 | 0xF0 0x90 0xB3 0x80..0xB2 #L& [51] OLD HUNGARIAN SMALL LETTER A..O...
616 | 0xF0 0x91 0x80 0x83..0xB7 #Lo [53] BRAHMI SIGN JIHVAMULIYA..BRAHMI...
617 | 0xF0 0x91 0x82 0x83..0xAF #Lo [45] KAITHI LETTER A..KAITHI LETTER HA
618 | 0xF0 0x91 0x83 0x90..0xA8 #Lo [25] SORA SOMPENG LETTER SAH..SORA S...
619 | 0xF0 0x91 0x84 0x83..0xA6 #Lo [36] CHAKMA LETTER AA..CHAKMA LETTER...
620 | 0xF0 0x91 0x85 0x90..0xB2 #Lo [35] MAHAJANI LETTER A..MAHAJANI LET...
621 | 0xF0 0x91 0x85 0xB6 #Lo MAHAJANI LIGATURE SHRI
622 | 0xF0 0x91 0x86 0x83..0xB2 #Lo [48] SHARADA LETTER A..SHARADA LETTE...
623 | 0xF0 0x91 0x87 0x81..0x84 #Lo [4] SHARADA SIGN AVAGRAHA..SHARADA OM
624 | 0xF0 0x91 0x87 0x9A #Lo SHARADA EKAM
625 | 0xF0 0x91 0x87 0x9C #Lo SHARADA HEADSTROKE
626 | 0xF0 0x91 0x88 0x80..0x91 #Lo [18] KHOJKI LETTER A..KHOJKI LETTER JJA
627 | 0xF0 0x91 0x88 0x93..0xAB #Lo [25] KHOJKI LETTER NYA..KHOJKI LETTE...
628 | 0xF0 0x91 0x8A 0x80..0x86 #Lo [7] MULTANI LETTER A..MULTANI LETTE...
629 | 0xF0 0x91 0x8A 0x88 #Lo MULTANI LETTER GHA
630 | 0xF0 0x91 0x8A 0x8A..0x8D #Lo [4] MULTANI LETTER CA..MULTANI LETT...
631 | 0xF0 0x91 0x8A 0x8F..0x9D #Lo [15] MULTANI LETTER NYA..MULTANI LET...
632 | 0xF0 0x91 0x8A 0x9F..0xA8 #Lo [10] MULTANI LETTER BHA..MULTANI LET...
633 | 0xF0 0x91 0x8A 0xB0..0xFF #Lo [47] KHUDAWADI LETTER A..KHUDAWADI L...
634 | 0xF0 0x91 0x8B 0x00..0x9E #
635 | 0xF0 0x91 0x8C 0x85..0x8C #Lo [8] GRANTHA LETTER A..GRANTHA LETTE...
636 | 0xF0 0x91 0x8C 0x8F..0x90 #Lo [2] GRANTHA LETTER EE..GRANTHA LETT...
637 | 0xF0 0x91 0x8C 0x93..0xA8 #Lo [22] GRANTHA LETTER OO..GRANTHA LETT...
638 | 0xF0 0x91 0x8C 0xAA..0xB0 #Lo [7] GRANTHA LETTER PA..GRANTHA LETT...
639 | 0xF0 0x91 0x8C 0xB2..0xB3 #Lo [2] GRANTHA LETTER LA..GRANTHA LETT...
640 | 0xF0 0x91 0x8C 0xB5..0xB9 #Lo [5] GRANTHA LETTER VA..GRANTHA LETT...
641 | 0xF0 0x91 0x8C 0xBD #Lo GRANTHA SIGN AVAGRAHA
642 | 0xF0 0x91 0x8D 0x90 #Lo GRANTHA OM
643 | 0xF0 0x91 0x8D 0x9D..0xA1 #Lo [5] GRANTHA SIGN PLUTA..GRANTHA LET...
644 | 0xF0 0x91 0x90 0x80..0xB4 #Lo [53] NEWA LETTER A..NEWA LETTER HA
645 | 0xF0 0x91 0x91 0x87..0x8A #Lo [4] NEWA SIGN AVAGRAHA..NEWA SIDDHI
646 | 0xF0 0x91 0x92 0x80..0xAF #Lo [48] TIRHUTA ANJI..TIRHUTA LETTER HA
647 | 0xF0 0x91 0x93 0x84..0x85 #Lo [2] TIRHUTA SIGN AVAGRAHA..TIRHUTA ...
648 | 0xF0 0x91 0x93 0x87 #Lo TIRHUTA OM
649 | 0xF0 0x91 0x96 0x80..0xAE #Lo [47] SIDDHAM LETTER A..SIDDHAM LETTE...
650 | 0xF0 0x91 0x97 0x98..0x9B #Lo [4] SIDDHAM LETTER THREE-CIRCLE ALT...
651 | 0xF0 0x91 0x98 0x80..0xAF #Lo [48] MODI LETTER A..MODI LETTER LLA
652 | 0xF0 0x91 0x99 0x84 #Lo MODI SIGN HUVA
653 | 0xF0 0x91 0x9A 0x80..0xAA #Lo [43] TAKRI LETTER A..TAKRI LETTER RRA
654 | 0xF0 0x91 0x9C 0x80..0x99 #Lo [26] AHOM LETTER KA..AHOM LETTER JHA
655 | 0xF0 0x91 0xA2 0xA0..0xFF #L& [64] WARANG CITI CAPITAL LETTER NGAA...
656 | 0xF0 0x91 0xA3 0x00..0x9F #
657 | 0xF0 0x91 0xA3 0xBF #Lo WARANG CITI OM
658 | 0xF0 0x91 0xAB 0x80..0xB8 #Lo [57] PAU CIN HAU LETTER PA..PAU CIN ...
659 | 0xF0 0x91 0xB0 0x80..0x88 #Lo [9] BHAIKSUKI LETTER A..BHAIKSUKI L...
660 | 0xF0 0x91 0xB0 0x8A..0xAE #Lo [37] BHAIKSUKI LETTER E..BHAIKSUKI L...
661 | 0xF0 0x91 0xB1 0x80 #Lo BHAIKSUKI SIGN AVAGRAHA
662 | 0xF0 0x91 0xB1 0xB2..0xFF #Lo [30] MARCHEN LETTER KA..MARCHEN LETT...
663 | 0xF0 0x91 0xB2 0x00..0x8F #
664 | 0xF0 0x92 0x80 0x80..0xFF #Lo [922] CUNEIFORM SIGN A..CUNEIFO...
665 | 0xF0 0x92 0x81..0x8D 0x00..0xFF #
666 | 0xF0 0x92 0x8E 0x00..0x99 #
667 | 0xF0 0x92 0x90 0x80..0xFF #Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH....
668 | 0xF0 0x92 0x91 0x00..0xAE #
669 | 0xF0 0x92 0x92 0x80..0xFF #Lo [196] CUNEIFORM SIGN AB TIMES N...
670 | 0xF0 0x92 0x93..0x94 0x00..0xFF #
671 | 0xF0 0x92 0x95 0x00..0x83 #
672 | 0xF0 0x93 0x80 0x80..0xFF #Lo [1071] EGYPTIAN HIEROGLYPH A001...
673 | 0xF0 0x93 0x81..0x8F 0x00..0xFF #
674 | 0xF0 0x93 0x90 0x00..0xAE #
675 | 0xF0 0x94 0x90 0x80..0xFF #Lo [583] ANATOLIAN HIEROGLYPH A001...
676 | 0xF0 0x94 0x91..0x98 0x00..0xFF #
677 | 0xF0 0x94 0x99 0x00..0x86 #
678 | 0xF0 0x96 0xA0 0x80..0xFF #Lo [569] BAMUM LETTER PHASE-A NGKU...
679 | 0xF0 0x96 0xA1..0xA7 0x00..0xFF #
680 | 0xF0 0x96 0xA8 0x00..0xB8 #
681 | 0xF0 0x96 0xA9 0x80..0x9E #Lo [31] MRO LETTER TA..MRO LETTER TEK
682 | 0xF0 0x96 0xAB 0x90..0xAD #Lo [30] BASSA VAH LETTER ENNI..BASSA VA...
683 | 0xF0 0x96 0xAC 0x80..0xAF #Lo [48] PAHAWH HMONG VOWEL KEEB..PAHAWH...
684 | 0xF0 0x96 0xAD 0x80..0x83 #Lm [4] PAHAWH HMONG SIGN VOS SEEV..PAH...
685 | 0xF0 0x96 0xAD 0xA3..0xB7 #Lo [21] PAHAWH HMONG SIGN VOS LUB..PAHA...
686 | 0xF0 0x96 0xAD 0xBD..0xFF #Lo [19] PAHAWH HMONG CLAN SIGN TSHEEJ.....
687 | 0xF0 0x96 0xAE 0x00..0x8F #
688 | 0xF0 0x96 0xBC 0x80..0xFF #Lo [69] MIAO LETTER PA..MIAO LETTER HHA
689 | 0xF0 0x96 0xBD 0x00..0x84 #
690 | 0xF0 0x96 0xBD 0x90 #Lo MIAO LETTER NASALIZATION
691 | 0xF0 0x96 0xBE 0x93..0x9F #Lm [13] MIAO LETTER TONE-2..MIAO LETTER...
692 | 0xF0 0x96 0xBF 0xA0 #Lm TANGUT ITERATION MARK
693 | 0xF0 0x97 0x80 0x80..0xFF #Lo [6125] TANGUT IDEOGRAPH-17000.....
694 | 0xF0 0x97 0x81..0xFF 0x00..0xFF #
695 | 0xF0 0x98 0x00 0x00..0xFF #
696 | 0xF0 0x98 0x01..0x9E 0x00..0xFF #
697 | 0xF0 0x98 0x9F 0x00..0xAC #
698 | 0xF0 0x98 0xA0 0x80..0xFF #Lo [755] TANGUT COMPONENT-001..TAN...
699 | 0xF0 0x98 0xA1..0xAA 0x00..0xFF #
700 | 0xF0 0x98 0xAB 0x00..0xB2 #
701 | 0xF0 0x9B 0x80 0x80..0x81 #Lo [2] KATAKANA LETTER ARCHAIC E..HIRA...
702 | 0xF0 0x9B 0xB0 0x80..0xFF #Lo [107] DUPLOYAN LETTER H..DUPLOYAN LET...
703 | 0xF0 0x9B 0xB1 0x00..0xAA #
704 | 0xF0 0x9B 0xB1 0xB0..0xBC #Lo [13] DUPLOYAN AFFIX LEFT HORIZONTAL ...
705 | 0xF0 0x9B 0xB2 0x80..0x88 #Lo [9] DUPLOYAN AFFIX HIGH ACUTE..DUPL...
706 | 0xF0 0x9B 0xB2 0x90..0x99 #Lo [10] DUPLOYAN AFFIX LOW ACUTE..DUPLO...
707 | 0xF0 0x9D 0x90 0x80..0xFF #L& [85] MATHEMATICAL BOLD CAPITAL A..MA...
708 | 0xF0 0x9D 0x91 0x00..0x94 #
709 | 0xF0 0x9D 0x91 0x96..0xFF #L& [71] MATHEMATICAL ITALIC SMALL I..MA...
710 | 0xF0 0x9D 0x92 0x00..0x9C #
711 | 0xF0 0x9D 0x92 0x9E..0x9F #L& [2] MATHEMATICAL SCRIPT CAPITAL C.....
712 | 0xF0 0x9D 0x92 0xA2 #L& MATHEMATICAL SCRIPT CAPITAL G
713 | 0xF0 0x9D 0x92 0xA5..0xA6 #L& [2] MATHEMATICAL SCRIPT CAPITAL J.....
714 | 0xF0 0x9D 0x92 0xA9..0xAC #L& [4] MATHEMATICAL SCRIPT CAPITAL N.....
715 | 0xF0 0x9D 0x92 0xAE..0xB9 #L& [12] MATHEMATICAL SCRIPT CAPITAL S.....
716 | 0xF0 0x9D 0x92 0xBB #L& MATHEMATICAL SCRIPT SMALL F
717 | 0xF0 0x9D 0x92 0xBD..0xFF #L& [7] MATHEMATICAL SCRIPT SMALL H..MA...
718 | 0xF0 0x9D 0x93 0x00..0x83 #
719 | 0xF0 0x9D 0x93 0x85..0xFF #L& [65] MATHEMATICAL SCRIPT SMALL P..MA...
720 | 0xF0 0x9D 0x94 0x00..0x85 #
721 | 0xF0 0x9D 0x94 0x87..0x8A #L& [4] MATHEMATICAL FRAKTUR CAPITAL D....
722 | 0xF0 0x9D 0x94 0x8D..0x94 #L& [8] MATHEMATICAL FRAKTUR CAPITAL J....
723 | 0xF0 0x9D 0x94 0x96..0x9C #L& [7] MATHEMATICAL FRAKTUR CAPITAL S....
724 | 0xF0 0x9D 0x94 0x9E..0xB9 #L& [28] MATHEMATICAL FRAKTUR SMALL A..M...
725 | 0xF0 0x9D 0x94 0xBB..0xBE #L& [4] MATHEMATICAL DOUBLE-STRUCK CAPI...
726 | 0xF0 0x9D 0x95 0x80..0x84 #L& [5] MATHEMATICAL DOUBLE-STRUCK CAPI...
727 | 0xF0 0x9D 0x95 0x86 #L& MATHEMATICAL DOUBLE-STRUCK CAPITAL O
728 | 0xF0 0x9D 0x95 0x8A..0x90 #L& [7] MATHEMATICAL DOUBLE-STRUCK CAPI...
729 | 0xF0 0x9D 0x95 0x92..0xFF #L& [340] MATHEMATICAL DOUBLE-STRUC...
730 | 0xF0 0x9D 0x96..0x99 0x00..0xFF #
731 | 0xF0 0x9D 0x9A 0x00..0xA5 #
732 | 0xF0 0x9D 0x9A 0xA8..0xFF #L& [25] MATHEMATICAL BOLD CAPITAL ALPHA...
733 | 0xF0 0x9D 0x9B 0x00..0x80 #
734 | 0xF0 0x9D 0x9B 0x82..0x9A #L& [25] MATHEMATICAL BOLD SMALL ALPHA.....
735 | 0xF0 0x9D 0x9B 0x9C..0xBA #L& [31] MATHEMATICAL BOLD EPSILON SYMBO...
736 | 0xF0 0x9D 0x9B 0xBC..0xFF #L& [25] MATHEMATICAL ITALIC SMALL ALPHA...
737 | 0xF0 0x9D 0x9C 0x00..0x94 #
738 | 0xF0 0x9D 0x9C 0x96..0xB4 #L& [31] MATHEMATICAL ITALIC EPSILON SYM...
739 | 0xF0 0x9D 0x9C 0xB6..0xFF #L& [25] MATHEMATICAL BOLD ITALIC SMALL ...
740 | 0xF0 0x9D 0x9D 0x00..0x8E #
741 | 0xF0 0x9D 0x9D 0x90..0xAE #L& [31] MATHEMATICAL BOLD ITALIC EPSILO...
742 | 0xF0 0x9D 0x9D 0xB0..0xFF #L& [25] MATHEMATICAL SANS-SERIF BOLD SM...
743 | 0xF0 0x9D 0x9E 0x00..0x88 #
744 | 0xF0 0x9D 0x9E 0x8A..0xA8 #L& [31] MATHEMATICAL SANS-SERIF BOLD EP...
745 | 0xF0 0x9D 0x9E 0xAA..0xFF #L& [25] MATHEMATICAL SANS-SERIF BOLD IT...
746 | 0xF0 0x9D 0x9F 0x00..0x82 #
747 | 0xF0 0x9D 0x9F 0x84..0x8B #L& [8] MATHEMATICAL SANS-SERIF BOLD IT...
748 | 0xF0 0x9E 0xA0 0x80..0xFF #Lo [197] MENDE KIKAKUI SYLLABLE M0...
749 | 0xF0 0x9E 0xA1..0xA2 0x00..0xFF #
750 | 0xF0 0x9E 0xA3 0x00..0x84 #
751 | 0xF0 0x9E 0xA4 0x80..0xFF #L& [68] ADLAM CAPITAL LETTER ALIF..ADLA...
752 | 0xF0 0x9E 0xA5 0x00..0x83 #
753 | 0xF0 0x9E 0xB8 0x80..0x83 #Lo [4] ARABIC MATHEMATICAL ALEF..ARABI...
754 | 0xF0 0x9E 0xB8 0x85..0x9F #Lo [27] ARABIC MATHEMATICAL WAW..ARABIC...
755 | 0xF0 0x9E 0xB8 0xA1..0xA2 #Lo [2] ARABIC MATHEMATICAL INITIAL BEH...
756 | 0xF0 0x9E 0xB8 0xA4 #Lo ARABIC MATHEMATICAL INITIAL HEH
757 | 0xF0 0x9E 0xB8 0xA7 #Lo ARABIC MATHEMATICAL INITIAL HAH
758 | 0xF0 0x9E 0xB8 0xA9..0xB2 #Lo [10] ARABIC MATHEMATICAL INITIAL YEH...
759 | 0xF0 0x9E 0xB8 0xB4..0xB7 #Lo [4] ARABIC MATHEMATICAL INITIAL SHE...
760 | 0xF0 0x9E 0xB8 0xB9 #Lo ARABIC MATHEMATICAL INITIAL DAD
761 | 0xF0 0x9E 0xB8 0xBB #Lo ARABIC MATHEMATICAL INITIAL GHAIN
762 | 0xF0 0x9E 0xB9 0x82 #Lo ARABIC MATHEMATICAL TAILED JEEM
763 | 0xF0 0x9E 0xB9 0x87 #Lo ARABIC MATHEMATICAL TAILED HAH
764 | 0xF0 0x9E 0xB9 0x89 #Lo ARABIC MATHEMATICAL TAILED YEH
765 | 0xF0 0x9E 0xB9 0x8B #Lo ARABIC MATHEMATICAL TAILED LAM
766 | 0xF0 0x9E 0xB9 0x8D..0x8F #Lo [3] ARABIC MATHEMATICAL TAILED NOON...
767 | 0xF0 0x9E 0xB9 0x91..0x92 #Lo [2] ARABIC MATHEMATICAL TAILED SAD....
768 | 0xF0 0x9E 0xB9 0x94 #Lo ARABIC MATHEMATICAL TAILED SHEEN
769 | 0xF0 0x9E 0xB9 0x97 #Lo ARABIC MATHEMATICAL TAILED KHAH
770 | 0xF0 0x9E 0xB9 0x99 #Lo ARABIC MATHEMATICAL TAILED DAD
771 | 0xF0 0x9E 0xB9 0x9B #Lo ARABIC MATHEMATICAL TAILED GHAIN
772 | 0xF0 0x9E 0xB9 0x9D #Lo ARABIC MATHEMATICAL TAILED DOTLESS...
773 | 0xF0 0x9E 0xB9 0x9F #Lo ARABIC MATHEMATICAL TAILED DOTLESS...
774 | 0xF0 0x9E 0xB9 0xA1..0xA2 #Lo [2] ARABIC MATHEMATICAL STRETCHED B...
775 | 0xF0 0x9E 0xB9 0xA4 #Lo ARABIC MATHEMATICAL STRETCHED HEH
776 | 0xF0 0x9E 0xB9 0xA7..0xAA #Lo [4] ARABIC MATHEMATICAL STRETCHED H...
777 | 0xF0 0x9E 0xB9 0xAC..0xB2 #Lo [7] ARABIC MATHEMATICAL STRETCHED M...
778 | 0xF0 0x9E 0xB9 0xB4..0xB7 #Lo [4] ARABIC MATHEMATICAL STRETCHED S...
779 | 0xF0 0x9E 0xB9 0xB9..0xBC #Lo [4] ARABIC MATHEMATICAL STRETCHED D...
780 | 0xF0 0x9E 0xB9 0xBE #Lo ARABIC MATHEMATICAL STRETCHED DOTL...
781 | 0xF0 0x9E 0xBA 0x80..0x89 #Lo [10] ARABIC MATHEMATICAL LOOPED ALEF...
782 | 0xF0 0x9E 0xBA 0x8B..0x9B #Lo [17] ARABIC MATHEMATICAL LOOPED LAM....
783 | 0xF0 0x9E 0xBA 0xA1..0xA3 #Lo [3] ARABIC MATHEMATICAL DOUBLE-STRU...
784 | 0xF0 0x9E 0xBA 0xA5..0xA9 #Lo [5] ARABIC MATHEMATICAL DOUBLE-STRU...
785 | 0xF0 0x9E 0xBA 0xAB..0xBB #Lo [17] ARABIC MATHEMATICAL DOUBLE-STRU...
786 | 0xF0 0xA0 0x80 0x80..0xFF #Lo [42711] CJK UNIFIED IDEOG...
787 | 0xF0 0xA0 0x81..0xFF 0x00..0xFF #
788 | 0xF0 0xA1..0xA9 0x00..0xFF 0x00..0xFF #
789 | 0xF0 0xAA 0x00 0x00..0xFF #
790 | 0xF0 0xAA 0x01..0x9A 0x00..0xFF #
791 | 0xF0 0xAA 0x9B 0x00..0x96 #
792 | 0xF0 0xAA 0x9C 0x80..0xFF #Lo [4149] CJK UNIFIED IDEOGRAPH-2A...
793 | 0xF0 0xAA 0x9D..0xFF 0x00..0xFF #
794 | 0xF0 0xAB 0x00 0x00..0xFF #
795 | 0xF0 0xAB 0x01..0x9B 0x00..0xFF #
796 | 0xF0 0xAB 0x9C 0x00..0xB4 #
797 | 0xF0 0xAB 0x9D 0x80..0xFF #Lo [222] CJK UNIFIED IDEOGRAPH-2B7...
798 | 0xF0 0xAB 0x9E..0x9F 0x00..0xFF #
799 | 0xF0 0xAB 0xA0 0x00..0x9D #
800 | 0xF0 0xAB 0xA0 0xA0..0xFF #Lo [5762] CJK UNIFIED IDEOGRAPH-2B...
801 | 0xF0 0xAB 0xA1..0xFF 0x00..0xFF #
802 | 0xF0 0xAC 0x00 0x00..0xFF #
803 | 0xF0 0xAC 0x01..0xB9 0x00..0xFF #
804 | 0xF0 0xAC 0xBA 0x00..0xA1 #
805 | 0xF0 0xAF 0xA0 0x80..0xFF #Lo [542] CJK COMPATIBILITY IDEOGRA...
806 | 0xF0 0xAF 0xA1..0xA7 0x00..0xFF #
807 | 0xF0 0xAF 0xA8 0x00..0x9D #
808 ;
809
810 ID_Continue =
811 0x30..0x39 #Nd [10] DIGIT ZERO..DIGIT NINE
812 | 0x41..0x5A #L& [26] LATIN CAPITAL LETTER A..LATIN CAPI...
813 | 0x5F #Pc LOW LINE
814 | 0x61..0x7A #L& [26] LATIN SMALL LETTER A..LATIN SMALL ...
815 | 0xC2 0xAA #Lo FEMININE ORDINAL INDICATOR
816 | 0xC2 0xB5 #L& MICRO SIGN
817 | 0xC2 0xB7 #Po MIDDLE DOT
818 | 0xC2 0xBA #Lo MASCULINE ORDINAL INDICATOR
819 | 0xC3 0x80..0x96 #L& [23] LATIN CAPITAL LETTER A WITH GRAVE....
820 | 0xC3 0x98..0xB6 #L& [31] LATIN CAPITAL LETTER O WITH STROKE...
821 | 0xC3 0xB8..0xFF #L& [195] LATIN SMALL LETTER O WITH STROKE.....
822 | 0xC4..0xC5 0x00..0xFF #
823 | 0xC6 0x00..0xBA #
824 | 0xC6 0xBB #Lo LATIN LETTER TWO WITH STROKE
825 | 0xC6 0xBC..0xBF #L& [4] LATIN CAPITAL LETTER TONE FIVE..LA...
826 | 0xC7 0x80..0x83 #Lo [4] LATIN LETTER DENTAL CLICK..LATIN L...
827 | 0xC7 0x84..0xFF #L& [208] LATIN CAPITAL LETTER DZ WITH CARON...
828 | 0xC8..0xC9 0x00..0xFF #
829 | 0xCA 0x00..0x93 #
830 | 0xCA 0x94 #Lo LATIN LETTER GLOTTAL STOP
831 | 0xCA 0x95..0xAF #L& [27] LATIN LETTER PHARYNGEAL VOICED FRI...
832 | 0xCA 0xB0..0xFF #Lm [18] MODIFIER LETTER SMALL H..MODIFIER ...
833 | 0xCB 0x00..0x81 #
834 | 0xCB 0x86..0x91 #Lm [12] MODIFIER LETTER CIRCUMFLEX ACCENT....
835 | 0xCB 0xA0..0xA4 #Lm [5] MODIFIER LETTER SMALL GAMMA..MODIF...
836 | 0xCB 0xAC #Lm MODIFIER LETTER VOICING
837 | 0xCB 0xAE #Lm MODIFIER LETTER DOUBLE APOSTROPHE
838 | 0xCC 0x80..0xFF #Mn [112] COMBINING GRAVE ACCENT..COMBINING ...
839 | 0xCD 0x00..0xAF #
840 | 0xCD 0xB0..0xB3 #L& [4] GREEK CAPITAL LETTER HETA..GREEK S...
841 | 0xCD 0xB4 #Lm GREEK NUMERAL SIGN
842 | 0xCD 0xB6..0xB7 #L& [2] GREEK CAPITAL LETTER PAMPHYLIAN DI...
843 | 0xCD 0xBA #Lm GREEK YPOGEGRAMMENI
844 | 0xCD 0xBB..0xBD #L& [3] GREEK SMALL REVERSED LUNATE SIGMA ...
845 | 0xCD 0xBF #L& GREEK CAPITAL LETTER YOT
846 | 0xCE 0x86 #L& GREEK CAPITAL LETTER ALPHA WITH TONOS
847 | 0xCE 0x87 #Po GREEK ANO TELEIA
848 | 0xCE 0x88..0x8A #L& [3] GREEK CAPITAL LETTER EPSILON WITH ...
849 | 0xCE 0x8C #L& GREEK CAPITAL LETTER OMICRON WITH ...
850 | 0xCE 0x8E..0xA1 #L& [20] GREEK CAPITAL LETTER UPSILON WITH ...
851 | 0xCE 0xA3..0xFF #L& [83] GREEK CAPITAL LETTER SIGMA..GREEK ...
852 | 0xCF 0x00..0xB5 #
853 | 0xCF 0xB7..0xFF #L& [139] GREEK CAPITAL LETTER SHO..CYRILLIC...
854 | 0xD0..0xD1 0x00..0xFF #
855 | 0xD2 0x00..0x81 #
856 | 0xD2 0x83..0x87 #Mn [5] COMBINING CYRILLIC TITLO..COMBININ...
857 | 0xD2 0x8A..0xFF #L& [166] CYRILLIC CAPITAL LETTER SHORT I WI...
858 | 0xD3..0xD3 0x00..0xFF #
859 | 0xD4 0x00..0xAF #
860 | 0xD4 0xB1..0xFF #L& [38] ARMENIAN CAPITAL LETTER AYB..ARMEN...
861 | 0xD5 0x00..0x96 #
862 | 0xD5 0x99 #Lm ARMENIAN MODIFIER LETTER LEFT HALF...
863 | 0xD5 0xA1..0xFF #L& [39] ARMENIAN SMALL LETTER AYB..ARMENIA...
864 | 0xD6 0x00..0x87 #
865 | 0xD6 0x91..0xBD #Mn [45] HEBREW ACCENT ETNAHTA..HEBREW POIN...
866 | 0xD6 0xBF #Mn HEBREW POINT RAFE
867 | 0xD7 0x81..0x82 #Mn [2] HEBREW POINT SHIN DOT..HEBREW POIN...
868 | 0xD7 0x84..0x85 #Mn [2] HEBREW MARK UPPER DOT..HEBREW MARK...
869 | 0xD7 0x87 #Mn HEBREW POINT QAMATS QATAN
870 | 0xD7 0x90..0xAA #Lo [27] HEBREW LETTER ALEF..HEBREW LETTER TAV
871 | 0xD7 0xB0..0xB2 #Lo [3] HEBREW LIGATURE YIDDISH DOUBLE VAV...
872 | 0xD8 0x90..0x9A #Mn [11] ARABIC SIGN SALLALLAHOU ALAYHE WAS...
873 | 0xD8 0xA0..0xBF #Lo [32] ARABIC LETTER KASHMIRI YEH..ARABIC...
874 | 0xD9 0x80 #Lm ARABIC TATWEEL
875 | 0xD9 0x81..0x8A #Lo [10] ARABIC LETTER FEH..ARABIC LETTER YEH
876 | 0xD9 0x8B..0x9F #Mn [21] ARABIC FATHATAN..ARABIC WAVY HAMZA...
877 | 0xD9 0xA0..0xA9 #Nd [10] ARABIC-INDIC DIGIT ZERO..ARABIC-IN...
878 | 0xD9 0xAE..0xAF #Lo [2] ARABIC LETTER DOTLESS BEH..ARABIC ...
879 | 0xD9 0xB0 #Mn ARABIC LETTER SUPERSCRIPT ALEF
880 | 0xD9 0xB1..0xFF #Lo [99] ARABIC LETTER ALEF WASLA..ARABIC L...
881 | 0xDA..0xDA 0x00..0xFF #
882 | 0xDB 0x00..0x93 #
883 | 0xDB 0x95 #Lo ARABIC LETTER AE
884 | 0xDB 0x96..0x9C #Mn [7] ARABIC SMALL HIGH LIGATURE SAD WIT...
885 | 0xDB 0x9F..0xA4 #Mn [6] ARABIC SMALL HIGH ROUNDED ZERO..AR...
886 | 0xDB 0xA5..0xA6 #Lm [2] ARABIC SMALL WAW..ARABIC SMALL YEH
887 | 0xDB 0xA7..0xA8 #Mn [2] ARABIC SMALL HIGH YEH..ARABIC SMAL...
888 | 0xDB 0xAA..0xAD #Mn [4] ARABIC EMPTY CENTRE LOW STOP..ARAB...
889 | 0xDB 0xAE..0xAF #Lo [2] ARABIC LETTER DAL WITH INVERTED V....
890 | 0xDB 0xB0..0xB9 #Nd [10] EXTENDED ARABIC-INDIC DIGIT ZERO.....
891 | 0xDB 0xBA..0xBC #Lo [3] ARABIC LETTER SHEEN WITH DOT BELOW...
892 | 0xDB 0xBF #Lo ARABIC LETTER HEH WITH INVERTED V
893 | 0xDC 0x90 #Lo SYRIAC LETTER ALAPH
894 | 0xDC 0x91 #Mn SYRIAC LETTER SUPERSCRIPT ALAPH
895 | 0xDC 0x92..0xAF #Lo [30] SYRIAC LETTER BETH..SYRIAC LETTER ...
896 | 0xDC 0xB0..0xFF #Mn [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH
897 | 0xDD 0x00..0x8A #
898 | 0xDD 0x8D..0xFF #Lo [89] SYRIAC LETTER SOGDIAN ZHAIN..THAAN...
899 | 0xDE 0x00..0xA5 #
900 | 0xDE 0xA6..0xB0 #Mn [11] THAANA ABAFILI..THAANA SUKUN
901 | 0xDE 0xB1 #Lo THAANA LETTER NAA
902 | 0xDF 0x80..0x89 #Nd [10] NKO DIGIT ZERO..NKO DIGIT NINE
903 | 0xDF 0x8A..0xAA #Lo [33] NKO LETTER A..NKO LETTER JONA RA
904 | 0xDF 0xAB..0xB3 #Mn [9] NKO COMBINING SHORT HIGH TONE..NKO...
905 | 0xDF 0xB4..0xB5 #Lm [2] NKO HIGH TONE APOSTROPHE..NKO LOW ...
906 | 0xDF 0xBA #Lm NKO LAJANYALAN
907 | 0xE0 0xA0 0x80..0x95 #Lo [22] SAMARITAN LETTER ALAF..SAMARITAN L...
908 | 0xE0 0xA0 0x96..0x99 #Mn [4] SAMARITAN MARK IN..SAMARITAN MARK ...
909 | 0xE0 0xA0 0x9A #Lm SAMARITAN MODIFIER LETTER EPENTHET...
910 | 0xE0 0xA0 0x9B..0xA3 #Mn [9] SAMARITAN MARK EPENTHETIC YUT..SAM...
911 | 0xE0 0xA0 0xA4 #Lm SAMARITAN MODIFIER LETTER SHORT A
912 | 0xE0 0xA0 0xA5..0xA7 #Mn [3] SAMARITAN VOWEL SIGN SHORT A..SAMA...
913 | 0xE0 0xA0 0xA8 #Lm SAMARITAN MODIFIER LETTER I
914 | 0xE0 0xA0 0xA9..0xAD #Mn [5] SAMARITAN VOWEL SIGN LONG I..SAMAR...
915 | 0xE0 0xA1 0x80..0x98 #Lo [25] MANDAIC LETTER HALQA..MANDAIC LETT...
916 | 0xE0 0xA1 0x99..0x9B #Mn [3] MANDAIC AFFRICATION MARK..MANDAIC ...
917 | 0xE0 0xA2 0xA0..0xB4 #Lo [21] ARABIC LETTER BEH WITH SMALL V BEL...
918 | 0xE0 0xA2 0xB6..0xBD #Lo [8] ARABIC LETTER BEH WITH SMALL MEEM ...
919 | 0xE0 0xA3 0x94..0xA1 #Mn [14] ARABIC SMALL HIGH WORD AR-RUB..ARA...
920 | 0xE0 0xA3 0xA3..0xFF #Mn [32] ARABIC TURNED DAMMA BELOW..DEVANAG...
921 | 0xE0 0xA4 0x00..0x82 #
922 | 0xE0 0xA4 0x83 #Mc DEVANAGARI SIGN VISARGA
923 | 0xE0 0xA4 0x84..0xB9 #Lo [54] DEVANAGARI LETTER SHORT A..DEVANAG...
924 | 0xE0 0xA4 0xBA #Mn DEVANAGARI VOWEL SIGN OE
925 | 0xE0 0xA4 0xBB #Mc DEVANAGARI VOWEL SIGN OOE
926 | 0xE0 0xA4 0xBC #Mn DEVANAGARI SIGN NUKTA
927 | 0xE0 0xA4 0xBD #Lo DEVANAGARI SIGN AVAGRAHA
928 | 0xE0 0xA4 0xBE..0xFF #Mc [3] DEVANAGARI VOWEL SIGN AA..DEVANAGA...
929 | 0xE0 0xA5 0x00..0x80 #
930 | 0xE0 0xA5 0x81..0x88 #Mn [8] DEVANAGARI VOWEL SIGN U..DEVANAGAR...
931 | 0xE0 0xA5 0x89..0x8C #Mc [4] DEVANAGARI VOWEL SIGN CANDRA O..DE...
932 | 0xE0 0xA5 0x8D #Mn DEVANAGARI SIGN VIRAMA
933 | 0xE0 0xA5 0x8E..0x8F #Mc [2] DEVANAGARI VOWEL SIGN PRISHTHAMATR...
934 | 0xE0 0xA5 0x90 #Lo DEVANAGARI OM
935 | 0xE0 0xA5 0x91..0x97 #Mn [7] DEVANAGARI STRESS SIGN UDATTA..DEV...
936 | 0xE0 0xA5 0x98..0xA1 #Lo [10] DEVANAGARI LETTER QA..DEVANAGARI L...
937 | 0xE0 0xA5 0xA2..0xA3 #Mn [2] DEVANAGARI VOWEL SIGN VOCALIC L..D...
938 | 0xE0 0xA5 0xA6..0xAF #Nd [10] DEVANAGARI DIGIT ZERO..DEVANAGARI ...
939 | 0xE0 0xA5 0xB1 #Lm DEVANAGARI SIGN HIGH SPACING DOT
940 | 0xE0 0xA5 0xB2..0xFF #Lo [15] DEVANAGARI LETTER CANDRA A..BENGAL...
941 | 0xE0 0xA6 0x00..0x80 #
942 | 0xE0 0xA6 0x81 #Mn BENGALI SIGN CANDRABINDU
943 | 0xE0 0xA6 0x82..0x83 #Mc [2] BENGALI SIGN ANUSVARA..BENGALI SIG...
944 | 0xE0 0xA6 0x85..0x8C #Lo [8] BENGALI LETTER A..BENGALI LETTER V...
945 | 0xE0 0xA6 0x8F..0x90 #Lo [2] BENGALI LETTER E..BENGALI LETTER AI
946 | 0xE0 0xA6 0x93..0xA8 #Lo [22] BENGALI LETTER O..BENGALI LETTER NA
947 | 0xE0 0xA6 0xAA..0xB0 #Lo [7] BENGALI LETTER PA..BENGALI LETTER RA
948 | 0xE0 0xA6 0xB2 #Lo BENGALI LETTER LA
949 | 0xE0 0xA6 0xB6..0xB9 #Lo [4] BENGALI LETTER SHA..BENGALI LETTER HA
950 | 0xE0 0xA6 0xBC #Mn BENGALI SIGN NUKTA
951 | 0xE0 0xA6 0xBD #Lo BENGALI SIGN AVAGRAHA
952 | 0xE0 0xA6 0xBE..0xFF #Mc [3] BENGALI VOWEL SIGN AA..BENGALI VOW...
953 | 0xE0 0xA7 0x00..0x80 #
954 | 0xE0 0xA7 0x81..0x84 #Mn [4] BENGALI VOWEL SIGN U..BENGALI VOWE...
955 | 0xE0 0xA7 0x87..0x88 #Mc [2] BENGALI VOWEL SIGN E..BENGALI VOWE...
956 | 0xE0 0xA7 0x8B..0x8C #Mc [2] BENGALI VOWEL SIGN O..BENGALI VOWE...
957 | 0xE0 0xA7 0x8D #Mn BENGALI SIGN VIRAMA
958 | 0xE0 0xA7 0x8E #Lo BENGALI LETTER KHANDA TA
959 | 0xE0 0xA7 0x97 #Mc BENGALI AU LENGTH MARK
960 | 0xE0 0xA7 0x9C..0x9D #Lo [2] BENGALI LETTER RRA..BENGALI LETTER...
961 | 0xE0 0xA7 0x9F..0xA1 #Lo [3] BENGALI LETTER YYA..BENGALI LETTER...
962 | 0xE0 0xA7 0xA2..0xA3 #Mn [2] BENGALI VOWEL SIGN VOCALIC L..BENG...
963 | 0xE0 0xA7 0xA6..0xAF #Nd [10] BENGALI DIGIT ZERO..BENGALI DIGIT ...
964 | 0xE0 0xA7 0xB0..0xB1 #Lo [2] BENGALI LETTER RA WITH MIDDLE DIAG...
965 | 0xE0 0xA8 0x81..0x82 #Mn [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI...
966 | 0xE0 0xA8 0x83 #Mc GURMUKHI SIGN VISARGA
967 | 0xE0 0xA8 0x85..0x8A #Lo [6] GURMUKHI LETTER A..GURMUKHI LETTER UU
968 | 0xE0 0xA8 0x8F..0x90 #Lo [2] GURMUKHI LETTER EE..GURMUKHI LETTE...
969 | 0xE0 0xA8 0x93..0xA8 #Lo [22] GURMUKHI LETTER OO..GURMUKHI LETTE...
970 | 0xE0 0xA8 0xAA..0xB0 #Lo [7] GURMUKHI LETTER PA..GURMUKHI LETTE...
971 | 0xE0 0xA8 0xB2..0xB3 #Lo [2] GURMUKHI LETTER LA..GURMUKHI LETTE...
972 | 0xE0 0xA8 0xB5..0xB6 #Lo [2] GURMUKHI LETTER VA..GURMUKHI LETTE...
973 | 0xE0 0xA8 0xB8..0xB9 #Lo [2] GURMUKHI LETTER SA..GURMUKHI LETTE...
974 | 0xE0 0xA8 0xBC #Mn GURMUKHI SIGN NUKTA
975 | 0xE0 0xA8 0xBE..0xFF #Mc [3] GURMUKHI VOWEL SIGN AA..GURMUKHI V...
976 | 0xE0 0xA9 0x00..0x80 #
977 | 0xE0 0xA9 0x81..0x82 #Mn [2] GURMUKHI VOWEL SIGN U..GURMUKHI VO...
978 | 0xE0 0xA9 0x87..0x88 #Mn [2] GURMUKHI VOWEL SIGN EE..GURMUKHI V...
979 | 0xE0 0xA9 0x8B..0x8D #Mn [3] GURMUKHI VOWEL SIGN OO..GURMUKHI S...
980 | 0xE0 0xA9 0x91 #Mn GURMUKHI SIGN UDAAT
981 | 0xE0 0xA9 0x99..0x9C #Lo [4] GURMUKHI LETTER KHHA..GURMUKHI LET...
982 | 0xE0 0xA9 0x9E #Lo GURMUKHI LETTER FA
983 | 0xE0 0xA9 0xA6..0xAF #Nd [10] GURMUKHI DIGIT ZERO..GURMUKHI DIGI...
984 | 0xE0 0xA9 0xB0..0xB1 #Mn [2] GURMUKHI TIPPI..GURMUKHI ADDAK
985 | 0xE0 0xA9 0xB2..0xB4 #Lo [3] GURMUKHI IRI..GURMUKHI EK ONKAR
986 | 0xE0 0xA9 0xB5 #Mn GURMUKHI SIGN YAKASH
987 | 0xE0 0xAA 0x81..0x82 #Mn [2] GUJARATI SIGN CANDRABINDU..GUJARAT...
988 | 0xE0 0xAA 0x83 #Mc GUJARATI SIGN VISARGA
989 | 0xE0 0xAA 0x85..0x8D #Lo [9] GUJARATI LETTER A..GUJARATI VOWEL ...
990 | 0xE0 0xAA 0x8F..0x91 #Lo [3] GUJARATI LETTER E..GUJARATI VOWEL ...
991 | 0xE0 0xAA 0x93..0xA8 #Lo [22] GUJARATI LETTER O..GUJARATI LETTER NA
992 | 0xE0 0xAA 0xAA..0xB0 #Lo [7] GUJARATI LETTER PA..GUJARATI LETTE...
993 | 0xE0 0xAA 0xB2..0xB3 #Lo [2] GUJARATI LETTER LA..GUJARATI LETTE...
994 | 0xE0 0xAA 0xB5..0xB9 #Lo [5] GUJARATI LETTER VA..GUJARATI LETTE...
995 | 0xE0 0xAA 0xBC #Mn GUJARATI SIGN NUKTA
996 | 0xE0 0xAA 0xBD #Lo GUJARATI SIGN AVAGRAHA
997 | 0xE0 0xAA 0xBE..0xFF #Mc [3] GUJARATI VOWEL SIGN AA..GUJARATI V...
998 | 0xE0 0xAB 0x00..0x80 #
999 | 0xE0 0xAB 0x81..0x85 #Mn [5] GUJARATI VOWEL SIGN U..GUJARATI VO...
1000 | 0xE0 0xAB 0x87..0x88 #Mn [2] GUJARATI VOWEL SIGN E..GUJARATI VO...
1001 | 0xE0 0xAB 0x89 #Mc GUJARATI VOWEL SIGN CANDRA O
1002 | 0xE0 0xAB 0x8B..0x8C #Mc [2] GUJARATI VOWEL SIGN O..GUJARATI VO...
1003 | 0xE0 0xAB 0x8D #Mn GUJARATI SIGN VIRAMA
1004 | 0xE0 0xAB 0x90 #Lo GUJARATI OM
1005 | 0xE0 0xAB 0xA0..0xA1 #Lo [2] GUJARATI LETTER VOCALIC RR..GUJARA...
1006 | 0xE0 0xAB 0xA2..0xA3 #Mn [2] GUJARATI VOWEL SIGN VOCALIC L..GUJ...
1007 | 0xE0 0xAB 0xA6..0xAF #Nd [10] GUJARATI DIGIT ZERO..GUJARATI DIGI...
1008 | 0xE0 0xAB 0xB9 #Lo GUJARATI LETTER ZHA
1009 | 0xE0 0xAC 0x81 #Mn ORIYA SIGN CANDRABINDU
1010 | 0xE0 0xAC 0x82..0x83 #Mc [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VI...
1011 | 0xE0 0xAC 0x85..0x8C #Lo [8] ORIYA LETTER A..ORIYA LETTER VOCAL...
1012 | 0xE0 0xAC 0x8F..0x90 #Lo [2] ORIYA LETTER E..ORIYA LETTER AI
1013 | 0xE0 0xAC 0x93..0xA8 #Lo [22] ORIYA LETTER O..ORIYA LETTER NA
1014 | 0xE0 0xAC 0xAA..0xB0 #Lo [7] ORIYA LETTER PA..ORIYA LETTER RA
1015 | 0xE0 0xAC 0xB2..0xB3 #Lo [2] ORIYA LETTER LA..ORIYA LETTER LLA
1016 | 0xE0 0xAC 0xB5..0xB9 #Lo [5] ORIYA LETTER VA..ORIYA LETTER HA
1017 | 0xE0 0xAC 0xBC #Mn ORIYA SIGN NUKTA
1018 | 0xE0 0xAC 0xBD #Lo ORIYA SIGN AVAGRAHA
1019 | 0xE0 0xAC 0xBE #Mc ORIYA VOWEL SIGN AA
1020 | 0xE0 0xAC 0xBF #Mn ORIYA VOWEL SIGN I
1021 | 0xE0 0xAD 0x80 #Mc ORIYA VOWEL SIGN II
1022 | 0xE0 0xAD 0x81..0x84 #Mn [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SI...
1023 | 0xE0 0xAD 0x87..0x88 #Mc [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SI...
1024 | 0xE0 0xAD 0x8B..0x8C #Mc [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SI...
1025 | 0xE0 0xAD 0x8D #Mn ORIYA SIGN VIRAMA
1026 | 0xE0 0xAD 0x96 #Mn ORIYA AI LENGTH MARK
1027 | 0xE0 0xAD 0x97 #Mc ORIYA AU LENGTH MARK
1028 | 0xE0 0xAD 0x9C..0x9D #Lo [2] ORIYA LETTER RRA..ORIYA LETTER RHA
1029 | 0xE0 0xAD 0x9F..0xA1 #Lo [3] ORIYA LETTER YYA..ORIYA LETTER VOC...
1030 | 0xE0 0xAD 0xA2..0xA3 #Mn [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA ...
1031 | 0xE0 0xAD 0xA6..0xAF #Nd [10] ORIYA DIGIT ZERO..ORIYA DIGIT NINE
1032 | 0xE0 0xAD 0xB1 #Lo ORIYA LETTER WA
1033 | 0xE0 0xAE 0x82 #Mn TAMIL SIGN ANUSVARA
1034 | 0xE0 0xAE 0x83 #Lo TAMIL SIGN VISARGA
1035 | 0xE0 0xAE 0x85..0x8A #Lo [6] TAMIL LETTER A..TAMIL LETTER UU
1036 | 0xE0 0xAE 0x8E..0x90 #Lo [3] TAMIL LETTER E..TAMIL LETTER AI
1037 | 0xE0 0xAE 0x92..0x95 #Lo [4] TAMIL LETTER O..TAMIL LETTER KA
1038 | 0xE0 0xAE 0x99..0x9A #Lo [2] TAMIL LETTER NGA..TAMIL LETTER CA
1039 | 0xE0 0xAE 0x9C #Lo TAMIL LETTER JA
1040 | 0xE0 0xAE 0x9E..0x9F #Lo [2] TAMIL LETTER NYA..TAMIL LETTER TTA
1041 | 0xE0 0xAE 0xA3..0xA4 #Lo [2] TAMIL LETTER NNA..TAMIL LETTER TA
1042 | 0xE0 0xAE 0xA8..0xAA #Lo [3] TAMIL LETTER NA..TAMIL LETTER PA
1043 | 0xE0 0xAE 0xAE..0xB9 #Lo [12] TAMIL LETTER MA..TAMIL LETTER HA
1044 | 0xE0 0xAE 0xBE..0xBF #Mc [2] TAMIL VOWEL SIGN AA..TAMIL VOWEL S...
1045 | 0xE0 0xAF 0x80 #Mn TAMIL VOWEL SIGN II
1046 | 0xE0 0xAF 0x81..0x82 #Mc [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SI...
1047 | 0xE0 0xAF 0x86..0x88 #Mc [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SI...
1048 | 0xE0 0xAF 0x8A..0x8C #Mc [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SI...
1049 | 0xE0 0xAF 0x8D #Mn TAMIL SIGN VIRAMA
1050 | 0xE0 0xAF 0x90 #Lo TAMIL OM
1051 | 0xE0 0xAF 0x97 #Mc TAMIL AU LENGTH MARK
1052 | 0xE0 0xAF 0xA6..0xAF #Nd [10] TAMIL DIGIT ZERO..TAMIL DIGIT NINE
1053 | 0xE0 0xB0 0x80 #Mn TELUGU SIGN COMBINING CANDRABINDU ...
1054 | 0xE0 0xB0 0x81..0x83 #Mc [3] TELUGU SIGN CANDRABINDU..TELUGU SI...
1055 | 0xE0 0xB0 0x85..0x8C #Lo [8] TELUGU LETTER A..TELUGU LETTER VOC...
1056 | 0xE0 0xB0 0x8E..0x90 #Lo [3] TELUGU LETTER E..TELUGU LETTER AI
1057 | 0xE0 0xB0 0x92..0xA8 #Lo [23] TELUGU LETTER O..TELUGU LETTER NA
1058 | 0xE0 0xB0 0xAA..0xB9 #Lo [16] TELUGU LETTER PA..TELUGU LETTER HA
1059 | 0xE0 0xB0 0xBD #Lo TELUGU SIGN AVAGRAHA
1060 | 0xE0 0xB0 0xBE..0xFF #Mn [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL...
1061 | 0xE0 0xB1 0x00..0x80 #
1062 | 0xE0 0xB1 0x81..0x84 #Mc [4] TELUGU VOWEL SIGN U..TELUGU VOWEL ...
1063 | 0xE0 0xB1 0x86..0x88 #Mn [3] TELUGU VOWEL SIGN E..TELUGU VOWEL ...
1064 | 0xE0 0xB1 0x8A..0x8D #Mn [4] TELUGU VOWEL SIGN O..TELUGU SIGN V...
1065 | 0xE0 0xB1 0x95..0x96 #Mn [2] TELUGU LENGTH MARK..TELUGU AI LENG...
1066 | 0xE0 0xB1 0x98..0x9A #Lo [3] TELUGU LETTER TSA..TELUGU LETTER RRRA
1067 | 0xE0 0xB1 0xA0..0xA1 #Lo [2] TELUGU LETTER VOCALIC RR..TELUGU L...
1068 | 0xE0 0xB1 0xA2..0xA3 #Mn [2] TELUGU VOWEL SIGN VOCALIC L..TELUG...
1069 | 0xE0 0xB1 0xA6..0xAF #Nd [10] TELUGU DIGIT ZERO..TELUGU DIGIT NINE
1070 | 0xE0 0xB2 0x80 #Lo KANNADA SIGN SPACING CANDRABINDU
1071 | 0xE0 0xB2 0x81 #Mn KANNADA SIGN CANDRABINDU
1072 | 0xE0 0xB2 0x82..0x83 #Mc [2] KANNADA SIGN ANUSVARA..KANNADA SIG...
1073 | 0xE0 0xB2 0x85..0x8C #Lo [8] KANNADA LETTER A..KANNADA LETTER V...
1074 | 0xE0 0xB2 0x8E..0x90 #Lo [3] KANNADA LETTER E..KANNADA LETTER AI
1075 | 0xE0 0xB2 0x92..0xA8 #Lo [23] KANNADA LETTER O..KANNADA LETTER NA
1076 | 0xE0 0xB2 0xAA..0xB3 #Lo [10] KANNADA LETTER PA..KANNADA LETTER LLA
1077 | 0xE0 0xB2 0xB5..0xB9 #Lo [5] KANNADA LETTER VA..KANNADA LETTER HA
1078 | 0xE0 0xB2 0xBC #Mn KANNADA SIGN NUKTA
1079 | 0xE0 0xB2 0xBD #Lo KANNADA SIGN AVAGRAHA
1080 | 0xE0 0xB2 0xBE #Mc KANNADA VOWEL SIGN AA
1081 | 0xE0 0xB2 0xBF #Mn KANNADA VOWEL SIGN I
1082 | 0xE0 0xB3 0x80..0x84 #Mc [5] KANNADA VOWEL SIGN II..KANNADA VOW...
1083 | 0xE0 0xB3 0x86 #Mn KANNADA VOWEL SIGN E
1084 | 0xE0 0xB3 0x87..0x88 #Mc [2] KANNADA VOWEL SIGN EE..KANNADA VOW...
1085 | 0xE0 0xB3 0x8A..0x8B #Mc [2] KANNADA VOWEL SIGN O..KANNADA VOWE...
1086 | 0xE0 0xB3 0x8C..0x8D #Mn [2] KANNADA VOWEL SIGN AU..KANNADA SIG...
1087 | 0xE0 0xB3 0x95..0x96 #Mc [2] KANNADA LENGTH MARK..KANNADA AI LE...
1088 | 0xE0 0xB3 0x9E #Lo KANNADA LETTER FA
1089 | 0xE0 0xB3 0xA0..0xA1 #Lo [2] KANNADA LETTER VOCALIC RR..KANNADA...
1090 | 0xE0 0xB3 0xA2..0xA3 #Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANN...
1091 | 0xE0 0xB3 0xA6..0xAF #Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT ...
1092 | 0xE0 0xB3 0xB1..0xB2 #Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA ...
1093 | 0xE0 0xB4 0x81 #Mn MALAYALAM SIGN CANDRABINDU
1094 | 0xE0 0xB4 0x82..0x83 #Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM...
1095 | 0xE0 0xB4 0x85..0x8C #Lo [8] MALAYALAM LETTER A..MALAYALAM LETT...
1096 | 0xE0 0xB4 0x8E..0x90 #Lo [3] MALAYALAM LETTER E..MALAYALAM LETT...
1097 | 0xE0 0xB4 0x92..0xBA #Lo [41] MALAYALAM LETTER O..MALAYALAM LETT...
1098 | 0xE0 0xB4 0xBD #Lo MALAYALAM SIGN AVAGRAHA
1099 | 0xE0 0xB4 0xBE..0xFF #Mc [3] MALAYALAM VOWEL SIGN AA..MALAYALAM...
1100 | 0xE0 0xB5 0x00..0x80 #
1101 | 0xE0 0xB5 0x81..0x84 #Mn [4] MALAYALAM VOWEL SIGN U..MALAYALAM ...
1102 | 0xE0 0xB5 0x86..0x88 #Mc [3] MALAYALAM VOWEL SIGN E..MALAYALAM ...
1103 | 0xE0 0xB5 0x8A..0x8C #Mc [3] MALAYALAM VOWEL SIGN O..MALAYALAM ...
1104 | 0xE0 0xB5 0x8D #Mn MALAYALAM SIGN VIRAMA
1105 | 0xE0 0xB5 0x8E #Lo MALAYALAM LETTER DOT REPH
1106 | 0xE0 0xB5 0x94..0x96 #Lo [3] MALAYALAM LETTER CHILLU M..MALAYAL...
1107 | 0xE0 0xB5 0x97 #Mc MALAYALAM AU LENGTH MARK
1108 | 0xE0 0xB5 0x9F..0xA1 #Lo [3] MALAYALAM LETTER ARCHAIC II..MALAY...
1109 | 0xE0 0xB5 0xA2..0xA3 #Mn [2] MALAYALAM VOWEL SIGN VOCALIC L..MA...
1110 | 0xE0 0xB5 0xA6..0xAF #Nd [10] MALAYALAM DIGIT ZERO..MALAYALAM DI...
1111 | 0xE0 0xB5 0xBA..0xBF #Lo [6] MALAYALAM LETTER CHILLU NN..MALAYA...
1112 | 0xE0 0xB6 0x82..0x83 #Mc [2] SINHALA SIGN ANUSVARAYA..SINHALA S...
1113 | 0xE0 0xB6 0x85..0x96 #Lo [18] SINHALA LETTER AYANNA..SINHALA LET...
1114 | 0xE0 0xB6 0x9A..0xB1 #Lo [24] SINHALA LETTER ALPAPRAANA KAYANNA....
1115 | 0xE0 0xB6 0xB3..0xBB #Lo [9] SINHALA LETTER SANYAKA DAYANNA..SI...
1116 | 0xE0 0xB6 0xBD #Lo SINHALA LETTER DANTAJA LAYANNA
1117 | 0xE0 0xB7 0x80..0x86 #Lo [7] SINHALA LETTER VAYANNA..SINHALA LE...
1118 | 0xE0 0xB7 0x8A #Mn SINHALA SIGN AL-LAKUNA
1119 | 0xE0 0xB7 0x8F..0x91 #Mc [3] SINHALA VOWEL SIGN AELA-PILLA..SIN...
1120 | 0xE0 0xB7 0x92..0x94 #Mn [3] SINHALA VOWEL SIGN KETTI IS-PILLA....
1121 | 0xE0 0xB7 0x96 #Mn SINHALA VOWEL SIGN DIGA PAA-PILLA
1122 | 0xE0 0xB7 0x98..0x9F #Mc [8] SINHALA VOWEL SIGN GAETTA-PILLA..S...
1123 | 0xE0 0xB7 0xA6..0xAF #Nd [10] SINHALA LITH DIGIT ZERO..SINHALA L...
1124 | 0xE0 0xB7 0xB2..0xB3 #Mc [2] SINHALA VOWEL SIGN DIGA GAETTA-PIL...
1125 | 0xE0 0xB8 0x81..0xB0 #Lo [48] THAI CHARACTER KO KAI..THAI CHARAC...
1126 | 0xE0 0xB8 0xB1 #Mn THAI CHARACTER MAI HAN-AKAT
1127 | 0xE0 0xB8 0xB2..0xB3 #Lo [2] THAI CHARACTER SARA AA..THAI CHARA...
1128 | 0xE0 0xB8 0xB4..0xBA #Mn [7] THAI CHARACTER SARA I..THAI CHARAC...
1129 | 0xE0 0xB9 0x80..0x85 #Lo [6] THAI CHARACTER SARA E..THAI CHARAC...
1130 | 0xE0 0xB9 0x86 #Lm THAI CHARACTER MAIYAMOK
1131 | 0xE0 0xB9 0x87..0x8E #Mn [8] THAI CHARACTER MAITAIKHU..THAI CHA...
1132 | 0xE0 0xB9 0x90..0x99 #Nd [10] THAI DIGIT ZERO..THAI DIGIT NINE
1133 | 0xE0 0xBA 0x81..0x82 #Lo [2] LAO LETTER KO..LAO LETTER KHO SUNG
1134 | 0xE0 0xBA 0x84 #Lo LAO LETTER KHO TAM
1135 | 0xE0 0xBA 0x87..0x88 #Lo [2] LAO LETTER NGO..LAO LETTER CO
1136 | 0xE0 0xBA 0x8A #Lo LAO LETTER SO TAM
1137 | 0xE0 0xBA 0x8D #Lo LAO LETTER NYO
1138 | 0xE0 0xBA 0x94..0x97 #Lo [4] LAO LETTER DO..LAO LETTER THO TAM
1139 | 0xE0 0xBA 0x99..0x9F #Lo [7] LAO LETTER NO..LAO LETTER FO SUNG
1140 | 0xE0 0xBA 0xA1..0xA3 #Lo [3] LAO LETTER MO..LAO LETTER LO LING
1141 | 0xE0 0xBA 0xA5 #Lo LAO LETTER LO LOOT
1142 | 0xE0 0xBA 0xA7 #Lo LAO LETTER WO
1143 | 0xE0 0xBA 0xAA..0xAB #Lo [2] LAO LETTER SO SUNG..LAO LETTER HO ...
1144 | 0xE0 0xBA 0xAD..0xB0 #Lo [4] LAO LETTER O..LAO VOWEL SIGN A
1145 | 0xE0 0xBA 0xB1 #Mn LAO VOWEL SIGN MAI KAN
1146 | 0xE0 0xBA 0xB2..0xB3 #Lo [2] LAO VOWEL SIGN AA..LAO VOWEL SIGN AM
1147 | 0xE0 0xBA 0xB4..0xB9 #Mn [6] LAO VOWEL SIGN I..LAO VOWEL SIGN UU
1148 | 0xE0 0xBA 0xBB..0xBC #Mn [2] LAO VOWEL SIGN MAI KON..LAO SEMIVO...
1149 | 0xE0 0xBA 0xBD #Lo LAO SEMIVOWEL SIGN NYO
1150 | 0xE0 0xBB 0x80..0x84 #Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
1151 | 0xE0 0xBB 0x86 #Lm LAO KO LA
1152 | 0xE0 0xBB 0x88..0x8D #Mn [6] LAO TONE MAI EK..LAO NIGGAHITA
1153 | 0xE0 0xBB 0x90..0x99 #Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE
1154 | 0xE0 0xBB 0x9C..0x9F #Lo [4] LAO HO NO..LAO LETTER KHMU NYO
1155 | 0xE0 0xBC 0x80 #Lo TIBETAN SYLLABLE OM
1156 | 0xE0 0xBC 0x98..0x99 #Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD P...
1157 | 0xE0 0xBC 0xA0..0xA9 #Nd [10] TIBETAN DIGIT ZERO..TIBETAN DIGIT ...
1158 | 0xE0 0xBC 0xB5 #Mn TIBETAN MARK NGAS BZUNG NYI ZLA
1159 | 0xE0 0xBC 0xB7 #Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS
1160 | 0xE0 0xBC 0xB9 #Mn TIBETAN MARK TSA -PHRU
1161 | 0xE0 0xBC 0xBE..0xBF #Mc [2] TIBETAN SIGN YAR TSHES..TIBETAN SI...
1162 | 0xE0 0xBD 0x80..0x87 #Lo [8] TIBETAN LETTER KA..TIBETAN LETTER JA
1163 | 0xE0 0xBD 0x89..0xAC #Lo [36] TIBETAN LETTER NYA..TIBETAN LETTER...
1164 | 0xE0 0xBD 0xB1..0xBE #Mn [14] TIBETAN VOWEL SIGN AA..TIBETAN SIG...
1165 | 0xE0 0xBD 0xBF #Mc TIBETAN SIGN RNAM BCAD
1166 | 0xE0 0xBE 0x80..0x84 #Mn [5] TIBETAN VOWEL SIGN REVERSED I..TIB...
1167 | 0xE0 0xBE 0x86..0x87 #Mn [2] TIBETAN SIGN LCI RTAGS..TIBETAN SI...
1168 | 0xE0 0xBE 0x88..0x8C #Lo [5] TIBETAN SIGN LCE TSA CAN..TIBETAN ...
1169 | 0xE0 0xBE 0x8D..0x97 #Mn [11] TIBETAN SUBJOINED SIGN LCE TSA CAN...
1170 | 0xE0 0xBE 0x99..0xBC #Mn [36] TIBETAN SUBJOINED LETTER NYA..TIBE...
1171 | 0xE0 0xBF 0x86 #Mn TIBETAN SYMBOL PADMA GDAN
1172 | 0xE1 0x80 0x80..0xAA #Lo [43] MYANMAR LETTER KA..MYANMAR LETTER AU
1173 | 0xE1 0x80 0xAB..0xAC #Mc [2] MYANMAR VOWEL SIGN TALL AA..MYANMA...
1174 | 0xE1 0x80 0xAD..0xB0 #Mn [4] MYANMAR VOWEL SIGN I..MYANMAR VOWE...
1175 | 0xE1 0x80 0xB1 #Mc MYANMAR VOWEL SIGN E
1176 | 0xE1 0x80 0xB2..0xB7 #Mn [6] MYANMAR VOWEL SIGN AI..MYANMAR SIG...
1177 | 0xE1 0x80 0xB8 #Mc MYANMAR SIGN VISARGA
1178 | 0xE1 0x80 0xB9..0xBA #Mn [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ...
1179 | 0xE1 0x80 0xBB..0xBC #Mc [2] MYANMAR CONSONANT SIGN MEDIAL YA.....
1180 | 0xE1 0x80 0xBD..0xBE #Mn [2] MYANMAR CONSONANT SIGN MEDIAL WA.....
1181 | 0xE1 0x80 0xBF #Lo MYANMAR LETTER GREAT SA
1182 | 0xE1 0x81 0x80..0x89 #Nd [10] MYANMAR DIGIT ZERO..MYANMAR DIGIT ...
1183 | 0xE1 0x81 0x90..0x95 #Lo [6] MYANMAR LETTER SHA..MYANMAR LETTER...
1184 | 0xE1 0x81 0x96..0x97 #Mc [2] MYANMAR VOWEL SIGN VOCALIC R..MYAN...
1185 | 0xE1 0x81 0x98..0x99 #Mn [2] MYANMAR VOWEL SIGN VOCALIC L..MYAN...
1186 | 0xE1 0x81 0x9A..0x9D #Lo [4] MYANMAR LETTER MON NGA..MYANMAR LE...
1187 | 0xE1 0x81 0x9E..0xA0 #Mn [3] MYANMAR CONSONANT SIGN MON MEDIAL ...
1188 | 0xE1 0x81 0xA1 #Lo MYANMAR LETTER SGAW KAREN SHA
1189 | 0xE1 0x81 0xA2..0xA4 #Mc [3] MYANMAR VOWEL SIGN SGAW KAREN EU.....
1190 | 0xE1 0x81 0xA5..0xA6 #Lo [2] MYANMAR LETTER WESTERN PWO KAREN T...
1191 | 0xE1 0x81 0xA7..0xAD #Mc [7] MYANMAR VOWEL SIGN WESTERN PWO KAR...
1192 | 0xE1 0x81 0xAE..0xB0 #Lo [3] MYANMAR LETTER EASTERN PWO KAREN N...
1193 | 0xE1 0x81 0xB1..0xB4 #Mn [4] MYANMAR VOWEL SIGN GEBA KAREN I..M...
1194 | 0xE1 0x81 0xB5..0xFF #Lo [13] MYANMAR LETTER SHAN KA..MYANMAR LE...
1195 | 0xE1 0x82 0x00..0x81 #
1196 | 0xE1 0x82 0x82 #Mn MYANMAR CONSONANT SIGN SHAN MEDIAL WA
1197 | 0xE1 0x82 0x83..0x84 #Mc [2] MYANMAR VOWEL SIGN SHAN AA..MYANMA...
1198 | 0xE1 0x82 0x85..0x86 #Mn [2] MYANMAR VOWEL SIGN SHAN E ABOVE..M...
1199 | 0xE1 0x82 0x87..0x8C #Mc [6] MYANMAR SIGN SHAN TONE-2..MYANMAR ...
1200 | 0xE1 0x82 0x8D #Mn MYANMAR SIGN SHAN COUNCIL EMPHATIC...
1201 | 0xE1 0x82 0x8E #Lo MYANMAR LETTER RUMAI PALAUNG FA
1202 | 0xE1 0x82 0x8F #Mc MYANMAR SIGN RUMAI PALAUNG TONE-5
1203 | 0xE1 0x82 0x90..0x99 #Nd [10] MYANMAR SHAN DIGIT ZERO..MYANMAR S...
1204 | 0xE1 0x82 0x9A..0x9C #Mc [3] MYANMAR SIGN KHAMTI TONE-1..MYANMA...
1205 | 0xE1 0x82 0x9D #Mn MYANMAR VOWEL SIGN AITON AI
1206 | 0xE1 0x82 0xA0..0xFF #L& [38] GEORGIAN CAPITAL LETTER AN..GEORGI...
1207 | 0xE1 0x83 0x00..0x85 #
1208 | 0xE1 0x83 0x87 #L& GEORGIAN CAPITAL LETTER YN
1209 | 0xE1 0x83 0x8D #L& GEORGIAN CAPITAL LETTER AEN
1210 | 0xE1 0x83 0x90..0xBA #Lo [43] GEORGIAN LETTER AN..GEORGIAN LETTE...
1211 | 0xE1 0x83 0xBC #Lm MODIFIER LETTER GEORGIAN NAR
1212 | 0xE1 0x83 0xBD..0xFF #Lo [332] GEORGIAN LETTER AEN..ETHIOPIC ...
1213 | 0xE1 0x84..0x88 0x00..0xFF #
1214 | 0xE1 0x89 0x00..0x88 #
1215 | 0xE1 0x89 0x8A..0x8D #Lo [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SY...
1216 | 0xE1 0x89 0x90..0x96 #Lo [7] ETHIOPIC SYLLABLE QHA..ETHIOPIC SY...
1217 | 0xE1 0x89 0x98 #Lo ETHIOPIC SYLLABLE QHWA
1218 | 0xE1 0x89 0x9A..0x9D #Lo [4] ETHIOPIC SYLLABLE QHWI..ETHIOPIC S...
1219 | 0xE1 0x89 0xA0..0xFF #Lo [41] ETHIOPIC SYLLABLE BA..ETHIOPIC SYL...
1220 | 0xE1 0x8A 0x00..0x88 #
1221 | 0xE1 0x8A 0x8A..0x8D #Lo [4] ETHIOPIC SYLLABLE XWI..ETHIOPIC SY...
1222 | 0xE1 0x8A 0x90..0xB0 #Lo [33] ETHIOPIC SYLLABLE NA..ETHIOPIC SYL...
1223 | 0xE1 0x8A 0xB2..0xB5 #Lo [4] ETHIOPIC SYLLABLE KWI..ETHIOPIC SY...
1224 | 0xE1 0x8A 0xB8..0xBE #Lo [7] ETHIOPIC SYLLABLE KXA..ETHIOPIC SY...
1225 | 0xE1 0x8B 0x80 #Lo ETHIOPIC SYLLABLE KXWA
1226 | 0xE1 0x8B 0x82..0x85 #Lo [4] ETHIOPIC SYLLABLE KXWI..ETHIOPIC S...
1227 | 0xE1 0x8B 0x88..0x96 #Lo [15] ETHIOPIC SYLLABLE WA..ETHIOPIC SYL...
1228 | 0xE1 0x8B 0x98..0xFF #Lo [57] ETHIOPIC SYLLABLE ZA..ETHIOPIC SYL...
1229 | 0xE1 0x8C 0x00..0x90 #
1230 | 0xE1 0x8C 0x92..0x95 #Lo [4] ETHIOPIC SYLLABLE GWI..ETHIOPIC SY...
1231 | 0xE1 0x8C 0x98..0xFF #Lo [67] ETHIOPIC SYLLABLE GGA..ETHIOPIC SY...
1232 | 0xE1 0x8D 0x00..0x9A #
1233 | 0xE1 0x8D 0x9D..0x9F #Mn [3] ETHIOPIC COMBINING GEMINATION AND ...
1234 | 0xE1 0x8D 0xA9..0xB1 #No [9] ETHIOPIC DIGIT ONE..ETHIOPIC DIGIT...
1235 | 0xE1 0x8E 0x80..0x8F #Lo [16] ETHIOPIC SYLLABLE SEBATBEIT MWA..E...
1236 | 0xE1 0x8E 0xA0..0xFF #L& [86] CHEROKEE LETTER A..CHEROKEE LETTER MV
1237 | 0xE1 0x8F 0x00..0xB5 #
1238 | 0xE1 0x8F 0xB8..0xBD #L& [6] CHEROKEE SMALL LETTER YE..CHEROKEE...
1239 | 0xE1 0x90 0x81..0xFF #Lo [620] CANADIAN SYLLABICS E..CANADIAN...
1240 | 0xE1 0x91..0x98 0x00..0xFF #
1241 | 0xE1 0x99 0x00..0xAC #
1242 | 0xE1 0x99 0xAF..0xBF #Lo [17] CANADIAN SYLLABICS QAI..CANADIAN S...
1243 | 0xE1 0x9A 0x81..0x9A #Lo [26] OGHAM LETTER BEITH..OGHAM LETTER P...
1244 | 0xE1 0x9A 0xA0..0xFF #Lo [75] RUNIC LETTER FEHU FEOH FE F..RUNIC...
1245 | 0xE1 0x9B 0x00..0xAA #
1246 | 0xE1 0x9B 0xAE..0xB0 #Nl [3] RUNIC ARLAUG SYMBOL..RUNIC BELGTHO...
1247 | 0xE1 0x9B 0xB1..0xB8 #Lo [8] RUNIC LETTER K..RUNIC LETTER FRANK...
1248 | 0xE1 0x9C 0x80..0x8C #Lo [13] TAGALOG LETTER A..TAGALOG LETTER YA
1249 | 0xE1 0x9C 0x8E..0x91 #Lo [4] TAGALOG LETTER LA..TAGALOG LETTER HA
1250 | 0xE1 0x9C 0x92..0x94 #Mn [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN...
1251 | 0xE1 0x9C 0xA0..0xB1 #Lo [18] HANUNOO LETTER A..HANUNOO LETTER HA
1252 | 0xE1 0x9C 0xB2..0xB4 #Mn [3] HANUNOO VOWEL SIGN I..HANUNOO SIGN...
1253 | 0xE1 0x9D 0x80..0x91 #Lo [18] BUHID LETTER A..BUHID LETTER HA
1254 | 0xE1 0x9D 0x92..0x93 #Mn [2] BUHID VOWEL SIGN I..BUHID VOWEL SI...
1255 | 0xE1 0x9D 0xA0..0xAC #Lo [13] TAGBANWA LETTER A..TAGBANWA LETTER YA
1256 | 0xE1 0x9D 0xAE..0xB0 #Lo [3] TAGBANWA LETTER LA..TAGBANWA LETTE...
1257 | 0xE1 0x9D 0xB2..0xB3 #Mn [2] TAGBANWA VOWEL SIGN I..TAGBANWA VO...
1258 | 0xE1 0x9E 0x80..0xB3 #Lo [52] KHMER LETTER KA..KHMER INDEPENDENT...
1259 | 0xE1 0x9E 0xB4..0xB5 #Mn [2] KHMER VOWEL INHERENT AQ..KHMER VOW...
1260 | 0xE1 0x9E 0xB6 #Mc KHMER VOWEL SIGN AA
1261 | 0xE1 0x9E 0xB7..0xBD #Mn [7] KHMER VOWEL SIGN I..KHMER VOWEL SI...
1262 | 0xE1 0x9E 0xBE..0xFF #Mc [8] KHMER VOWEL SIGN OE..KHMER VOWEL S...
1263 | 0xE1 0x9F 0x00..0x85 #
1264 | 0xE1 0x9F 0x86 #Mn KHMER SIGN NIKAHIT
1265 | 0xE1 0x9F 0x87..0x88 #Mc [2] KHMER SIGN REAHMUK..KHMER SIGN YUU...
1266 | 0xE1 0x9F 0x89..0x93 #Mn [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN...
1267 | 0xE1 0x9F 0x97 #Lm KHMER SIGN LEK TOO
1268 | 0xE1 0x9F 0x9C #Lo KHMER SIGN AVAKRAHASANYA
1269 | 0xE1 0x9F 0x9D #Mn KHMER SIGN ATTHACAN
1270 | 0xE1 0x9F 0xA0..0xA9 #Nd [10] KHMER DIGIT ZERO..KHMER DIGIT NINE
1271 | 0xE1 0xA0 0x8B..0x8D #Mn [3] MONGOLIAN FREE VARIATION SELECTOR ...
1272 | 0xE1 0xA0 0x90..0x99 #Nd [10] MONGOLIAN DIGIT ZERO..MONGOLIAN DI...
1273 | 0xE1 0xA0 0xA0..0xFF #Lo [35] MONGOLIAN LETTER A..MONGOLIAN LETT...
1274 | 0xE1 0xA1 0x00..0x82 #
1275 | 0xE1 0xA1 0x83 #Lm MONGOLIAN LETTER TODO LONG VOWEL SIGN
1276 | 0xE1 0xA1 0x84..0xB7 #Lo [52] MONGOLIAN LETTER TODO E..MONGOLIAN...
1277 | 0xE1 0xA2 0x80..0x84 #Lo [5] MONGOLIAN LETTER ALI GALI ANUSVARA...
1278 | 0xE1 0xA2 0x85..0x86 #Mn [2] MONGOLIAN LETTER ALI GALI BALUDA.....
1279 | 0xE1 0xA2 0x87..0xA8 #Lo [34] MONGOLIAN LETTER ALI GALI A..MONGO...
1280 | 0xE1 0xA2 0xA9 #Mn MONGOLIAN LETTER ALI GALI DAGALGA
1281 | 0xE1 0xA2 0xAA #Lo MONGOLIAN LETTER MANCHU ALI GALI LHA
1282 | 0xE1 0xA2 0xB0..0xFF #Lo [70] CANADIAN SYLLABICS OY..CANADIAN SY...
1283 | 0xE1 0xA3 0x00..0xB5 #
1284 | 0xE1 0xA4 0x80..0x9E #Lo [31] LIMBU VOWEL-CARRIER LETTER..LIMBU ...
1285 | 0xE1 0xA4 0xA0..0xA2 #Mn [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SI...
1286 | 0xE1 0xA4 0xA3..0xA6 #Mc [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL S...
1287 | 0xE1 0xA4 0xA7..0xA8 #Mn [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SI...
1288 | 0xE1 0xA4 0xA9..0xAB #Mc [3] LIMBU SUBJOINED LETTER YA..LIMBU S...
1289 | 0xE1 0xA4 0xB0..0xB1 #Mc [2] LIMBU SMALL LETTER KA..LIMBU SMALL...
1290 | 0xE1 0xA4 0xB2 #Mn LIMBU SMALL LETTER ANUSVARA
1291 | 0xE1 0xA4 0xB3..0xB8 #Mc [6] LIMBU SMALL LETTER TA..LIMBU SMALL...
1292 | 0xE1 0xA4 0xB9..0xBB #Mn [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I
1293 | 0xE1 0xA5 0x86..0x8F #Nd [10] LIMBU DIGIT ZERO..LIMBU DIGIT NINE
1294 | 0xE1 0xA5 0x90..0xAD #Lo [30] TAI LE LETTER KA..TAI LE LETTER AI
1295 | 0xE1 0xA5 0xB0..0xB4 #Lo [5] TAI LE LETTER TONE-2..TAI LE LETTE...
1296 | 0xE1 0xA6 0x80..0xAB #Lo [44] NEW TAI LUE LETTER HIGH QA..NEW TA...
1297 | 0xE1 0xA6 0xB0..0xFF #Lo [26] NEW TAI LUE VOWEL SIGN VOWEL SHORT...
1298 | 0xE1 0xA7 0x00..0x89 #
1299 | 0xE1 0xA7 0x90..0x99 #Nd [10] NEW TAI LUE DIGIT ZERO..NEW TAI LU...
1300 | 0xE1 0xA7 0x9A #No NEW TAI LUE THAM DIGIT ONE
1301 | 0xE1 0xA8 0x80..0x96 #Lo [23] BUGINESE LETTER KA..BUGINESE LETTE...
1302 | 0xE1 0xA8 0x97..0x98 #Mn [2] BUGINESE VOWEL SIGN I..BUGINESE VO...
1303 | 0xE1 0xA8 0x99..0x9A #Mc [2] BUGINESE VOWEL SIGN E..BUGINESE VO...
1304 | 0xE1 0xA8 0x9B #Mn BUGINESE VOWEL SIGN AE
1305 | 0xE1 0xA8 0xA0..0xFF #Lo [53] TAI THAM LETTER HIGH KA..TAI THAM ...
1306 | 0xE1 0xA9 0x00..0x94 #
1307 | 0xE1 0xA9 0x95 #Mc TAI THAM CONSONANT SIGN MEDIAL RA
1308 | 0xE1 0xA9 0x96 #Mn TAI THAM CONSONANT SIGN MEDIAL LA
1309 | 0xE1 0xA9 0x97 #Mc TAI THAM CONSONANT SIGN LA TANG LAI
1310 | 0xE1 0xA9 0x98..0x9E #Mn [7] TAI THAM SIGN MAI KANG LAI..TAI TH...
1311 | 0xE1 0xA9 0xA0 #Mn TAI THAM SIGN SAKOT
1312 | 0xE1 0xA9 0xA1 #Mc TAI THAM VOWEL SIGN A
1313 | 0xE1 0xA9 0xA2 #Mn TAI THAM VOWEL SIGN MAI SAT
1314 | 0xE1 0xA9 0xA3..0xA4 #Mc [2] TAI THAM VOWEL SIGN AA..TAI THAM V...
1315 | 0xE1 0xA9 0xA5..0xAC #Mn [8] TAI THAM VOWEL SIGN I..TAI THAM VO...
1316 | 0xE1 0xA9 0xAD..0xB2 #Mc [6] TAI THAM VOWEL SIGN OY..TAI THAM V...
1317 | 0xE1 0xA9 0xB3..0xBC #Mn [10] TAI THAM VOWEL SIGN OA ABOVE..TAI ...
1318 | 0xE1 0xA9 0xBF #Mn TAI THAM COMBINING CRYPTOGRAMMIC DOT
1319 | 0xE1 0xAA 0x80..0x89 #Nd [10] TAI THAM HORA DIGIT ZERO..TAI THAM...
1320 | 0xE1 0xAA 0x90..0x99 #Nd [10] TAI THAM THAM DIGIT ZERO..TAI THAM...
1321 | 0xE1 0xAA 0xA7 #Lm TAI THAM SIGN MAI YAMOK
1322 | 0xE1 0xAA 0xB0..0xBD #Mn [14] COMBINING DOUBLED CIRCUMFLEX ACCEN...
1323 | 0xE1 0xAC 0x80..0x83 #Mn [4] BALINESE SIGN ULU RICEM..BALINESE ...
1324 | 0xE1 0xAC 0x84 #Mc BALINESE SIGN BISAH
1325 | 0xE1 0xAC 0x85..0xB3 #Lo [47] BALINESE LETTER AKARA..BALINESE LE...
1326 | 0xE1 0xAC 0xB4 #Mn BALINESE SIGN REREKAN
1327 | 0xE1 0xAC 0xB5 #Mc BALINESE VOWEL SIGN TEDUNG
1328 | 0xE1 0xAC 0xB6..0xBA #Mn [5] BALINESE VOWEL SIGN ULU..BALINESE ...
1329 | 0xE1 0xAC 0xBB #Mc BALINESE VOWEL SIGN RA REPA TEDUNG
1330 | 0xE1 0xAC 0xBC #Mn BALINESE VOWEL SIGN LA LENGA
1331 | 0xE1 0xAC 0xBD..0xFF #Mc [5] BALINESE VOWEL SIGN LA LENGA TEDUN...
1332 | 0xE1 0xAD 0x00..0x81 #
1333 | 0xE1 0xAD 0x82 #Mn BALINESE VOWEL SIGN PEPET
1334 | 0xE1 0xAD 0x83..0x84 #Mc [2] BALINESE VOWEL SIGN PEPET TEDUNG.....
1335 | 0xE1 0xAD 0x85..0x8B #Lo [7] BALINESE LETTER KAF SASAK..BALINES...
1336 | 0xE1 0xAD 0x90..0x99 #Nd [10] BALINESE DIGIT ZERO..BALINESE DIGI...
1337 | 0xE1 0xAD 0xAB..0xB3 #Mn [9] BALINESE MUSICAL SYMBOL COMBINING ...
1338 | 0xE1 0xAE 0x80..0x81 #Mn [2] SUNDANESE SIGN PANYECEK..SUNDANESE...
1339 | 0xE1 0xAE 0x82 #Mc SUNDANESE SIGN PANGWISAD
1340 | 0xE1 0xAE 0x83..0xA0 #Lo [30] SUNDANESE LETTER A..SUNDANESE LETT...
1341 | 0xE1 0xAE 0xA1 #Mc SUNDANESE CONSONANT SIGN PAMINGKAL
1342 | 0xE1 0xAE 0xA2..0xA5 #Mn [4] SUNDANESE CONSONANT SIGN PANYAKRA....
1343 | 0xE1 0xAE 0xA6..0xA7 #Mc [2] SUNDANESE VOWEL SIGN PANAELAENG..S...
1344 | 0xE1 0xAE 0xA8..0xA9 #Mn [2] SUNDANESE VOWEL SIGN PAMEPET..SUND...
1345 | 0xE1 0xAE 0xAA #Mc SUNDANESE SIGN PAMAAEH
1346 | 0xE1 0xAE 0xAB..0xAD #Mn [3] SUNDANESE SIGN VIRAMA..SUNDANESE C...
1347 | 0xE1 0xAE 0xAE..0xAF #Lo [2] SUNDANESE LETTER KHA..SUNDANESE LE...
1348 | 0xE1 0xAE 0xB0..0xB9 #Nd [10] SUNDANESE DIGIT ZERO..SUNDANESE DI...
1349 | 0xE1 0xAE 0xBA..0xFF #Lo [44] SUNDANESE AVAGRAHA..BATAK LETTER U
1350 | 0xE1 0xAF 0x00..0xA5 #
1351 | 0xE1 0xAF 0xA6 #Mn BATAK SIGN TOMPI
1352 | 0xE1 0xAF 0xA7 #Mc BATAK VOWEL SIGN E
1353 | 0xE1 0xAF 0xA8..0xA9 #Mn [2] BATAK VOWEL SIGN PAKPAK E..BATAK V...
1354 | 0xE1 0xAF 0xAA..0xAC #Mc [3] BATAK VOWEL SIGN I..BATAK VOWEL SI...
1355 | 0xE1 0xAF 0xAD #Mn BATAK VOWEL SIGN KARO O
1356 | 0xE1 0xAF 0xAE #Mc BATAK VOWEL SIGN U
1357 | 0xE1 0xAF 0xAF..0xB1 #Mn [3] BATAK VOWEL SIGN U FOR SIMALUNGUN ...
1358 | 0xE1 0xAF 0xB2..0xB3 #Mc [2] BATAK PANGOLAT..BATAK PANONGONAN
1359 | 0xE1 0xB0 0x80..0xA3 #Lo [36] LEPCHA LETTER KA..LEPCHA LETTER A
1360 | 0xE1 0xB0 0xA4..0xAB #Mc [8] LEPCHA SUBJOINED LETTER YA..LEPCHA...
1361 | 0xE1 0xB0 0xAC..0xB3 #Mn [8] LEPCHA VOWEL SIGN E..LEPCHA CONSON...
1362 | 0xE1 0xB0 0xB4..0xB5 #Mc [2] LEPCHA CONSONANT SIGN NYIN-DO..LEP...
1363 | 0xE1 0xB0 0xB6..0xB7 #Mn [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA
1364 | 0xE1 0xB1 0x80..0x89 #Nd [10] LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE
1365 | 0xE1 0xB1 0x8D..0x8F #Lo [3] LEPCHA LETTER TTA..LEPCHA LETTER DDA
1366 | 0xE1 0xB1 0x90..0x99 #Nd [10] OL CHIKI DIGIT ZERO..OL CHIKI DIGI...
1367 | 0xE1 0xB1 0x9A..0xB7 #Lo [30] OL CHIKI LETTER LA..OL CHIKI LETTE...
1368 | 0xE1 0xB1 0xB8..0xBD #Lm [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD
1369 | 0xE1 0xB2 0x80..0x88 #L& [9] CYRILLIC SMALL LETTER ROUNDED VE.....
1370 | 0xE1 0xB3 0x90..0x92 #Mn [3] VEDIC TONE KARSHANA..VEDIC TONE PR...
1371 | 0xE1 0xB3 0x94..0xA0 #Mn [13] VEDIC SIGN YAJURVEDIC MIDLINE SVAR...
1372 | 0xE1 0xB3 0xA1 #Mc VEDIC TONE ATHARVAVEDIC INDEPENDEN...
1373 | 0xE1 0xB3 0xA2..0xA8 #Mn [7] VEDIC SIGN VISARGA SVARITA..VEDIC ...
1374 | 0xE1 0xB3 0xA9..0xAC #Lo [4] VEDIC SIGN ANUSVARA ANTARGOMUKHA.....
1375 | 0xE1 0xB3 0xAD #Mn VEDIC SIGN TIRYAK
1376 | 0xE1 0xB3 0xAE..0xB1 #Lo [4] VEDIC SIGN HEXIFORM LONG ANUSVARA....
1377 | 0xE1 0xB3 0xB2..0xB3 #Mc [2] VEDIC SIGN ARDHAVISARGA..VEDIC SIG...
1378 | 0xE1 0xB3 0xB4 #Mn VEDIC TONE CANDRA ABOVE
1379 | 0xE1 0xB3 0xB5..0xB6 #Lo [2] VEDIC SIGN JIHVAMULIYA..VEDIC SIGN...
1380 | 0xE1 0xB3 0xB8..0xB9 #Mn [2] VEDIC TONE RING ABOVE..VEDIC TONE ...
1381 | 0xE1 0xB4 0x80..0xAB #L& [44] LATIN LETTER SMALL CAPITAL A..CYRI...
1382 | 0xE1 0xB4 0xAC..0xFF #Lm [63] MODIFIER LETTER CAPITAL A..GREEK S...
1383 | 0xE1 0xB5 0x00..0xAA #
1384 | 0xE1 0xB5 0xAB..0xB7 #L& [13] LATIN SMALL LETTER UE..LATIN SMALL...
1385 | 0xE1 0xB5 0xB8 #Lm MODIFIER LETTER CYRILLIC EN
1386 | 0xE1 0xB5 0xB9..0xFF #L& [34] LATIN SMALL LETTER INSULAR G..LATI...
1387 | 0xE1 0xB6 0x00..0x9A #
1388 | 0xE1 0xB6 0x9B..0xBF #Lm [37] MODIFIER LETTER SMALL TURNED ALPHA...
1389 | 0xE1 0xB7 0x80..0xB5 #Mn [54] COMBINING DOTTED GRAVE ACCENT..COM...
1390 | 0xE1 0xB7 0xBB..0xBF #Mn [5] COMBINING DELETION MARK..COMBINING...
1391 | 0xE1 0xB8 0x80..0xFF #L& [278] LATIN CAPITAL LETTER A WITH RI...
1392 | 0xE1 0xB9..0xBB 0x00..0xFF #
1393 | 0xE1 0xBC 0x00..0x95 #
1394 | 0xE1 0xBC 0x98..0x9D #L& [6] GREEK CAPITAL LETTER EPSILON WITH ...
1395 | 0xE1 0xBC 0xA0..0xFF #L& [38] GREEK SMALL LETTER ETA WITH PSILI....
1396 | 0xE1 0xBD 0x00..0x85 #
1397 | 0xE1 0xBD 0x88..0x8D #L& [6] GREEK CAPITAL LETTER OMICRON WITH ...
1398 | 0xE1 0xBD 0x90..0x97 #L& [8] GREEK SMALL LETTER UPSILON WITH PS...
1399 | 0xE1 0xBD 0x99 #L& GREEK CAPITAL LETTER UPSILON WITH ...
1400 | 0xE1 0xBD 0x9B #L& GREEK CAPITAL LETTER UPSILON WITH ...
1401 | 0xE1 0xBD 0x9D #L& GREEK CAPITAL LETTER UPSILON WITH ...
1402 | 0xE1 0xBD 0x9F..0xBD #L& [31] GREEK CAPITAL LETTER UPSILON WITH ...
1403 | 0xE1 0xBE 0x80..0xB4 #L& [53] GREEK SMALL LETTER ALPHA WITH PSIL...
1404 | 0xE1 0xBE 0xB6..0xBC #L& [7] GREEK SMALL LETTER ALPHA WITH PERI...
1405 | 0xE1 0xBE 0xBE #L& GREEK PROSGEGRAMMENI
1406 | 0xE1 0xBF 0x82..0x84 #L& [3] GREEK SMALL LETTER ETA WITH VARIA ...
1407 | 0xE1 0xBF 0x86..0x8C #L& [7] GREEK SMALL LETTER ETA WITH PERISP...
1408 | 0xE1 0xBF 0x90..0x93 #L& [4] GREEK SMALL LETTER IOTA WITH VRACH...
1409 | 0xE1 0xBF 0x96..0x9B #L& [6] GREEK SMALL LETTER IOTA WITH PERIS...
1410 | 0xE1 0xBF 0xA0..0xAC #L& [13] GREEK SMALL LETTER UPSILON WITH VR...
1411 | 0xE1 0xBF 0xB2..0xB4 #L& [3] GREEK SMALL LETTER OMEGA WITH VARI...
1412 | 0xE1 0xBF 0xB6..0xBC #L& [7] GREEK SMALL LETTER OMEGA WITH PERI...
1413 | 0xE2 0x80 0xBF..0xFF #Pc [2] UNDERTIE..CHARACTER TIE
1414 | 0xE2 0x81 0x00..0x80 #
1415 | 0xE2 0x81 0x94 #Pc INVERTED UNDERTIE
1416 | 0xE2 0x81 0xB1 #Lm SUPERSCRIPT LATIN SMALL LETTER I
1417 | 0xE2 0x81 0xBF #Lm SUPERSCRIPT LATIN SMALL LETTER N
1418 | 0xE2 0x82 0x90..0x9C #Lm [13] LATIN SUBSCRIPT SMALL LETTER A..LA...
1419 | 0xE2 0x83 0x90..0x9C #Mn [13] COMBINING LEFT HARPOON ABOVE..COMB...
1420 | 0xE2 0x83 0xA1 #Mn COMBINING LEFT RIGHT ARROW ABOVE
1421 | 0xE2 0x83 0xA5..0xB0 #Mn [12] COMBINING REVERSE SOLIDUS OVERLAY....
1422 | 0xE2 0x84 0x82 #L& DOUBLE-STRUCK CAPITAL C
1423 | 0xE2 0x84 0x87 #L& EULER CONSTANT
1424 | 0xE2 0x84 0x8A..0x93 #L& [10] SCRIPT SMALL G..SCRIPT SMALL L
1425 | 0xE2 0x84 0x95 #L& DOUBLE-STRUCK CAPITAL N
1426 | 0xE2 0x84 0x98 #Sm SCRIPT CAPITAL P
1427 | 0xE2 0x84 0x99..0x9D #L& [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-ST...
1428 | 0xE2 0x84 0xA4 #L& DOUBLE-STRUCK CAPITAL Z
1429 | 0xE2 0x84 0xA6 #L& OHM SIGN
1430 | 0xE2 0x84 0xA8 #L& BLACK-LETTER CAPITAL Z
1431 | 0xE2 0x84 0xAA..0xAD #L& [4] KELVIN SIGN..BLACK-LETTER CAPITAL C
1432 | 0xE2 0x84 0xAE #So ESTIMATED SYMBOL
1433 | 0xE2 0x84 0xAF..0xB4 #L& [6] SCRIPT SMALL E..SCRIPT SMALL O
1434 | 0xE2 0x84 0xB5..0xB8 #Lo [4] ALEF SYMBOL..DALET SYMBOL
1435 | 0xE2 0x84 0xB9 #L& INFORMATION SOURCE
1436 | 0xE2 0x84 0xBC..0xBF #L& [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STR...
1437 | 0xE2 0x85 0x85..0x89 #L& [5] DOUBLE-STRUCK ITALIC CAPITAL D..DO...
1438 | 0xE2 0x85 0x8E #L& TURNED SMALL F
1439 | 0xE2 0x85 0xA0..0xFF #Nl [35] ROMAN NUMERAL ONE..ROMAN NUMERAL T...
1440 | 0xE2 0x86 0x00..0x82 #
1441 | 0xE2 0x86 0x83..0x84 #L& [2] ROMAN NUMERAL REVERSED ONE HUNDRED...
1442 | 0xE2 0x86 0x85..0x88 #Nl [4] ROMAN NUMERAL SIX LATE FORM..ROMAN...
1443 | 0xE2 0xB0 0x80..0xAE #L& [47] GLAGOLITIC CAPITAL LETTER AZU..GLA...
1444 | 0xE2 0xB0 0xB0..0xFF #L& [47] GLAGOLITIC SMALL LETTER AZU..GLAGO...
1445 | 0xE2 0xB1 0x00..0x9E #
1446 | 0xE2 0xB1 0xA0..0xBB #L& [28] LATIN CAPITAL LETTER L WITH DOUBLE...
1447 | 0xE2 0xB1 0xBC..0xBD #Lm [2] LATIN SUBSCRIPT SMALL LETTER J..MO...
1448 | 0xE2 0xB1 0xBE..0xFF #L& [103] LATIN CAPITAL LETTER S WITH SW...
1449 | 0xE2 0xB2..0xB2 0x00..0xFF #
1450 | 0xE2 0xB3 0x00..0xA4 #
1451 | 0xE2 0xB3 0xAB..0xAE #L& [4] COPTIC CAPITAL LETTER CRYPTOGRAMMI...
1452 | 0xE2 0xB3 0xAF..0xB1 #Mn [3] COPTIC COMBINING NI ABOVE..COPTIC ...
1453 | 0xE2 0xB3 0xB2..0xB3 #L& [2] COPTIC CAPITAL LETTER BOHAIRIC KHE...
1454 | 0xE2 0xB4 0x80..0xA5 #L& [38] GEORGIAN SMALL LETTER AN..GEORGIAN...
1455 | 0xE2 0xB4 0xA7 #L& GEORGIAN SMALL LETTER YN
1456 | 0xE2 0xB4 0xAD #L& GEORGIAN SMALL LETTER AEN
1457 | 0xE2 0xB4 0xB0..0xFF #Lo [56] TIFINAGH LETTER YA..TIFINAGH LETTE...
1458 | 0xE2 0xB5 0x00..0xA7 #
1459 | 0xE2 0xB5 0xAF #Lm TIFINAGH MODIFIER LETTER LABIALIZA...
1460 | 0xE2 0xB5 0xBF #Mn TIFINAGH CONSONANT JOINER
1461 | 0xE2 0xB6 0x80..0x96 #Lo [23] ETHIOPIC SYLLABLE LOA..ETHIOPIC SY...
1462 | 0xE2 0xB6 0xA0..0xA6 #Lo [7] ETHIOPIC SYLLABLE SSA..ETHIOPIC SY...
1463 | 0xE2 0xB6 0xA8..0xAE #Lo [7] ETHIOPIC SYLLABLE CCA..ETHIOPIC SY...
1464 | 0xE2 0xB6 0xB0..0xB6 #Lo [7] ETHIOPIC SYLLABLE ZZA..ETHIOPIC SY...
1465 | 0xE2 0xB6 0xB8..0xBE #Lo [7] ETHIOPIC SYLLABLE CCHA..ETHIOPIC S...
1466 | 0xE2 0xB7 0x80..0x86 #Lo [7] ETHIOPIC SYLLABLE QYA..ETHIOPIC SY...
1467 | 0xE2 0xB7 0x88..0x8E #Lo [7] ETHIOPIC SYLLABLE KYA..ETHIOPIC SY...
1468 | 0xE2 0xB7 0x90..0x96 #Lo [7] ETHIOPIC SYLLABLE XYA..ETHIOPIC SY...
1469 | 0xE2 0xB7 0x98..0x9E #Lo [7] ETHIOPIC SYLLABLE GYA..ETHIOPIC SY...
1470 | 0xE2 0xB7 0xA0..0xBF #Mn [32] COMBINING CYRILLIC LETTER BE..COMB...
1471 | 0xE3 0x80 0x85 #Lm IDEOGRAPHIC ITERATION MARK
1472 | 0xE3 0x80 0x86 #Lo IDEOGRAPHIC CLOSING MARK
1473 | 0xE3 0x80 0x87 #Nl IDEOGRAPHIC NUMBER ZERO
1474 | 0xE3 0x80 0xA1..0xA9 #Nl [9] HANGZHOU NUMERAL ONE..HANGZHOU NUM...
1475 | 0xE3 0x80 0xAA..0xAD #Mn [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOG...
1476 | 0xE3 0x80 0xAE..0xAF #Mc [2] HANGUL SINGLE DOT TONE MARK..HANGU...
1477 | 0xE3 0x80 0xB1..0xB5 #Lm [5] VERTICAL KANA REPEAT MARK..VERTICA...
1478 | 0xE3 0x80 0xB8..0xBA #Nl [3] HANGZHOU NUMERAL TEN..HANGZHOU NUM...
1479 | 0xE3 0x80 0xBB #Lm VERTICAL IDEOGRAPHIC ITERATION MARK
1480 | 0xE3 0x80 0xBC #Lo MASU MARK
1481 | 0xE3 0x81 0x81..0xFF #Lo [86] HIRAGANA LETTER SMALL A..HIRAGANA ...
1482 | 0xE3 0x82 0x00..0x96 #
1483 | 0xE3 0x82 0x99..0x9A #Mn [2] COMBINING KATAKANA-HIRAGANA VOICED...
1484 | 0xE3 0x82 0x9B..0x9C #Sk [2] KATAKANA-HIRAGANA VOICED SOUND MAR...
1485 | 0xE3 0x82 0x9D..0x9E #Lm [2] HIRAGANA ITERATION MARK..HIRAGANA ...
1486 | 0xE3 0x82 0x9F #Lo HIRAGANA DIGRAPH YORI
1487 | 0xE3 0x82 0xA1..0xFF #Lo [90] KATAKANA LETTER SMALL A..KATAKANA ...
1488 | 0xE3 0x83 0x00..0xBA #
1489 | 0xE3 0x83 0xBC..0xBE #Lm [3] KATAKANA-HIRAGANA PROLONGED SOUND ...
1490 | 0xE3 0x83 0xBF #Lo KATAKANA DIGRAPH KOTO
1491 | 0xE3 0x84 0x85..0xAD #Lo [41] BOPOMOFO LETTER B..BOPOMOFO LETTER IH
1492 | 0xE3 0x84 0xB1..0xFF #Lo [94] HANGUL LETTER KIYEOK..HANGUL L...
1493 | 0xE3 0x85..0x85 0x00..0xFF #
1494 | 0xE3 0x86 0x00..0x8E #
1495 | 0xE3 0x86 0xA0..0xBA #Lo [27] BOPOMOFO LETTER BU..BOPOMOFO LETTE...
1496 | 0xE3 0x87 0xB0..0xBF #Lo [16] KATAKANA LETTER SMALL KU..KATAKANA...
1497 | 0xE3 0x90 0x80..0xFF #Lo [6582] CJK UNIFIED IDEOGRAPH-3400..C...
1498 | 0xE3 0x91..0xFF 0x00..0xFF #
1499 | 0xE4 0x00 0x00..0xFF #
1500 | 0xE4 0x01..0xB5 0x00..0xFF #
1501 | 0xE4 0xB6 0x00..0xB5 #
1502 | 0xE4 0xB8 0x80..0xFF #Lo [20950] CJK UNIFIED IDEOGRAPH-...
1503 | 0xE4 0xB9..0xFF 0x00..0xFF #
1504 | 0xE5..0xE8 0x00..0xFF 0x00..0xFF #
1505 | 0xE9 0x00 0x00..0xFF #
1506 | 0xE9 0x01..0xBE 0x00..0xFF #
1507 | 0xE9 0xBF 0x00..0x95 #
1508 | 0xEA 0x80 0x80..0x94 #Lo [21] YI SYLLABLE IT..YI SYLLABLE E
1509 | 0xEA 0x80 0x95 #Lm YI SYLLABLE WU
1510 | 0xEA 0x80 0x96..0xFF #Lo [1143] YI SYLLABLE BIT..YI SYLLABLE YYR
1511 | 0xEA 0x81..0x91 0x00..0xFF #
1512 | 0xEA 0x92 0x00..0x8C #
1513 | 0xEA 0x93 0x90..0xB7 #Lo [40] LISU LETTER BA..LISU LETTER OE
1514 | 0xEA 0x93 0xB8..0xBD #Lm [6] LISU LETTER TONE MYA TI..LISU LETT...
1515 | 0xEA 0x94 0x80..0xFF #Lo [268] VAI SYLLABLE EE..VAI SYLLABLE NG
1516 | 0xEA 0x95..0x97 0x00..0xFF #
1517 | 0xEA 0x98 0x00..0x8B #
1518 | 0xEA 0x98 0x8C #Lm VAI SYLLABLE LENGTHENER
1519 | 0xEA 0x98 0x90..0x9F #Lo [16] VAI SYLLABLE NDOLE FA..VAI SYMBOL ...
1520 | 0xEA 0x98 0xA0..0xA9 #Nd [10] VAI DIGIT ZERO..VAI DIGIT NINE
1521 | 0xEA 0x98 0xAA..0xAB #Lo [2] VAI SYLLABLE NDOLE MA..VAI SYLLABL...
1522 | 0xEA 0x99 0x80..0xAD #L& [46] CYRILLIC CAPITAL LETTER ZEMLYA..CY...
1523 | 0xEA 0x99 0xAE #Lo CYRILLIC LETTER MULTIOCULAR O
1524 | 0xEA 0x99 0xAF #Mn COMBINING CYRILLIC VZMET
1525 | 0xEA 0x99 0xB4..0xBD #Mn [10] COMBINING CYRILLIC LETTER UKRAINIA...
1526 | 0xEA 0x99 0xBF #Lm CYRILLIC PAYEROK
1527 | 0xEA 0x9A 0x80..0x9B #L& [28] CYRILLIC CAPITAL LETTER DWE..CYRIL...
1528 | 0xEA 0x9A 0x9C..0x9D #Lm [2] MODIFIER LETTER CYRILLIC HARD SIGN...
1529 | 0xEA 0x9A 0x9E..0x9F #Mn [2] COMBINING CYRILLIC LETTER EF..COMB...
1530 | 0xEA 0x9A 0xA0..0xFF #Lo [70] BAMUM LETTER A..BAMUM LETTER KI
1531 | 0xEA 0x9B 0x00..0xA5 #
1532 | 0xEA 0x9B 0xA6..0xAF #Nl [10] BAMUM LETTER MO..BAMUM LETTER KOGHOM
1533 | 0xEA 0x9B 0xB0..0xB1 #Mn [2] BAMUM COMBINING MARK KOQNDON..BAMU...
1534 | 0xEA 0x9C 0x97..0x9F #Lm [9] MODIFIER LETTER DOT VERTICAL BAR.....
1535 | 0xEA 0x9C 0xA2..0xFF #L& [78] LATIN CAPITAL LETTER EGYPTOLOGICAL...
1536 | 0xEA 0x9D 0x00..0xAF #
1537 | 0xEA 0x9D 0xB0 #Lm MODIFIER LETTER US
1538 | 0xEA 0x9D 0xB1..0xFF #L& [23] LATIN SMALL LETTER DUM..LATIN SMAL...
1539 | 0xEA 0x9E 0x00..0x87 #
1540 | 0xEA 0x9E 0x88 #Lm MODIFIER LETTER LOW CIRCUMFLEX ACCENT
1541 | 0xEA 0x9E 0x8B..0x8E #L& [4] LATIN CAPITAL LETTER SALTILLO..LAT...
1542 | 0xEA 0x9E 0x8F #Lo LATIN LETTER SINOLOGICAL DOT
1543 | 0xEA 0x9E 0x90..0xAE #L& [31] LATIN CAPITAL LETTER N WITH DESCEN...
1544 | 0xEA 0x9E 0xB0..0xB7 #L& [8] LATIN CAPITAL LETTER TURNED K..LAT...
1545 | 0xEA 0x9F 0xB7 #Lo LATIN EPIGRAPHIC LETTER SIDEWAYS I
1546 | 0xEA 0x9F 0xB8..0xB9 #Lm [2] MODIFIER LETTER CAPITAL H WITH STR...
1547 | 0xEA 0x9F 0xBA #L& LATIN LETTER SMALL CAPITAL TURNED M
1548 | 0xEA 0x9F 0xBB..0xFF #Lo [7] LATIN EPIGRAPHIC LETTER REVERSED F...
1549 | 0xEA 0xA0 0x00..0x81 #
1550 | 0xEA 0xA0 0x82 #Mn SYLOTI NAGRI SIGN DVISVARA
1551 | 0xEA 0xA0 0x83..0x85 #Lo [3] SYLOTI NAGRI LETTER U..SYLOTI NAGR...
1552 | 0xEA 0xA0 0x86 #Mn SYLOTI NAGRI SIGN HASANTA
1553 | 0xEA 0xA0 0x87..0x8A #Lo [4] SYLOTI NAGRI LETTER KO..SYLOTI NAG...
1554 | 0xEA 0xA0 0x8B #Mn SYLOTI NAGRI SIGN ANUSVARA
1555 | 0xEA 0xA0 0x8C..0xA2 #Lo [23] SYLOTI NAGRI LETTER CO..SYLOTI NAG...
1556 | 0xEA 0xA0 0xA3..0xA4 #Mc [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI ...
1557 | 0xEA 0xA0 0xA5..0xA6 #Mn [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI ...
1558 | 0xEA 0xA0 0xA7 #Mc SYLOTI NAGRI VOWEL SIGN OO
1559 | 0xEA 0xA1 0x80..0xB3 #Lo [52] PHAGS-PA LETTER KA..PHAGS-PA LETTE...
1560 | 0xEA 0xA2 0x80..0x81 #Mc [2] SAURASHTRA SIGN ANUSVARA..SAURASHT...
1561 | 0xEA 0xA2 0x82..0xB3 #Lo [50] SAURASHTRA LETTER A..SAURASHTRA LE...
1562 | 0xEA 0xA2 0xB4..0xFF #Mc [16] SAURASHTRA CONSONANT SIGN HAARU..S...
1563 | 0xEA 0xA3 0x00..0x83 #
1564 | 0xEA 0xA3 0x84..0x85 #Mn [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA...
1565 | 0xEA 0xA3 0x90..0x99 #Nd [10] SAURASHTRA DIGIT ZERO..SAURASHTRA ...
1566 | 0xEA 0xA3 0xA0..0xB1 #Mn [18] COMBINING DEVANAGARI DIGIT ZERO..C...
1567 | 0xEA 0xA3 0xB2..0xB7 #Lo [6] DEVANAGARI SIGN SPACING CANDRABIND...
1568 | 0xEA 0xA3 0xBB #Lo DEVANAGARI HEADSTROKE
1569 | 0xEA 0xA3 0xBD #Lo DEVANAGARI JAIN OM
1570 | 0xEA 0xA4 0x80..0x89 #Nd [10] KAYAH LI DIGIT ZERO..KAYAH LI DIGI...
1571 | 0xEA 0xA4 0x8A..0xA5 #Lo [28] KAYAH LI LETTER KA..KAYAH LI LETTE...
1572 | 0xEA 0xA4 0xA6..0xAD #Mn [8] KAYAH LI VOWEL UE..KAYAH LI TONE C...
1573 | 0xEA 0xA4 0xB0..0xFF #Lo [23] REJANG LETTER KA..REJANG LETTER A
1574 | 0xEA 0xA5 0x00..0x86 #
1575 | 0xEA 0xA5 0x87..0x91 #Mn [11] REJANG VOWEL SIGN I..REJANG CONSON...
1576 | 0xEA 0xA5 0x92..0x93 #Mc [2] REJANG CONSONANT SIGN H..REJANG VI...
1577 | 0xEA 0xA5 0xA0..0xBC #Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANG...
1578 | 0xEA 0xA6 0x80..0x82 #Mn [3] JAVANESE SIGN PANYANGGA..JAVANESE ...
1579 | 0xEA 0xA6 0x83 #Mc JAVANESE SIGN WIGNYAN
1580 | 0xEA 0xA6 0x84..0xB2 #Lo [47] JAVANESE LETTER A..JAVANESE LETTER HA
1581 | 0xEA 0xA6 0xB3 #Mn JAVANESE SIGN CECAK TELU
1582 | 0xEA 0xA6 0xB4..0xB5 #Mc [2] JAVANESE VOWEL SIGN TARUNG..JAVANE...
1583 | 0xEA 0xA6 0xB6..0xB9 #Mn [4] JAVANESE VOWEL SIGN WULU..JAVANESE...
1584 | 0xEA 0xA6 0xBA..0xBB #Mc [2] JAVANESE VOWEL SIGN TALING..JAVANE...
1585 | 0xEA 0xA6 0xBC #Mn JAVANESE VOWEL SIGN PEPET
1586 | 0xEA 0xA6 0xBD..0xFF #Mc [4] JAVANESE CONSONANT SIGN KERET..JAV...
1587 | 0xEA 0xA7 0x00..0x80 #
1588 | 0xEA 0xA7 0x8F #Lm JAVANESE PANGRANGKEP
1589 | 0xEA 0xA7 0x90..0x99 #Nd [10] JAVANESE DIGIT ZERO..JAVANESE DIGI...
1590 | 0xEA 0xA7 0xA0..0xA4 #Lo [5] MYANMAR LETTER SHAN GHA..MYANMAR L...
1591 | 0xEA 0xA7 0xA5 #Mn MYANMAR SIGN SHAN SAW
1592 | 0xEA 0xA7 0xA6 #Lm MYANMAR MODIFIER LETTER SHAN REDUP...
1593 | 0xEA 0xA7 0xA7..0xAF #Lo [9] MYANMAR LETTER TAI LAING NYA..MYAN...
1594 | 0xEA 0xA7 0xB0..0xB9 #Nd [10] MYANMAR TAI LAING DIGIT ZERO..MYAN...
1595 | 0xEA 0xA7 0xBA..0xBE #Lo [5] MYANMAR LETTER TAI LAING LLA..MYAN...
1596 | 0xEA 0xA8 0x80..0xA8 #Lo [41] CHAM LETTER A..CHAM LETTER HA
1597 | 0xEA 0xA8 0xA9..0xAE #Mn [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIG...
1598 | 0xEA 0xA8 0xAF..0xB0 #Mc [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI
1599 | 0xEA 0xA8 0xB1..0xB2 #Mn [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIG...
1600 | 0xEA 0xA8 0xB3..0xB4 #Mc [2] CHAM CONSONANT SIGN YA..CHAM CONSO...
1601 | 0xEA 0xA8 0xB5..0xB6 #Mn [2] CHAM CONSONANT SIGN LA..CHAM CONSO...
1602 | 0xEA 0xA9 0x80..0x82 #Lo [3] CHAM LETTER FINAL K..CHAM LETTER F...
1603 | 0xEA 0xA9 0x83 #Mn CHAM CONSONANT SIGN FINAL NG
1604 | 0xEA 0xA9 0x84..0x8B #Lo [8] CHAM LETTER FINAL CH..CHAM LETTER ...
1605 | 0xEA 0xA9 0x8C #Mn CHAM CONSONANT SIGN FINAL M
1606 | 0xEA 0xA9 0x8D #Mc CHAM CONSONANT SIGN FINAL H
1607 | 0xEA 0xA9 0x90..0x99 #Nd [10] CHAM DIGIT ZERO..CHAM DIGIT NINE
1608 | 0xEA 0xA9 0xA0..0xAF #Lo [16] MYANMAR LETTER KHAMTI GA..MYANMAR ...
1609 | 0xEA 0xA9 0xB0 #Lm MYANMAR MODIFIER LETTER KHAMTI RED...
1610 | 0xEA 0xA9 0xB1..0xB6 #Lo [6] MYANMAR LETTER KHAMTI XA..MYANMAR ...
1611 | 0xEA 0xA9 0xBA #Lo MYANMAR LETTER AITON RA
1612 | 0xEA 0xA9 0xBB #Mc MYANMAR SIGN PAO KAREN TONE
1613 | 0xEA 0xA9 0xBC #Mn MYANMAR SIGN TAI LAING TONE-2
1614 | 0xEA 0xA9 0xBD #Mc MYANMAR SIGN TAI LAING TONE-5
1615 | 0xEA 0xA9 0xBE..0xFF #Lo [50] MYANMAR LETTER SHWE PALAUNG CHA..T...
1616 | 0xEA 0xAA 0x00..0xAF #
1617 | 0xEA 0xAA 0xB0 #Mn TAI VIET MAI KANG
1618 | 0xEA 0xAA 0xB1 #Lo TAI VIET VOWEL AA
1619 | 0xEA 0xAA 0xB2..0xB4 #Mn [3] TAI VIET VOWEL I..TAI VIET VOWEL U
1620 | 0xEA 0xAA 0xB5..0xB6 #Lo [2] TAI VIET VOWEL E..TAI VIET VOWEL O
1621 | 0xEA 0xAA 0xB7..0xB8 #Mn [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA
1622 | 0xEA 0xAA 0xB9..0xBD #Lo [5] TAI VIET VOWEL UEA..TAI VIET VOWEL AN
1623 | 0xEA 0xAA 0xBE..0xBF #Mn [2] TAI VIET VOWEL AM..TAI VIET TONE M...
1624 | 0xEA 0xAB 0x80 #Lo TAI VIET TONE MAI NUENG
1625 | 0xEA 0xAB 0x81 #Mn TAI VIET TONE MAI THO
1626 | 0xEA 0xAB 0x82 #Lo TAI VIET TONE MAI SONG
1627 | 0xEA 0xAB 0x9B..0x9C #Lo [2] TAI VIET SYMBOL KON..TAI VIET SYMB...
1628 | 0xEA 0xAB 0x9D #Lm TAI VIET SYMBOL SAM
1629 | 0xEA 0xAB 0xA0..0xAA #Lo [11] MEETEI MAYEK LETTER E..MEETEI MAYE...
1630 | 0xEA 0xAB 0xAB #Mc MEETEI MAYEK VOWEL SIGN II
1631 | 0xEA 0xAB 0xAC..0xAD #Mn [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI...
1632 | 0xEA 0xAB 0xAE..0xAF #Mc [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI...
1633 | 0xEA 0xAB 0xB2 #Lo MEETEI MAYEK ANJI
1634 | 0xEA 0xAB 0xB3..0xB4 #Lm [2] MEETEI MAYEK SYLLABLE REPETITION M...
1635 | 0xEA 0xAB 0xB5 #Mc MEETEI MAYEK VOWEL SIGN VISARGA
1636 | 0xEA 0xAB 0xB6 #Mn MEETEI MAYEK VIRAMA
1637 | 0xEA 0xAC 0x81..0x86 #Lo [6] ETHIOPIC SYLLABLE TTHU..ETHIOPIC S...
1638 | 0xEA 0xAC 0x89..0x8E #Lo [6] ETHIOPIC SYLLABLE DDHU..ETHIOPIC S...
1639 | 0xEA 0xAC 0x91..0x96 #Lo [6] ETHIOPIC SYLLABLE DZU..ETHIOPIC SY...
1640 | 0xEA 0xAC 0xA0..0xA6 #Lo [7] ETHIOPIC SYLLABLE CCHHA..ETHIOPIC ...
1641 | 0xEA 0xAC 0xA8..0xAE #Lo [7] ETHIOPIC SYLLABLE BBA..ETHIOPIC SY...
1642 | 0xEA 0xAC 0xB0..0xFF #L& [43] LATIN SMALL LETTER BARRED ALPHA..L...
1643 | 0xEA 0xAD 0x00..0x9A #
1644 | 0xEA 0xAD 0x9C..0x9F #Lm [4] MODIFIER LETTER SMALL HENG..MODIFI...
1645 | 0xEA 0xAD 0xA0..0xA5 #L& [6] LATIN SMALL LETTER SAKHA YAT..GREE...
1646 | 0xEA 0xAD 0xB0..0xFF #L& [80] CHEROKEE SMALL LETTER A..CHEROKEE ...
1647 | 0xEA 0xAE 0x00..0xBF #
1648 | 0xEA 0xAF 0x80..0xA2 #Lo [35] MEETEI MAYEK LETTER KOK..MEETEI MA...
1649 | 0xEA 0xAF 0xA3..0xA4 #Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEET...
1650 | 0xEA 0xAF 0xA5 #Mn MEETEI MAYEK VOWEL SIGN ANAP
1651 | 0xEA 0xAF 0xA6..0xA7 #Mc [2] MEETEI MAYEK VOWEL SIGN YENAP..MEE...
1652 | 0xEA 0xAF 0xA8 #Mn MEETEI MAYEK VOWEL SIGN UNAP
1653 | 0xEA 0xAF 0xA9..0xAA #Mc [2] MEETEI MAYEK VOWEL SIGN CHEINAP..M...
1654 | 0xEA 0xAF 0xAC #Mc MEETEI MAYEK LUM IYEK
1655 | 0xEA 0xAF 0xAD #Mn MEETEI MAYEK APUN IYEK
1656 | 0xEA 0xAF 0xB0..0xB9 #Nd [10] MEETEI MAYEK DIGIT ZERO..MEETEI MA...
1657 | 0xEA 0xB0 0x80..0xFF #Lo [11172] HANGUL SYLLABLE GA..HA...
1658 | 0xEA 0xB1..0xFF 0x00..0xFF #
1659 | 0xEB..0xEC 0x00..0xFF 0x00..0xFF #
1660 | 0xED 0x00 0x00..0xFF #
1661 | 0xED 0x01..0x9D 0x00..0xFF #
1662 | 0xED 0x9E 0x00..0xA3 #
1663 | 0xED 0x9E 0xB0..0xFF #Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUN...
1664 | 0xED 0x9F 0x00..0x86 #
1665 | 0xED 0x9F 0x8B..0xBB #Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANG...
1666 | 0xEF 0xA4 0x80..0xFF #Lo [366] CJK COMPATIBILITY IDEOGRAPH-F9...
1667 | 0xEF 0xA5..0xA8 0x00..0xFF #
1668 | 0xEF 0xA9 0x00..0xAD #
1669 | 0xEF 0xA9 0xB0..0xFF #Lo [106] CJK COMPATIBILITY IDEOGRAPH-FA...
1670 | 0xEF 0xAA..0xAA 0x00..0xFF #
1671 | 0xEF 0xAB 0x00..0x99 #
1672 | 0xEF 0xAC 0x80..0x86 #L& [7] LATIN SMALL LIGATURE FF..LATIN SMA...
1673 | 0xEF 0xAC 0x93..0x97 #L& [5] ARMENIAN SMALL LIGATURE MEN NOW..A...
1674 | 0xEF 0xAC 0x9D #Lo HEBREW LETTER YOD WITH HIRIQ
1675 | 0xEF 0xAC 0x9E #Mn HEBREW POINT JUDEO-SPANISH VARIKA
1676 | 0xEF 0xAC 0x9F..0xA8 #Lo [10] HEBREW LIGATURE YIDDISH YOD YOD PA...
1677 | 0xEF 0xAC 0xAA..0xB6 #Lo [13] HEBREW LETTER SHIN WITH SHIN DOT.....
1678 | 0xEF 0xAC 0xB8..0xBC #Lo [5] HEBREW LETTER TET WITH DAGESH..HEB...
1679 | 0xEF 0xAC 0xBE #Lo HEBREW LETTER MEM WITH DAGESH
1680 | 0xEF 0xAD 0x80..0x81 #Lo [2] HEBREW LETTER NUN WITH DAGESH..HEB...
1681 | 0xEF 0xAD 0x83..0x84 #Lo [2] HEBREW LETTER FINAL PE WITH DAGESH...
1682 | 0xEF 0xAD 0x86..0xFF #Lo [108] HEBREW LETTER TSADI WITH DAGESH..A...
1683 | 0xEF 0xAE 0x00..0xB1 #
1684 | 0xEF 0xAF 0x93..0xFF #Lo [363] ARABIC LETTER NG ISOLATED FORM...
1685 | 0xEF 0xB0..0xB3 0x00..0xFF #
1686 | 0xEF 0xB4 0x00..0xBD #
1687 | 0xEF 0xB5 0x90..0xFF #Lo [64] ARABIC LIGATURE TEH WITH JEEM WITH...
1688 | 0xEF 0xB6 0x00..0x8F #
1689 | 0xEF 0xB6 0x92..0xFF #Lo [54] ARABIC LIGATURE MEEM WITH JEEM WIT...
1690 | 0xEF 0xB7 0x00..0x87 #
1691 | 0xEF 0xB7 0xB0..0xBB #Lo [12] ARABIC LIGATURE SALLA USED AS KORA...
1692 | 0xEF 0xB8 0x80..0x8F #Mn [16] VARIATION SELECTOR-1..VARIATION SE...
1693 | 0xEF 0xB8 0xA0..0xAF #Mn [16] COMBINING LIGATURE LEFT HALF..COMB...
1694 | 0xEF 0xB8 0xB3..0xB4 #Pc [2] PRESENTATION FORM FOR VERTICAL LOW...
1695 | 0xEF 0xB9 0x8D..0x8F #Pc [3] DASHED LOW LINE..WAVY LOW LINE
1696 | 0xEF 0xB9 0xB0..0xB4 #Lo [5] ARABIC FATHATAN ISOLATED FORM..ARA...
1697 | 0xEF 0xB9 0xB6..0xFF #Lo [135] ARABIC FATHA ISOLATED FORM..AR...
1698 | 0xEF 0xBA..0xBA 0x00..0xFF #
1699 | 0xEF 0xBB 0x00..0xBC #
1700 | 0xEF 0xBC 0x90..0x99 #Nd [10] FULLWIDTH DIGIT ZERO..FULLWIDTH DI...
1701 | 0xEF 0xBC 0xA1..0xBA #L& [26] FULLWIDTH LATIN CAPITAL LETTER A.....
1702 | 0xEF 0xBC 0xBF #Pc FULLWIDTH LOW LINE
1703 | 0xEF 0xBD 0x81..0x9A #L& [26] FULLWIDTH LATIN SMALL LETTER A..FU...
1704 | 0xEF 0xBD 0xA6..0xAF #Lo [10] HALFWIDTH KATAKANA LETTER WO..HALF...
1705 | 0xEF 0xBD 0xB0 #Lm HALFWIDTH KATAKANA-HIRAGANA PROLON...
1706 | 0xEF 0xBD 0xB1..0xFF #Lo [45] HALFWIDTH KATAKANA LETTER A..HALFW...
1707 | 0xEF 0xBE 0x00..0x9D #
1708 | 0xEF 0xBE 0x9E..0x9F #Lm [2] HALFWIDTH KATAKANA VOICED SOUND MA...
1709 | 0xEF 0xBE 0xA0..0xBE #Lo [31] HALFWIDTH HANGUL FILLER..HALFWIDTH...
1710 | 0xEF 0xBF 0x82..0x87 #Lo [6] HALFWIDTH HANGUL LETTER A..HALFWID...
1711 | 0xEF 0xBF 0x8A..0x8F #Lo [6] HALFWIDTH HANGUL LETTER YEO..HALFW...
1712 | 0xEF 0xBF 0x92..0x97 #Lo [6] HALFWIDTH HANGUL LETTER YO..HALFWI...
1713 | 0xEF 0xBF 0x9A..0x9C #Lo [3] HALFWIDTH HANGUL LETTER EU..HALFWI...
1714 | 0xF0 0x90 0x80 0x80..0x8B #Lo [12] LINEAR B SYLLABLE B008 A..LINEA...
1715 | 0xF0 0x90 0x80 0x8D..0xA6 #Lo [26] LINEAR B SYLLABLE B036 JO..LINE...
1716 | 0xF0 0x90 0x80 0xA8..0xBA #Lo [19] LINEAR B SYLLABLE B060 RA..LINE...
1717 | 0xF0 0x90 0x80 0xBC..0xBD #Lo [2] LINEAR B SYLLABLE B017 ZA..LINE...
1718 | 0xF0 0x90 0x80 0xBF..0xFF #Lo [15] LINEAR B SYLLABLE B020 ZO..LINE...
1719 | 0xF0 0x90 0x81 0x00..0x8D #
1720 | 0xF0 0x90 0x81 0x90..0x9D #Lo [14] LINEAR B SYMBOL B018..LINEAR B ...
1721 | 0xF0 0x90 0x82 0x80..0xFF #Lo [123] LINEAR B IDEOGRAM B100 MAN..LIN...
1722 | 0xF0 0x90 0x83 0x00..0xBA #
1723 | 0xF0 0x90 0x85 0x80..0xB4 #Nl [53] GREEK ACROPHONIC ATTIC ONE QUAR...
1724 | 0xF0 0x90 0x87 0xBD #Mn PHAISTOS DISC SIGN COMBINING OBLIQ...
1725 | 0xF0 0x90 0x8A 0x80..0x9C #Lo [29] LYCIAN LETTER A..LYCIAN LETTER X
1726 | 0xF0 0x90 0x8A 0xA0..0xFF #Lo [49] CARIAN LETTER A..CARIAN LETTER ...
1727 | 0xF0 0x90 0x8B 0x00..0x90 #
1728 | 0xF0 0x90 0x8B 0xA0 #Mn COPTIC EPACT THOUSANDS MARK
1729 | 0xF0 0x90 0x8C 0x80..0x9F #Lo [32] OLD ITALIC LETTER A..OLD ITALIC...
1730 | 0xF0 0x90 0x8C 0xB0..0xFF #Lo [17] GOTHIC LETTER AHSA..GOTHIC LETT...
1731 | 0xF0 0x90 0x8D 0x00..0x80 #
1732 | 0xF0 0x90 0x8D 0x81 #Nl GOTHIC LETTER NINETY
1733 | 0xF0 0x90 0x8D 0x82..0x89 #Lo [8] GOTHIC LETTER RAIDA..GOTHIC LET...
1734 | 0xF0 0x90 0x8D 0x8A #Nl GOTHIC LETTER NINE HUNDRED
1735 | 0xF0 0x90 0x8D 0x90..0xB5 #Lo [38] OLD PERMIC LETTER AN..OLD PERMI...
1736 | 0xF0 0x90 0x8D 0xB6..0xBA #Mn [5] COMBINING OLD PERMIC LETTER AN....
1737 | 0xF0 0x90 0x8E 0x80..0x9D #Lo [30] UGARITIC LETTER ALPA..UGARITIC ...
1738 | 0xF0 0x90 0x8E 0xA0..0xFF #Lo [36] OLD PERSIAN SIGN A..OLD PERSIAN...
1739 | 0xF0 0x90 0x8F 0x00..0x83 #
1740 | 0xF0 0x90 0x8F 0x88..0x8F #Lo [8] OLD PERSIAN SIGN AURAMAZDAA..OL...
1741 | 0xF0 0x90 0x8F 0x91..0x95 #Nl [5] OLD PERSIAN NUMBER ONE..OLD PER...
1742 | 0xF0 0x90 0x90 0x80..0xFF #L& [80] DESERET CAPITAL LETTER LONG I.....
1743 | 0xF0 0x90 0x91 0x00..0x8F #
1744 | 0xF0 0x90 0x91 0x90..0xFF #Lo [78] SHAVIAN LETTER PEEP..OSMANYA LE...
1745 | 0xF0 0x90 0x92 0x00..0x9D #
1746 | 0xF0 0x90 0x92 0xA0..0xA9 #Nd [10] OSMANYA DIGIT ZERO..OSMANYA DIG...
1747 | 0xF0 0x90 0x92 0xB0..0xFF #L& [36] OSAGE CAPITAL LETTER A..OSAGE C...
1748 | 0xF0 0x90 0x93 0x00..0x93 #
1749 | 0xF0 0x90 0x93 0x98..0xBB #L& [36] OSAGE SMALL LETTER A..OSAGE SMA...
1750 | 0xF0 0x90 0x94 0x80..0xA7 #Lo [40] ELBASAN LETTER A..ELBASAN LETTE...
1751 | 0xF0 0x90 0x94 0xB0..0xFF #Lo [52] CAUCASIAN ALBANIAN LETTER ALT.....
1752 | 0xF0 0x90 0x95 0x00..0xA3 #
1753 | 0xF0 0x90 0x98 0x80..0xFF #Lo [311] LINEAR A SIGN AB001..LINE...
1754 | 0xF0 0x90 0x99..0x9B 0x00..0xFF #
1755 | 0xF0 0x90 0x9C 0x00..0xB6 #
1756 | 0xF0 0x90 0x9D 0x80..0x95 #Lo [22] LINEAR A SIGN A701 A..LINEAR A ...
1757 | 0xF0 0x90 0x9D 0xA0..0xA7 #Lo [8] LINEAR A SIGN A800..LINEAR A SI...
1758 | 0xF0 0x90 0xA0 0x80..0x85 #Lo [6] CYPRIOT SYLLABLE A..CYPRIOT SYL...
1759 | 0xF0 0x90 0xA0 0x88 #Lo CYPRIOT SYLLABLE JO
1760 | 0xF0 0x90 0xA0 0x8A..0xB5 #Lo [44] CYPRIOT SYLLABLE KA..CYPRIOT SY...
1761 | 0xF0 0x90 0xA0 0xB7..0xB8 #Lo [2] CYPRIOT SYLLABLE XA..CYPRIOT SY...
1762 | 0xF0 0x90 0xA0 0xBC #Lo CYPRIOT SYLLABLE ZA
1763 | 0xF0 0x90 0xA0 0xBF..0xFF #Lo [23] CYPRIOT SYLLABLE ZO..IMPERIAL A...
1764 | 0xF0 0x90 0xA1 0x00..0x95 #
1765 | 0xF0 0x90 0xA1 0xA0..0xB6 #Lo [23] PALMYRENE LETTER ALEPH..PALMYRE...
1766 | 0xF0 0x90 0xA2 0x80..0x9E #Lo [31] NABATAEAN LETTER FINAL ALEPH..N...
1767 | 0xF0 0x90 0xA3 0xA0..0xB2 #Lo [19] HATRAN LETTER ALEPH..HATRAN LET...
1768 | 0xF0 0x90 0xA3 0xB4..0xB5 #Lo [2] HATRAN LETTER SHIN..HATRAN LETT...
1769 | 0xF0 0x90 0xA4 0x80..0x95 #Lo [22] PHOENICIAN LETTER ALF..PHOENICI...
1770 | 0xF0 0x90 0xA4 0xA0..0xB9 #Lo [26] LYDIAN LETTER A..LYDIAN LETTER C
1771 | 0xF0 0x90 0xA6 0x80..0xB7 #Lo [56] MEROITIC HIEROGLYPHIC LETTER A....
1772 | 0xF0 0x90 0xA6 0xBE..0xBF #Lo [2] MEROITIC CURSIVE LOGOGRAM RMT.....
1773 | 0xF0 0x90 0xA8 0x80 #Lo KHAROSHTHI LETTER A
1774 | 0xF0 0x90 0xA8 0x81..0x83 #Mn [3] KHAROSHTHI VOWEL SIGN I..KHAROS...
1775 | 0xF0 0x90 0xA8 0x85..0x86 #Mn [2] KHAROSHTHI VOWEL SIGN E..KHAROS...
1776 | 0xF0 0x90 0xA8 0x8C..0x8F #Mn [4] KHAROSHTHI VOWEL LENGTH MARK..K...
1777 | 0xF0 0x90 0xA8 0x90..0x93 #Lo [4] KHAROSHTHI LETTER KA..KHAROSHTH...
1778 | 0xF0 0x90 0xA8 0x95..0x97 #Lo [3] KHAROSHTHI LETTER CA..KHAROSHTH...
1779 | 0xF0 0x90 0xA8 0x99..0xB3 #Lo [27] KHAROSHTHI LETTER NYA..KHAROSHT...
1780 | 0xF0 0x90 0xA8 0xB8..0xBA #Mn [3] KHAROSHTHI SIGN BAR ABOVE..KHAR...
1781 | 0xF0 0x90 0xA8 0xBF #Mn KHAROSHTHI VIRAMA
1782 | 0xF0 0x90 0xA9 0xA0..0xBC #Lo [29] OLD SOUTH ARABIAN LETTER HE..OL...
1783 | 0xF0 0x90 0xAA 0x80..0x9C #Lo [29] OLD NORTH ARABIAN LETTER HEH..O...
1784 | 0xF0 0x90 0xAB 0x80..0x87 #Lo [8] MANICHAEAN LETTER ALEPH..MANICH...
1785 | 0xF0 0x90 0xAB 0x89..0xA4 #Lo [28] MANICHAEAN LETTER ZAYIN..MANICH...
1786 | 0xF0 0x90 0xAB 0xA5..0xA6 #Mn [2] MANICHAEAN ABBREVIATION MARK AB...
1787 | 0xF0 0x90 0xAC 0x80..0xB5 #Lo [54] AVESTAN LETTER A..AVESTAN LETTE...
1788 | 0xF0 0x90 0xAD 0x80..0x95 #Lo [22] INSCRIPTIONAL PARTHIAN LETTER A...
1789 | 0xF0 0x90 0xAD 0xA0..0xB2 #Lo [19] INSCRIPTIONAL PAHLAVI LETTER AL...
1790 | 0xF0 0x90 0xAE 0x80..0x91 #Lo [18] PSALTER PAHLAVI LETTER ALEPH..P...
1791 | 0xF0 0x90 0xB0 0x80..0xFF #Lo [73] OLD TURKIC LETTER ORKHON A..OLD...
1792 | 0xF0 0x90 0xB1 0x00..0x88 #
1793 | 0xF0 0x90 0xB2 0x80..0xB2 #L& [51] OLD HUNGARIAN CAPITAL LETTER A....
1794 | 0xF0 0x90 0xB3 0x80..0xB2 #L& [51] OLD HUNGARIAN SMALL LETTER A..O...
1795 | 0xF0 0x91 0x80 0x80 #Mc BRAHMI SIGN CANDRABINDU
1796 | 0xF0 0x91 0x80 0x81 #Mn BRAHMI SIGN ANUSVARA
1797 | 0xF0 0x91 0x80 0x82 #Mc BRAHMI SIGN VISARGA
1798 | 0xF0 0x91 0x80 0x83..0xB7 #Lo [53] BRAHMI SIGN JIHVAMULIYA..BRAHMI...
1799 | 0xF0 0x91 0x80 0xB8..0xFF #Mn [15] BRAHMI VOWEL SIGN AA..BRAHMI VI...
1800 | 0xF0 0x91 0x81 0x00..0x86 #
1801 | 0xF0 0x91 0x81 0xA6..0xAF #Nd [10] BRAHMI DIGIT ZERO..BRAHMI DIGIT...
1802 | 0xF0 0x91 0x81 0xBF..0xFF #Mn [3] BRAHMI NUMBER JOINER..KAITHI SI...
1803 | 0xF0 0x91 0x82 0x00..0x81 #
1804 | 0xF0 0x91 0x82 0x82 #Mc KAITHI SIGN VISARGA
1805 | 0xF0 0x91 0x82 0x83..0xAF #Lo [45] KAITHI LETTER A..KAITHI LETTER HA
1806 | 0xF0 0x91 0x82 0xB0..0xB2 #Mc [3] KAITHI VOWEL SIGN AA..KAITHI VO...
1807 | 0xF0 0x91 0x82 0xB3..0xB6 #Mn [4] KAITHI VOWEL SIGN U..KAITHI VOW...
1808 | 0xF0 0x91 0x82 0xB7..0xB8 #Mc [2] KAITHI VOWEL SIGN O..KAITHI VOW...
1809 | 0xF0 0x91 0x82 0xB9..0xBA #Mn [2] KAITHI SIGN VIRAMA..KAITHI SIGN...
1810 | 0xF0 0x91 0x83 0x90..0xA8 #Lo [25] SORA SOMPENG LETTER SAH..SORA S...
1811 | 0xF0 0x91 0x83 0xB0..0xB9 #Nd [10] SORA SOMPENG DIGIT ZERO..SORA S...
1812 | 0xF0 0x91 0x84 0x80..0x82 #Mn [3] CHAKMA SIGN CANDRABINDU..CHAKMA...
1813 | 0xF0 0x91 0x84 0x83..0xA6 #Lo [36] CHAKMA LETTER AA..CHAKMA LETTER...
1814 | 0xF0 0x91 0x84 0xA7..0xAB #Mn [5] CHAKMA VOWEL SIGN A..CHAKMA VOW...
1815 | 0xF0 0x91 0x84 0xAC #Mc CHAKMA VOWEL SIGN E
1816 | 0xF0 0x91 0x84 0xAD..0xB4 #Mn [8] CHAKMA VOWEL SIGN AI..CHAKMA MA...
1817 | 0xF0 0x91 0x84 0xB6..0xBF #Nd [10] CHAKMA DIGIT ZERO..CHAKMA DIGIT...
1818 | 0xF0 0x91 0x85 0x90..0xB2 #Lo [35] MAHAJANI LETTER A..MAHAJANI LET...
1819 | 0xF0 0x91 0x85 0xB3 #Mn MAHAJANI SIGN NUKTA
1820 | 0xF0 0x91 0x85 0xB6 #Lo MAHAJANI LIGATURE SHRI
1821 | 0xF0 0x91 0x86 0x80..0x81 #Mn [2] SHARADA SIGN CANDRABINDU..SHARA...
1822 | 0xF0 0x91 0x86 0x82 #Mc SHARADA SIGN VISARGA
1823 | 0xF0 0x91 0x86 0x83..0xB2 #Lo [48] SHARADA LETTER A..SHARADA LETTE...
1824 | 0xF0 0x91 0x86 0xB3..0xB5 #Mc [3] SHARADA VOWEL SIGN AA..SHARADA ...
1825 | 0xF0 0x91 0x86 0xB6..0xBE #Mn [9] SHARADA VOWEL SIGN U..SHARADA V...
1826 | 0xF0 0x91 0x86 0xBF..0xFF #Mc [2] SHARADA VOWEL SIGN AU..SHARADA ...
1827 | 0xF0 0x91 0x87 0x00..0x80 #
1828 | 0xF0 0x91 0x87 0x81..0x84 #Lo [4] SHARADA SIGN AVAGRAHA..SHARADA OM
1829 | 0xF0 0x91 0x87 0x8A..0x8C #Mn [3] SHARADA SIGN NUKTA..SHARADA EXT...
1830 | 0xF0 0x91 0x87 0x90..0x99 #Nd [10] SHARADA DIGIT ZERO..SHARADA DIG...
1831 | 0xF0 0x91 0x87 0x9A #Lo SHARADA EKAM
1832 | 0xF0 0x91 0x87 0x9C #Lo SHARADA HEADSTROKE
1833 | 0xF0 0x91 0x88 0x80..0x91 #Lo [18] KHOJKI LETTER A..KHOJKI LETTER JJA
1834 | 0xF0 0x91 0x88 0x93..0xAB #Lo [25] KHOJKI LETTER NYA..KHOJKI LETTE...
1835 | 0xF0 0x91 0x88 0xAC..0xAE #Mc [3] KHOJKI VOWEL SIGN AA..KHOJKI VO...
1836 | 0xF0 0x91 0x88 0xAF..0xB1 #Mn [3] KHOJKI VOWEL SIGN U..KHOJKI VOW...
1837 | 0xF0 0x91 0x88 0xB2..0xB3 #Mc [2] KHOJKI VOWEL SIGN O..KHOJKI VOW...
1838 | 0xF0 0x91 0x88 0xB4 #Mn KHOJKI SIGN ANUSVARA
1839 | 0xF0 0x91 0x88 0xB5 #Mc KHOJKI SIGN VIRAMA
1840 | 0xF0 0x91 0x88 0xB6..0xB7 #Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN ...
1841 | 0xF0 0x91 0x88 0xBE #Mn KHOJKI SIGN SUKUN
1842 | 0xF0 0x91 0x8A 0x80..0x86 #Lo [7] MULTANI LETTER A..MULTANI LETTE...
1843 | 0xF0 0x91 0x8A 0x88 #Lo MULTANI LETTER GHA
1844 | 0xF0 0x91 0x8A 0x8A..0x8D #Lo [4] MULTANI LETTER CA..MULTANI LETT...
1845 | 0xF0 0x91 0x8A 0x8F..0x9D #Lo [15] MULTANI LETTER NYA..MULTANI LET...
1846 | 0xF0 0x91 0x8A 0x9F..0xA8 #Lo [10] MULTANI LETTER BHA..MULTANI LET...
1847 | 0xF0 0x91 0x8A 0xB0..0xFF #Lo [47] KHUDAWADI LETTER A..KHUDAWADI L...
1848 | 0xF0 0x91 0x8B 0x00..0x9E #
1849 | 0xF0 0x91 0x8B 0x9F #Mn KHUDAWADI SIGN ANUSVARA
1850 | 0xF0 0x91 0x8B 0xA0..0xA2 #Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAW...
1851 | 0xF0 0x91 0x8B 0xA3..0xAA #Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWA...
1852 | 0xF0 0x91 0x8B 0xB0..0xB9 #Nd [10] KHUDAWADI DIGIT ZERO..KHUDAWADI...
1853 | 0xF0 0x91 0x8C 0x80..0x81 #Mn [2] GRANTHA SIGN COMBINING ANUSVARA...
1854 | 0xF0 0x91 0x8C 0x82..0x83 #Mc [2] GRANTHA SIGN ANUSVARA..GRANTHA ...
1855 | 0xF0 0x91 0x8C 0x85..0x8C #Lo [8] GRANTHA LETTER A..GRANTHA LETTE...
1856 | 0xF0 0x91 0x8C 0x8F..0x90 #Lo [2] GRANTHA LETTER EE..GRANTHA LETT...
1857 | 0xF0 0x91 0x8C 0x93..0xA8 #Lo [22] GRANTHA LETTER OO..GRANTHA LETT...
1858 | 0xF0 0x91 0x8C 0xAA..0xB0 #Lo [7] GRANTHA LETTER PA..GRANTHA LETT...
1859 | 0xF0 0x91 0x8C 0xB2..0xB3 #Lo [2] GRANTHA LETTER LA..GRANTHA LETT...
1860 | 0xF0 0x91 0x8C 0xB5..0xB9 #Lo [5] GRANTHA LETTER VA..GRANTHA LETT...
1861 | 0xF0 0x91 0x8C 0xBC #Mn GRANTHA SIGN NUKTA
1862 | 0xF0 0x91 0x8C 0xBD #Lo GRANTHA SIGN AVAGRAHA
1863 | 0xF0 0x91 0x8C 0xBE..0xBF #Mc [2] GRANTHA VOWEL SIGN AA..GRANTHA ...
1864 | 0xF0 0x91 0x8D 0x80 #Mn GRANTHA VOWEL SIGN II
1865 | 0xF0 0x91 0x8D 0x81..0x84 #Mc [4] GRANTHA VOWEL SIGN U..GRANTHA V...
1866 | 0xF0 0x91 0x8D 0x87..0x88 #Mc [2] GRANTHA VOWEL SIGN EE..GRANTHA ...
1867 | 0xF0 0x91 0x8D 0x8B..0x8D #Mc [3] GRANTHA VOWEL SIGN OO..GRANTHA ...
1868 | 0xF0 0x91 0x8D 0x90 #Lo GRANTHA OM
1869 | 0xF0 0x91 0x8D 0x97 #Mc GRANTHA AU LENGTH MARK
1870 | 0xF0 0x91 0x8D 0x9D..0xA1 #Lo [5] GRANTHA SIGN PLUTA..GRANTHA LET...
1871 | 0xF0 0x91 0x8D 0xA2..0xA3 #Mc [2] GRANTHA VOWEL SIGN VOCALIC L..G...
1872 | 0xF0 0x91 0x8D 0xA6..0xAC #Mn [7] COMBINING GRANTHA DIGIT ZERO..C...
1873 | 0xF0 0x91 0x8D 0xB0..0xB4 #Mn [5] COMBINING GRANTHA LETTER A..COM...
1874 | 0xF0 0x91 0x90 0x80..0xB4 #Lo [53] NEWA LETTER A..NEWA LETTER HA
1875 | 0xF0 0x91 0x90 0xB5..0xB7 #Mc [3] NEWA VOWEL SIGN AA..NEWA VOWEL ...
1876 | 0xF0 0x91 0x90 0xB8..0xBF #Mn [8] NEWA VOWEL SIGN U..NEWA VOWEL S...
1877 | 0xF0 0x91 0x91 0x80..0x81 #Mc [2] NEWA VOWEL SIGN O..NEWA VOWEL S...
1878 | 0xF0 0x91 0x91 0x82..0x84 #Mn [3] NEWA SIGN VIRAMA..NEWA SIGN ANU...
1879 | 0xF0 0x91 0x91 0x85 #Mc NEWA SIGN VISARGA
1880 | 0xF0 0x91 0x91 0x86 #Mn NEWA SIGN NUKTA
1881 | 0xF0 0x91 0x91 0x87..0x8A #Lo [4] NEWA SIGN AVAGRAHA..NEWA SIDDHI
1882 | 0xF0 0x91 0x91 0x90..0x99 #Nd [10] NEWA DIGIT ZERO..NEWA DIGIT NINE
1883 | 0xF0 0x91 0x92 0x80..0xAF #Lo [48] TIRHUTA ANJI..TIRHUTA LETTER HA
1884 | 0xF0 0x91 0x92 0xB0..0xB2 #Mc [3] TIRHUTA VOWEL SIGN AA..TIRHUTA ...
1885 | 0xF0 0x91 0x92 0xB3..0xB8 #Mn [6] TIRHUTA VOWEL SIGN U..TIRHUTA V...
1886 | 0xF0 0x91 0x92 0xB9 #Mc TIRHUTA VOWEL SIGN E
1887 | 0xF0 0x91 0x92 0xBA #Mn TIRHUTA VOWEL SIGN SHORT E
1888 | 0xF0 0x91 0x92 0xBB..0xBE #Mc [4] TIRHUTA VOWEL SIGN AI..TIRHUTA ...
1889 | 0xF0 0x91 0x92 0xBF..0xFF #Mn [2] TIRHUTA SIGN CANDRABINDU..TIRHU...
1890 | 0xF0 0x91 0x93 0x00..0x80 #
1891 | 0xF0 0x91 0x93 0x81 #Mc TIRHUTA SIGN VISARGA
1892 | 0xF0 0x91 0x93 0x82..0x83 #Mn [2] TIRHUTA SIGN VIRAMA..TIRHUTA SI...
1893 | 0xF0 0x91 0x93 0x84..0x85 #Lo [2] TIRHUTA SIGN AVAGRAHA..TIRHUTA ...
1894 | 0xF0 0x91 0x93 0x87 #Lo TIRHUTA OM
1895 | 0xF0 0x91 0x93 0x90..0x99 #Nd [10] TIRHUTA DIGIT ZERO..TIRHUTA DIG...
1896 | 0xF0 0x91 0x96 0x80..0xAE #Lo [47] SIDDHAM LETTER A..SIDDHAM LETTE...
1897 | 0xF0 0x91 0x96 0xAF..0xB1 #Mc [3] SIDDHAM VOWEL SIGN AA..SIDDHAM ...
1898 | 0xF0 0x91 0x96 0xB2..0xB5 #Mn [4] SIDDHAM VOWEL SIGN U..SIDDHAM V...
1899 | 0xF0 0x91 0x96 0xB8..0xBB #Mc [4] SIDDHAM VOWEL SIGN E..SIDDHAM V...
1900 | 0xF0 0x91 0x96 0xBC..0xBD #Mn [2] SIDDHAM SIGN CANDRABINDU..SIDDH...
1901 | 0xF0 0x91 0x96 0xBE #Mc SIDDHAM SIGN VISARGA
1902 | 0xF0 0x91 0x96 0xBF..0xFF #Mn [2] SIDDHAM SIGN VIRAMA..SIDDHAM SI...
1903 | 0xF0 0x91 0x97 0x00..0x80 #
1904 | 0xF0 0x91 0x97 0x98..0x9B #Lo [4] SIDDHAM LETTER THREE-CIRCLE ALT...
1905 | 0xF0 0x91 0x97 0x9C..0x9D #Mn [2] SIDDHAM VOWEL SIGN ALTERNATE U....
1906 | 0xF0 0x91 0x98 0x80..0xAF #Lo [48] MODI LETTER A..MODI LETTER LLA
1907 | 0xF0 0x91 0x98 0xB0..0xB2 #Mc [3] MODI VOWEL SIGN AA..MODI VOWEL ...
1908 | 0xF0 0x91 0x98 0xB3..0xBA #Mn [8] MODI VOWEL SIGN U..MODI VOWEL S...
1909 | 0xF0 0x91 0x98 0xBB..0xBC #Mc [2] MODI VOWEL SIGN O..MODI VOWEL S...
1910 | 0xF0 0x91 0x98 0xBD #Mn MODI SIGN ANUSVARA
1911 | 0xF0 0x91 0x98 0xBE #Mc MODI SIGN VISARGA
1912 | 0xF0 0x91 0x98 0xBF..0xFF #Mn [2] MODI SIGN VIRAMA..MODI SIGN ARD...
1913 | 0xF0 0x91 0x99 0x00..0x80 #
1914 | 0xF0 0x91 0x99 0x84 #Lo MODI SIGN HUVA
1915 | 0xF0 0x91 0x99 0x90..0x99 #Nd [10] MODI DIGIT ZERO..MODI DIGIT NINE
1916 | 0xF0 0x91 0x9A 0x80..0xAA #Lo [43] TAKRI LETTER A..TAKRI LETTER RRA
1917 | 0xF0 0x91 0x9A 0xAB #Mn TAKRI SIGN ANUSVARA
1918 | 0xF0 0x91 0x9A 0xAC #Mc TAKRI SIGN VISARGA
1919 | 0xF0 0x91 0x9A 0xAD #Mn TAKRI VOWEL SIGN AA
1920 | 0xF0 0x91 0x9A 0xAE..0xAF #Mc [2] TAKRI VOWEL SIGN I..TAKRI VOWEL...
1921 | 0xF0 0x91 0x9A 0xB0..0xB5 #Mn [6] TAKRI VOWEL SIGN U..TAKRI VOWEL...
1922 | 0xF0 0x91 0x9A 0xB6 #Mc TAKRI SIGN VIRAMA
1923 | 0xF0 0x91 0x9A 0xB7 #Mn TAKRI SIGN NUKTA
1924 | 0xF0 0x91 0x9B 0x80..0x89 #Nd [10] TAKRI DIGIT ZERO..TAKRI DIGIT NINE
1925 | 0xF0 0x91 0x9C 0x80..0x99 #Lo [26] AHOM LETTER KA..AHOM LETTER JHA
1926 | 0xF0 0x91 0x9C 0x9D..0x9F #Mn [3] AHOM CONSONANT SIGN MEDIAL LA.....
1927 | 0xF0 0x91 0x9C 0xA0..0xA1 #Mc [2] AHOM VOWEL SIGN A..AHOM VOWEL S...
1928 | 0xF0 0x91 0x9C 0xA2..0xA5 #Mn [4] AHOM VOWEL SIGN I..AHOM VOWEL S...
1929 | 0xF0 0x91 0x9C 0xA6 #Mc AHOM VOWEL SIGN E
1930 | 0xF0 0x91 0x9C 0xA7..0xAB #Mn [5] AHOM VOWEL SIGN AW..AHOM SIGN K...
1931 | 0xF0 0x91 0x9C 0xB0..0xB9 #Nd [10] AHOM DIGIT ZERO..AHOM DIGIT NINE
1932 | 0xF0 0x91 0xA2 0xA0..0xFF #L& [64] WARANG CITI CAPITAL LETTER NGAA...
1933 | 0xF0 0x91 0xA3 0x00..0x9F #
1934 | 0xF0 0x91 0xA3 0xA0..0xA9 #Nd [10] WARANG CITI DIGIT ZERO..WARANG ...
1935 | 0xF0 0x91 0xA3 0xBF #Lo WARANG CITI OM
1936 | 0xF0 0x91 0xAB 0x80..0xB8 #Lo [57] PAU CIN HAU LETTER PA..PAU CIN ...
1937 | 0xF0 0x91 0xB0 0x80..0x88 #Lo [9] BHAIKSUKI LETTER A..BHAIKSUKI L...
1938 | 0xF0 0x91 0xB0 0x8A..0xAE #Lo [37] BHAIKSUKI LETTER E..BHAIKSUKI L...
1939 | 0xF0 0x91 0xB0 0xAF #Mc BHAIKSUKI VOWEL SIGN AA
1940 | 0xF0 0x91 0xB0 0xB0..0xB6 #Mn [7] BHAIKSUKI VOWEL SIGN I..BHAIKSU...
1941 | 0xF0 0x91 0xB0 0xB8..0xBD #Mn [6] BHAIKSUKI VOWEL SIGN E..BHAIKSU...
1942 | 0xF0 0x91 0xB0 0xBE #Mc BHAIKSUKI SIGN VISARGA
1943 | 0xF0 0x91 0xB0 0xBF #Mn BHAIKSUKI SIGN VIRAMA
1944 | 0xF0 0x91 0xB1 0x80 #Lo BHAIKSUKI SIGN AVAGRAHA
1945 | 0xF0 0x91 0xB1 0x90..0x99 #Nd [10] BHAIKSUKI DIGIT ZERO..BHAIKSUKI...
1946 | 0xF0 0x91 0xB1 0xB2..0xFF #Lo [30] MARCHEN LETTER KA..MARCHEN LETT...
1947 | 0xF0 0x91 0xB2 0x00..0x8F #
1948 | 0xF0 0x91 0xB2 0x92..0xA7 #Mn [22] MARCHEN SUBJOINED LETTER KA..MA...
1949 | 0xF0 0x91 0xB2 0xA9 #Mc MARCHEN SUBJOINED LETTER YA
1950 | 0xF0 0x91 0xB2 0xAA..0xB0 #Mn [7] MARCHEN SUBJOINED LETTER RA..MA...
1951 | 0xF0 0x91 0xB2 0xB1 #Mc MARCHEN VOWEL SIGN I
1952 | 0xF0 0x91 0xB2 0xB2..0xB3 #Mn [2] MARCHEN VOWEL SIGN U..MARCHEN V...
1953 | 0xF0 0x91 0xB2 0xB4 #Mc MARCHEN VOWEL SIGN O
1954 | 0xF0 0x91 0xB2 0xB5..0xB6 #Mn [2] MARCHEN SIGN ANUSVARA..MARCHEN ...
1955 | 0xF0 0x92 0x80 0x80..0xFF #Lo [922] CUNEIFORM SIGN A..CUNEIFO...
1956 | 0xF0 0x92 0x81..0x8D 0x00..0xFF #
1957 | 0xF0 0x92 0x8E 0x00..0x99 #
1958 | 0xF0 0x92 0x90 0x80..0xFF #Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH....
1959 | 0xF0 0x92 0x91 0x00..0xAE #
1960 | 0xF0 0x92 0x92 0x80..0xFF #Lo [196] CUNEIFORM SIGN AB TIMES N...
1961 | 0xF0 0x92 0x93..0x94 0x00..0xFF #
1962 | 0xF0 0x92 0x95 0x00..0x83 #
1963 | 0xF0 0x93 0x80 0x80..0xFF #Lo [1071] EGYPTIAN HIEROGLYPH A001...
1964 | 0xF0 0x93 0x81..0x8F 0x00..0xFF #
1965 | 0xF0 0x93 0x90 0x00..0xAE #
1966 | 0xF0 0x94 0x90 0x80..0xFF #Lo [583] ANATOLIAN HIEROGLYPH A001...
1967 | 0xF0 0x94 0x91..0x98 0x00..0xFF #
1968 | 0xF0 0x94 0x99 0x00..0x86 #
1969 | 0xF0 0x96 0xA0 0x80..0xFF #Lo [569] BAMUM LETTER PHASE-A NGKU...
1970 | 0xF0 0x96 0xA1..0xA7 0x00..0xFF #
1971 | 0xF0 0x96 0xA8 0x00..0xB8 #
1972 | 0xF0 0x96 0xA9 0x80..0x9E #Lo [31] MRO LETTER TA..MRO LETTER TEK
1973 | 0xF0 0x96 0xA9 0xA0..0xA9 #Nd [10] MRO DIGIT ZERO..MRO DIGIT NINE
1974 | 0xF0 0x96 0xAB 0x90..0xAD #Lo [30] BASSA VAH LETTER ENNI..BASSA VA...
1975 | 0xF0 0x96 0xAB 0xB0..0xB4 #Mn [5] BASSA VAH COMBINING HIGH TONE.....
1976 | 0xF0 0x96 0xAC 0x80..0xAF #Lo [48] PAHAWH HMONG VOWEL KEEB..PAHAWH...
1977 | 0xF0 0x96 0xAC 0xB0..0xB6 #Mn [7] PAHAWH HMONG MARK CIM TUB..PAHA...
1978 | 0xF0 0x96 0xAD 0x80..0x83 #Lm [4] PAHAWH HMONG SIGN VOS SEEV..PAH...
1979 | 0xF0 0x96 0xAD 0x90..0x99 #Nd [10] PAHAWH HMONG DIGIT ZERO..PAHAWH...
1980 | 0xF0 0x96 0xAD 0xA3..0xB7 #Lo [21] PAHAWH HMONG SIGN VOS LUB..PAHA...
1981 | 0xF0 0x96 0xAD 0xBD..0xFF #Lo [19] PAHAWH HMONG CLAN SIGN TSHEEJ.....
1982 | 0xF0 0x96 0xAE 0x00..0x8F #
1983 | 0xF0 0x96 0xBC 0x80..0xFF #Lo [69] MIAO LETTER PA..MIAO LETTER HHA
1984 | 0xF0 0x96 0xBD 0x00..0x84 #
1985 | 0xF0 0x96 0xBD 0x90 #Lo MIAO LETTER NASALIZATION
1986 | 0xF0 0x96 0xBD 0x91..0xBE #Mc [46] MIAO SIGN ASPIRATION..MIAO VOWE...
1987 | 0xF0 0x96 0xBE 0x8F..0x92 #Mn [4] MIAO TONE RIGHT..MIAO TONE BELOW
1988 | 0xF0 0x96 0xBE 0x93..0x9F #Lm [13] MIAO LETTER TONE-2..MIAO LETTER...
1989 | 0xF0 0x96 0xBF 0xA0 #Lm TANGUT ITERATION MARK
1990 | 0xF0 0x97 0x80 0x80..0xFF #Lo [6125] TANGUT IDEOGRAPH-17000.....
1991 | 0xF0 0x97 0x81..0xFF 0x00..0xFF #
1992 | 0xF0 0x98 0x00 0x00..0xFF #
1993 | 0xF0 0x98 0x01..0x9E 0x00..0xFF #
1994 | 0xF0 0x98 0x9F 0x00..0xAC #
1995 | 0xF0 0x98 0xA0 0x80..0xFF #Lo [755] TANGUT COMPONENT-001..TAN...
1996 | 0xF0 0x98 0xA1..0xAA 0x00..0xFF #
1997 | 0xF0 0x98 0xAB 0x00..0xB2 #
1998 | 0xF0 0x9B 0x80 0x80..0x81 #Lo [2] KATAKANA LETTER ARCHAIC E..HIRA...
1999 | 0xF0 0x9B 0xB0 0x80..0xFF #Lo [107] DUPLOYAN LETTER H..DUPLOYAN LET...
2000 | 0xF0 0x9B 0xB1 0x00..0xAA #
2001 | 0xF0 0x9B 0xB1 0xB0..0xBC #Lo [13] DUPLOYAN AFFIX LEFT HORIZONTAL ...
2002 | 0xF0 0x9B 0xB2 0x80..0x88 #Lo [9] DUPLOYAN AFFIX HIGH ACUTE..DUPL...
2003 | 0xF0 0x9B 0xB2 0x90..0x99 #Lo [10] DUPLOYAN AFFIX LOW ACUTE..DUPLO...
2004 | 0xF0 0x9B 0xB2 0x9D..0x9E #Mn [2] DUPLOYAN THICK LETTER SELECTOR....
2005 | 0xF0 0x9D 0x85 0xA5..0xA6 #Mc [2] MUSICAL SYMBOL COMBINING STEM.....
2006 | 0xF0 0x9D 0x85 0xA7..0xA9 #Mn [3] MUSICAL SYMBOL COMBINING TREMOL...
2007 | 0xF0 0x9D 0x85 0xAD..0xB2 #Mc [6] MUSICAL SYMBOL COMBINING AUGMEN...
2008 | 0xF0 0x9D 0x85 0xBB..0xFF #Mn [8] MUSICAL SYMBOL COMBINING ACCENT...
2009 | 0xF0 0x9D 0x86 0x00..0x82 #
2010 | 0xF0 0x9D 0x86 0x85..0x8B #Mn [7] MUSICAL SYMBOL COMBINING DOIT.....
2011 | 0xF0 0x9D 0x86 0xAA..0xAD #Mn [4] MUSICAL SYMBOL COMBINING DOWN B...
2012 | 0xF0 0x9D 0x89 0x82..0x84 #Mn [3] COMBINING GREEK MUSICAL TRISEME...
2013 | 0xF0 0x9D 0x90 0x80..0xFF #L& [85] MATHEMATICAL BOLD CAPITAL A..MA...
2014 | 0xF0 0x9D 0x91 0x00..0x94 #
2015 | 0xF0 0x9D 0x91 0x96..0xFF #L& [71] MATHEMATICAL ITALIC SMALL I..MA...
2016 | 0xF0 0x9D 0x92 0x00..0x9C #
2017 | 0xF0 0x9D 0x92 0x9E..0x9F #L& [2] MATHEMATICAL SCRIPT CAPITAL C.....
2018 | 0xF0 0x9D 0x92 0xA2 #L& MATHEMATICAL SCRIPT CAPITAL G
2019 | 0xF0 0x9D 0x92 0xA5..0xA6 #L& [2] MATHEMATICAL SCRIPT CAPITAL J.....
2020 | 0xF0 0x9D 0x92 0xA9..0xAC #L& [4] MATHEMATICAL SCRIPT CAPITAL N.....
2021 | 0xF0 0x9D 0x92 0xAE..0xB9 #L& [12] MATHEMATICAL SCRIPT CAPITAL S.....
2022 | 0xF0 0x9D 0x92 0xBB #L& MATHEMATICAL SCRIPT SMALL F
2023 | 0xF0 0x9D 0x92 0xBD..0xFF #L& [7] MATHEMATICAL SCRIPT SMALL H..MA...
2024 | 0xF0 0x9D 0x93 0x00..0x83 #
2025 | 0xF0 0x9D 0x93 0x85..0xFF #L& [65] MATHEMATICAL SCRIPT SMALL P..MA...
2026 | 0xF0 0x9D 0x94 0x00..0x85 #
2027 | 0xF0 0x9D 0x94 0x87..0x8A #L& [4] MATHEMATICAL FRAKTUR CAPITAL D....
2028 | 0xF0 0x9D 0x94 0x8D..0x94 #L& [8] MATHEMATICAL FRAKTUR CAPITAL J....
2029 | 0xF0 0x9D 0x94 0x96..0x9C #L& [7] MATHEMATICAL FRAKTUR CAPITAL S....
2030 | 0xF0 0x9D 0x94 0x9E..0xB9 #L& [28] MATHEMATICAL FRAKTUR SMALL A..M...
2031 | 0xF0 0x9D 0x94 0xBB..0xBE #L& [4] MATHEMATICAL DOUBLE-STRUCK CAPI...
2032 | 0xF0 0x9D 0x95 0x80..0x84 #L& [5] MATHEMATICAL DOUBLE-STRUCK CAPI...
2033 | 0xF0 0x9D 0x95 0x86 #L& MATHEMATICAL DOUBLE-STRUCK CAPITAL O
2034 | 0xF0 0x9D 0x95 0x8A..0x90 #L& [7] MATHEMATICAL DOUBLE-STRUCK CAPI...
2035 | 0xF0 0x9D 0x95 0x92..0xFF #L& [340] MATHEMATICAL DOUBLE-STRUC...
2036 | 0xF0 0x9D 0x96..0x99 0x00..0xFF #
2037 | 0xF0 0x9D 0x9A 0x00..0xA5 #
2038 | 0xF0 0x9D 0x9A 0xA8..0xFF #L& [25] MATHEMATICAL BOLD CAPITAL ALPHA...
2039 | 0xF0 0x9D 0x9B 0x00..0x80 #
2040 | 0xF0 0x9D 0x9B 0x82..0x9A #L& [25] MATHEMATICAL BOLD SMALL ALPHA.....
2041 | 0xF0 0x9D 0x9B 0x9C..0xBA #L& [31] MATHEMATICAL BOLD EPSILON SYMBO...
2042 | 0xF0 0x9D 0x9B 0xBC..0xFF #L& [25] MATHEMATICAL ITALIC SMALL ALPHA...
2043 | 0xF0 0x9D 0x9C 0x00..0x94 #
2044 | 0xF0 0x9D 0x9C 0x96..0xB4 #L& [31] MATHEMATICAL ITALIC EPSILON SYM...
2045 | 0xF0 0x9D 0x9C 0xB6..0xFF #L& [25] MATHEMATICAL BOLD ITALIC SMALL ...
2046 | 0xF0 0x9D 0x9D 0x00..0x8E #
2047 | 0xF0 0x9D 0x9D 0x90..0xAE #L& [31] MATHEMATICAL BOLD ITALIC EPSILO...
2048 | 0xF0 0x9D 0x9D 0xB0..0xFF #L& [25] MATHEMATICAL SANS-SERIF BOLD SM...
2049 | 0xF0 0x9D 0x9E 0x00..0x88 #
2050 | 0xF0 0x9D 0x9E 0x8A..0xA8 #L& [31] MATHEMATICAL SANS-SERIF BOLD EP...
2051 | 0xF0 0x9D 0x9E 0xAA..0xFF #L& [25] MATHEMATICAL SANS-SERIF BOLD IT...
2052 | 0xF0 0x9D 0x9F 0x00..0x82 #
2053 | 0xF0 0x9D 0x9F 0x84..0x8B #L& [8] MATHEMATICAL SANS-SERIF BOLD IT...
2054 | 0xF0 0x9D 0x9F 0x8E..0xBF #Nd [50] MATHEMATICAL BOLD DIGIT ZERO..M...
2055 | 0xF0 0x9D 0xA8 0x80..0xB6 #Mn [55] SIGNWRITING HEAD RIM..SIGNWRITI...
2056 | 0xF0 0x9D 0xA8 0xBB..0xFF #Mn [50] SIGNWRITING MOUTH CLOSED NEUTRA...
2057 | 0xF0 0x9D 0xA9 0x00..0xAC #
2058 | 0xF0 0x9D 0xA9 0xB5 #Mn SIGNWRITING UPPER BODY TILTING FRO...
2059 | 0xF0 0x9D 0xAA 0x84 #Mn SIGNWRITING LOCATION HEAD NECK
2060 | 0xF0 0x9D 0xAA 0x9B..0x9F #Mn [5] SIGNWRITING FILL MODIFIER-2..SI...
2061 | 0xF0 0x9D 0xAA 0xA1..0xAF #Mn [15] SIGNWRITING ROTATION MODIFIER-2...
2062 | 0xF0 0x9E 0x80 0x80..0x86 #Mn [7] COMBINING GLAGOLITIC LETTER AZU...
2063 | 0xF0 0x9E 0x80 0x88..0x98 #Mn [17] COMBINING GLAGOLITIC LETTER ZEM...
2064 | 0xF0 0x9E 0x80 0x9B..0xA1 #Mn [7] COMBINING GLAGOLITIC LETTER SHT...
2065 | 0xF0 0x9E 0x80 0xA3..0xA4 #Mn [2] COMBINING GLAGOLITIC LETTER YU....
2066 | 0xF0 0x9E 0x80 0xA6..0xAA #Mn [5] COMBINING GLAGOLITIC LETTER YO....
2067 | 0xF0 0x9E 0xA0 0x80..0xFF #Lo [197] MENDE KIKAKUI SYLLABLE M0...
2068 | 0xF0 0x9E 0xA1..0xA2 0x00..0xFF #
2069 | 0xF0 0x9E 0xA3 0x00..0x84 #
2070 | 0xF0 0x9E 0xA3 0x90..0x96 #Mn [7] MENDE KIKAKUI COMBINING NUMBER ...
2071 | 0xF0 0x9E 0xA4 0x80..0xFF #L& [68] ADLAM CAPITAL LETTER ALIF..ADLA...
2072 | 0xF0 0x9E 0xA5 0x00..0x83 #
2073 | 0xF0 0x9E 0xA5 0x84..0x8A #Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
2074 | 0xF0 0x9E 0xA5 0x90..0x99 #Nd [10] ADLAM DIGIT ZERO..ADLAM DIGIT NINE
2075 | 0xF0 0x9E 0xB8 0x80..0x83 #Lo [4] ARABIC MATHEMATICAL ALEF..ARABI...
2076 | 0xF0 0x9E 0xB8 0x85..0x9F #Lo [27] ARABIC MATHEMATICAL WAW..ARABIC...
2077 | 0xF0 0x9E 0xB8 0xA1..0xA2 #Lo [2] ARABIC MATHEMATICAL INITIAL BEH...
2078 | 0xF0 0x9E 0xB8 0xA4 #Lo ARABIC MATHEMATICAL INITIAL HEH
2079 | 0xF0 0x9E 0xB8 0xA7 #Lo ARABIC MATHEMATICAL INITIAL HAH
2080 | 0xF0 0x9E 0xB8 0xA9..0xB2 #Lo [10] ARABIC MATHEMATICAL INITIAL YEH...
2081 | 0xF0 0x9E 0xB8 0xB4..0xB7 #Lo [4] ARABIC MATHEMATICAL INITIAL SHE...
2082 | 0xF0 0x9E 0xB8 0xB9 #Lo ARABIC MATHEMATICAL INITIAL DAD
2083 | 0xF0 0x9E 0xB8 0xBB #Lo ARABIC MATHEMATICAL INITIAL GHAIN
2084 | 0xF0 0x9E 0xB9 0x82 #Lo ARABIC MATHEMATICAL TAILED JEEM
2085 | 0xF0 0x9E 0xB9 0x87 #Lo ARABIC MATHEMATICAL TAILED HAH
2086 | 0xF0 0x9E 0xB9 0x89 #Lo ARABIC MATHEMATICAL TAILED YEH
2087 | 0xF0 0x9E 0xB9 0x8B #Lo ARABIC MATHEMATICAL TAILED LAM
2088 | 0xF0 0x9E 0xB9 0x8D..0x8F #Lo [3] ARABIC MATHEMATICAL TAILED NOON...
2089 | 0xF0 0x9E 0xB9 0x91..0x92 #Lo [2] ARABIC MATHEMATICAL TAILED SAD....
2090 | 0xF0 0x9E 0xB9 0x94 #Lo ARABIC MATHEMATICAL TAILED SHEEN
2091 | 0xF0 0x9E 0xB9 0x97 #Lo ARABIC MATHEMATICAL TAILED KHAH
2092 | 0xF0 0x9E 0xB9 0x99 #Lo ARABIC MATHEMATICAL TAILED DAD
2093 | 0xF0 0x9E 0xB9 0x9B #Lo ARABIC MATHEMATICAL TAILED GHAIN
2094 | 0xF0 0x9E 0xB9 0x9D #Lo ARABIC MATHEMATICAL TAILED DOTLESS...
2095 | 0xF0 0x9E 0xB9 0x9F #Lo ARABIC MATHEMATICAL TAILED DOTLESS...
2096 | 0xF0 0x9E 0xB9 0xA1..0xA2 #Lo [2] ARABIC MATHEMATICAL STRETCHED B...
2097 | 0xF0 0x9E 0xB9 0xA4 #Lo ARABIC MATHEMATICAL STRETCHED HEH
2098 | 0xF0 0x9E 0xB9 0xA7..0xAA #Lo [4] ARABIC MATHEMATICAL STRETCHED H...
2099 | 0xF0 0x9E 0xB9 0xAC..0xB2 #Lo [7] ARABIC MATHEMATICAL STRETCHED M...
2100 | 0xF0 0x9E 0xB9 0xB4..0xB7 #Lo [4] ARABIC MATHEMATICAL STRETCHED S...
2101 | 0xF0 0x9E 0xB9 0xB9..0xBC #Lo [4] ARABIC MATHEMATICAL STRETCHED D...
2102 | 0xF0 0x9E 0xB9 0xBE #Lo ARABIC MATHEMATICAL STRETCHED DOTL...
2103 | 0xF0 0x9E 0xBA 0x80..0x89 #Lo [10] ARABIC MATHEMATICAL LOOPED ALEF...
2104 | 0xF0 0x9E 0xBA 0x8B..0x9B #Lo [17] ARABIC MATHEMATICAL LOOPED LAM....
2105 | 0xF0 0x9E 0xBA 0xA1..0xA3 #Lo [3] ARABIC MATHEMATICAL DOUBLE-STRU...
2106 | 0xF0 0x9E 0xBA 0xA5..0xA9 #Lo [5] ARABIC MATHEMATICAL DOUBLE-STRU...
2107 | 0xF0 0x9E 0xBA 0xAB..0xBB #Lo [17] ARABIC MATHEMATICAL DOUBLE-STRU...
2108 | 0xF0 0xA0 0x80 0x80..0xFF #Lo [42711] CJK UNIFIED IDEOG...
2109 | 0xF0 0xA0 0x81..0xFF 0x00..0xFF #
2110 | 0xF0 0xA1..0xA9 0x00..0xFF 0x00..0xFF #
2111 | 0xF0 0xAA 0x00 0x00..0xFF #
2112 | 0xF0 0xAA 0x01..0x9A 0x00..0xFF #
2113 | 0xF0 0xAA 0x9B 0x00..0x96 #
2114 | 0xF0 0xAA 0x9C 0x80..0xFF #Lo [4149] CJK UNIFIED IDEOGRAPH-2A...
2115 | 0xF0 0xAA 0x9D..0xFF 0x00..0xFF #
2116 | 0xF0 0xAB 0x00 0x00..0xFF #
2117 | 0xF0 0xAB 0x01..0x9B 0x00..0xFF #
2118 | 0xF0 0xAB 0x9C 0x00..0xB4 #
2119 | 0xF0 0xAB 0x9D 0x80..0xFF #Lo [222] CJK UNIFIED IDEOGRAPH-2B7...
2120 | 0xF0 0xAB 0x9E..0x9F 0x00..0xFF #
2121 | 0xF0 0xAB 0xA0 0x00..0x9D #
2122 | 0xF0 0xAB 0xA0 0xA0..0xFF #Lo [5762] CJK UNIFIED IDEOGRAPH-2B...
2123 | 0xF0 0xAB 0xA1..0xFF 0x00..0xFF #
2124 | 0xF0 0xAC 0x00 0x00..0xFF #
2125 | 0xF0 0xAC 0x01..0xB9 0x00..0xFF #
2126 | 0xF0 0xAC 0xBA 0x00..0xA1 #
2127 | 0xF0 0xAF 0xA0 0x80..0xFF #Lo [542] CJK COMPATIBILITY IDEOGRA...
2128 | 0xF0 0xAF 0xA1..0xA7 0x00..0xFF #
2129 | 0xF0 0xAF 0xA8 0x00..0x9D #
2130 | 0xF3 0xA0 0x84 0x80..0xFF #Mn [240] VARIATION SELECTOR-17..VA...
2131 | 0xF3 0xA0 0x85..0x86 0x00..0xFF #
2132 | 0xF3 0xA0 0x87 0x00..0xAF #
2133 ;
2134
2135}%%
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/variables.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/variables.go
new file mode 100644
index 0000000..eeee1a5
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/variables.go
@@ -0,0 +1,86 @@
1package hclsyntax
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5)
6
7// Variables returns all of the variables referenced within a given experssion.
8//
9// This is the implementation of the "Variables" method on every native
10// expression.
11func Variables(expr Expression) []hcl.Traversal {
12 var vars []hcl.Traversal
13
14 walker := &variablesWalker{
15 Callback: func(t hcl.Traversal) {
16 vars = append(vars, t)
17 },
18 }
19
20 Walk(expr, walker)
21
22 return vars
23}
24
25// variablesWalker is a Walker implementation that calls its callback for any
26// root scope traversal found while walking.
27type variablesWalker struct {
28 Callback func(hcl.Traversal)
29 localScopes []map[string]struct{}
30}
31
32func (w *variablesWalker) Enter(n Node) hcl.Diagnostics {
33 switch tn := n.(type) {
34 case *ScopeTraversalExpr:
35 t := tn.Traversal
36
37 // Check if the given root name appears in any of the active
38 // local scopes. We don't want to return local variables here, since
39 // the goal of walking variables is to tell the calling application
40 // which names it needs to populate in the _root_ scope.
41 name := t.RootName()
42 for _, names := range w.localScopes {
43 if _, localized := names[name]; localized {
44 return nil
45 }
46 }
47
48 w.Callback(t)
49 case ChildScope:
50 w.localScopes = append(w.localScopes, tn.LocalNames)
51 }
52 return nil
53}
54
55func (w *variablesWalker) Exit(n Node) hcl.Diagnostics {
56 switch n.(type) {
57 case ChildScope:
58 // pop the latest local scope, assuming that the walker will
59 // behave symmetrically as promised.
60 w.localScopes = w.localScopes[:len(w.localScopes)-1]
61 }
62 return nil
63}
64
65// ChildScope is a synthetic AST node that is visited during a walk to
66// indicate that its descendent will be evaluated in a child scope, which
67// may mask certain variables from the parent scope as locals.
68//
69// ChildScope nodes don't really exist in the AST, but are rather synthesized
70// on the fly during walk. Therefore it doesn't do any good to transform them;
71// instead, transform either parent node that created a scope or the expression
72// that the child scope struct wraps.
73type ChildScope struct {
74 LocalNames map[string]struct{}
75 Expr *Expression // pointer because it can be replaced on walk
76}
77
78func (e ChildScope) walkChildNodes(w internalWalkFunc) {
79 *(e.Expr) = w(*(e.Expr)).(Expression)
80}
81
82// Range returns the range of the expression that the ChildScope is
83// encapsulating. It isn't really very useful to call Range on a ChildScope.
84func (e ChildScope) Range() hcl.Range {
85 return (*e.Expr).Range()
86}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/walk.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/walk.go
new file mode 100644
index 0000000..3405d26
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/walk.go
@@ -0,0 +1,77 @@
1package hclsyntax
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5)
6
7// VisitFunc is the callback signature for VisitAll.
8type VisitFunc func(node Node) hcl.Diagnostics
9
10// VisitAll is a basic way to traverse the AST beginning with a particular
11// node. The given function will be called once for each AST node in
12// depth-first order, but no context is provided about the shape of the tree.
13//
14// The VisitFunc may return diagnostics, in which case they will be accumulated
15// and returned as a single set.
16func VisitAll(node Node, f VisitFunc) hcl.Diagnostics {
17 diags := f(node)
18 node.walkChildNodes(func(node Node) Node {
19 diags = append(diags, VisitAll(node, f)...)
20 return node
21 })
22 return diags
23}
24
25// Walker is an interface used with Walk.
26type Walker interface {
27 Enter(node Node) hcl.Diagnostics
28 Exit(node Node) hcl.Diagnostics
29}
30
31// Walk is a more complex way to traverse the AST starting with a particular
32// node, which provides information about the tree structure via separate
33// Enter and Exit functions.
34func Walk(node Node, w Walker) hcl.Diagnostics {
35 diags := w.Enter(node)
36 node.walkChildNodes(func(node Node) Node {
37 diags = append(diags, Walk(node, w)...)
38 return node
39 })
40 return diags
41}
42
43// Transformer is an interface used with Transform
44type Transformer interface {
45 // Transform accepts a node and returns a replacement node along with
46 // a flag for whether to also visit child nodes. If the flag is false,
47 // none of the child nodes will be visited and the TransformExit method
48 // will not be called for the node.
49 //
50 // It is acceptable and appropriate for Transform to return the same node
51 // it was given, for situations where no transform is needed.
52 Transform(node Node) (Node, bool, hcl.Diagnostics)
53
54 // TransformExit signals the end of transformations of child nodes of the
55 // given node. If Transform returned a new node, the given node is the
56 // node that was returned, rather than the node that was originally
57 // encountered.
58 TransformExit(node Node) hcl.Diagnostics
59}
60
61// Transform allows for in-place transformations of an AST starting with a
62// particular node. The provider Transformer implementation drives the
63// transformation process. The return value is the node that replaced the
64// given top-level node.
65func Transform(node Node, t Transformer) (Node, hcl.Diagnostics) {
66 newNode, descend, diags := t.Transform(node)
67 if !descend {
68 return newNode, diags
69 }
70 node.walkChildNodes(func(node Node) Node {
71 newNode, newDiags := Transform(node, t)
72 diags = append(diags, newDiags...)
73 return newNode
74 })
75 diags = append(diags, t.TransformExit(newNode)...)
76 return newNode, diags
77}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/ast.go b/vendor/github.com/hashicorp/hcl2/hcl/json/ast.go
new file mode 100644
index 0000000..753bfa0
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/ast.go
@@ -0,0 +1,121 @@
1package json
2
3import (
4 "math/big"
5
6 "github.com/hashicorp/hcl2/hcl"
7)
8
9type node interface {
10 Range() hcl.Range
11 StartRange() hcl.Range
12}
13
14type objectVal struct {
15 Attrs []*objectAttr
16 SrcRange hcl.Range // range of the entire object, brace-to-brace
17 OpenRange hcl.Range // range of the opening brace
18 CloseRange hcl.Range // range of the closing brace
19}
20
21func (n *objectVal) Range() hcl.Range {
22 return n.SrcRange
23}
24
25func (n *objectVal) StartRange() hcl.Range {
26 return n.OpenRange
27}
28
29type objectAttr struct {
30 Name string
31 Value node
32 NameRange hcl.Range // range of the name string
33}
34
35func (n *objectAttr) Range() hcl.Range {
36 return n.NameRange
37}
38
39func (n *objectAttr) StartRange() hcl.Range {
40 return n.NameRange
41}
42
43type arrayVal struct {
44 Values []node
45 SrcRange hcl.Range // range of the entire object, bracket-to-bracket
46 OpenRange hcl.Range // range of the opening bracket
47}
48
49func (n *arrayVal) Range() hcl.Range {
50 return n.SrcRange
51}
52
53func (n *arrayVal) StartRange() hcl.Range {
54 return n.OpenRange
55}
56
57type booleanVal struct {
58 Value bool
59 SrcRange hcl.Range
60}
61
62func (n *booleanVal) Range() hcl.Range {
63 return n.SrcRange
64}
65
66func (n *booleanVal) StartRange() hcl.Range {
67 return n.SrcRange
68}
69
70type numberVal struct {
71 Value *big.Float
72 SrcRange hcl.Range
73}
74
75func (n *numberVal) Range() hcl.Range {
76 return n.SrcRange
77}
78
79func (n *numberVal) StartRange() hcl.Range {
80 return n.SrcRange
81}
82
83type stringVal struct {
84 Value string
85 SrcRange hcl.Range
86}
87
88func (n *stringVal) Range() hcl.Range {
89 return n.SrcRange
90}
91
92func (n *stringVal) StartRange() hcl.Range {
93 return n.SrcRange
94}
95
96type nullVal struct {
97 SrcRange hcl.Range
98}
99
100func (n *nullVal) Range() hcl.Range {
101 return n.SrcRange
102}
103
104func (n *nullVal) StartRange() hcl.Range {
105 return n.SrcRange
106}
107
108// invalidVal is used as a placeholder where a value is needed for a valid
109// parse tree but the input was invalid enough to prevent one from being
110// created.
111type invalidVal struct {
112 SrcRange hcl.Range
113}
114
115func (n invalidVal) Range() hcl.Range {
116 return n.SrcRange
117}
118
119func (n invalidVal) StartRange() hcl.Range {
120 return n.SrcRange
121}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go b/vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go
new file mode 100644
index 0000000..fbdd8bf
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/didyoumean.go
@@ -0,0 +1,33 @@
1package json
2
3import (
4 "github.com/agext/levenshtein"
5)
6
7var keywords = []string{"false", "true", "null"}
8
9// keywordSuggestion tries to find a valid JSON keyword that is close to the
10// given string and returns it if found. If no keyword is close enough, returns
11// the empty string.
12func keywordSuggestion(given string) string {
13 return nameSuggestion(given, keywords)
14}
15
16// nameSuggestion tries to find a name from the given slice of suggested names
17// that is close to the given name and returns it if found. If no suggestion
18// is close enough, returns the empty string.
19//
20// The suggestions are tried in order, so earlier suggestions take precedence
21// if the given string is similar to two or more suggestions.
22//
23// This function is intended to be used with a relatively-small number of
24// suggestions. It's not optimized for hundreds or thousands of them.
25func nameSuggestion(given string, suggestions []string) string {
26 for _, suggestion := range suggestions {
27 dist := levenshtein.Distance(given, suggestion, nil)
28 if dist < 3 { // threshold determined experimentally
29 return suggestion
30 }
31 }
32 return ""
33}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/doc.go b/vendor/github.com/hashicorp/hcl2/hcl/json/doc.go
new file mode 100644
index 0000000..4943f9b
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/doc.go
@@ -0,0 +1,8 @@
1// Package json is the JSON parser for HCL. It parses JSON files and returns
2// implementations of the core HCL structural interfaces in terms of the
3// JSON data inside.
4//
5// This is not a generic JSON parser. Instead, it deals with the mapping from
6// the JSON information model to the HCL information model, using a number
7// of hard-coded structural conventions.
8package json
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go b/vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go
new file mode 100644
index 0000000..bc8a97f
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/navigation.go
@@ -0,0 +1,70 @@
1package json
2
3import (
4 "fmt"
5 "strings"
6)
7
8type navigation struct {
9 root node
10}
11
12// Implementation of hcled.ContextString
13func (n navigation) ContextString(offset int) string {
14 steps := navigationStepsRev(n.root, offset)
15 if steps == nil {
16 return ""
17 }
18
19 // We built our slice backwards, so we'll reverse it in-place now.
20 half := len(steps) / 2 // integer division
21 for i := 0; i < half; i++ {
22 steps[i], steps[len(steps)-1-i] = steps[len(steps)-1-i], steps[i]
23 }
24
25 ret := strings.Join(steps, "")
26 if len(ret) > 0 && ret[0] == '.' {
27 ret = ret[1:]
28 }
29 return ret
30}
31
32func navigationStepsRev(v node, offset int) []string {
33 switch tv := v.(type) {
34 case *objectVal:
35 // Do any of our properties have an object that contains the target
36 // offset?
37 for _, attr := range tv.Attrs {
38 k := attr.Name
39 av := attr.Value
40
41 switch av.(type) {
42 case *objectVal, *arrayVal:
43 // okay
44 default:
45 continue
46 }
47
48 if av.Range().ContainsOffset(offset) {
49 return append(navigationStepsRev(av, offset), "."+k)
50 }
51 }
52 case *arrayVal:
53 // Do any of our elements contain the target offset?
54 for i, elem := range tv.Values {
55
56 switch elem.(type) {
57 case *objectVal, *arrayVal:
58 // okay
59 default:
60 continue
61 }
62
63 if elem.Range().ContainsOffset(offset) {
64 return append(navigationStepsRev(elem, offset), fmt.Sprintf("[%d]", i))
65 }
66 }
67 }
68
69 return nil
70}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go b/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go
new file mode 100644
index 0000000..246fd1c
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go
@@ -0,0 +1,491 @@
1package json
2
3import (
4 "encoding/json"
5 "fmt"
6 "math/big"
7
8 "github.com/hashicorp/hcl2/hcl"
9)
10
11func parseFileContent(buf []byte, filename string) (node, hcl.Diagnostics) {
12 tokens := scan(buf, pos{
13 Filename: filename,
14 Pos: hcl.Pos{
15 Byte: 0,
16 Line: 1,
17 Column: 1,
18 },
19 })
20 p := newPeeker(tokens)
21 node, diags := parseValue(p)
22 if len(diags) == 0 && p.Peek().Type != tokenEOF {
23 diags = diags.Append(&hcl.Diagnostic{
24 Severity: hcl.DiagError,
25 Summary: "Extraneous data after value",
26 Detail: "Extra characters appear after the JSON value.",
27 Subject: p.Peek().Range.Ptr(),
28 })
29 }
30 return node, diags
31}
32
33func parseValue(p *peeker) (node, hcl.Diagnostics) {
34 tok := p.Peek()
35
36 wrapInvalid := func(n node, diags hcl.Diagnostics) (node, hcl.Diagnostics) {
37 if n != nil {
38 return n, diags
39 }
40 return invalidVal{tok.Range}, diags
41 }
42
43 switch tok.Type {
44 case tokenBraceO:
45 return wrapInvalid(parseObject(p))
46 case tokenBrackO:
47 return wrapInvalid(parseArray(p))
48 case tokenNumber:
49 return wrapInvalid(parseNumber(p))
50 case tokenString:
51 return wrapInvalid(parseString(p))
52 case tokenKeyword:
53 return wrapInvalid(parseKeyword(p))
54 case tokenBraceC:
55 return wrapInvalid(nil, hcl.Diagnostics{
56 {
57 Severity: hcl.DiagError,
58 Summary: "Missing attribute value",
59 Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.",
60 Subject: &tok.Range,
61 },
62 })
63 case tokenBrackC:
64 return wrapInvalid(nil, hcl.Diagnostics{
65 {
66 Severity: hcl.DiagError,
67 Summary: "Missing array element value",
68 Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.",
69 Subject: &tok.Range,
70 },
71 })
72 case tokenEOF:
73 return wrapInvalid(nil, hcl.Diagnostics{
74 {
75 Severity: hcl.DiagError,
76 Summary: "Missing value",
77 Detail: "The JSON data ends prematurely.",
78 Subject: &tok.Range,
79 },
80 })
81 default:
82 return wrapInvalid(nil, hcl.Diagnostics{
83 {
84 Severity: hcl.DiagError,
85 Summary: "Invalid start of value",
86 Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.",
87 Subject: &tok.Range,
88 },
89 })
90 }
91}
92
93func tokenCanStartValue(tok token) bool {
94 switch tok.Type {
95 case tokenBraceO, tokenBrackO, tokenNumber, tokenString, tokenKeyword:
96 return true
97 default:
98 return false
99 }
100}
101
102func parseObject(p *peeker) (node, hcl.Diagnostics) {
103 var diags hcl.Diagnostics
104
105 open := p.Read()
106 attrs := []*objectAttr{}
107
108 // recover is used to shift the peeker to what seems to be the end of
109 // our object, so that when we encounter an error we leave the peeker
110 // at a reasonable point in the token stream to continue parsing.
111 recover := func(tok token) {
112 open := 1
113 for {
114 switch tok.Type {
115 case tokenBraceO:
116 open++
117 case tokenBraceC:
118 open--
119 if open <= 1 {
120 return
121 }
122 case tokenEOF:
123 // Ran out of source before we were able to recover,
124 // so we'll bail here and let the caller deal with it.
125 return
126 }
127 tok = p.Read()
128 }
129 }
130
131Token:
132 for {
133 if p.Peek().Type == tokenBraceC {
134 break Token
135 }
136
137 keyNode, keyDiags := parseValue(p)
138 diags = diags.Extend(keyDiags)
139 if keyNode == nil {
140 return nil, diags
141 }
142
143 keyStrNode, ok := keyNode.(*stringVal)
144 if !ok {
145 return nil, diags.Append(&hcl.Diagnostic{
146 Severity: hcl.DiagError,
147 Summary: "Invalid object attribute name",
148 Detail: "A JSON object attribute name must be a string",
149 Subject: keyNode.StartRange().Ptr(),
150 })
151 }
152
153 key := keyStrNode.Value
154
155 colon := p.Read()
156 if colon.Type != tokenColon {
157 recover(colon)
158
159 if colon.Type == tokenBraceC || colon.Type == tokenComma {
160 // Catch common mistake of using braces instead of brackets
161 // for an object.
162 return nil, diags.Append(&hcl.Diagnostic{
163 Severity: hcl.DiagError,
164 Summary: "Missing object value",
165 Detail: "A JSON object attribute must have a value, introduced by a colon.",
166 Subject: &colon.Range,
167 })
168 }
169
170 if colon.Type == tokenEquals {
171 // Possible confusion with native HCL syntax.
172 return nil, diags.Append(&hcl.Diagnostic{
173 Severity: hcl.DiagError,
174 Summary: "Missing attribute value colon",
175 Detail: "JSON uses a colon as its name/value delimiter, not an equals sign.",
176 Subject: &colon.Range,
177 })
178 }
179
180 return nil, diags.Append(&hcl.Diagnostic{
181 Severity: hcl.DiagError,
182 Summary: "Missing attribute value colon",
183 Detail: "A colon must appear between an object attribute's name and its value.",
184 Subject: &colon.Range,
185 })
186 }
187
188 valNode, valDiags := parseValue(p)
189 diags = diags.Extend(valDiags)
190 if valNode == nil {
191 return nil, diags
192 }
193
194 attrs = append(attrs, &objectAttr{
195 Name: key,
196 Value: valNode,
197 NameRange: keyStrNode.SrcRange,
198 })
199
200 switch p.Peek().Type {
201 case tokenComma:
202 comma := p.Read()
203 if p.Peek().Type == tokenBraceC {
204 // Special error message for this common mistake
205 return nil, diags.Append(&hcl.Diagnostic{
206 Severity: hcl.DiagError,
207 Summary: "Trailing comma in object",
208 Detail: "JSON does not permit a trailing comma after the final attribute in an object.",
209 Subject: &comma.Range,
210 })
211 }
212 continue Token
213 case tokenEOF:
214 return nil, diags.Append(&hcl.Diagnostic{
215 Severity: hcl.DiagError,
216 Summary: "Unclosed object",
217 Detail: "No closing brace was found for this JSON object.",
218 Subject: &open.Range,
219 })
220 case tokenBrackC:
221 // Consume the bracket anyway, so that we don't return with the peeker
222 // at a strange place.
223 p.Read()
224 return nil, diags.Append(&hcl.Diagnostic{
225 Severity: hcl.DiagError,
226 Summary: "Mismatched braces",
227 Detail: "A JSON object must be closed with a brace, not a bracket.",
228 Subject: p.Peek().Range.Ptr(),
229 })
230 case tokenBraceC:
231 break Token
232 default:
233 recover(p.Read())
234 return nil, diags.Append(&hcl.Diagnostic{
235 Severity: hcl.DiagError,
236 Summary: "Missing attribute seperator comma",
237 Detail: "A comma must appear between each attribute declaration in an object.",
238 Subject: p.Peek().Range.Ptr(),
239 })
240 }
241
242 }
243
244 close := p.Read()
245 return &objectVal{
246 Attrs: attrs,
247 SrcRange: hcl.RangeBetween(open.Range, close.Range),
248 OpenRange: open.Range,
249 CloseRange: close.Range,
250 }, diags
251}
252
253func parseArray(p *peeker) (node, hcl.Diagnostics) {
254 var diags hcl.Diagnostics
255
256 open := p.Read()
257 vals := []node{}
258
259 // recover is used to shift the peeker to what seems to be the end of
260 // our array, so that when we encounter an error we leave the peeker
261 // at a reasonable point in the token stream to continue parsing.
262 recover := func(tok token) {
263 open := 1
264 for {
265 switch tok.Type {
266 case tokenBrackO:
267 open++
268 case tokenBrackC:
269 open--
270 if open <= 1 {
271 return
272 }
273 case tokenEOF:
274 // Ran out of source before we were able to recover,
275 // so we'll bail here and let the caller deal with it.
276 return
277 }
278 tok = p.Read()
279 }
280 }
281
282Token:
283 for {
284 if p.Peek().Type == tokenBrackC {
285 break Token
286 }
287
288 valNode, valDiags := parseValue(p)
289 diags = diags.Extend(valDiags)
290 if valNode == nil {
291 return nil, diags
292 }
293
294 vals = append(vals, valNode)
295
296 switch p.Peek().Type {
297 case tokenComma:
298 comma := p.Read()
299 if p.Peek().Type == tokenBrackC {
300 // Special error message for this common mistake
301 return nil, diags.Append(&hcl.Diagnostic{
302 Severity: hcl.DiagError,
303 Summary: "Trailing comma in array",
304 Detail: "JSON does not permit a trailing comma after the final attribute in an array.",
305 Subject: &comma.Range,
306 })
307 }
308 continue Token
309 case tokenColon:
310 recover(p.Read())
311 return nil, diags.Append(&hcl.Diagnostic{
312 Severity: hcl.DiagError,
313 Summary: "Invalid array value",
314 Detail: "A colon is not used to introduce values in a JSON array.",
315 Subject: p.Peek().Range.Ptr(),
316 })
317 case tokenEOF:
318 recover(p.Read())
319 return nil, diags.Append(&hcl.Diagnostic{
320 Severity: hcl.DiagError,
321 Summary: "Unclosed object",
322 Detail: "No closing bracket was found for this JSON array.",
323 Subject: &open.Range,
324 })
325 case tokenBraceC:
326 recover(p.Read())
327 return nil, diags.Append(&hcl.Diagnostic{
328 Severity: hcl.DiagError,
329 Summary: "Mismatched brackets",
330 Detail: "A JSON array must be closed with a bracket, not a brace.",
331 Subject: p.Peek().Range.Ptr(),
332 })
333 case tokenBrackC:
334 break Token
335 default:
336 recover(p.Read())
337 return nil, diags.Append(&hcl.Diagnostic{
338 Severity: hcl.DiagError,
339 Summary: "Missing attribute seperator comma",
340 Detail: "A comma must appear between each value in an array.",
341 Subject: p.Peek().Range.Ptr(),
342 })
343 }
344
345 }
346
347 close := p.Read()
348 return &arrayVal{
349 Values: vals,
350 SrcRange: hcl.RangeBetween(open.Range, close.Range),
351 OpenRange: open.Range,
352 }, diags
353}
354
355func parseNumber(p *peeker) (node, hcl.Diagnostics) {
356 tok := p.Read()
357
358 // Use encoding/json to validate the number syntax.
359 // TODO: Do this more directly to produce better diagnostics.
360 var num json.Number
361 err := json.Unmarshal(tok.Bytes, &num)
362 if err != nil {
363 return nil, hcl.Diagnostics{
364 {
365 Severity: hcl.DiagError,
366 Summary: "Invalid JSON number",
367 Detail: fmt.Sprintf("There is a syntax error in the given JSON number."),
368 Subject: &tok.Range,
369 },
370 }
371 }
372
373 f, _, err := big.ParseFloat(string(num), 10, 512, big.ToNearestEven)
374 if err != nil {
375 // Should never happen if above passed, since JSON numbers are a subset
376 // of what big.Float can parse...
377 return nil, hcl.Diagnostics{
378 {
379 Severity: hcl.DiagError,
380 Summary: "Invalid JSON number",
381 Detail: fmt.Sprintf("There is a syntax error in the given JSON number."),
382 Subject: &tok.Range,
383 },
384 }
385 }
386
387 return &numberVal{
388 Value: f,
389 SrcRange: tok.Range,
390 }, nil
391}
392
393func parseString(p *peeker) (node, hcl.Diagnostics) {
394 tok := p.Read()
395 var str string
396 err := json.Unmarshal(tok.Bytes, &str)
397
398 if err != nil {
399 var errRange hcl.Range
400 if serr, ok := err.(*json.SyntaxError); ok {
401 errOfs := serr.Offset
402 errPos := tok.Range.Start
403 errPos.Byte += int(errOfs)
404
405 // TODO: Use the byte offset to properly count unicode
406 // characters for the column, and mark the whole of the
407 // character that was wrong as part of our range.
408 errPos.Column += int(errOfs)
409
410 errEndPos := errPos
411 errEndPos.Byte++
412 errEndPos.Column++
413
414 errRange = hcl.Range{
415 Filename: tok.Range.Filename,
416 Start: errPos,
417 End: errEndPos,
418 }
419 } else {
420 errRange = tok.Range
421 }
422
423 var contextRange *hcl.Range
424 if errRange != tok.Range {
425 contextRange = &tok.Range
426 }
427
428 // FIXME: Eventually we should parse strings directly here so
429 // we can produce a more useful error message in the face fo things
430 // such as invalid escapes, etc.
431 return nil, hcl.Diagnostics{
432 {
433 Severity: hcl.DiagError,
434 Summary: "Invalid JSON string",
435 Detail: fmt.Sprintf("There is a syntax error in the given JSON string."),
436 Subject: &errRange,
437 Context: contextRange,
438 },
439 }
440 }
441
442 return &stringVal{
443 Value: str,
444 SrcRange: tok.Range,
445 }, nil
446}
447
448func parseKeyword(p *peeker) (node, hcl.Diagnostics) {
449 tok := p.Read()
450 s := string(tok.Bytes)
451
452 switch s {
453 case "true":
454 return &booleanVal{
455 Value: true,
456 SrcRange: tok.Range,
457 }, nil
458 case "false":
459 return &booleanVal{
460 Value: false,
461 SrcRange: tok.Range,
462 }, nil
463 case "null":
464 return &nullVal{
465 SrcRange: tok.Range,
466 }, nil
467 case "undefined", "NaN", "Infinity":
468 return nil, hcl.Diagnostics{
469 {
470 Severity: hcl.DiagError,
471 Summary: "Invalid JSON keyword",
472 Detail: fmt.Sprintf("The JavaScript identifier %q cannot be used in JSON.", s),
473 Subject: &tok.Range,
474 },
475 }
476 default:
477 var dym string
478 if suggest := keywordSuggestion(s); suggest != "" {
479 dym = fmt.Sprintf(" Did you mean %q?", suggest)
480 }
481
482 return nil, hcl.Diagnostics{
483 {
484 Severity: hcl.DiagError,
485 Summary: "Invalid JSON keyword",
486 Detail: fmt.Sprintf("%q is not a valid JSON keyword.%s", s, dym),
487 Subject: &tok.Range,
488 },
489 }
490 }
491}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go b/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go
new file mode 100644
index 0000000..fc7bbf5
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go
@@ -0,0 +1,25 @@
1package json
2
3type peeker struct {
4 tokens []token
5 pos int
6}
7
8func newPeeker(tokens []token) *peeker {
9 return &peeker{
10 tokens: tokens,
11 pos: 0,
12 }
13}
14
15func (p *peeker) Peek() token {
16 return p.tokens[p.pos]
17}
18
19func (p *peeker) Read() token {
20 ret := p.tokens[p.pos]
21 if ret.Type != tokenEOF {
22 p.pos++
23 }
24 return ret
25}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/public.go b/vendor/github.com/hashicorp/hcl2/hcl/json/public.go
new file mode 100644
index 0000000..2728aa1
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/public.go
@@ -0,0 +1,94 @@
1package json
2
3import (
4 "fmt"
5 "io/ioutil"
6 "os"
7
8 "github.com/hashicorp/hcl2/hcl"
9)
10
11// Parse attempts to parse the given buffer as JSON and, if successful, returns
12// a hcl.File for the HCL configuration represented by it.
13//
14// This is not a generic JSON parser. Instead, it deals only with the profile
15// of JSON used to express HCL configuration.
16//
17// The returned file is valid only if the returned diagnostics returns false
18// from its HasErrors method. If HasErrors returns true, the file represents
19// the subset of data that was able to be parsed, which may be none.
20func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
21 rootNode, diags := parseFileContent(src, filename)
22
23 switch rootNode.(type) {
24 case *objectVal, *arrayVal:
25 // okay
26 default:
27 diags = diags.Append(&hcl.Diagnostic{
28 Severity: hcl.DiagError,
29 Summary: "Root value must be object",
30 Detail: "The root value in a JSON-based configuration must be either a JSON object or a JSON array of objects.",
31 Subject: rootNode.StartRange().Ptr(),
32 })
33
34 // Since we've already produced an error message for this being
35 // invalid, we'll return an empty placeholder here so that trying to
36 // extract content from our root body won't produce a redundant
37 // error saying the same thing again in more general terms.
38 fakePos := hcl.Pos{
39 Byte: 0,
40 Line: 1,
41 Column: 1,
42 }
43 fakeRange := hcl.Range{
44 Filename: filename,
45 Start: fakePos,
46 End: fakePos,
47 }
48 rootNode = &objectVal{
49 Attrs: []*objectAttr{},
50 SrcRange: fakeRange,
51 OpenRange: fakeRange,
52 }
53 }
54
55 file := &hcl.File{
56 Body: &body{
57 val: rootNode,
58 },
59 Bytes: src,
60 Nav: navigation{rootNode},
61 }
62 return file, diags
63}
64
65// ParseFile is a convenience wrapper around Parse that first attempts to load
66// data from the given filename, passing the result to Parse if successful.
67//
68// If the file cannot be read, an error diagnostic with nil context is returned.
69func ParseFile(filename string) (*hcl.File, hcl.Diagnostics) {
70 f, err := os.Open(filename)
71 if err != nil {
72 return nil, hcl.Diagnostics{
73 {
74 Severity: hcl.DiagError,
75 Summary: "Failed to open file",
76 Detail: fmt.Sprintf("The file %q could not be opened.", filename),
77 },
78 }
79 }
80 defer f.Close()
81
82 src, err := ioutil.ReadAll(f)
83 if err != nil {
84 return nil, hcl.Diagnostics{
85 {
86 Severity: hcl.DiagError,
87 Summary: "Failed to read file",
88 Detail: fmt.Sprintf("The file %q was opened, but an error occured while reading it.", filename),
89 },
90 }
91 }
92
93 return Parse(src, filename)
94}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go b/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go
new file mode 100644
index 0000000..0a8378b
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go
@@ -0,0 +1,293 @@
1package json
2
3import (
4 "fmt"
5
6 "github.com/apparentlymart/go-textseg/textseg"
7 "github.com/hashicorp/hcl2/hcl"
8)
9
10//go:generate stringer -type tokenType scanner.go
11type tokenType rune
12
13const (
14 tokenBraceO tokenType = '{'
15 tokenBraceC tokenType = '}'
16 tokenBrackO tokenType = '['
17 tokenBrackC tokenType = ']'
18 tokenComma tokenType = ','
19 tokenColon tokenType = ':'
20 tokenKeyword tokenType = 'K'
21 tokenString tokenType = 'S'
22 tokenNumber tokenType = 'N'
23 tokenEOF tokenType = '␄'
24 tokenInvalid tokenType = 0
25 tokenEquals tokenType = '=' // used only for reminding the user of JSON syntax
26)
27
28type token struct {
29 Type tokenType
30 Bytes []byte
31 Range hcl.Range
32}
33
34// scan returns the primary tokens for the given JSON buffer in sequence.
35//
36// The responsibility of this pass is to just mark the slices of the buffer
37// as being of various types. It is lax in how it interprets the multi-byte
38// token types keyword, string and number, preferring to capture erroneous
39// extra bytes that we presume the user intended to be part of the token
40// so that we can generate more helpful diagnostics in the parser.
41func scan(buf []byte, start pos) []token {
42 var tokens []token
43 p := start
44 for {
45 if len(buf) == 0 {
46 tokens = append(tokens, token{
47 Type: tokenEOF,
48 Bytes: nil,
49 Range: posRange(p, p),
50 })
51 return tokens
52 }
53
54 buf, p = skipWhitespace(buf, p)
55
56 if len(buf) == 0 {
57 tokens = append(tokens, token{
58 Type: tokenEOF,
59 Bytes: nil,
60 Range: posRange(p, p),
61 })
62 return tokens
63 }
64
65 start = p
66
67 first := buf[0]
68 switch {
69 case first == '{' || first == '}' || first == '[' || first == ']' || first == ',' || first == ':' || first == '=':
70 p.Pos.Column++
71 p.Pos.Byte++
72 tokens = append(tokens, token{
73 Type: tokenType(first),
74 Bytes: buf[0:1],
75 Range: posRange(start, p),
76 })
77 buf = buf[1:]
78 case first == '"':
79 var tokBuf []byte
80 tokBuf, buf, p = scanString(buf, p)
81 tokens = append(tokens, token{
82 Type: tokenString,
83 Bytes: tokBuf,
84 Range: posRange(start, p),
85 })
86 case byteCanStartNumber(first):
87 var tokBuf []byte
88 tokBuf, buf, p = scanNumber(buf, p)
89 tokens = append(tokens, token{
90 Type: tokenNumber,
91 Bytes: tokBuf,
92 Range: posRange(start, p),
93 })
94 case byteCanStartKeyword(first):
95 var tokBuf []byte
96 tokBuf, buf, p = scanKeyword(buf, p)
97 tokens = append(tokens, token{
98 Type: tokenKeyword,
99 Bytes: tokBuf,
100 Range: posRange(start, p),
101 })
102 default:
103 tokens = append(tokens, token{
104 Type: tokenInvalid,
105 Bytes: buf[:1],
106 Range: start.Range(1, 1),
107 })
108 // If we've encountered an invalid then we might as well stop
109 // scanning since the parser won't proceed beyond this point.
110 return tokens
111 }
112 }
113}
114
115func byteCanStartNumber(b byte) bool {
116 switch b {
117 // We are slightly more tolerant than JSON requires here since we
118 // expect the parser will make a stricter interpretation of the
119 // number bytes, but we specifically don't allow 'e' or 'E' here
120 // since we want the scanner to treat that as the start of an
121 // invalid keyword instead, to produce more intelligible error messages.
122 case '-', '+', '.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
123 return true
124 default:
125 return false
126 }
127}
128
129func scanNumber(buf []byte, start pos) ([]byte, []byte, pos) {
130 // The scanner doesn't check that the sequence of digit-ish bytes is
131 // in a valid order. The parser must do this when decoding a number
132 // token.
133 var i int
134 p := start
135Byte:
136 for i = 0; i < len(buf); i++ {
137 switch buf[i] {
138 case '-', '+', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
139 p.Pos.Byte++
140 p.Pos.Column++
141 default:
142 break Byte
143 }
144 }
145 return buf[:i], buf[i:], p
146}
147
148func byteCanStartKeyword(b byte) bool {
149 switch {
150 // We allow any sequence of alphabetical characters here, even though
151 // JSON is more constrained, so that we can collect what we presume
152 // the user intended to be a single keyword and then check its validity
153 // in the parser, where we can generate better diagnostics.
154 // So e.g. we want to be able to say:
155 // unrecognized keyword "True". Did you mean "true"?
156 case b >= 'a' || b <= 'z' || b >= 'A' || b <= 'Z':
157 return true
158 default:
159 return false
160 }
161}
162
163func scanKeyword(buf []byte, start pos) ([]byte, []byte, pos) {
164 var i int
165 p := start
166Byte:
167 for i = 0; i < len(buf); i++ {
168 b := buf[i]
169 switch {
170 case (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_':
171 p.Pos.Byte++
172 p.Pos.Column++
173 default:
174 break Byte
175 }
176 }
177 return buf[:i], buf[i:], p
178}
179
180func scanString(buf []byte, start pos) ([]byte, []byte, pos) {
181 // The scanner doesn't validate correct use of escapes, etc. It pays
182 // attention to escapes only for the purpose of identifying the closing
183 // quote character. It's the parser's responsibility to do proper
184 // validation.
185 //
186 // The scanner also doesn't specifically detect unterminated string
187 // literals, though they can be identified in the parser by checking if
188 // the final byte in a string token is the double-quote character.
189
190 // Skip the opening quote symbol
191 i := 1
192 p := start
193 p.Pos.Byte++
194 p.Pos.Column++
195 escaping := false
196Byte:
197 for i < len(buf) {
198 b := buf[i]
199
200 switch {
201 case b == '\\':
202 escaping = !escaping
203 p.Pos.Byte++
204 p.Pos.Column++
205 i++
206 case b == '"':
207 p.Pos.Byte++
208 p.Pos.Column++
209 i++
210 if !escaping {
211 break Byte
212 }
213 escaping = false
214 case b < 32:
215 break Byte
216 default:
217 // Advance by one grapheme cluster, so that we consider each
218 // grapheme to be a "column".
219 // Ignoring error because this scanner cannot produce errors.
220 advance, _, _ := textseg.ScanGraphemeClusters(buf[i:], true)
221
222 p.Pos.Byte += advance
223 p.Pos.Column++
224 i += advance
225
226 escaping = false
227 }
228 }
229 return buf[:i], buf[i:], p
230}
231
232func skipWhitespace(buf []byte, start pos) ([]byte, pos) {
233 var i int
234 p := start
235Byte:
236 for i = 0; i < len(buf); i++ {
237 switch buf[i] {
238 case ' ':
239 p.Pos.Byte++
240 p.Pos.Column++
241 case '\n':
242 p.Pos.Byte++
243 p.Pos.Column = 1
244 p.Pos.Line++
245 case '\r':
246 // For the purpose of line/column counting we consider a
247 // carriage return to take up no space, assuming that it will
248 // be paired up with a newline (on Windows, for example) that
249 // will account for both of them.
250 p.Pos.Byte++
251 case '\t':
252 // We arbitrarily count a tab as if it were two spaces, because
253 // we need to choose _some_ number here. This means any system
254 // that renders code on-screen with markers must itself treat
255 // tabs as a pair of spaces for rendering purposes, or instead
256 // use the byte offset and back into its own column position.
257 p.Pos.Byte++
258 p.Pos.Column += 2
259 default:
260 break Byte
261 }
262 }
263 return buf[i:], p
264}
265
266type pos struct {
267 Filename string
268 Pos hcl.Pos
269}
270
271func (p *pos) Range(byteLen, charLen int) hcl.Range {
272 start := p.Pos
273 end := p.Pos
274 end.Byte += byteLen
275 end.Column += charLen
276 return hcl.Range{
277 Filename: p.Filename,
278 Start: start,
279 End: end,
280 }
281}
282
283func posRange(start, end pos) hcl.Range {
284 return hcl.Range{
285 Filename: start.Filename,
286 Start: start.Pos,
287 End: end.Pos,
288 }
289}
290
291func (t token) GoString() string {
292 return fmt.Sprintf("json.token{json.%s, []byte(%q), %#v}", t.Type, t.Bytes, t.Range)
293}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md b/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md
new file mode 100644
index 0000000..9b33c7f
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md
@@ -0,0 +1,405 @@
1# HCL JSON Syntax Specification
2
3This is the specification for the JSON serialization for hcl. HCL is a system
4for defining configuration languages for applications. The HCL information
5model is designed to support multiple concrete syntaxes for configuration,
6and this JSON-based format complements [the native syntax](../hclsyntax/spec.md)
7by being easy to machine-generate, whereas the native syntax is oriented
8towards human authoring and maintenence.
9
10This syntax is defined in terms of JSON as defined in
11[RFC7159](https://tools.ietf.org/html/rfc7159). As such it inherits the JSON
12grammar as-is, and merely defines a specific methodology for interpreting
13JSON constructs into HCL structural elements and expressions.
14
15This mapping is defined such that valid JSON-serialized HCL input can be
16_produced_ using standard JSON implementations in various programming languages.
17_Parsing_ such JSON has some additional constraints not beyond what is normally
18supported by JSON parsers, so a specialized parser may be required that
19is able to:
20
21* Preserve the relative ordering of properties defined in an object.
22* Preserve multiple definitions of the same property name.
23* Preserve numeric values to the precision required by the number type
24 in [the HCL syntax-agnostic information model](../spec.md).
25* Retain source location information for parsed tokens/constructs in order
26 to produce good error messages.
27
28## Structural Elements
29
30[The HCL syntax-agnostic information model](../spec.md) defines a _body_ as an
31abstract container for attribute definitions and child blocks. A body is
32represented in JSON as either a single JSON object or a JSON array of objects.
33
34Body processing is in terms of JSON object properties, visited in the order
35they appear in the input. Where a body is represented by a single JSON object,
36the properties of that object are visited in order. Where a body is
37represented by a JSON array, each of its elements are visited in order and
38each element has its properties visited in order. If any element of the array
39is not a JSON object then the input is erroneous.
40
41When a body is being processed in the _dynamic attributes_ mode, the allowance
42of a JSON array in the previous paragraph does not apply and instead a single
43JSON object is always required.
44
45As defined in the language-agnostic model, body processing is in terms
46of a schema which provides context for interpreting the body's content. For
47JSON bodies, the schema is crucial to allow differentiation of attribute
48definitions and block definitions, both of which are represented via object
49properties.
50
51The special property name `"//"`, when used in an object representing a HCL
52body, is parsed and ignored. A property with this name can be used to
53include human-readable comments. (This special property name is _not_
54processed in this way for any _other_ HCL constructs that are represented as
55JSON objects.)
56
57### Attributes
58
59Where the given schema describes an attribute with a given name, the object
60property with the matching name — if present — serves as the attribute's
61definition.
62
63When a body is being processed in the _dynamic attributes_ mode, each object
64property serves as an attribute definition for the attribute whose name
65matches the property name.
66
67The value of an attribute definition property is interpreted as an _expression_,
68as described in a later section.
69
70Given a schema that calls for an attribute named "foo", a JSON object like
71the following provides a definition for that attribute:
72
73```json
74{
75 "foo": "bar baz"
76}
77```
78
79### Blocks
80
81Where the given schema describes a block with a given type name, each object
82property with the matching name serves as a definition of zero or more blocks
83of that type.
84
85Processing of child blocks is in terms of nested JSON objects and arrays.
86If the schema defines one or more _labels_ for the block type, a nested JSON
87object or JSON array of objects is required for each labelling level. These
88are flattened to a single ordered sequence of object properties using the
89same algorithm as for body content as defined above. Each object property
90serves as a label value at the corresponding level.
91
92After any labelling levels, the next nested value is either a JSON object
93representing a single block body, or a JSON array of JSON objects that each
94represent a single block body. Use of an array accommodates the definition
95of multiple blocks that have identical type and labels.
96
97Given a schema that calls for a block type named "foo" with no labels, the
98following JSON objects are all valid definitions of zero or more blocks of this
99type:
100
101```json
102{
103 "foo": {
104 "child_attr": "baz"
105 }
106}
107```
108
109```json
110{
111 "foo": [
112 {
113 "child_attr": "baz"
114 },
115 {
116 "child_attr": "boz"
117 }
118 ]
119}
120```
121```json
122{
123 "foo": []
124}
125```
126
127The first of these defines a single child block of type "foo". The second
128defines _two_ such blocks. The final example shows a degenerate definition
129of zero blocks, though generators should prefer to omit the property entirely
130in this scenario.
131
132Given a schema that calls for a block type named "foo" with _two_ labels, the
133extra label levels must be represented as objects or arrays of objects as in
134the following examples:
135
136```json
137{
138 "foo": {
139 "bar": {
140 "baz": {
141 "child_attr": "baz"
142 },
143 "boz": {
144 "child_attr": "baz"
145 }
146 },
147 "boz": {
148 "baz": {
149 "child_attr": "baz"
150 },
151 }
152 }
153}
154```
155
156```json
157{
158 "foo": {
159 "bar": {
160 "baz": {
161 "child_attr": "baz"
162 },
163 "boz": {
164 "child_attr": "baz"
165 }
166 },
167 "boz": {
168 "baz": [
169 {
170 "child_attr": "baz"
171 },
172 {
173 "child_attr": "boz"
174 }
175 ]
176 }
177 }
178}
179```
180
181```json
182{
183 "foo": [
184 {
185 "bar": {
186 "baz": {
187 "child_attr": "baz"
188 },
189 "boz": {
190 "child_attr": "baz"
191 }
192 },
193 },
194 {
195 "bar": {
196 "baz": [
197 {
198 "child_attr": "baz"
199 },
200 {
201 "child_attr": "boz"
202 }
203 ]
204 }
205 }
206 ]
207}
208```
209
210```json
211{
212 "foo": {
213 "bar": {
214 "baz": {
215 "child_attr": "baz"
216 },
217 "boz": {
218 "child_attr": "baz"
219 }
220 },
221 "bar": {
222 "baz": [
223 {
224 "child_attr": "baz"
225 },
226 {
227 "child_attr": "boz"
228 }
229 ]
230 }
231 }
232}
233```
234
235Arrays can be introduced at either the label definition or block body
236definition levels to define multiple definitions of the same block type
237or labels while preserving order.
238
239A JSON HCL parser _must_ support duplicate definitions of the same property
240name within a single object, preserving all of them and the relative ordering
241between them. The array-based forms are also required so that JSON HCL
242configurations can be produced with JSON producing libraries that are not
243able to preserve property definition order and multiple definitions of
244the same property.
245
246## Expressions
247
248JSON lacks a native expression syntax, so the HCL JSON syntax instead defines
249a mapping for each of the JSON value types, including a special mapping for
250strings that allows optional use of arbitrary expressions.
251
252### Objects
253
254When interpreted as an expression, a JSON object represents a value of a HCL
255object type.
256
257Each property of the JSON object represents an attribute of the HCL object type.
258The property name string given in the JSON input is interpreted as a string
259expression as described below, and its result is converted to string as defined
260by the syntax-agnostic information model. If such a conversion is not possible,
261an error is produced and evaluation fails.
262
263An instance of the constructed object type is then created, whose values
264are interpreted by again recursively applying the mapping rules defined in
265this section to each of the property values.
266
267If any evaluated property name strings produce null values, an error is
268produced and evaluation fails. If any produce _unknown_ values, the _entire
269object's_ result is an unknown value of the dynamic pseudo-type, signalling
270that the type of the object cannot be determined.
271
272It is an error to define the same property name multiple times within a single
273JSON object interpreted as an expression. In full expression mode, this
274constraint applies to the name expression results after conversion to string,
275rather than the raw string that may contain interpolation expressions.
276
277### Arrays
278
279When interpreted as an expression, a JSON array represents a value of a HCL
280tuple type.
281
282Each element of the JSON array represents an element of the HCL tuple type.
283The tuple type is constructed by enumerationg the JSON array elements, creating
284for each an element whose type is the result of recursively applying the
285expression mapping rules. Correspondance is preserved between the array element
286indices and the tuple element indices.
287
288An instance of the constructed tuple type is then created, whose values are
289interpreted by again recursively applying the mapping rules defined in this
290section.
291
292### Numbers
293
294When interpreted as an expression, a JSON number represents a HCL number value.
295
296HCL numbers are arbitrary-precision decimal values, so a JSON HCL parser must
297be able to translate exactly the value given to a number of corresponding
298precision, within the constraints set by the HCL syntax-agnostic information
299model.
300
301In practice, off-the-shelf JSON serializers often do not support customizing the
302processing of numbers, and instead force processing as 32-bit or 64-bit
303floating point values.
304
305A _producer_ of JSON HCL that uses such a serializer can provide numeric values
306as JSON strings where they have precision too great for representation in the
307serializer's chosen numeric type in situations where the result will be
308converted to number (using the standard conversion rules) by a calling
309application.
310
311Alternatively, for expressions that are evaluated in full expression mode an
312embedded template interpolation can be used to faithfully represent a number,
313such as `"${1e150}"`, which will then be evaluated by the underlying HCL native
314syntax expression evaluator.
315
316### Boolean Values
317
318The JSON boolean values `true` and `false`, when interpreted as expressions,
319represent the corresponding HCL boolean values.
320
321### The Null Value
322
323The JSON value `null`, when interpreted as an expression, represents a
324HCL null value of the dynamic pseudo-type.
325
326### Strings
327
328When intepreted as an expression, a JSON string may be interpreted in one of
329two ways depending on the evaluation mode.
330
331If evaluating in literal-only mode (as defined by the syntax-agnostic
332information model) the literal string is intepreted directly as a HCL string
333value, by directly using the exact sequence of unicode characters represented.
334Template interpolations and directives MUST NOT be processed in this mode,
335allowing any characters that appear as introduction sequences to pass through
336literally:
337
338```json
339"Hello world! Template sequences like ${ are not intepreted here."
340```
341
342When evaluating in full expression mode (again, as defined by the syntax-
343agnostic information model) the literal string is instead interpreted as a
344_standalone template_ in the HCL Native Syntax. The expression evaluation
345result is then the direct result of evaluating that template with the current
346variable scope and function table.
347
348```json
349"Hello, ${name}! Template sequences are interpreted in full expression mode."
350```
351
352In particular the _Template Interpolation Unwrapping_ requirement from the
353HCL native syntax specification must be implemented, allowing the use of
354single-interpolation templates to represent expressions that would not
355otherwise be representable in JSON, such as the following example where
356the result must be a number, rather than a string representation of a number:
357
358```json
359"${ a + b }"
360```
361
362## Static Analysis
363
364The HCL static analysis operations are implemented for JSON values that
365represent expressions, as described in the following sections.
366
367Due to the limited expressive power of the JSON syntax alone, use of these
368static analyses functions rather than normal expression evaluation is used
369as additional context for how a JSON value is to be interpreted, which means
370that static analyses can result in a different interpretation of a given
371expression than normal evaluation.
372
373### Static List
374
375An expression interpreted as a static list must be a JSON array. Each of the
376values in the array is interpreted as an expression and returned.
377
378### Static Map
379
380An expression interpreted as a static map must be a JSON object. Each of the
381key/value pairs in the object is presented as a pair of expressions. Since
382object property names are always strings, evaluating the key expression with
383a non-`nil` evaluation context will evaluate any template sequences given
384in the property name.
385
386### Static Call
387
388An expression interpreted as a static call must be a string. The content of
389the string is interpreted as a native syntax expression (not a _template_,
390unlike normal evaluation) and then the static call analysis is delegated to
391that expression.
392
393If the original expression is not a string or its contents cannot be parsed
394as a native syntax expression then static call analysis is not supported.
395
396### Static Traversal
397
398An expression interpreted as a static traversal must be a string. The content
399of the string is interpreted as a native syntax expression (not a _template_,
400unlike normal evaluation) and then static traversal analysis is delegated
401to that expression.
402
403If the original expression is not a string or its contents cannot be parsed
404as a native syntax expression then static call analysis is not supported.
405
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go b/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go
new file mode 100644
index 0000000..28dcf52
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go
@@ -0,0 +1,616 @@
1package json
2
3import (
4 "fmt"
5
6 "github.com/hashicorp/hcl2/hcl"
7 "github.com/hashicorp/hcl2/hcl/hclsyntax"
8 "github.com/zclconf/go-cty/cty"
9 "github.com/zclconf/go-cty/cty/convert"
10)
11
12// body is the implementation of "Body" used for files processed with the JSON
13// parser.
14type body struct {
15 val node
16
17 // If non-nil, the keys of this map cause the corresponding attributes to
18 // be treated as non-existing. This is used when Body.PartialContent is
19 // called, to produce the "remaining content" Body.
20 hiddenAttrs map[string]struct{}
21}
22
23// expression is the implementation of "Expression" used for files processed
24// with the JSON parser.
25type expression struct {
26 src node
27}
28
29func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
30 content, newBody, diags := b.PartialContent(schema)
31
32 hiddenAttrs := newBody.(*body).hiddenAttrs
33
34 var nameSuggestions []string
35 for _, attrS := range schema.Attributes {
36 if _, ok := hiddenAttrs[attrS.Name]; !ok {
37 // Only suggest an attribute name if we didn't use it already.
38 nameSuggestions = append(nameSuggestions, attrS.Name)
39 }
40 }
41 for _, blockS := range schema.Blocks {
42 // Blocks can appear multiple times, so we'll suggest their type
43 // names regardless of whether they've already been used.
44 nameSuggestions = append(nameSuggestions, blockS.Type)
45 }
46
47 jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
48 diags = append(diags, attrDiags...)
49
50 for _, attr := range jsonAttrs {
51 k := attr.Name
52 if k == "//" {
53 // Ignore "//" keys in objects representing bodies, to allow
54 // their use as comments.
55 continue
56 }
57
58 if _, ok := hiddenAttrs[k]; !ok {
59 suggestion := nameSuggestion(k, nameSuggestions)
60 if suggestion != "" {
61 suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
62 }
63
64 diags = append(diags, &hcl.Diagnostic{
65 Severity: hcl.DiagError,
66 Summary: "Extraneous JSON object property",
67 Detail: fmt.Sprintf("No attribute or block type is named %q.%s", k, suggestion),
68 Subject: &attr.NameRange,
69 Context: attr.Range().Ptr(),
70 })
71 }
72 }
73
74 return content, diags
75}
76
77func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
78 var diags hcl.Diagnostics
79
80 jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
81 diags = append(diags, attrDiags...)
82
83 usedNames := map[string]struct{}{}
84 if b.hiddenAttrs != nil {
85 for k := range b.hiddenAttrs {
86 usedNames[k] = struct{}{}
87 }
88 }
89
90 content := &hcl.BodyContent{
91 Attributes: map[string]*hcl.Attribute{},
92 Blocks: nil,
93
94 MissingItemRange: b.MissingItemRange(),
95 }
96
97 // Create some more convenient data structures for our work below.
98 attrSchemas := map[string]hcl.AttributeSchema{}
99 blockSchemas := map[string]hcl.BlockHeaderSchema{}
100 for _, attrS := range schema.Attributes {
101 attrSchemas[attrS.Name] = attrS
102 }
103 for _, blockS := range schema.Blocks {
104 blockSchemas[blockS.Type] = blockS
105 }
106
107 for _, jsonAttr := range jsonAttrs {
108 attrName := jsonAttr.Name
109 if _, used := b.hiddenAttrs[attrName]; used {
110 continue
111 }
112
113 if attrS, defined := attrSchemas[attrName]; defined {
114 if existing, exists := content.Attributes[attrName]; exists {
115 diags = append(diags, &hcl.Diagnostic{
116 Severity: hcl.DiagError,
117 Summary: "Duplicate attribute definition",
118 Detail: fmt.Sprintf("The attribute %q was already defined at %s.", attrName, existing.Range),
119 Subject: &jsonAttr.NameRange,
120 Context: jsonAttr.Range().Ptr(),
121 })
122 continue
123 }
124
125 content.Attributes[attrS.Name] = &hcl.Attribute{
126 Name: attrS.Name,
127 Expr: &expression{src: jsonAttr.Value},
128 Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
129 NameRange: jsonAttr.NameRange,
130 }
131 usedNames[attrName] = struct{}{}
132
133 } else if blockS, defined := blockSchemas[attrName]; defined {
134 bv := jsonAttr.Value
135 blockDiags := b.unpackBlock(bv, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks)
136 diags = append(diags, blockDiags...)
137 usedNames[attrName] = struct{}{}
138 }
139
140 // We ignore anything that isn't defined because that's the
141 // PartialContent contract. The Content method will catch leftovers.
142 }
143
144 // Make sure we got all the required attributes.
145 for _, attrS := range schema.Attributes {
146 if !attrS.Required {
147 continue
148 }
149 if _, defined := content.Attributes[attrS.Name]; !defined {
150 diags = append(diags, &hcl.Diagnostic{
151 Severity: hcl.DiagError,
152 Summary: "Missing required attribute",
153 Detail: fmt.Sprintf("The attribute %q is required, but no definition was found.", attrS.Name),
154 Subject: b.MissingItemRange().Ptr(),
155 })
156 }
157 }
158
159 unusedBody := &body{
160 val: b.val,
161 hiddenAttrs: usedNames,
162 }
163
164 return content, unusedBody, diags
165}
166
167// JustAttributes for JSON bodies interprets all properties of the wrapped
168// JSON object as attributes and returns them.
169func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
170 var diags hcl.Diagnostics
171 attrs := make(map[string]*hcl.Attribute)
172
173 obj, ok := b.val.(*objectVal)
174 if !ok {
175 diags = append(diags, &hcl.Diagnostic{
176 Severity: hcl.DiagError,
177 Summary: "Incorrect JSON value type",
178 Detail: "A JSON object is required here, defining the attributes for this block.",
179 Subject: b.val.StartRange().Ptr(),
180 })
181 return attrs, diags
182 }
183
184 for _, jsonAttr := range obj.Attrs {
185 name := jsonAttr.Name
186 if name == "//" {
187 // Ignore "//" keys in objects representing bodies, to allow
188 // their use as comments.
189 continue
190 }
191
192 if _, hidden := b.hiddenAttrs[name]; hidden {
193 continue
194 }
195
196 if existing, exists := attrs[name]; exists {
197 diags = append(diags, &hcl.Diagnostic{
198 Severity: hcl.DiagError,
199 Summary: "Duplicate attribute definition",
200 Detail: fmt.Sprintf("The attribute %q was already defined at %s.", name, existing.Range),
201 Subject: &jsonAttr.NameRange,
202 })
203 continue
204 }
205
206 attrs[name] = &hcl.Attribute{
207 Name: name,
208 Expr: &expression{src: jsonAttr.Value},
209 Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
210 NameRange: jsonAttr.NameRange,
211 }
212 }
213
214 // No diagnostics possible here, since the parser already took care of
215 // finding duplicates and every JSON value can be a valid attribute value.
216 return attrs, diags
217}
218
219func (b *body) MissingItemRange() hcl.Range {
220 switch tv := b.val.(type) {
221 case *objectVal:
222 return tv.CloseRange
223 case *arrayVal:
224 return tv.OpenRange
225 default:
226 // Should not happen in correct operation, but might show up if the
227 // input is invalid and we are producing partial results.
228 return tv.StartRange()
229 }
230}
231
232func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labelsLeft []string, labelsUsed []string, labelRanges []hcl.Range, blocks *hcl.Blocks) (diags hcl.Diagnostics) {
233 if len(labelsLeft) > 0 {
234 labelName := labelsLeft[0]
235 jsonAttrs, attrDiags := b.collectDeepAttrs(v, &labelName)
236 diags = append(diags, attrDiags...)
237
238 if len(jsonAttrs) == 0 {
239 diags = diags.Append(&hcl.Diagnostic{
240 Severity: hcl.DiagError,
241 Summary: "Missing block label",
242 Detail: fmt.Sprintf("At least one object property is required, whose name represents the %s block's %s.", typeName, labelName),
243 Subject: v.StartRange().Ptr(),
244 })
245 return
246 }
247 labelsUsed := append(labelsUsed, "")
248 labelRanges := append(labelRanges, hcl.Range{})
249 for _, p := range jsonAttrs {
250 pk := p.Name
251 labelsUsed[len(labelsUsed)-1] = pk
252 labelRanges[len(labelRanges)-1] = p.NameRange
253 diags = append(diags, b.unpackBlock(p.Value, typeName, typeRange, labelsLeft[1:], labelsUsed, labelRanges, blocks)...)
254 }
255 return
256 }
257
258 // By the time we get here, we've peeled off all the labels and we're ready
259 // to deal with the block's actual content.
260
261 // need to copy the label slices because their underlying arrays will
262 // continue to be mutated after we return.
263 labels := make([]string, len(labelsUsed))
264 copy(labels, labelsUsed)
265 labelR := make([]hcl.Range, len(labelRanges))
266 copy(labelR, labelRanges)
267
268 switch tv := v.(type) {
269 case *objectVal:
270 // Single instance of the block
271 *blocks = append(*blocks, &hcl.Block{
272 Type: typeName,
273 Labels: labels,
274 Body: &body{
275 val: tv,
276 },
277
278 DefRange: tv.OpenRange,
279 TypeRange: *typeRange,
280 LabelRanges: labelR,
281 })
282 case *arrayVal:
283 // Multiple instances of the block
284 for _, av := range tv.Values {
285 *blocks = append(*blocks, &hcl.Block{
286 Type: typeName,
287 Labels: labels,
288 Body: &body{
289 val: av, // might be mistyped; we'll find out when content is requested for this body
290 },
291
292 DefRange: tv.OpenRange,
293 TypeRange: *typeRange,
294 LabelRanges: labelR,
295 })
296 }
297 default:
298 diags = diags.Append(&hcl.Diagnostic{
299 Severity: hcl.DiagError,
300 Summary: "Incorrect JSON value type",
301 Detail: fmt.Sprintf("Either a JSON object or a JSON array is required, representing the contents of one or more %q blocks.", typeName),
302 Subject: v.StartRange().Ptr(),
303 })
304 }
305 return
306}
307
308// collectDeepAttrs takes either a single object or an array of objects and
309// flattens it into a list of object attributes, collecting attributes from
310// all of the objects in a given array.
311//
312// Ordering is preserved, so a list of objects that each have one property
313// will result in those properties being returned in the same order as the
314// objects appeared in the array.
315//
316// This is appropriate for use only for objects representing bodies or labels
317// within a block.
318//
319// The labelName argument, if non-null, is used to tailor returned error
320// messages to refer to block labels rather than attributes and child blocks.
321// It has no other effect.
322func (b *body) collectDeepAttrs(v node, labelName *string) ([]*objectAttr, hcl.Diagnostics) {
323 var diags hcl.Diagnostics
324 var attrs []*objectAttr
325
326 switch tv := v.(type) {
327
328 case *objectVal:
329 attrs = append(attrs, tv.Attrs...)
330
331 case *arrayVal:
332 for _, ev := range tv.Values {
333 switch tev := ev.(type) {
334 case *objectVal:
335 attrs = append(attrs, tev.Attrs...)
336 default:
337 if labelName != nil {
338 diags = append(diags, &hcl.Diagnostic{
339 Severity: hcl.DiagError,
340 Summary: "Incorrect JSON value type",
341 Detail: fmt.Sprintf("A JSON object is required here, to specify %s labels for this block.", *labelName),
342 Subject: ev.StartRange().Ptr(),
343 })
344 } else {
345 diags = append(diags, &hcl.Diagnostic{
346 Severity: hcl.DiagError,
347 Summary: "Incorrect JSON value type",
348 Detail: "A JSON object is required here, to define attributes and child blocks.",
349 Subject: ev.StartRange().Ptr(),
350 })
351 }
352 }
353 }
354
355 default:
356 if labelName != nil {
357 diags = append(diags, &hcl.Diagnostic{
358 Severity: hcl.DiagError,
359 Summary: "Incorrect JSON value type",
360 Detail: fmt.Sprintf("Either a JSON object or JSON array of objects is required here, to specify %s labels for this block.", *labelName),
361 Subject: v.StartRange().Ptr(),
362 })
363 } else {
364 diags = append(diags, &hcl.Diagnostic{
365 Severity: hcl.DiagError,
366 Summary: "Incorrect JSON value type",
367 Detail: "Either a JSON object or JSON array of objects is required here, to define attributes and child blocks.",
368 Subject: v.StartRange().Ptr(),
369 })
370 }
371 }
372
373 return attrs, diags
374}
375
376func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
377 switch v := e.src.(type) {
378 case *stringVal:
379 if ctx != nil {
380 // Parse string contents as a HCL native language expression.
381 // We only do this if we have a context, so passing a nil context
382 // is how the caller specifies that interpolations are not allowed
383 // and that the string should just be returned verbatim.
384 templateSrc := v.Value
385 expr, diags := hclsyntax.ParseTemplate(
386 []byte(templateSrc),
387 v.SrcRange.Filename,
388
389 // This won't produce _exactly_ the right result, since
390 // the hclsyntax parser can't "see" any escapes we removed
391 // while parsing JSON, but it's better than nothing.
392 hcl.Pos{
393 Line: v.SrcRange.Start.Line,
394
395 // skip over the opening quote mark
396 Byte: v.SrcRange.Start.Byte + 1,
397 Column: v.SrcRange.Start.Column + 1,
398 },
399 )
400 if diags.HasErrors() {
401 return cty.DynamicVal, diags
402 }
403 val, evalDiags := expr.Value(ctx)
404 diags = append(diags, evalDiags...)
405 return val, diags
406 }
407
408 return cty.StringVal(v.Value), nil
409 case *numberVal:
410 return cty.NumberVal(v.Value), nil
411 case *booleanVal:
412 return cty.BoolVal(v.Value), nil
413 case *arrayVal:
414 vals := []cty.Value{}
415 for _, jsonVal := range v.Values {
416 val, _ := (&expression{src: jsonVal}).Value(ctx)
417 vals = append(vals, val)
418 }
419 return cty.TupleVal(vals), nil
420 case *objectVal:
421 var diags hcl.Diagnostics
422 attrs := map[string]cty.Value{}
423 attrRanges := map[string]hcl.Range{}
424 known := true
425 for _, jsonAttr := range v.Attrs {
426 // In this one context we allow keys to contain interpolation
427 // experessions too, assuming we're evaluating in interpolation
428 // mode. This achieves parity with the native syntax where
429 // object expressions can have dynamic keys, while block contents
430 // may not.
431 name, nameDiags := (&expression{src: &stringVal{
432 Value: jsonAttr.Name,
433 SrcRange: jsonAttr.NameRange,
434 }}).Value(ctx)
435 val, valDiags := (&expression{src: jsonAttr.Value}).Value(ctx)
436 diags = append(diags, nameDiags...)
437 diags = append(diags, valDiags...)
438
439 var err error
440 name, err = convert.Convert(name, cty.String)
441 if err != nil {
442 diags = append(diags, &hcl.Diagnostic{
443 Severity: hcl.DiagError,
444 Summary: "Invalid object key expression",
445 Detail: fmt.Sprintf("Cannot use this expression as an object key: %s.", err),
446 Subject: &jsonAttr.NameRange,
447 })
448 continue
449 }
450 if name.IsNull() {
451 diags = append(diags, &hcl.Diagnostic{
452 Severity: hcl.DiagError,
453 Summary: "Invalid object key expression",
454 Detail: "Cannot use null value as an object key.",
455 Subject: &jsonAttr.NameRange,
456 })
457 continue
458 }
459 if !name.IsKnown() {
460 // This is a bit of a weird case, since our usual rules require
461 // us to tolerate unknowns and just represent the result as
462 // best we can but if we don't know the key then we can't
463 // know the type of our object at all, and thus we must turn
464 // the whole thing into cty.DynamicVal. This is consistent with
465 // how this situation is handled in the native syntax.
466 // We'll keep iterating so we can collect other errors in
467 // subsequent attributes.
468 known = false
469 continue
470 }
471 nameStr := name.AsString()
472 if _, defined := attrs[nameStr]; defined {
473 diags = append(diags, &hcl.Diagnostic{
474 Severity: hcl.DiagError,
475 Summary: "Duplicate object attribute",
476 Detail: fmt.Sprintf("An attribute named %q was already defined at %s.", nameStr, attrRanges[nameStr]),
477 Subject: &jsonAttr.NameRange,
478 })
479 continue
480 }
481 attrs[nameStr] = val
482 attrRanges[nameStr] = jsonAttr.NameRange
483 }
484 if !known {
485 // We encountered an unknown key somewhere along the way, so
486 // we can't know what our type will eventually be.
487 return cty.DynamicVal, diags
488 }
489 return cty.ObjectVal(attrs), diags
490 default:
491 // Default to DynamicVal so that ASTs containing invalid nodes can
492 // still be partially-evaluated.
493 return cty.DynamicVal, nil
494 }
495}
496
497func (e *expression) Variables() []hcl.Traversal {
498 var vars []hcl.Traversal
499
500 switch v := e.src.(type) {
501 case *stringVal:
502 templateSrc := v.Value
503 expr, diags := hclsyntax.ParseTemplate(
504 []byte(templateSrc),
505 v.SrcRange.Filename,
506
507 // This won't produce _exactly_ the right result, since
508 // the hclsyntax parser can't "see" any escapes we removed
509 // while parsing JSON, but it's better than nothing.
510 hcl.Pos{
511 Line: v.SrcRange.Start.Line,
512
513 // skip over the opening quote mark
514 Byte: v.SrcRange.Start.Byte + 1,
515 Column: v.SrcRange.Start.Column + 1,
516 },
517 )
518 if diags.HasErrors() {
519 return vars
520 }
521 return expr.Variables()
522
523 case *arrayVal:
524 for _, jsonVal := range v.Values {
525 vars = append(vars, (&expression{src: jsonVal}).Variables()...)
526 }
527 case *objectVal:
528 for _, jsonAttr := range v.Attrs {
529 vars = append(vars, (&expression{src: jsonAttr.Value}).Variables()...)
530 }
531 }
532
533 return vars
534}
535
536func (e *expression) Range() hcl.Range {
537 return e.src.Range()
538}
539
540func (e *expression) StartRange() hcl.Range {
541 return e.src.StartRange()
542}
543
544// Implementation for hcl.AbsTraversalForExpr.
545func (e *expression) AsTraversal() hcl.Traversal {
546 // In JSON-based syntax a traversal is given as a string containing
547 // traversal syntax as defined by hclsyntax.ParseTraversalAbs.
548
549 switch v := e.src.(type) {
550 case *stringVal:
551 traversal, diags := hclsyntax.ParseTraversalAbs([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start)
552 if diags.HasErrors() {
553 return nil
554 }
555 return traversal
556 default:
557 return nil
558 }
559}
560
561// Implementation for hcl.ExprCall.
562func (e *expression) ExprCall() *hcl.StaticCall {
563 // In JSON-based syntax a static call is given as a string containing
564 // an expression in the native syntax that also supports ExprCall.
565
566 switch v := e.src.(type) {
567 case *stringVal:
568 expr, diags := hclsyntax.ParseExpression([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start)
569 if diags.HasErrors() {
570 return nil
571 }
572
573 call, diags := hcl.ExprCall(expr)
574 if diags.HasErrors() {
575 return nil
576 }
577
578 return call
579 default:
580 return nil
581 }
582}
583
584// Implementation for hcl.ExprList.
585func (e *expression) ExprList() []hcl.Expression {
586 switch v := e.src.(type) {
587 case *arrayVal:
588 ret := make([]hcl.Expression, len(v.Values))
589 for i, node := range v.Values {
590 ret[i] = &expression{src: node}
591 }
592 return ret
593 default:
594 return nil
595 }
596}
597
598// Implementation for hcl.ExprMap.
599func (e *expression) ExprMap() []hcl.KeyValuePair {
600 switch v := e.src.(type) {
601 case *objectVal:
602 ret := make([]hcl.KeyValuePair, len(v.Attrs))
603 for i, jsonAttr := range v.Attrs {
604 ret[i] = hcl.KeyValuePair{
605 Key: &expression{src: &stringVal{
606 Value: jsonAttr.Name,
607 SrcRange: jsonAttr.NameRange,
608 }},
609 Value: &expression{src: jsonAttr.Value},
610 }
611 }
612 return ret
613 default:
614 return nil
615 }
616}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go b/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go
new file mode 100644
index 0000000..bbcce5b
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go
@@ -0,0 +1,29 @@
1// Code generated by "stringer -type tokenType scanner.go"; DO NOT EDIT.
2
3package json
4
5import "strconv"
6
7const _tokenType_name = "tokenInvalidtokenCommatokenColontokenEqualstokenKeywordtokenNumbertokenStringtokenBrackOtokenBrackCtokenBraceOtokenBraceCtokenEOF"
8
9var _tokenType_map = map[tokenType]string{
10 0: _tokenType_name[0:12],
11 44: _tokenType_name[12:22],
12 58: _tokenType_name[22:32],
13 61: _tokenType_name[32:43],
14 75: _tokenType_name[43:55],
15 78: _tokenType_name[55:66],
16 83: _tokenType_name[66:77],
17 91: _tokenType_name[77:88],
18 93: _tokenType_name[88:99],
19 123: _tokenType_name[99:110],
20 125: _tokenType_name[110:121],
21 9220: _tokenType_name[121:129],
22}
23
24func (i tokenType) String() string {
25 if str, ok := _tokenType_map[i]; ok {
26 return str
27 }
28 return "tokenType(" + strconv.FormatInt(int64(i), 10) + ")"
29}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/merged.go b/vendor/github.com/hashicorp/hcl2/hcl/merged.go
new file mode 100644
index 0000000..ca2b728
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/merged.go
@@ -0,0 +1,226 @@
1package hcl
2
3import (
4 "fmt"
5)
6
7// MergeFiles combines the given files to produce a single body that contains
8// configuration from all of the given files.
9//
10// The ordering of the given files decides the order in which contained
11// elements will be returned. If any top-level attributes are defined with
12// the same name across multiple files, a diagnostic will be produced from
13// the Content and PartialContent methods describing this error in a
14// user-friendly way.
15func MergeFiles(files []*File) Body {
16 var bodies []Body
17 for _, file := range files {
18 bodies = append(bodies, file.Body)
19 }
20 return MergeBodies(bodies)
21}
22
23// MergeBodies is like MergeFiles except it deals directly with bodies, rather
24// than with entire files.
25func MergeBodies(bodies []Body) Body {
26 if len(bodies) == 0 {
27 // Swap out for our singleton empty body, to reduce the number of
28 // empty slices we have hanging around.
29 return emptyBody
30 }
31
32 // If any of the given bodies are already merged bodies, we'll unpack
33 // to flatten to a single mergedBodies, since that's conceptually simpler.
34 // This also, as a side-effect, eliminates any empty bodies, since
35 // empties are merged bodies with no inner bodies.
36 var newLen int
37 var flatten bool
38 for _, body := range bodies {
39 if children, merged := body.(mergedBodies); merged {
40 newLen += len(children)
41 flatten = true
42 } else {
43 newLen++
44 }
45 }
46
47 if !flatten { // not just newLen == len, because we might have mergedBodies with single bodies inside
48 return mergedBodies(bodies)
49 }
50
51 if newLen == 0 {
52 // Don't allocate a new empty when we already have one
53 return emptyBody
54 }
55
56 new := make([]Body, 0, newLen)
57 for _, body := range bodies {
58 if children, merged := body.(mergedBodies); merged {
59 new = append(new, children...)
60 } else {
61 new = append(new, body)
62 }
63 }
64 return mergedBodies(new)
65}
66
67var emptyBody = mergedBodies([]Body{})
68
69// EmptyBody returns a body with no content. This body can be used as a
70// placeholder when a body is required but no body content is available.
71func EmptyBody() Body {
72 return emptyBody
73}
74
75type mergedBodies []Body
76
77// Content returns the content produced by applying the given schema to all
78// of the merged bodies and merging the result.
79//
80// Although required attributes _are_ supported, they should be used sparingly
81// with merged bodies since in this case there is no contextual information
82// with which to return good diagnostics. Applications working with merged
83// bodies may wish to mark all attributes as optional and then check for
84// required attributes afterwards, to produce better diagnostics.
85func (mb mergedBodies) Content(schema *BodySchema) (*BodyContent, Diagnostics) {
86 // the returned body will always be empty in this case, because mergedContent
87 // will only ever call Content on the child bodies.
88 content, _, diags := mb.mergedContent(schema, false)
89 return content, diags
90}
91
92func (mb mergedBodies) PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics) {
93 return mb.mergedContent(schema, true)
94}
95
96func (mb mergedBodies) JustAttributes() (Attributes, Diagnostics) {
97 attrs := make(map[string]*Attribute)
98 var diags Diagnostics
99
100 for _, body := range mb {
101 thisAttrs, thisDiags := body.JustAttributes()
102
103 if len(thisDiags) != 0 {
104 diags = append(diags, thisDiags...)
105 }
106
107 if thisAttrs != nil {
108 for name, attr := range thisAttrs {
109 if existing := attrs[name]; existing != nil {
110 diags = diags.Append(&Diagnostic{
111 Severity: DiagError,
112 Summary: "Duplicate attribute",
113 Detail: fmt.Sprintf(
114 "Attribute %q was already assigned at %s",
115 name, existing.NameRange.String(),
116 ),
117 Subject: &attr.NameRange,
118 })
119 continue
120 }
121
122 attrs[name] = attr
123 }
124 }
125 }
126
127 return attrs, diags
128}
129
130func (mb mergedBodies) MissingItemRange() Range {
131 if len(mb) == 0 {
132 // Nothing useful to return here, so we'll return some garbage.
133 return Range{
134 Filename: "<empty>",
135 }
136 }
137
138 // arbitrarily use the first body's missing item range
139 return mb[0].MissingItemRange()
140}
141
142func (mb mergedBodies) mergedContent(schema *BodySchema, partial bool) (*BodyContent, Body, Diagnostics) {
143 // We need to produce a new schema with none of the attributes marked as
144 // required, since _any one_ of our bodies can contribute an attribute value.
145 // We'll separately check that all required attributes are present at
146 // the end.
147 mergedSchema := &BodySchema{
148 Blocks: schema.Blocks,
149 }
150 for _, attrS := range schema.Attributes {
151 mergedAttrS := attrS
152 mergedAttrS.Required = false
153 mergedSchema.Attributes = append(mergedSchema.Attributes, mergedAttrS)
154 }
155
156 var mergedLeftovers []Body
157 content := &BodyContent{
158 Attributes: map[string]*Attribute{},
159 }
160
161 var diags Diagnostics
162 for _, body := range mb {
163 var thisContent *BodyContent
164 var thisLeftovers Body
165 var thisDiags Diagnostics
166
167 if partial {
168 thisContent, thisLeftovers, thisDiags = body.PartialContent(mergedSchema)
169 } else {
170 thisContent, thisDiags = body.Content(mergedSchema)
171 }
172
173 if thisLeftovers != nil {
174 mergedLeftovers = append(mergedLeftovers)
175 }
176 if len(thisDiags) != 0 {
177 diags = append(diags, thisDiags...)
178 }
179
180 if thisContent.Attributes != nil {
181 for name, attr := range thisContent.Attributes {
182 if existing := content.Attributes[name]; existing != nil {
183 diags = diags.Append(&Diagnostic{
184 Severity: DiagError,
185 Summary: "Duplicate attribute",
186 Detail: fmt.Sprintf(
187 "Attribute %q was already assigned at %s",
188 name, existing.NameRange.String(),
189 ),
190 Subject: &attr.NameRange,
191 })
192 continue
193 }
194 content.Attributes[name] = attr
195 }
196 }
197
198 if len(thisContent.Blocks) != 0 {
199 content.Blocks = append(content.Blocks, thisContent.Blocks...)
200 }
201 }
202
203 // Finally, we check for required attributes.
204 for _, attrS := range schema.Attributes {
205 if !attrS.Required {
206 continue
207 }
208
209 if content.Attributes[attrS.Name] == nil {
210 // We don't have any context here to produce a good diagnostic,
211 // which is why we warn in the Content docstring to minimize the
212 // use of required attributes on merged bodies.
213 diags = diags.Append(&Diagnostic{
214 Severity: DiagError,
215 Summary: "Missing required attribute",
216 Detail: fmt.Sprintf(
217 "The attribute %q is required, but was not assigned.",
218 attrS.Name,
219 ),
220 })
221 }
222 }
223
224 leftoverBody := MergeBodies(mergedLeftovers)
225 return content, leftoverBody, diags
226}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/ops.go b/vendor/github.com/hashicorp/hcl2/hcl/ops.go
new file mode 100644
index 0000000..f4e30b0
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/ops.go
@@ -0,0 +1,147 @@
1package hcl
2
3import (
4 "fmt"
5
6 "github.com/zclconf/go-cty/cty"
7 "github.com/zclconf/go-cty/cty/convert"
8)
9
10// Index is a helper function that performs the same operation as the index
11// operator in the HCL expression language. That is, the result is the
12// same as it would be for collection[key] in a configuration expression.
13//
14// This is exported so that applications can perform indexing in a manner
15// consistent with how the language does it, including handling of null and
16// unknown values, etc.
17//
18// Diagnostics are produced if the given combination of values is not valid.
19// Therefore a pointer to a source range must be provided to use in diagnostics,
20// though nil can be provided if the calling application is going to
21// ignore the subject of the returned diagnostics anyway.
22func Index(collection, key cty.Value, srcRange *Range) (cty.Value, Diagnostics) {
23 if collection.IsNull() {
24 return cty.DynamicVal, Diagnostics{
25 {
26 Severity: DiagError,
27 Summary: "Attempt to index null value",
28 Detail: "This value is null, so it does not have any indices.",
29 Subject: srcRange,
30 },
31 }
32 }
33 if key.IsNull() {
34 return cty.DynamicVal, Diagnostics{
35 {
36 Severity: DiagError,
37 Summary: "Invalid index",
38 Detail: "Can't use a null value as an indexing key.",
39 Subject: srcRange,
40 },
41 }
42 }
43 ty := collection.Type()
44 kty := key.Type()
45 if kty == cty.DynamicPseudoType || ty == cty.DynamicPseudoType {
46 return cty.DynamicVal, nil
47 }
48
49 switch {
50
51 case ty.IsListType() || ty.IsTupleType() || ty.IsMapType():
52 var wantType cty.Type
53 switch {
54 case ty.IsListType() || ty.IsTupleType():
55 wantType = cty.Number
56 case ty.IsMapType():
57 wantType = cty.String
58 default:
59 // should never happen
60 panic("don't know what key type we want")
61 }
62
63 key, keyErr := convert.Convert(key, wantType)
64 if keyErr != nil {
65 return cty.DynamicVal, Diagnostics{
66 {
67 Severity: DiagError,
68 Summary: "Invalid index",
69 Detail: fmt.Sprintf(
70 "The given key does not identify an element in this collection value: %s.",
71 keyErr.Error(),
72 ),
73 Subject: srcRange,
74 },
75 }
76 }
77
78 has := collection.HasIndex(key)
79 if !has.IsKnown() {
80 if ty.IsTupleType() {
81 return cty.DynamicVal, nil
82 } else {
83 return cty.UnknownVal(ty.ElementType()), nil
84 }
85 }
86 if has.False() {
87 return cty.DynamicVal, Diagnostics{
88 {
89 Severity: DiagError,
90 Summary: "Invalid index",
91 Detail: "The given key does not identify an element in this collection value.",
92 Subject: srcRange,
93 },
94 }
95 }
96
97 return collection.Index(key), nil
98
99 case ty.IsObjectType():
100 key, keyErr := convert.Convert(key, cty.String)
101 if keyErr != nil {
102 return cty.DynamicVal, Diagnostics{
103 {
104 Severity: DiagError,
105 Summary: "Invalid index",
106 Detail: fmt.Sprintf(
107 "The given key does not identify an element in this collection value: %s.",
108 keyErr.Error(),
109 ),
110 Subject: srcRange,
111 },
112 }
113 }
114 if !collection.IsKnown() {
115 return cty.DynamicVal, nil
116 }
117 if !key.IsKnown() {
118 return cty.DynamicVal, nil
119 }
120
121 attrName := key.AsString()
122
123 if !ty.HasAttribute(attrName) {
124 return cty.DynamicVal, Diagnostics{
125 {
126 Severity: DiagError,
127 Summary: "Invalid index",
128 Detail: "The given key does not identify an element in this collection value.",
129 Subject: srcRange,
130 },
131 }
132 }
133
134 return collection.GetAttr(attrName), nil
135
136 default:
137 return cty.DynamicVal, Diagnostics{
138 {
139 Severity: DiagError,
140 Summary: "Invalid index",
141 Detail: "This value does not have any indices.",
142 Subject: srcRange,
143 },
144 }
145 }
146
147}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/pos.go b/vendor/github.com/hashicorp/hcl2/hcl/pos.go
new file mode 100644
index 0000000..1a4b329
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/pos.go
@@ -0,0 +1,262 @@
1package hcl
2
3import "fmt"
4
5// Pos represents a single position in a source file, by addressing the
6// start byte of a unicode character encoded in UTF-8.
7//
8// Pos is generally used only in the context of a Range, which then defines
9// which source file the position is within.
10type Pos struct {
11 // Line is the source code line where this position points. Lines are
12 // counted starting at 1 and incremented for each newline character
13 // encountered.
14 Line int
15
16 // Column is the source code column where this position points, in
17 // unicode characters, with counting starting at 1.
18 //
19 // Column counts characters as they appear visually, so for example a
20 // latin letter with a combining diacritic mark counts as one character.
21 // This is intended for rendering visual markers against source code in
22 // contexts where these diacritics would be rendered in a single character
23 // cell. Technically speaking, Column is counting grapheme clusters as
24 // used in unicode normalization.
25 Column int
26
27 // Byte is the byte offset into the file where the indicated character
28 // begins. This is a zero-based offset to the first byte of the first
29 // UTF-8 codepoint sequence in the character, and thus gives a position
30 // that can be resolved _without_ awareness of Unicode characters.
31 Byte int
32}
33
34// Range represents a span of characters between two positions in a source
35// file.
36//
37// This struct is usually used by value in types that represent AST nodes,
38// but by pointer in types that refer to the positions of other objects,
39// such as in diagnostics.
40type Range struct {
41 // Filename is the name of the file into which this range's positions
42 // point.
43 Filename string
44
45 // Start and End represent the bounds of this range. Start is inclusive
46 // and End is exclusive.
47 Start, End Pos
48}
49
50// RangeBetween returns a new range that spans from the beginning of the
51// start range to the end of the end range.
52//
53// The result is meaningless if the two ranges do not belong to the same
54// source file or if the end range appears before the start range.
55func RangeBetween(start, end Range) Range {
56 return Range{
57 Filename: start.Filename,
58 Start: start.Start,
59 End: end.End,
60 }
61}
62
63// RangeOver returns a new range that covers both of the given ranges and
64// possibly additional content between them if the two ranges do not overlap.
65//
66// If either range is empty then it is ignored. The result is empty if both
67// given ranges are empty.
68//
69// The result is meaningless if the two ranges to not belong to the same
70// source file.
71func RangeOver(a, b Range) Range {
72 if a.Empty() {
73 return b
74 }
75 if b.Empty() {
76 return a
77 }
78
79 var start, end Pos
80 if a.Start.Byte < b.Start.Byte {
81 start = a.Start
82 } else {
83 start = b.Start
84 }
85 if a.End.Byte > b.End.Byte {
86 end = a.End
87 } else {
88 end = b.End
89 }
90 return Range{
91 Filename: a.Filename,
92 Start: start,
93 End: end,
94 }
95}
96
97// ContainsOffset returns true if and only if the given byte offset is within
98// the receiving Range.
99func (r Range) ContainsOffset(offset int) bool {
100 return offset >= r.Start.Byte && offset < r.End.Byte
101}
102
103// Ptr returns a pointer to a copy of the receiver. This is a convenience when
104// ranges in places where pointers are required, such as in Diagnostic, but
105// the range in question is returned from a method. Go would otherwise not
106// allow one to take the address of a function call.
107func (r Range) Ptr() *Range {
108 return &r
109}
110
111// String returns a compact string representation of the receiver.
112// Callers should generally prefer to present a range more visually,
113// e.g. via markers directly on the relevant portion of source code.
114func (r Range) String() string {
115 if r.Start.Line == r.End.Line {
116 return fmt.Sprintf(
117 "%s:%d,%d-%d",
118 r.Filename,
119 r.Start.Line, r.Start.Column,
120 r.End.Column,
121 )
122 } else {
123 return fmt.Sprintf(
124 "%s:%d,%d-%d,%d",
125 r.Filename,
126 r.Start.Line, r.Start.Column,
127 r.End.Line, r.End.Column,
128 )
129 }
130}
131
132func (r Range) Empty() bool {
133 return r.Start.Byte == r.End.Byte
134}
135
136// CanSliceBytes returns true if SliceBytes could return an accurate
137// sub-slice of the given slice.
138//
139// This effectively tests whether the start and end offsets of the range
140// are within the bounds of the slice, and thus whether SliceBytes can be
141// trusted to produce an accurate start and end position within that slice.
142func (r Range) CanSliceBytes(b []byte) bool {
143 switch {
144 case r.Start.Byte < 0 || r.Start.Byte > len(b):
145 return false
146 case r.End.Byte < 0 || r.End.Byte > len(b):
147 return false
148 case r.End.Byte < r.Start.Byte:
149 return false
150 default:
151 return true
152 }
153}
154
155// SliceBytes returns a sub-slice of the given slice that is covered by the
156// receiving range, assuming that the given slice is the source code of the
157// file indicated by r.Filename.
158//
159// If the receiver refers to any byte offsets that are outside of the slice
160// then the result is constrained to the overlapping portion only, to avoid
161// a panic. Use CanSliceBytes to determine if the result is guaranteed to
162// be an accurate span of the requested range.
163func (r Range) SliceBytes(b []byte) []byte {
164 start := r.Start.Byte
165 end := r.End.Byte
166 if start < 0 {
167 start = 0
168 } else if start > len(b) {
169 start = len(b)
170 }
171 if end < 0 {
172 end = 0
173 } else if end > len(b) {
174 end = len(b)
175 }
176 if end < start {
177 end = start
178 }
179 return b[start:end]
180}
181
182// Overlaps returns true if the receiver and the other given range share any
183// characters in common.
184func (r Range) Overlaps(other Range) bool {
185 switch {
186 case r.Filename != other.Filename:
187 // If the ranges are in different files then they can't possibly overlap
188 return false
189 case r.Empty() || other.Empty():
190 // Empty ranges can never overlap
191 return false
192 case r.ContainsOffset(other.Start.Byte) || r.ContainsOffset(other.End.Byte):
193 return true
194 case other.ContainsOffset(r.Start.Byte) || other.ContainsOffset(r.End.Byte):
195 return true
196 default:
197 return false
198 }
199}
200
201// Overlap finds a range that is either identical to or a sub-range of both
202// the receiver and the other given range. It returns an empty range
203// within the receiver if there is no overlap between the two ranges.
204//
205// A non-empty result is either identical to or a subset of the receiver.
206func (r Range) Overlap(other Range) Range {
207 if !r.Overlaps(other) {
208 // Start == End indicates an empty range
209 return Range{
210 Filename: r.Filename,
211 Start: r.Start,
212 End: r.Start,
213 }
214 }
215
216 var start, end Pos
217 if r.Start.Byte > other.Start.Byte {
218 start = r.Start
219 } else {
220 start = other.Start
221 }
222 if r.End.Byte < other.End.Byte {
223 end = r.End
224 } else {
225 end = other.End
226 }
227
228 return Range{
229 Filename: r.Filename,
230 Start: start,
231 End: end,
232 }
233}
234
235// PartitionAround finds the portion of the given range that overlaps with
236// the reciever and returns three ranges: the portion of the reciever that
237// precedes the overlap, the overlap itself, and then the portion of the
238// reciever that comes after the overlap.
239//
240// If the two ranges do not overlap then all three returned ranges are empty.
241//
242// If the given range aligns with or extends beyond either extent of the
243// reciever then the corresponding outer range will be empty.
244func (r Range) PartitionAround(other Range) (before, overlap, after Range) {
245 overlap = r.Overlap(other)
246 if overlap.Empty() {
247 return overlap, overlap, overlap
248 }
249
250 before = Range{
251 Filename: r.Filename,
252 Start: r.Start,
253 End: overlap.Start,
254 }
255 after = Range{
256 Filename: r.Filename,
257 Start: overlap.End,
258 End: r.End,
259 }
260
261 return before, overlap, after
262}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/pos_scanner.go b/vendor/github.com/hashicorp/hcl2/hcl/pos_scanner.go
new file mode 100644
index 0000000..7c8f2df
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/pos_scanner.go
@@ -0,0 +1,148 @@
1package hcl
2
3import (
4 "bufio"
5 "bytes"
6
7 "github.com/apparentlymart/go-textseg/textseg"
8)
9
10// RangeScanner is a helper that will scan over a buffer using a bufio.SplitFunc
11// and visit a source range for each token matched.
12//
13// For example, this can be used with bufio.ScanLines to find the source range
14// for each line in the file, skipping over the actual newline characters, which
15// may be useful when printing source code snippets as part of diagnostic
16// messages.
17//
18// The line and column information in the returned ranges is produced by
19// counting newline characters and grapheme clusters respectively, which
20// mimics the behavior we expect from a parser when producing ranges.
21type RangeScanner struct {
22 filename string
23 b []byte
24 cb bufio.SplitFunc
25
26 pos Pos // position of next byte to process in b
27 cur Range // latest range
28 tok []byte // slice of b that is covered by cur
29 err error // error from last scan, if any
30}
31
32// Create a new RangeScanner for the given buffer, producing ranges for the
33// given filename.
34//
35// Since ranges have grapheme-cluster granularity rather than byte granularity,
36// the scanner will produce incorrect results if the given SplitFunc creates
37// tokens between grapheme cluster boundaries. In particular, it is incorrect
38// to use RangeScanner with bufio.ScanRunes because it will produce tokens
39// around individual UTF-8 sequences, which will split any multi-sequence
40// grapheme clusters.
41func NewRangeScanner(b []byte, filename string, cb bufio.SplitFunc) *RangeScanner {
42 return &RangeScanner{
43 filename: filename,
44 b: b,
45 cb: cb,
46 pos: Pos{
47 Byte: 0,
48 Line: 1,
49 Column: 1,
50 },
51 }
52}
53
54func (sc *RangeScanner) Scan() bool {
55 if sc.pos.Byte >= len(sc.b) || sc.err != nil {
56 // All done
57 return false
58 }
59
60 // Since we're operating on an in-memory buffer, we always pass the whole
61 // remainder of the buffer to our SplitFunc and set isEOF to let it know
62 // that it has the whole thing.
63 advance, token, err := sc.cb(sc.b[sc.pos.Byte:], true)
64
65 // Since we are setting isEOF to true this should never happen, but
66 // if it does we will just abort and assume the SplitFunc is misbehaving.
67 if advance == 0 && token == nil && err == nil {
68 return false
69 }
70
71 if err != nil {
72 sc.err = err
73 sc.cur = Range{
74 Filename: sc.filename,
75 Start: sc.pos,
76 End: sc.pos,
77 }
78 sc.tok = nil
79 return false
80 }
81
82 sc.tok = token
83 start := sc.pos
84 end := sc.pos
85 new := sc.pos
86
87 // adv is similar to token but it also includes any subsequent characters
88 // we're being asked to skip over by the SplitFunc.
89 // adv is a slice covering any additional bytes we are skipping over, based
90 // on what the SplitFunc told us to do with advance.
91 adv := sc.b[sc.pos.Byte : sc.pos.Byte+advance]
92
93 // We now need to scan over our token to count the grapheme clusters
94 // so we can correctly advance Column, and count the newlines so we
95 // can correctly advance Line.
96 advR := bytes.NewReader(adv)
97 gsc := bufio.NewScanner(advR)
98 advanced := 0
99 gsc.Split(textseg.ScanGraphemeClusters)
100 for gsc.Scan() {
101 gr := gsc.Bytes()
102 new.Byte += len(gr)
103 new.Column++
104
105 // We rely here on the fact that \r\n is considered a grapheme cluster
106 // and so we don't need to worry about miscounting additional lines
107 // on files with Windows-style line endings.
108 if len(gr) != 0 && (gr[0] == '\r' || gr[0] == '\n') {
109 new.Column = 1
110 new.Line++
111 }
112
113 if advanced < len(token) {
114 // If we've not yet found the end of our token then we'll
115 // also push our "end" marker along.
116 // (if advance > len(token) then we'll stop moving "end" early
117 // so that the caller only sees the range covered by token.)
118 end = new
119 }
120 advanced += len(gr)
121 }
122
123 sc.cur = Range{
124 Filename: sc.filename,
125 Start: start,
126 End: end,
127 }
128 sc.pos = new
129 return true
130}
131
132// Range returns a range that covers the latest token obtained after a call
133// to Scan returns true.
134func (sc *RangeScanner) Range() Range {
135 return sc.cur
136}
137
138// Bytes returns the slice of the input buffer that is covered by the range
139// that would be returned by Range.
140func (sc *RangeScanner) Bytes() []byte {
141 return sc.tok
142}
143
144// Err can be called after Scan returns false to determine if the latest read
145// resulted in an error, and obtain that error if so.
146func (sc *RangeScanner) Err() error {
147 return sc.err
148}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/schema.go b/vendor/github.com/hashicorp/hcl2/hcl/schema.go
new file mode 100644
index 0000000..891257a
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/schema.go
@@ -0,0 +1,21 @@
1package hcl
2
3// BlockHeaderSchema represents the shape of a block header, and is
4// used for matching blocks within bodies.
5type BlockHeaderSchema struct {
6 Type string
7 LabelNames []string
8}
9
10// AttributeSchema represents the requirements for an attribute, and is used
11// for matching attributes within bodies.
12type AttributeSchema struct {
13 Name string
14 Required bool
15}
16
17// BodySchema represents the desired shallow structure of a body.
18type BodySchema struct {
19 Attributes []AttributeSchema
20 Blocks []BlockHeaderSchema
21}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/spec.md b/vendor/github.com/hashicorp/hcl2/hcl/spec.md
new file mode 100644
index 0000000..58257bf
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/spec.md
@@ -0,0 +1,691 @@
1# HCL Syntax-Agnostic Information Model
2
3This is the specification for the general information model (abstract types and
4semantics) for hcl. HCL is a system for defining configuration languages for
5applications. The HCL information model is designed to support multiple
6concrete syntaxes for configuration, each with a mapping to the model defined
7in this specification.
8
9The two primary syntaxes intended for use in conjunction with this model are
10[the HCL native syntax](./hclsyntax/spec.md) and [the JSON syntax](./json/spec.md).
11In principle other syntaxes are possible as long as either their language model
12is sufficiently rich to express the concepts described in this specification
13or the language targets a well-defined subset of the specification.
14
15## Structural Elements
16
17The primary structural element is the _body_, which is a container representing
18a set of zero or more _attributes_ and a set of zero or more _blocks_.
19
20A _configuration file_ is the top-level object, and will usually be produced
21by reading a file from disk and parsing it as a particular syntax. A
22configuration file has its own _body_, representing the top-level attributes
23and blocks.
24
25An _attribute_ is a name and value pair associated with a body. Attribute names
26are unique within a given body. Attribute values are provided as _expressions_,
27which are discussed in detail in a later section.
28
29A _block_ is a nested structure that has a _type name_, zero or more string
30_labels_ (e.g. identifiers), and a nested body.
31
32Together the structural elements create a heirarchical data structure, with
33attributes intended to represent the direct properties of a particular object
34in the calling application, and blocks intended to represent child objects
35of a particular object.
36
37## Body Content
38
39To support the expression of the HCL concepts in languages whose information
40model is a subset of HCL's, such as JSON, a _body_ is an opaque container
41whose content can only be accessed by providing information on the expected
42structure of the content.
43
44The specification for each syntax must describe how its physical constructs
45are mapped on to body content given a schema. For syntaxes that have
46first-class syntax distinguishing attributes and bodies this can be relatively
47straightforward, while more detailed mapping rules may be required in syntaxes
48where the representation of attributes vs. blocks is ambiguous.
49
50### Schema-driven Processing
51
52Schema-driven processing is the primary way to access body content.
53A _body schema_ is a description of what is expected within a particular body,
54which can then be used to extract the _body content_, which then provides
55access to the specific attributes and blocks requested.
56
57A _body schema_ consists of a list of _attribute schemata_ and
58_block header schemata_:
59
60* An _attribute schema_ provides the name of an attribute and whether its
61 presence is required.
62
63* A _block header schema_ provides a block type name and the semantic names
64 assigned to each of the labels of that block type, if any.
65
66Within a schema, it is an error to request the same attribute name twice or
67to request a block type whose name is also an attribute name. While this can
68in principle be supported in some syntaxes, in other syntaxes the attribute
69and block namespaces are combined and so an an attribute cannot coexist with
70a block whose type name is identical to the attribute name.
71
72The result of applying a body schema to a body is _body content_, which
73consists of an _attribute map_ and a _block sequence_:
74
75* The _attribute map_ is a map data structure whose keys are attribute names
76 and whose values are _expressions_ that represent the corresponding attribute
77 values.
78
79* The _block sequence_ is an ordered sequence of blocks, with each specifying
80 a block _type name_, the sequence of _labels_ specified for the block,
81 and the body object (not body _content_) representing the block's own body.
82
83After obtaining _body content_, the calling application may continue processing
84by evaluating attribute expressions and/or recursively applying further
85schema-driven processing to the child block bodies.
86
87**Note:** The _body schema_ is intentionally minimal, to reduce the set of
88mapping rules that must be defined for each syntax. Higher-level utility
89libraries may be provided to assist in the construction of a schema and
90perform additional processing, such as automatically evaluating attribute
91expressions and assigning their result values into a data structure, or
92recursively applying a schema to child blocks. Such utilities are not part of
93this core specification and will vary depending on the capabilities and idiom
94of the implementation language.
95
96### _Dynamic Attributes_ Processing
97
98The _schema-driven_ processing model is useful when the expected structure
99of a body is known a priori by the calling application. Some blocks are
100instead more free-form, such as a user-provided set of arbitrary key/value
101pairs.
102
103The alternative _dynamic attributes_ processing mode allows for this more
104ad-hoc approach. Processing in this mode behaves as if a schema had been
105constructed without any _block header schemata_ and with an attribute
106schema for each distinct key provided within the physical representation
107of the body.
108
109The means by which _distinct keys_ are identified is dependent on the
110physical syntax; this processing mode assumes that the syntax has a way
111to enumerate keys provided by the author and identify expressions that
112correspond with those keys, but does not define the means by which this is
113done.
114
115The result of _dynamic attributes_ processing is an _attribute map_ as
116defined in the previous section. No _block sequence_ is produced in this
117processing mode.
118
119### Partial Processing of Body Content
120
121Under _schema-driven processing_, by default the given schema is assumed
122to be exhaustive, such that any attribute or block not matched by schema
123elements is considered an error. This allows feedback about unsupported
124attributes and blocks (such as typos) to be provided.
125
126An alternative is _partial processing_, where any additional elements within
127the body are not considered an error.
128
129Under partial processing, the result is both body content as described
130above _and_ a new body that represents any body elements that remain after
131the schema has been processed.
132
133Specifically:
134
135* Any attribute whose name is specified in the schema is returned in body
136 content and elided from the new body.
137
138* Any block whose type is specified in the schema is returned in body content
139 and elided from the new body.
140
141* Any attribute or block _not_ meeting the above conditions is placed into
142 the new body, unmodified.
143
144The new body can then be recursively processed using any of the body
145processing models. This facility allows different subsets of body content
146to be processed by different parts of the calling application.
147
148Processing a body in two steps — first partial processing of a source body,
149then exhaustive processing of the returned body — is equivalent to single-step
150processing with a schema that is the union of the schemata used
151across the two steps.
152
153## Expressions
154
155Attribute values are represented by _expressions_. Depending on the concrete
156syntax in use, an expression may just be a literal value or it may describe
157a computation in terms of literal values, variables, and functions.
158
159Each syntax defines its own representation of expressions. For syntaxes based
160in languages that do not have any non-literal expression syntax, it is
161recommended to embed the template language from
162[the native syntax](./hclsyntax/spec.md) e.g. as a post-processing step on
163string literals.
164
165### Expression Evaluation
166
167In order to obtain a concrete value, each expression must be _evaluated_.
168Evaluation is performed in terms of an evaluation context, which
169consists of the following:
170
171* An _evaluation mode_, which is defined below.
172* A _variable scope_, which provides a set of named variables for use in
173 expressions.
174* A _function table_, which provides a set of named functions for use in
175 expressions.
176
177The _evaluation mode_ allows for two different interpretations of an
178expression:
179
180* In _literal-only mode_, variables and functions are not available and it
181 is assumed that the calling application's intent is to treat the attribute
182 value as a literal.
183
184* In _full expression mode_, variables and functions are defined and it is
185 assumed that the calling application wishes to provide a full expression
186 language for definition of the attribute value.
187
188The actual behavior of these two modes depends on the syntax in use. For
189languages with first-class expression syntax, these two modes may be considered
190equivalent, with _literal-only mode_ simply not defining any variables or
191functions. For languages that embed arbitrary expressions via string templates,
192_literal-only mode_ may disable such processing, allowing literal strings to
193pass through without interpretation as templates.
194
195Since literal-only mode does not support variables and functions, it is an
196error for the calling application to enable this mode and yet provide a
197variable scope and/or function table.
198
199## Values and Value Types
200
201The result of expression evaluation is a _value_. Each value has a _type_,
202which is dynamically determined during evaluation. The _variable scope_ in
203the evaluation context is a map from variable name to value, using the same
204definition of value.
205
206The type system for HCL values is intended to be of a level abstraction
207suitable for configuration of various applications. A well-defined,
208implementation-language-agnostic type system is defined to allow for
209consistent processing of configuration across many implementation languages.
210Concrete implementations may provide additional functionality to lower
211HCL values and types to corresponding native language types, which may then
212impose additional constraints on the values outside of the scope of this
213specification.
214
215Two values are _equal_ if and only if they have identical types and their
216values are equal according to the rules of their shared type.
217
218### Primitive Types
219
220The primitive types are _string_, _bool_, and _number_.
221
222A _string_ is a sequence of unicode characters. Two strings are equal if
223NFC normalization ([UAX#15](http://unicode.org/reports/tr15/)
224of each string produces two identical sequences of characters.
225NFC normalization ensures that, for example, a precomposed combination of a
226latin letter and a diacritic compares equal with the letter followed by
227a combining diacritic.
228
229The _bool_ type has only two non-null values: _true_ and _false_. Two bool
230values are equal if and only if they are either both true or both false.
231
232A _number_ is an arbitrary-precision floating point value. An implementation
233_must_ make the full-precision values available to the calling application
234for interpretation into any suitable number representation. An implementation
235may in practice implement numbers with limited precision so long as the
236following constraints are met:
237
238* Integers are represented with at least 256 bits.
239* Non-integer numbers are represented as floating point values with a
240 mantissa of at least 256 bits and a signed binary exponent of at least
241 16 bits.
242* An error is produced if an integer value given in source cannot be
243 represented precisely.
244* An error is produced if a non-integer value cannot be represented due to
245 overflow.
246* A non-integer number is rounded to the nearest possible value when a
247 value is of too high a precision to be represented.
248
249The _number_ type also requires representation of both positive and negative
250infinity. A "not a number" (NaN) value is _not_ provided nor used.
251
252Two number values are equal if they are numerically equal to the precision
253associated with the number. Positive infinity and negative infinity are
254equal to themselves but not to each other. Positive infinity is greater than
255any other number value, and negative infinity is less than any other number
256value.
257
258Some syntaxes may be unable to represent numeric literals of arbitrary
259precision. This must be defined in the syntax specification as part of its
260description of mapping numeric literals to HCL values.
261
262### Structural Types
263
264_Structural types_ are types that are constructed by combining other types.
265Each distinct combination of other types is itself a distinct type. There
266are two structural type _kinds_:
267
268* _Object types_ are constructed of a set of named attributes, each of which
269 has a type. Attribute names are always strings. (_Object_ attributes are a
270 distinct idea from _body_ attributes, though calling applications
271 may choose to blur the distinction by use of common naming schemes.)
272* _Tuple tupes_ are constructed of a sequence of elements, each of which
273 has a type.
274
275Values of structural types are compared for equality in terms of their
276attributes or elements. A structural type value is equal to another if and
277only if all of the corresponding attributes or elements are equal.
278
279Two structural types are identical if they are of the same kind and
280have attributes or elements with identical types.
281
282### Collection Types
283
284_Collection types_ are types that combine together an arbitrary number of
285values of some other single type. There are three collection type _kinds_:
286
287* _List types_ represent ordered sequences of values of their element type.
288* _Map types_ represent values of their element type accessed via string keys.
289* _Set types_ represent unordered sets of distinct values of their element type.
290
291For each of these kinds and each distinct element type there is a distinct
292collection type. For example, "list of string" is a distinct type from
293"set of string", and "list of number" is a distinct type from "list of string".
294
295Values of collection types are compared for equality in terms of their
296elements. A collection type value is equal to another if and only if both
297have the same number of elements and their corresponding elements are equal.
298
299Two collection types are identical if they are of the same kind and have
300the same element type.
301
302### Null values
303
304Each type has a null value. The null value of a type represents the absense
305of a value, but with type information retained to allow for type checking.
306
307Null values are used primarily to represent the conditional absense of a
308body attribute. In a syntax with a conditional operator, one of the result
309values of that conditional may be null to indicate that the attribute should be
310considered not present in that case.
311
312Calling applications _should_ consider an attribute with a null value as
313equivalent to the value not being present at all.
314
315A null value of a particular type is equal to itself.
316
317### Unknown Values and the Dynamic Pseudo-type
318
319An _unknown value_ is a placeholder for a value that is not yet known.
320Operations on unknown values themselves return unknown values that have a
321type appropriate to the operation. For example, adding together two unknown
322numbers yields an unknown number, while comparing two unknown values of any
323type for equality yields an unknown bool.
324
325Each type has a distinct unknown value. For example, an unknown _number_ is
326a distinct value from an unknown _string_.
327
328_The dynamic pseudo-type_ is a placeholder for a type that is not yet known.
329The only values of this type are its null value and its unknown value. It is
330referred to as a _pseudo-type_ because it should not be considered a type in
331its own right, but rather as a placeholder for a type yet to be established.
332The unknown value of the dynamic pseudo-type is referred to as _the dynamic
333value_.
334
335Operations on values of the dynamic pseudo-type behave as if it is a value
336of the expected type, optimistically assuming that once the value and type
337are known they will be valid for the operation. For example, adding together
338a number and the dynamic value produces an unknown number.
339
340Unknown values and the dynamic pseudo-type can be used as a mechanism for
341partial type checking and semantic checking: by evaluating an expression with
342all variables set to an unknown value, the expression can be evaluated to
343produce an unknown value of a given type, or produce an error if any operation
344is provably invalid with only type information.
345
346Unknown values and the dynamic pseudo-type must never be returned from
347operations unless at least one operand is unknown or dynamic. Calling
348applications are guaranteed that unless the global scope includes unknown
349values, or the function table includes functions that return unknown values,
350no expression will evaluate to an unknown value. The calling application is
351thus in total control over the use and meaning of unknown values.
352
353The dynamic pseudo-type is identical only to itself.
354
355### Capsule Types
356
357A _capsule type_ is a custom type defined by the calling application. A value
358of a capsule type is considered opaque to HCL, but may be accepted
359by functions provided by the calling application.
360
361A particular capsule type is identical only to itself. The equality of two
362values of the same capsule type is defined by the calling application. No
363other operations are supported for values of capsule types.
364
365Support for capsule types in a HCL implementation is optional. Capsule types
366are intended to allow calling applications to pass through values that are
367not part of the standard type system. For example, an application that
368deals with raw binary data may define a capsule type representing a byte
369array, and provide functions that produce or operate on byte arrays.
370
371### Type Specifications
372
373In certain situations it is necessary to define expectations about the expected
374type of a value. Whereas two _types_ have a commutative _identity_ relationship,
375a type has a non-commutative _matches_ relationship with a _type specification_.
376A type specification is, in practice, just a different interpretation of a
377type such that:
378
379* Any type _matches_ any type that it is identical to.
380
381* Any type _matches_ the dynamic pseudo-type.
382
383For example, given a type specification "list of dynamic pseudo-type", the
384concrete types "list of string" and "list of map" match, but the
385type "set of string" does not.
386
387## Functions and Function Calls
388
389The evaluation context used to evaluate an expression includes a function
390table, which represents an application-defined set of named functions
391available for use in expressions.
392
393Each syntax defines whether function calls are supported and how they are
394physically represented in source code, but the semantics of function calls are
395defined here to ensure consistent results across syntaxes and to allow
396applications to provide functions that are interoperable with all syntaxes.
397
398A _function_ is defined from the following elements:
399
400* Zero or more _positional parameters_, each with a name used for documentation,
401 a type specification for expected argument values, and a flag for whether
402 each of null values, unknown values, and values of the dynamic pseudo-type
403 are accepted.
404
405* Zero or one _variadic parameters_, with the same structure as the _positional_
406 parameters, which if present collects any additional arguments provided at
407 the function call site.
408
409* A _result type definition_, which specifies the value type returned for each
410 valid sequence of argument values.
411
412* A _result value definition_, which specifies the value returned for each
413 valid sequence of argument values.
414
415A _function call_, regardless of source syntax, consists of a sequence of
416argument values. The argument values are each mapped to a corresponding
417parameter as follows:
418
419* For each of the function's positional parameters in sequence, take the next
420 argument. If there are no more arguments, the call is erroneous.
421
422* If the function has a variadic parameter, take all remaining arguments that
423 where not yet assigned to a positional parameter and collect them into
424 a sequence of variadic arguments that each correspond to the variadic
425 parameter.
426
427* If the function has _no_ variadic parameter, it is an error if any arguments
428 remain after taking one argument for each positional parameter.
429
430After mapping each argument to a parameter, semantic checking proceeds
431for each argument:
432
433* If the argument value corresponding to a parameter does not match the
434 parameter's type specification, the call is erroneous.
435
436* If the argument value corresponding to a parameter is null and the parameter
437 is not specified as accepting nulls, the call is erroneous.
438
439* If the argument value corresponding to a parameter is the dynamic value
440 and the parameter is not specified as accepting values of the dynamic
441 pseudo-type, the call is valid but its _result type_ is forced to be the
442 dynamic pseudo type.
443
444* If neither of the above conditions holds for any argument, the call is
445 valid and the function's value type definition is used to determine the
446 call's _result type_. A function _may_ vary its result type depending on
447 the argument _values_ as well as the argument _types_; for example, a
448 function that decodes a JSON value will return a different result type
449 depending on the data structure described by the given JSON source code.
450
451If semantic checking succeeds without error, the call is _executed_:
452
453* For each argument, if its value is unknown and its corresponding parameter
454 is not specified as accepting unknowns, the _result value_ is forced to be an
455 unknown value of the result type.
456
457* If the previous condition does not apply, the function's result value
458 definition is used to determine the call's _result value_.
459
460The result of a function call expression is either an error, if one of the
461erroenous conditions above applies, or the _result value_.
462
463## Type Conversions and Unification
464
465Values given in configuration may not always match the expectations of the
466operations applied to them or to the calling application. In such situations,
467automatic type conversion is attempted as a convenience to the user.
468
469Along with conversions to a _specified_ type, it is sometimes necessary to
470ensure that a selection of values are all of the _same_ type, without any
471constraint on which type that is. This is the process of _type unification_,
472which attempts to find the most general type that all of the given types can
473be converted to.
474
475Both type conversions and unification are defined in the syntax-agnostic
476model to ensure consistency of behavior between syntaxes.
477
478Type conversions are broadly characterized into two categories: _safe_ and
479_unsafe_. A conversion is "safe" if any distinct value of the source type
480has a corresponding distinct value in the target type. A conversion is
481"unsafe" if either the target type values are _not_ distinct (information
482may be lost in conversion) or if some values of the source type do not have
483any corresponding value in the target type. An unsafe conversion may result
484in an error.
485
486A given type can always be converted to itself, which is a no-op.
487
488### Conversion of Null Values
489
490All null values are safely convertable to a null value of any other type,
491regardless of other type-specific rules specified in the sections below.
492
493### Conversion to and from the Dynamic Pseudo-type
494
495Conversion _from_ the dynamic pseudo-type _to_ any other type always succeeds,
496producing an unknown value of the target type.
497
498Conversion of any value _to_ the dynamic pseudo-type is a no-op. The result
499is the input value, verbatim. This is the only situation where the conversion
500result value is not of the the given target type.
501
502### Primitive Type Conversions
503
504Bidirectional conversions are available between the string and number types,
505and between the string and boolean types.
506
507The bool value true corresponds to the string containing the characters "true",
508while the bool value false corresponds to teh string containing the characters
509"false". Conversion from bool to string is safe, while the converse is
510unsafe. The strings "1" and "0" are alternative string representations
511of true and false respectively. It is an error to convert a string other than
512the four in this paragraph to type bool.
513
514A number value is converted to string by translating its integer portion
515into a sequence of decimal digits (`0` through `9`), and then if it has a
516non-zero fractional part, a period `.` followed by a sequence of decimal
517digits representing its fractional part. No exponent portion is included.
518The number is converted at its full precision. Conversion from number to
519string is safe.
520
521A string is converted to a number value by reversing the above mapping.
522No exponent portion is allowed. Conversion from string to number is unsafe.
523It is an error to convert a string that does not comply with the expected
524syntax to type number.
525
526No direct conversion is available between the bool and number types.
527
528### Collection and Structural Type Conversions
529
530Conversion from set types to list types is _safe_, as long as their
531element types are safely convertable. If the element types are _unsafely_
532convertable, then the collection conversion is also unsafe. Each set element
533becomes a corresponding list element, in an undefined order. Although no
534particular ordering is required, implementations _should_ produce list
535elements in a consistent order for a given input set, as a convenience
536to calling applications.
537
538Conversion from list types to set types is _unsafe_, as long as their element
539types are convertable. Each distinct list item becomes a distinct set item.
540If two list items are equal, one of the two is lost in the conversion.
541
542Conversion from tuple types to list types permitted if all of the
543tuple element types are convertable to the target list element type.
544The safety of the conversion depends on the safety of each of the element
545conversions. Each element in turn is converted to the list element type,
546producing a list of identical length.
547
548Conversion from tuple types to set types is permitted, behaving as if the
549tuple type was first converted to a list of the same element type and then
550that list converted to the target set type.
551
552Conversion from object types to map types is permitted if all of the object
553attribute types are convertable to the target map element type. The safety
554of the conversion depends on the safety of each of the attribute conversions.
555Each attribute in turn is converted to the map element type, and map element
556keys are set to the name of each corresponding object attribute.
557
558Conversion from list and set types to tuple types is permitted, following
559the opposite steps as the converse conversions. Such conversions are _unsafe_.
560It is an error to convert a list or set to a tuple type whose number of
561elements does not match the list or set length.
562
563Conversion from map types to object types is permitted if each map key
564corresponds to an attribute in the target object type. It is an error to
565convert from a map value whose set of keys does not exactly match the target
566type's attributes. The conversion takes the opposite steps of the converse
567conversion.
568
569Conversion from one object type to another is permitted as long as the
570common attribute names have convertable types. Any attribute present in the
571target type but not in the source type is populated with a null value of
572the appropriate type.
573
574Conversion from one tuple type to another is permitted as long as the
575tuples have the same length and the elements have convertable types.
576
577### Type Unification
578
579Type unification is an operation that takes a list of types and attempts
580to find a single type to which they can all be converted. Since some
581type pairs have bidirectional conversions, preference is given to _safe_
582conversions. In technical terms, all possible types are arranged into
583a lattice, from which a most general supertype is selected where possible.
584
585The type resulting from type unification may be one of the input types, or
586it may be an entirely new type produced by combination of two or more
587input types.
588
589The following rules do not guarantee a valid result. In addition to these
590rules, unification fails if any of the given types are not convertable
591(per the above rules) to the selected result type.
592
593The following unification rules apply transitively. That is, if a rule is
594defined from A to B, and one from B to C, then A can unify to C.
595
596Number and bool types both unify with string by preferring string.
597
598Two collection types of the same kind unify according to the unification
599of their element types.
600
601List and set types unify by preferring the list type.
602
603Map and object types unify by preferring the object type.
604
605List, set and tuple types unify by preferring the tuple type.
606
607The dynamic pseudo-type unifies with any other type by selecting that other
608type. The dynamic pseudo-type is the result type only if _all_ input types
609are the dynamic pseudo-type.
610
611Two object types unify by constructing a new type whose attributes are
612the union of those of the two input types. Any common attributes themselves
613have their types unified.
614
615Two tuple types of the same length unify constructing a new type of the
616same length whose elements are the unification of the corresponding elements
617in the two input types.
618
619## Static Analysis
620
621In most applications, full expression evaluation is sufficient for understanding
622the provided configuration. However, some specialized applications require more
623direct access to the physical structures in the expressions, which can for
624example allow the construction of new language constructs in terms of the
625existing syntax elements.
626
627Since static analysis analyses the physical structure of configuration, the
628details will vary depending on syntax. Each syntax must decide which of its
629physical structures corresponds to the following analyses, producing error
630diagnostics if they are applied to inappropriate expressions.
631
632The following are the required static analysis functions:
633
634* **Static List**: Require list/tuple construction syntax to be used and
635 return a list of expressions for each of the elements given.
636
637* **Static Map**: Require map/object construction syntax to be used and
638 return a list of key/value pairs -- both expressions -- for each of
639 the elements given. The usual constraint that a map key must be a string
640 must not apply to this analysis, thus allowing applications to interpret
641 arbitrary keys as they see fit.
642
643* **Static Call**: Require function call syntax to be used and return an
644 object describing the called function name and a list of expressions
645 representing each of the call arguments.
646
647* **Static Traversal**: Require a reference to a symbol in the variable
648 scope and return a description of the path from the root scope to the
649 accessed attribute or index.
650
651The intent of a calling application using these features is to require a more
652rigid interpretation of the configuration than in expression evaluation.
653Syntax implementations should make use of the extra contextual information
654provided in order to make an intuitive mapping onto the constructs of the
655underlying syntax, possibly interpreting the expression slightly differently
656than it would be interpreted in normal evaluation.
657
658Each syntax must define which of its expression elements each of the analyses
659above applies to, and how those analyses behave given those expression elements.
660
661## Implementation Considerations
662
663Implementations of this specification are free to adopt any strategy that
664produces behavior consistent with the specification. This non-normative
665section describes some possible implementation strategies that are consistent
666with the goals of this specification.
667
668### Language-agnosticism
669
670The language-agnosticism of this specification assumes that certain behaviors
671are implemented separately for each syntax:
672
673* Matching of a body schema with the physical elements of a body in the
674 source language, to determine correspondance between physical constructs
675 and schema elements.
676
677* Implementing the _dynamic attributes_ body processing mode by either
678 interpreting all physical constructs as attributes or producing an error
679 if non-attribute constructs are present.
680
681* Providing an evaluation function for all possible expressions that produces
682 a value given an evaluation context.
683
684* Providing the static analysis functionality described above in a manner that
685 makes sense within the convention of the syntax.
686
687The suggested implementation strategy is to use an implementation language's
688closest concept to an _abstract type_, _virtual type_ or _interface type_
689to represent both Body and Expression. Each language-specific implementation
690can then provide an implementation of each of these types wrapping AST nodes
691or other physical constructs from the language parser.
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/static_expr.go b/vendor/github.com/hashicorp/hcl2/hcl/static_expr.go
new file mode 100644
index 0000000..98ada87
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/static_expr.go
@@ -0,0 +1,40 @@
1package hcl
2
3import (
4 "github.com/zclconf/go-cty/cty"
5)
6
7type staticExpr struct {
8 val cty.Value
9 rng Range
10}
11
12// StaticExpr returns an Expression that always evaluates to the given value.
13//
14// This is useful to substitute default values for expressions that are
15// not explicitly given in configuration and thus would otherwise have no
16// Expression to return.
17//
18// Since expressions are expected to have a source range, the caller must
19// provide one. Ideally this should be a real source range, but it can
20// be a synthetic one (with an empty-string filename) if no suitable range
21// is available.
22func StaticExpr(val cty.Value, rng Range) Expression {
23 return staticExpr{val, rng}
24}
25
26func (e staticExpr) Value(ctx *EvalContext) (cty.Value, Diagnostics) {
27 return e.val, nil
28}
29
30func (e staticExpr) Variables() []Traversal {
31 return nil
32}
33
34func (e staticExpr) Range() Range {
35 return e.rng
36}
37
38func (e staticExpr) StartRange() Range {
39 return e.rng
40}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/structure.go b/vendor/github.com/hashicorp/hcl2/hcl/structure.go
new file mode 100644
index 0000000..b336f30
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/structure.go
@@ -0,0 +1,151 @@
1package hcl
2
3import (
4 "github.com/zclconf/go-cty/cty"
5)
6
7// File is the top-level node that results from parsing a HCL file.
8type File struct {
9 Body Body
10 Bytes []byte
11
12 // Nav is used to integrate with the "hcled" editor integration package,
13 // and with diagnostic information formatters. It is not for direct use
14 // by a calling application.
15 Nav interface{}
16}
17
18// Block represents a nested block within a Body.
19type Block struct {
20 Type string
21 Labels []string
22 Body Body
23
24 DefRange Range // Range that can be considered the "definition" for seeking in an editor
25 TypeRange Range // Range for the block type declaration specifically.
26 LabelRanges []Range // Ranges for the label values specifically.
27}
28
29// Blocks is a sequence of Block.
30type Blocks []*Block
31
32// Attributes is a set of attributes keyed by their names.
33type Attributes map[string]*Attribute
34
35// Body is a container for attributes and blocks. It serves as the primary
36// unit of heirarchical structure within configuration.
37//
38// The content of a body cannot be meaningfully intepreted without a schema,
39// so Body represents the raw body content and has methods that allow the
40// content to be extracted in terms of a given schema.
41type Body interface {
42 // Content verifies that the entire body content conforms to the given
43 // schema and then returns it, and/or returns diagnostics. The returned
44 // body content is valid if non-nil, regardless of whether Diagnostics
45 // are provided, but diagnostics should still be eventually shown to
46 // the user.
47 Content(schema *BodySchema) (*BodyContent, Diagnostics)
48
49 // PartialContent is like Content except that it permits the configuration
50 // to contain additional blocks or attributes not specified in the
51 // schema. If any are present, the returned Body is non-nil and contains
52 // the remaining items from the body that were not selected by the schema.
53 PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics)
54
55 // JustAttributes attempts to interpret all of the contents of the body
56 // as attributes, allowing for the contents to be accessed without a priori
57 // knowledge of the structure.
58 //
59 // The behavior of this method depends on the body's source language.
60 // Some languages, like JSON, can't distinguish between attributes and
61 // blocks without schema hints, but for languages that _can_ error
62 // diagnostics will be generated if any blocks are present in the body.
63 //
64 // Diagnostics may be produced for other reasons too, such as duplicate
65 // declarations of the same attribute.
66 JustAttributes() (Attributes, Diagnostics)
67
68 // MissingItemRange returns a range that represents where a missing item
69 // might hypothetically be inserted. This is used when producing
70 // diagnostics about missing required attributes or blocks. Not all bodies
71 // will have an obvious single insertion point, so the result here may
72 // be rather arbitrary.
73 MissingItemRange() Range
74}
75
76// BodyContent is the result of applying a BodySchema to a Body.
77type BodyContent struct {
78 Attributes Attributes
79 Blocks Blocks
80
81 MissingItemRange Range
82}
83
84// Attribute represents an attribute from within a body.
85type Attribute struct {
86 Name string
87 Expr Expression
88
89 Range Range
90 NameRange Range
91}
92
93// Expression is a literal value or an expression provided in the
94// configuration, which can be evaluated within a scope to produce a value.
95type Expression interface {
96 // Value returns the value resulting from evaluating the expression
97 // in the given evaluation context.
98 //
99 // The context may be nil, in which case the expression may contain
100 // only constants and diagnostics will be produced for any non-constant
101 // sub-expressions. (The exact definition of this depends on the source
102 // language.)
103 //
104 // The context may instead be set but have either its Variables or
105 // Functions maps set to nil, in which case only use of these features
106 // will return diagnostics.
107 //
108 // Different diagnostics are provided depending on whether the given
109 // context maps are nil or empty. In the former case, the message
110 // tells the user that variables/functions are not permitted at all,
111 // while in the latter case usage will produce a "not found" error for
112 // the specific symbol in question.
113 Value(ctx *EvalContext) (cty.Value, Diagnostics)
114
115 // Variables returns a list of variables referenced in the receiving
116 // expression. These are expressed as absolute Traversals, so may include
117 // additional information about how the variable is used, such as
118 // attribute lookups, which the calling application can potentially use
119 // to only selectively populate the scope.
120 Variables() []Traversal
121
122 Range() Range
123 StartRange() Range
124}
125
126// OfType filters the receiving block sequence by block type name,
127// returning a new block sequence including only the blocks of the
128// requested type.
129func (els Blocks) OfType(typeName string) Blocks {
130 ret := make(Blocks, 0)
131 for _, el := range els {
132 if el.Type == typeName {
133 ret = append(ret, el)
134 }
135 }
136 return ret
137}
138
139// ByType transforms the receiving block sequence into a map from type
140// name to block sequences of only that type.
141func (els Blocks) ByType() map[string]Blocks {
142 ret := make(map[string]Blocks)
143 for _, el := range els {
144 ty := el.Type
145 if ret[ty] == nil {
146 ret[ty] = make(Blocks, 0, 1)
147 }
148 ret[ty] = append(ret[ty], el)
149 }
150 return ret
151}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/traversal.go b/vendor/github.com/hashicorp/hcl2/hcl/traversal.go
new file mode 100644
index 0000000..24f4c91
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/traversal.go
@@ -0,0 +1,352 @@
1package hcl
2
3import (
4 "fmt"
5
6 "github.com/zclconf/go-cty/cty"
7)
8
9// A Traversal is a description of traversing through a value through a
10// series of operations such as attribute lookup, index lookup, etc.
11//
12// It is used to look up values in scopes, for example.
13//
14// The traversal operations are implementations of interface Traverser.
15// This is a closed set of implementations, so the interface cannot be
16// implemented from outside this package.
17//
18// A traversal can be absolute (its first value is a symbol name) or relative
19// (starts from an existing value).
20type Traversal []Traverser
21
22// TraversalJoin appends a relative traversal to an absolute traversal to
23// produce a new absolute traversal.
24func TraversalJoin(abs Traversal, rel Traversal) Traversal {
25 if abs.IsRelative() {
26 panic("first argument to TraversalJoin must be absolute")
27 }
28 if !rel.IsRelative() {
29 panic("second argument to TraversalJoin must be relative")
30 }
31
32 ret := make(Traversal, len(abs)+len(rel))
33 copy(ret, abs)
34 copy(ret[len(abs):], rel)
35 return ret
36}
37
38// TraverseRel applies the receiving traversal to the given value, returning
39// the resulting value. This is supported only for relative traversals,
40// and will panic if applied to an absolute traversal.
41func (t Traversal) TraverseRel(val cty.Value) (cty.Value, Diagnostics) {
42 if !t.IsRelative() {
43 panic("can't use TraverseRel on an absolute traversal")
44 }
45
46 current := val
47 var diags Diagnostics
48 for _, tr := range t {
49 var newDiags Diagnostics
50 current, newDiags = tr.TraversalStep(current)
51 diags = append(diags, newDiags...)
52 if newDiags.HasErrors() {
53 return cty.DynamicVal, diags
54 }
55 }
56 return current, diags
57}
58
59// TraverseAbs applies the receiving traversal to the given eval context,
60// returning the resulting value. This is supported only for absolute
61// traversals, and will panic if applied to a relative traversal.
62func (t Traversal) TraverseAbs(ctx *EvalContext) (cty.Value, Diagnostics) {
63 if t.IsRelative() {
64 panic("can't use TraverseAbs on a relative traversal")
65 }
66
67 split := t.SimpleSplit()
68 root := split.Abs[0].(TraverseRoot)
69 name := root.Name
70
71 thisCtx := ctx
72 hasNonNil := false
73 for thisCtx != nil {
74 if thisCtx.Variables == nil {
75 thisCtx = thisCtx.parent
76 continue
77 }
78 hasNonNil = true
79 val, exists := thisCtx.Variables[name]
80 if exists {
81 return split.Rel.TraverseRel(val)
82 }
83 thisCtx = thisCtx.parent
84 }
85
86 if !hasNonNil {
87 return cty.DynamicVal, Diagnostics{
88 {
89 Severity: DiagError,
90 Summary: "Variables not allowed",
91 Detail: "Variables may not be used here.",
92 Subject: &root.SrcRange,
93 },
94 }
95 }
96
97 suggestions := make([]string, 0, len(ctx.Variables))
98 thisCtx = ctx
99 for thisCtx != nil {
100 for k := range thisCtx.Variables {
101 suggestions = append(suggestions, k)
102 }
103 thisCtx = thisCtx.parent
104 }
105 suggestion := nameSuggestion(name, suggestions)
106 if suggestion != "" {
107 suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
108 }
109
110 return cty.DynamicVal, Diagnostics{
111 {
112 Severity: DiagError,
113 Summary: "Unknown variable",
114 Detail: fmt.Sprintf("There is no variable named %q.%s", name, suggestion),
115 Subject: &root.SrcRange,
116 },
117 }
118}
119
120// IsRelative returns true if the receiver is a relative traversal, or false
121// otherwise.
122func (t Traversal) IsRelative() bool {
123 if len(t) == 0 {
124 return true
125 }
126 if _, firstIsRoot := t[0].(TraverseRoot); firstIsRoot {
127 return false
128 }
129 return true
130}
131
132// SimpleSplit returns a TraversalSplit where the name lookup is the absolute
133// part and the remainder is the relative part. Supported only for
134// absolute traversals, and will panic if applied to a relative traversal.
135//
136// This can be used by applications that have a relatively-simple variable
137// namespace where only the top-level is directly populated in the scope, with
138// everything else handled by relative lookups from those initial values.
139func (t Traversal) SimpleSplit() TraversalSplit {
140 if t.IsRelative() {
141 panic("can't use SimpleSplit on a relative traversal")
142 }
143 return TraversalSplit{
144 Abs: t[0:1],
145 Rel: t[1:],
146 }
147}
148
149// RootName returns the root name for a absolute traversal. Will panic if
150// called on a relative traversal.
151func (t Traversal) RootName() string {
152 if t.IsRelative() {
153 panic("can't use RootName on a relative traversal")
154
155 }
156 return t[0].(TraverseRoot).Name
157}
158
159// SourceRange returns the source range for the traversal.
160func (t Traversal) SourceRange() Range {
161 if len(t) == 0 {
162 // Nothing useful to return here, but we'll return something
163 // that's correctly-typed at least.
164 return Range{}
165 }
166
167 return RangeBetween(t[0].SourceRange(), t[len(t)-1].SourceRange())
168}
169
170// TraversalSplit represents a pair of traversals, the first of which is
171// an absolute traversal and the second of which is relative to the first.
172//
173// This is used by calling applications that only populate prefixes of the
174// traversals in the scope, with Abs representing the part coming from the
175// scope and Rel representing the remaining steps once that part is
176// retrieved.
177type TraversalSplit struct {
178 Abs Traversal
179 Rel Traversal
180}
181
182// TraverseAbs traverses from a scope to the value resulting from the
183// absolute traversal.
184func (t TraversalSplit) TraverseAbs(ctx *EvalContext) (cty.Value, Diagnostics) {
185 return t.Abs.TraverseAbs(ctx)
186}
187
188// TraverseRel traverses from a given value, assumed to be the result of
189// TraverseAbs on some scope, to a final result for the entire split traversal.
190func (t TraversalSplit) TraverseRel(val cty.Value) (cty.Value, Diagnostics) {
191 return t.Rel.TraverseRel(val)
192}
193
194// Traverse is a convenience function to apply TraverseAbs followed by
195// TraverseRel.
196func (t TraversalSplit) Traverse(ctx *EvalContext) (cty.Value, Diagnostics) {
197 v1, diags := t.TraverseAbs(ctx)
198 if diags.HasErrors() {
199 return cty.DynamicVal, diags
200 }
201 v2, newDiags := t.TraverseRel(v1)
202 diags = append(diags, newDiags...)
203 return v2, diags
204}
205
206// Join concatenates together the Abs and Rel parts to produce a single
207// absolute traversal.
208func (t TraversalSplit) Join() Traversal {
209 return TraversalJoin(t.Abs, t.Rel)
210}
211
212// RootName returns the root name for the absolute part of the split.
213func (t TraversalSplit) RootName() string {
214 return t.Abs.RootName()
215}
216
217// A Traverser is a step within a Traversal.
218type Traverser interface {
219 TraversalStep(cty.Value) (cty.Value, Diagnostics)
220 SourceRange() Range
221 isTraverserSigil() isTraverser
222}
223
224// Embed this in a struct to declare it as a Traverser
225type isTraverser struct {
226}
227
228func (tr isTraverser) isTraverserSigil() isTraverser {
229 return isTraverser{}
230}
231
232// TraverseRoot looks up a root name in a scope. It is used as the first step
233// of an absolute Traversal, and cannot itself be traversed directly.
234type TraverseRoot struct {
235 isTraverser
236 Name string
237 SrcRange Range
238}
239
240// TraversalStep on a TraverseName immediately panics, because absolute
241// traversals cannot be directly traversed.
242func (tn TraverseRoot) TraversalStep(cty.Value) (cty.Value, Diagnostics) {
243 panic("Cannot traverse an absolute traversal")
244}
245
246func (tn TraverseRoot) SourceRange() Range {
247 return tn.SrcRange
248}
249
250// TraverseAttr looks up an attribute in its initial value.
251type TraverseAttr struct {
252 isTraverser
253 Name string
254 SrcRange Range
255}
256
257func (tn TraverseAttr) TraversalStep(val cty.Value) (cty.Value, Diagnostics) {
258 if val.IsNull() {
259 return cty.DynamicVal, Diagnostics{
260 {
261 Severity: DiagError,
262 Summary: "Attempt to get attribute from null value",
263 Detail: "This value is null, so it does not have any attributes.",
264 Subject: &tn.SrcRange,
265 },
266 }
267 }
268
269 ty := val.Type()
270 switch {
271 case ty.IsObjectType():
272 if !ty.HasAttribute(tn.Name) {
273 return cty.DynamicVal, Diagnostics{
274 {
275 Severity: DiagError,
276 Summary: "Unsupported attribute",
277 Detail: fmt.Sprintf("This object does not have an attribute named %q.", tn.Name),
278 Subject: &tn.SrcRange,
279 },
280 }
281 }
282
283 if !val.IsKnown() {
284 return cty.UnknownVal(ty.AttributeType(tn.Name)), nil
285 }
286
287 return val.GetAttr(tn.Name), nil
288 case ty.IsMapType():
289 if !val.IsKnown() {
290 return cty.UnknownVal(ty.ElementType()), nil
291 }
292
293 idx := cty.StringVal(tn.Name)
294 if val.HasIndex(idx).False() {
295 return cty.DynamicVal, Diagnostics{
296 {
297 Severity: DiagError,
298 Summary: "Missing map element",
299 Detail: fmt.Sprintf("This map does not have an element with the key %q.", tn.Name),
300 Subject: &tn.SrcRange,
301 },
302 }
303 }
304
305 return val.Index(idx), nil
306 case ty == cty.DynamicPseudoType:
307 return cty.DynamicVal, nil
308 default:
309 return cty.DynamicVal, Diagnostics{
310 {
311 Severity: DiagError,
312 Summary: "Unsupported attribute",
313 Detail: "This value does not have any attributes.",
314 Subject: &tn.SrcRange,
315 },
316 }
317 }
318}
319
320func (tn TraverseAttr) SourceRange() Range {
321 return tn.SrcRange
322}
323
324// TraverseIndex applies the index operation to its initial value.
325type TraverseIndex struct {
326 isTraverser
327 Key cty.Value
328 SrcRange Range
329}
330
331func (tn TraverseIndex) TraversalStep(val cty.Value) (cty.Value, Diagnostics) {
332 return Index(val, tn.Key, &tn.SrcRange)
333}
334
335func (tn TraverseIndex) SourceRange() Range {
336 return tn.SrcRange
337}
338
339// TraverseSplat applies the splat operation to its initial value.
340type TraverseSplat struct {
341 isTraverser
342 Each Traversal
343 SrcRange Range
344}
345
346func (tn TraverseSplat) TraversalStep(val cty.Value) (cty.Value, Diagnostics) {
347 panic("TraverseSplat not yet implemented")
348}
349
350func (tn TraverseSplat) SourceRange() Range {
351 return tn.SrcRange
352}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/traversal_for_expr.go b/vendor/github.com/hashicorp/hcl2/hcl/traversal_for_expr.go
new file mode 100644
index 0000000..5f52946
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcl/traversal_for_expr.go
@@ -0,0 +1,121 @@
1package hcl
2
3// AbsTraversalForExpr attempts to interpret the given expression as
4// an absolute traversal, or returns error diagnostic(s) if that is
5// not possible for the given expression.
6//
7// A particular Expression implementation can support this function by
8// offering a method called AsTraversal that takes no arguments and
9// returns either a valid absolute traversal or nil to indicate that
10// no traversal is possible. Alternatively, an implementation can support
11// UnwrapExpression to delegate handling of this function to a wrapped
12// Expression object.
13//
14// In most cases the calling application is interested in the value
15// that results from an expression, but in rarer cases the application
16// needs to see the the name of the variable and subsequent
17// attributes/indexes itself, for example to allow users to give references
18// to the variables themselves rather than to their values. An implementer
19// of this function should at least support attribute and index steps.
20func AbsTraversalForExpr(expr Expression) (Traversal, Diagnostics) {
21 type asTraversal interface {
22 AsTraversal() Traversal
23 }
24
25 physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
26 _, supported := expr.(asTraversal)
27 return supported
28 })
29
30 if asT, supported := physExpr.(asTraversal); supported {
31 if traversal := asT.AsTraversal(); traversal != nil {
32 return traversal, nil
33 }
34 }
35 return nil, Diagnostics{
36 &Diagnostic{
37 Severity: DiagError,
38 Summary: "Invalid expression",
39 Detail: "A static variable reference is required.",
40 Subject: expr.Range().Ptr(),
41 },
42 }
43}
44
45// RelTraversalForExpr is similar to AbsTraversalForExpr but it returns
46// a relative traversal instead. Due to the nature of HCL expressions, the
47// first element of the returned traversal is always a TraverseAttr, and
48// then it will be followed by zero or more other expressions.
49//
50// Any expression accepted by AbsTraversalForExpr is also accepted by
51// RelTraversalForExpr.
52func RelTraversalForExpr(expr Expression) (Traversal, Diagnostics) {
53 traversal, diags := AbsTraversalForExpr(expr)
54 if len(traversal) > 0 {
55 root := traversal[0].(TraverseRoot)
56 traversal[0] = TraverseAttr{
57 Name: root.Name,
58 SrcRange: root.SrcRange,
59 }
60 }
61 return traversal, diags
62}
63
64// ExprAsKeyword attempts to interpret the given expression as a static keyword,
65// returning the keyword string if possible, and the empty string if not.
66//
67// A static keyword, for the sake of this function, is a single identifier.
68// For example, the following attribute has an expression that would produce
69// the keyword "foo":
70//
71// example = foo
72//
73// This function is a variant of AbsTraversalForExpr, which uses the same
74// interface on the given expression. This helper constrains the result
75// further by requiring only a single root identifier.
76//
77// This function is intended to be used with the following idiom, to recognize
78// situations where one of a fixed set of keywords is required and arbitrary
79// expressions are not allowed:
80//
81// switch hcl.ExprAsKeyword(expr) {
82// case "allow":
83// // (take suitable action for keyword "allow")
84// case "deny":
85// // (take suitable action for keyword "deny")
86// default:
87// diags = append(diags, &hcl.Diagnostic{
88// // ... "invalid keyword" diagnostic message ...
89// })
90// }
91//
92// The above approach will generate the same message for both the use of an
93// unrecognized keyword and for not using a keyword at all, which is usually
94// reasonable if the message specifies that the given value must be a keyword
95// from that fixed list.
96//
97// Note that in the native syntax the keywords "true", "false", and "null" are
98// recognized as literal values during parsing and so these reserved words
99// cannot not be accepted as keywords by this function.
100//
101// Since interpreting an expression as a keyword bypasses usual expression
102// evaluation, it should be used sparingly for situations where e.g. one of
103// a fixed set of keywords is used in a structural way in a special attribute
104// to affect the further processing of a block.
105func ExprAsKeyword(expr Expression) string {
106 type asTraversal interface {
107 AsTraversal() Traversal
108 }
109
110 physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
111 _, supported := expr.(asTraversal)
112 return supported
113 })
114
115 if asT, supported := physExpr.(asTraversal); supported {
116 if traversal := asT.AsTraversal(); len(traversal) == 1 {
117 return traversal.RootName()
118 }
119 }
120 return ""
121}
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/block_labels.go b/vendor/github.com/hashicorp/hcl2/hcldec/block_labels.go
new file mode 100644
index 0000000..7e652e9
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcldec/block_labels.go
@@ -0,0 +1,21 @@
1package hcldec
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5)
6
7type blockLabel struct {
8 Value string
9 Range hcl.Range
10}
11
12func labelsForBlock(block *hcl.Block) []blockLabel {
13 ret := make([]blockLabel, len(block.Labels))
14 for i := range block.Labels {
15 ret[i] = blockLabel{
16 Value: block.Labels[i],
17 Range: block.LabelRanges[i],
18 }
19 }
20 return ret
21}
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/decode.go b/vendor/github.com/hashicorp/hcl2/hcldec/decode.go
new file mode 100644
index 0000000..6cf93fe
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcldec/decode.go
@@ -0,0 +1,36 @@
1package hcldec
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5 "github.com/zclconf/go-cty/cty"
6)
7
8func decode(body hcl.Body, blockLabels []blockLabel, ctx *hcl.EvalContext, spec Spec, partial bool) (cty.Value, hcl.Body, hcl.Diagnostics) {
9 schema := ImpliedSchema(spec)
10
11 var content *hcl.BodyContent
12 var diags hcl.Diagnostics
13 var leftovers hcl.Body
14
15 if partial {
16 content, leftovers, diags = body.PartialContent(schema)
17 } else {
18 content, diags = body.Content(schema)
19 }
20
21 val, valDiags := spec.decode(content, blockLabels, ctx)
22 diags = append(diags, valDiags...)
23
24 return val, leftovers, diags
25}
26
27func impliedType(spec Spec) cty.Type {
28 return spec.impliedType()
29}
30
31func sourceRange(body hcl.Body, blockLabels []blockLabel, spec Spec) hcl.Range {
32 schema := ImpliedSchema(spec)
33 content, _, _ := body.PartialContent(schema)
34
35 return spec.sourceRange(content, blockLabels)
36}
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/doc.go b/vendor/github.com/hashicorp/hcl2/hcldec/doc.go
new file mode 100644
index 0000000..23bfe54
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcldec/doc.go
@@ -0,0 +1,12 @@
1// Package hcldec provides a higher-level API for unpacking the content of
2// HCL bodies, implemented in terms of the low-level "Content" API exposed
3// by the bodies themselves.
4//
5// It allows decoding an entire nested configuration in a single operation
6// by providing a description of the intended structure.
7//
8// For some applications it may be more convenient to use the "gohcl"
9// package, which has a similar purpose but decodes directly into native
10// Go data types. hcldec instead targets the cty type system, and thus allows
11// a cty-driven application to remain within that type system.
12package hcldec
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/gob.go b/vendor/github.com/hashicorp/hcl2/hcldec/gob.go
new file mode 100644
index 0000000..e2027cf
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcldec/gob.go
@@ -0,0 +1,23 @@
1package hcldec
2
3import (
4 "encoding/gob"
5)
6
7func init() {
8 // Every Spec implementation should be registered with gob, so that
9 // specs can be sent over gob channels, such as using
10 // github.com/hashicorp/go-plugin with plugins that need to describe
11 // what shape of configuration they are expecting.
12 gob.Register(ObjectSpec(nil))
13 gob.Register(TupleSpec(nil))
14 gob.Register((*AttrSpec)(nil))
15 gob.Register((*LiteralSpec)(nil))
16 gob.Register((*ExprSpec)(nil))
17 gob.Register((*BlockSpec)(nil))
18 gob.Register((*BlockListSpec)(nil))
19 gob.Register((*BlockSetSpec)(nil))
20 gob.Register((*BlockMapSpec)(nil))
21 gob.Register((*BlockLabelSpec)(nil))
22 gob.Register((*DefaultSpec)(nil))
23}
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/public.go b/vendor/github.com/hashicorp/hcl2/hcldec/public.go
new file mode 100644
index 0000000..5d1f10a
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcldec/public.go
@@ -0,0 +1,78 @@
1package hcldec
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5 "github.com/zclconf/go-cty/cty"
6)
7
8// Decode interprets the given body using the given specification and returns
9// the resulting value. If the given body is not valid per the spec, error
10// diagnostics are returned and the returned value is likely to be incomplete.
11//
12// The ctx argument may be nil, in which case any references to variables or
13// functions will produce error diagnostics.
14func Decode(body hcl.Body, spec Spec, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
15 val, _, diags := decode(body, nil, ctx, spec, false)
16 return val, diags
17}
18
19// PartialDecode is like Decode except that it permits "leftover" items in
20// the top-level body, which are returned as a new body to allow for
21// further processing.
22//
23// Any descendent block bodies are _not_ decoded partially and thus must
24// be fully described by the given specification.
25func PartialDecode(body hcl.Body, spec Spec, ctx *hcl.EvalContext) (cty.Value, hcl.Body, hcl.Diagnostics) {
26 return decode(body, nil, ctx, spec, true)
27}
28
29// ImpliedType returns the value type that should result from decoding the
30// given spec.
31func ImpliedType(spec Spec) cty.Type {
32 return impliedType(spec)
33}
34
35// SourceRange interprets the given body using the given specification and
36// then returns the source range of the value that would be used to
37// fulfill the spec.
38//
39// This can be used if application-level validation detects value errors, to
40// obtain a reasonable SourceRange to use for generated diagnostics. It works
41// best when applied to specific body items (e.g. using AttrSpec, BlockSpec, ...)
42// as opposed to entire bodies using ObjectSpec, TupleSpec. The result will
43// be less useful the broader the specification, so e.g. a spec that returns
44// the entirety of all of the blocks of a given type is likely to be
45// _particularly_ arbitrary and useless.
46//
47// If the given body is not valid per the given spec, the result is best-effort
48// and may not actually be something ideal. It's expected that an application
49// will already have used Decode or PartialDecode earlier and thus had an
50// opportunity to detect and report spec violations.
51func SourceRange(body hcl.Body, spec Spec) hcl.Range {
52 return sourceRange(body, nil, spec)
53}
54
55// ChildBlockTypes returns a map of all of the child block types declared
56// by the given spec, with block type names as keys and the associated
57// nested body specs as values.
58func ChildBlockTypes(spec Spec) map[string]Spec {
59 ret := map[string]Spec{}
60
61 // visitSameBodyChildren walks through the spec structure, calling
62 // the given callback for each descendent spec encountered. We are
63 // interested in the specs that reference attributes and blocks.
64 var visit visitFunc
65 visit = func(s Spec) {
66 if bs, ok := s.(blockSpec); ok {
67 for _, blockS := range bs.blockHeaderSchemata() {
68 ret[blockS.Type] = bs.nestedSpec()
69 }
70 }
71
72 s.visitSameBodyChildren(visit)
73 }
74
75 visit(spec)
76
77 return ret
78}
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/schema.go b/vendor/github.com/hashicorp/hcl2/hcldec/schema.go
new file mode 100644
index 0000000..b57bd96
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcldec/schema.go
@@ -0,0 +1,36 @@
1package hcldec
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5)
6
7// ImpliedSchema returns the *hcl.BodySchema implied by the given specification.
8// This is the schema that the Decode function will use internally to
9// access the content of a given body.
10func ImpliedSchema(spec Spec) *hcl.BodySchema {
11 var attrs []hcl.AttributeSchema
12 var blocks []hcl.BlockHeaderSchema
13
14 // visitSameBodyChildren walks through the spec structure, calling
15 // the given callback for each descendent spec encountered. We are
16 // interested in the specs that reference attributes and blocks.
17 var visit visitFunc
18 visit = func(s Spec) {
19 if as, ok := s.(attrSpec); ok {
20 attrs = append(attrs, as.attrSchemata()...)
21 }
22
23 if bs, ok := s.(blockSpec); ok {
24 blocks = append(blocks, bs.blockHeaderSchemata()...)
25 }
26
27 s.visitSameBodyChildren(visit)
28 }
29
30 visit(spec)
31
32 return &hcl.BodySchema{
33 Attributes: attrs,
34 Blocks: blocks,
35 }
36}
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/spec.go b/vendor/github.com/hashicorp/hcl2/hcldec/spec.go
new file mode 100644
index 0000000..25cafcd
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcldec/spec.go
@@ -0,0 +1,998 @@
1package hcldec
2
3import (
4 "bytes"
5 "fmt"
6
7 "github.com/hashicorp/hcl2/hcl"
8 "github.com/zclconf/go-cty/cty"
9 "github.com/zclconf/go-cty/cty/convert"
10 "github.com/zclconf/go-cty/cty/function"
11)
12
13// A Spec is a description of how to decode a hcl.Body to a cty.Value.
14//
15// The various other types in this package whose names end in "Spec" are
16// the spec implementations. The most common top-level spec is ObjectSpec,
17// which decodes body content into a cty.Value of an object type.
18type Spec interface {
19 // Perform the decode operation on the given body, in the context of
20 // the given block (which might be null), using the given eval context.
21 //
22 // "block" is provided only by the nested calls performed by the spec
23 // types that work on block bodies.
24 decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics)
25
26 // Return the cty.Type that should be returned when decoding a body with
27 // this spec.
28 impliedType() cty.Type
29
30 // Call the given callback once for each of the nested specs that would
31 // get decoded with the same body and block as the receiver. This should
32 // not descend into the nested specs used when decoding blocks.
33 visitSameBodyChildren(cb visitFunc)
34
35 // Determine the source range of the value that would be returned for the
36 // spec in the given content, in the context of the given block
37 // (which might be null). If the corresponding item is missing, return
38 // a place where it might be inserted.
39 sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range
40}
41
42type visitFunc func(spec Spec)
43
44// An ObjectSpec is a Spec that produces a cty.Value of an object type whose
45// attributes correspond to the keys of the spec map.
46type ObjectSpec map[string]Spec
47
48// attrSpec is implemented by specs that require attributes from the body.
49type attrSpec interface {
50 attrSchemata() []hcl.AttributeSchema
51}
52
53// blockSpec is implemented by specs that require blocks from the body.
54type blockSpec interface {
55 blockHeaderSchemata() []hcl.BlockHeaderSchema
56 nestedSpec() Spec
57}
58
59// specNeedingVariables is implemented by specs that can use variables
60// from the EvalContext, to declare which variables they need.
61type specNeedingVariables interface {
62 variablesNeeded(content *hcl.BodyContent) []hcl.Traversal
63}
64
65func (s ObjectSpec) visitSameBodyChildren(cb visitFunc) {
66 for _, c := range s {
67 cb(c)
68 }
69}
70
71func (s ObjectSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
72 vals := make(map[string]cty.Value, len(s))
73 var diags hcl.Diagnostics
74
75 for k, spec := range s {
76 var kd hcl.Diagnostics
77 vals[k], kd = spec.decode(content, blockLabels, ctx)
78 diags = append(diags, kd...)
79 }
80
81 return cty.ObjectVal(vals), diags
82}
83
84func (s ObjectSpec) impliedType() cty.Type {
85 if len(s) == 0 {
86 return cty.EmptyObject
87 }
88
89 attrTypes := make(map[string]cty.Type)
90 for k, childSpec := range s {
91 attrTypes[k] = childSpec.impliedType()
92 }
93 return cty.Object(attrTypes)
94}
95
96func (s ObjectSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
97 // This is not great, but the best we can do. In practice, it's rather
98 // strange to ask for the source range of an entire top-level body, since
99 // that's already readily available to the caller.
100 return content.MissingItemRange
101}
102
103// A TupleSpec is a Spec that produces a cty.Value of a tuple type whose
104// elements correspond to the elements of the spec slice.
105type TupleSpec []Spec
106
107func (s TupleSpec) visitSameBodyChildren(cb visitFunc) {
108 for _, c := range s {
109 cb(c)
110 }
111}
112
113func (s TupleSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
114 vals := make([]cty.Value, len(s))
115 var diags hcl.Diagnostics
116
117 for i, spec := range s {
118 var ed hcl.Diagnostics
119 vals[i], ed = spec.decode(content, blockLabels, ctx)
120 diags = append(diags, ed...)
121 }
122
123 return cty.TupleVal(vals), diags
124}
125
126func (s TupleSpec) impliedType() cty.Type {
127 if len(s) == 0 {
128 return cty.EmptyTuple
129 }
130
131 attrTypes := make([]cty.Type, len(s))
132 for i, childSpec := range s {
133 attrTypes[i] = childSpec.impliedType()
134 }
135 return cty.Tuple(attrTypes)
136}
137
138func (s TupleSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
139 // This is not great, but the best we can do. In practice, it's rather
140 // strange to ask for the source range of an entire top-level body, since
141 // that's already readily available to the caller.
142 return content.MissingItemRange
143}
144
145// An AttrSpec is a Spec that evaluates a particular attribute expression in
146// the body and returns its resulting value converted to the requested type,
147// or produces a diagnostic if the type is incorrect.
148type AttrSpec struct {
149 Name string
150 Type cty.Type
151 Required bool
152}
153
154func (s *AttrSpec) visitSameBodyChildren(cb visitFunc) {
155 // leaf node
156}
157
158// specNeedingVariables implementation
159func (s *AttrSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal {
160 attr, exists := content.Attributes[s.Name]
161 if !exists {
162 return nil
163 }
164
165 return attr.Expr.Variables()
166}
167
168// attrSpec implementation
169func (s *AttrSpec) attrSchemata() []hcl.AttributeSchema {
170 return []hcl.AttributeSchema{
171 {
172 Name: s.Name,
173 Required: s.Required,
174 },
175 }
176}
177
178func (s *AttrSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
179 attr, exists := content.Attributes[s.Name]
180 if !exists {
181 return content.MissingItemRange
182 }
183
184 return attr.Expr.Range()
185}
186
187func (s *AttrSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
188 attr, exists := content.Attributes[s.Name]
189 if !exists {
190 // We don't need to check required and emit a diagnostic here, because
191 // that would already have happened when building "content".
192 return cty.NullVal(s.Type), nil
193 }
194
195 val, diags := attr.Expr.Value(ctx)
196
197 convVal, err := convert.Convert(val, s.Type)
198 if err != nil {
199 diags = append(diags, &hcl.Diagnostic{
200 Severity: hcl.DiagError,
201 Summary: "Incorrect attribute value type",
202 Detail: fmt.Sprintf(
203 "Inappropriate value for attribute %q: %s.",
204 s.Name, err.Error(),
205 ),
206 Subject: attr.Expr.StartRange().Ptr(),
207 Context: hcl.RangeBetween(attr.NameRange, attr.Expr.StartRange()).Ptr(),
208 })
209 // We'll return an unknown value of the _correct_ type so that the
210 // incomplete result can still be used for some analysis use-cases.
211 val = cty.UnknownVal(s.Type)
212 } else {
213 val = convVal
214 }
215
216 return val, diags
217}
218
219func (s *AttrSpec) impliedType() cty.Type {
220 return s.Type
221}
222
223// A LiteralSpec is a Spec that produces the given literal value, ignoring
224// the given body.
225type LiteralSpec struct {
226 Value cty.Value
227}
228
229func (s *LiteralSpec) visitSameBodyChildren(cb visitFunc) {
230 // leaf node
231}
232
233func (s *LiteralSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
234 return s.Value, nil
235}
236
237func (s *LiteralSpec) impliedType() cty.Type {
238 return s.Value.Type()
239}
240
241func (s *LiteralSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
242 // No sensible range to return for a literal, so the caller had better
243 // ensure it doesn't cause any diagnostics.
244 return hcl.Range{
245 Filename: "<unknown>",
246 }
247}
248
249// An ExprSpec is a Spec that evaluates the given expression, ignoring the
250// given body.
251type ExprSpec struct {
252 Expr hcl.Expression
253}
254
255func (s *ExprSpec) visitSameBodyChildren(cb visitFunc) {
256 // leaf node
257}
258
259// specNeedingVariables implementation
260func (s *ExprSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal {
261 return s.Expr.Variables()
262}
263
264func (s *ExprSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
265 return s.Expr.Value(ctx)
266}
267
268func (s *ExprSpec) impliedType() cty.Type {
269 // We can't know the type of our expression until we evaluate it
270 return cty.DynamicPseudoType
271}
272
273func (s *ExprSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
274 return s.Expr.Range()
275}
276
277// A BlockSpec is a Spec that produces a cty.Value by decoding the contents
278// of a single nested block of a given type, using a nested spec.
279//
280// If the Required flag is not set, the nested block may be omitted, in which
281// case a null value is produced. If it _is_ set, an error diagnostic is
282// produced if there are no nested blocks of the given type.
283type BlockSpec struct {
284 TypeName string
285 Nested Spec
286 Required bool
287}
288
289func (s *BlockSpec) visitSameBodyChildren(cb visitFunc) {
290 // leaf node ("Nested" does not use the same body)
291}
292
293// blockSpec implementation
294func (s *BlockSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema {
295 return []hcl.BlockHeaderSchema{
296 {
297 Type: s.TypeName,
298 LabelNames: findLabelSpecs(s.Nested),
299 },
300 }
301}
302
303// blockSpec implementation
304func (s *BlockSpec) nestedSpec() Spec {
305 return s.Nested
306}
307
308// specNeedingVariables implementation
309func (s *BlockSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal {
310 var childBlock *hcl.Block
311 for _, candidate := range content.Blocks {
312 if candidate.Type != s.TypeName {
313 continue
314 }
315
316 childBlock = candidate
317 break
318 }
319
320 if childBlock == nil {
321 return nil
322 }
323
324 return Variables(childBlock.Body, s.Nested)
325}
326
327func (s *BlockSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
328 var diags hcl.Diagnostics
329
330 var childBlock *hcl.Block
331 for _, candidate := range content.Blocks {
332 if candidate.Type != s.TypeName {
333 continue
334 }
335
336 if childBlock != nil {
337 diags = append(diags, &hcl.Diagnostic{
338 Severity: hcl.DiagError,
339 Summary: fmt.Sprintf("Duplicate %s block", s.TypeName),
340 Detail: fmt.Sprintf(
341 "Only one block of type %q is allowed. Previous definition was at %s.",
342 s.TypeName, childBlock.DefRange.String(),
343 ),
344 Subject: &candidate.DefRange,
345 })
346 break
347 }
348
349 childBlock = candidate
350 }
351
352 if childBlock == nil {
353 if s.Required {
354 diags = append(diags, &hcl.Diagnostic{
355 Severity: hcl.DiagError,
356 Summary: fmt.Sprintf("Missing %s block", s.TypeName),
357 Detail: fmt.Sprintf(
358 "A block of type %q is required here.", s.TypeName,
359 ),
360 Subject: &content.MissingItemRange,
361 })
362 }
363 return cty.NullVal(s.Nested.impliedType()), diags
364 }
365
366 if s.Nested == nil {
367 panic("BlockSpec with no Nested Spec")
368 }
369 val, _, childDiags := decode(childBlock.Body, labelsForBlock(childBlock), ctx, s.Nested, false)
370 diags = append(diags, childDiags...)
371 return val, diags
372}
373
374func (s *BlockSpec) impliedType() cty.Type {
375 return s.Nested.impliedType()
376}
377
378func (s *BlockSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
379 var childBlock *hcl.Block
380 for _, candidate := range content.Blocks {
381 if candidate.Type != s.TypeName {
382 continue
383 }
384
385 childBlock = candidate
386 break
387 }
388
389 if childBlock == nil {
390 return content.MissingItemRange
391 }
392
393 return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested)
394}
395
396// A BlockListSpec is a Spec that produces a cty list of the results of
397// decoding all of the nested blocks of a given type, using a nested spec.
398type BlockListSpec struct {
399 TypeName string
400 Nested Spec
401 MinItems int
402 MaxItems int
403}
404
405func (s *BlockListSpec) visitSameBodyChildren(cb visitFunc) {
406 // leaf node ("Nested" does not use the same body)
407}
408
409// blockSpec implementation
410func (s *BlockListSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema {
411 return []hcl.BlockHeaderSchema{
412 {
413 Type: s.TypeName,
414 LabelNames: findLabelSpecs(s.Nested),
415 },
416 }
417}
418
419// blockSpec implementation
420func (s *BlockListSpec) nestedSpec() Spec {
421 return s.Nested
422}
423
424// specNeedingVariables implementation
425func (s *BlockListSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal {
426 var ret []hcl.Traversal
427
428 for _, childBlock := range content.Blocks {
429 if childBlock.Type != s.TypeName {
430 continue
431 }
432
433 ret = append(ret, Variables(childBlock.Body, s.Nested)...)
434 }
435
436 return ret
437}
438
439func (s *BlockListSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
440 var diags hcl.Diagnostics
441
442 if s.Nested == nil {
443 panic("BlockListSpec with no Nested Spec")
444 }
445
446 var elems []cty.Value
447 var sourceRanges []hcl.Range
448 for _, childBlock := range content.Blocks {
449 if childBlock.Type != s.TypeName {
450 continue
451 }
452
453 val, _, childDiags := decode(childBlock.Body, labelsForBlock(childBlock), ctx, s.Nested, false)
454 diags = append(diags, childDiags...)
455 elems = append(elems, val)
456 sourceRanges = append(sourceRanges, sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested))
457 }
458
459 if len(elems) < s.MinItems {
460 diags = append(diags, &hcl.Diagnostic{
461 Severity: hcl.DiagError,
462 Summary: fmt.Sprintf("Insufficient %s blocks", s.TypeName),
463 Detail: fmt.Sprintf("At least %d %q blocks are required.", s.MinItems, s.TypeName),
464 Subject: &content.MissingItemRange,
465 })
466 } else if s.MaxItems > 0 && len(elems) > s.MaxItems {
467 diags = append(diags, &hcl.Diagnostic{
468 Severity: hcl.DiagError,
469 Summary: fmt.Sprintf("Too many %s blocks", s.TypeName),
470 Detail: fmt.Sprintf("No more than %d %q blocks are allowed", s.MaxItems, s.TypeName),
471 Subject: &sourceRanges[s.MaxItems],
472 })
473 }
474
475 var ret cty.Value
476
477 if len(elems) == 0 {
478 ret = cty.ListValEmpty(s.Nested.impliedType())
479 } else {
480 ret = cty.ListVal(elems)
481 }
482
483 return ret, diags
484}
485
486func (s *BlockListSpec) impliedType() cty.Type {
487 return cty.List(s.Nested.impliedType())
488}
489
490func (s *BlockListSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
491 // We return the source range of the _first_ block of the given type,
492 // since they are not guaranteed to form a contiguous range.
493
494 var childBlock *hcl.Block
495 for _, candidate := range content.Blocks {
496 if candidate.Type != s.TypeName {
497 continue
498 }
499
500 childBlock = candidate
501 break
502 }
503
504 if childBlock == nil {
505 return content.MissingItemRange
506 }
507
508 return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested)
509}
510
511// A BlockSetSpec is a Spec that produces a cty set of the results of
512// decoding all of the nested blocks of a given type, using a nested spec.
513type BlockSetSpec struct {
514 TypeName string
515 Nested Spec
516 MinItems int
517 MaxItems int
518}
519
520func (s *BlockSetSpec) visitSameBodyChildren(cb visitFunc) {
521 // leaf node ("Nested" does not use the same body)
522}
523
524// blockSpec implementation
525func (s *BlockSetSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema {
526 return []hcl.BlockHeaderSchema{
527 {
528 Type: s.TypeName,
529 LabelNames: findLabelSpecs(s.Nested),
530 },
531 }
532}
533
534// blockSpec implementation
535func (s *BlockSetSpec) nestedSpec() Spec {
536 return s.Nested
537}
538
539// specNeedingVariables implementation
540func (s *BlockSetSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal {
541 var ret []hcl.Traversal
542
543 for _, childBlock := range content.Blocks {
544 if childBlock.Type != s.TypeName {
545 continue
546 }
547
548 ret = append(ret, Variables(childBlock.Body, s.Nested)...)
549 }
550
551 return ret
552}
553
554func (s *BlockSetSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
555 var diags hcl.Diagnostics
556
557 if s.Nested == nil {
558 panic("BlockSetSpec with no Nested Spec")
559 }
560
561 var elems []cty.Value
562 var sourceRanges []hcl.Range
563 for _, childBlock := range content.Blocks {
564 if childBlock.Type != s.TypeName {
565 continue
566 }
567
568 val, _, childDiags := decode(childBlock.Body, labelsForBlock(childBlock), ctx, s.Nested, false)
569 diags = append(diags, childDiags...)
570 elems = append(elems, val)
571 sourceRanges = append(sourceRanges, sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested))
572 }
573
574 if len(elems) < s.MinItems {
575 diags = append(diags, &hcl.Diagnostic{
576 Severity: hcl.DiagError,
577 Summary: fmt.Sprintf("Insufficient %s blocks", s.TypeName),
578 Detail: fmt.Sprintf("At least %d %q blocks are required.", s.MinItems, s.TypeName),
579 Subject: &content.MissingItemRange,
580 })
581 } else if s.MaxItems > 0 && len(elems) > s.MaxItems {
582 diags = append(diags, &hcl.Diagnostic{
583 Severity: hcl.DiagError,
584 Summary: fmt.Sprintf("Too many %s blocks", s.TypeName),
585 Detail: fmt.Sprintf("No more than %d %q blocks are allowed", s.MaxItems, s.TypeName),
586 Subject: &sourceRanges[s.MaxItems],
587 })
588 }
589
590 var ret cty.Value
591
592 if len(elems) == 0 {
593 ret = cty.SetValEmpty(s.Nested.impliedType())
594 } else {
595 ret = cty.SetVal(elems)
596 }
597
598 return ret, diags
599}
600
601func (s *BlockSetSpec) impliedType() cty.Type {
602 return cty.Set(s.Nested.impliedType())
603}
604
605func (s *BlockSetSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
606 // We return the source range of the _first_ block of the given type,
607 // since they are not guaranteed to form a contiguous range.
608
609 var childBlock *hcl.Block
610 for _, candidate := range content.Blocks {
611 if candidate.Type != s.TypeName {
612 continue
613 }
614
615 childBlock = candidate
616 break
617 }
618
619 if childBlock == nil {
620 return content.MissingItemRange
621 }
622
623 return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested)
624}
625
626// A BlockMapSpec is a Spec that produces a cty map of the results of
627// decoding all of the nested blocks of a given type, using a nested spec.
628//
629// One level of map structure is created for each of the given label names.
630// There must be at least one given label name.
631type BlockMapSpec struct {
632 TypeName string
633 LabelNames []string
634 Nested Spec
635}
636
637func (s *BlockMapSpec) visitSameBodyChildren(cb visitFunc) {
638 // leaf node ("Nested" does not use the same body)
639}
640
641// blockSpec implementation
642func (s *BlockMapSpec) blockHeaderSchemata() []hcl.BlockHeaderSchema {
643 return []hcl.BlockHeaderSchema{
644 {
645 Type: s.TypeName,
646 LabelNames: append(s.LabelNames, findLabelSpecs(s.Nested)...),
647 },
648 }
649}
650
651// blockSpec implementation
652func (s *BlockMapSpec) nestedSpec() Spec {
653 return s.Nested
654}
655
656// specNeedingVariables implementation
657func (s *BlockMapSpec) variablesNeeded(content *hcl.BodyContent) []hcl.Traversal {
658 var ret []hcl.Traversal
659
660 for _, childBlock := range content.Blocks {
661 if childBlock.Type != s.TypeName {
662 continue
663 }
664
665 ret = append(ret, Variables(childBlock.Body, s.Nested)...)
666 }
667
668 return ret
669}
670
671func (s *BlockMapSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
672 var diags hcl.Diagnostics
673
674 if s.Nested == nil {
675 panic("BlockSetSpec with no Nested Spec")
676 }
677
678 elems := map[string]interface{}{}
679 for _, childBlock := range content.Blocks {
680 if childBlock.Type != s.TypeName {
681 continue
682 }
683
684 childLabels := labelsForBlock(childBlock)
685 val, _, childDiags := decode(childBlock.Body, childLabels[len(s.LabelNames):], ctx, s.Nested, false)
686 targetMap := elems
687 for _, key := range childBlock.Labels[:len(s.LabelNames)-1] {
688 if _, exists := targetMap[key]; !exists {
689 targetMap[key] = make(map[string]interface{})
690 }
691 targetMap = targetMap[key].(map[string]interface{})
692 }
693
694 diags = append(diags, childDiags...)
695
696 key := childBlock.Labels[len(s.LabelNames)-1]
697 if _, exists := targetMap[key]; exists {
698 labelsBuf := bytes.Buffer{}
699 for _, label := range childBlock.Labels {
700 fmt.Fprintf(&labelsBuf, " %q", label)
701 }
702 diags = append(diags, &hcl.Diagnostic{
703 Severity: hcl.DiagError,
704 Summary: fmt.Sprintf("Duplicate %s block", s.TypeName),
705 Detail: fmt.Sprintf(
706 "A block for %s%s was already defined. The %s labels must be unique.",
707 s.TypeName, labelsBuf.String(), s.TypeName,
708 ),
709 Subject: &childBlock.DefRange,
710 })
711 continue
712 }
713
714 targetMap[key] = val
715 }
716
717 if len(elems) == 0 {
718 return cty.MapValEmpty(s.Nested.impliedType()), diags
719 }
720
721 var ctyMap func(map[string]interface{}, int) cty.Value
722 ctyMap = func(raw map[string]interface{}, depth int) cty.Value {
723 vals := make(map[string]cty.Value, len(raw))
724 if depth == 1 {
725 for k, v := range raw {
726 vals[k] = v.(cty.Value)
727 }
728 } else {
729 for k, v := range raw {
730 vals[k] = ctyMap(v.(map[string]interface{}), depth-1)
731 }
732 }
733 return cty.MapVal(vals)
734 }
735
736 return ctyMap(elems, len(s.LabelNames)), diags
737}
738
739func (s *BlockMapSpec) impliedType() cty.Type {
740 ret := s.Nested.impliedType()
741 for _ = range s.LabelNames {
742 ret = cty.Map(ret)
743 }
744 return ret
745}
746
747func (s *BlockMapSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
748 // We return the source range of the _first_ block of the given type,
749 // since they are not guaranteed to form a contiguous range.
750
751 var childBlock *hcl.Block
752 for _, candidate := range content.Blocks {
753 if candidate.Type != s.TypeName {
754 continue
755 }
756
757 childBlock = candidate
758 break
759 }
760
761 if childBlock == nil {
762 return content.MissingItemRange
763 }
764
765 return sourceRange(childBlock.Body, labelsForBlock(childBlock), s.Nested)
766}
767
768// A BlockLabelSpec is a Spec that returns a cty.String representing the
769// label of the block its given body belongs to, if indeed its given body
770// belongs to a block. It is a programming error to use this in a non-block
771// context, so this spec will panic in that case.
772//
773// This spec only works in the nested spec within a BlockSpec, BlockListSpec,
774// BlockSetSpec or BlockMapSpec.
775//
776// The full set of label specs used against a particular block must have a
777// consecutive set of indices starting at zero. The maximum index found
778// defines how many labels the corresponding blocks must have in cty source.
779type BlockLabelSpec struct {
780 Index int
781 Name string
782}
783
784func (s *BlockLabelSpec) visitSameBodyChildren(cb visitFunc) {
785 // leaf node
786}
787
788func (s *BlockLabelSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
789 if s.Index >= len(blockLabels) {
790 panic("BlockListSpec used in non-block context")
791 }
792
793 return cty.StringVal(blockLabels[s.Index].Value), nil
794}
795
796func (s *BlockLabelSpec) impliedType() cty.Type {
797 return cty.String // labels are always strings
798}
799
800func (s *BlockLabelSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
801 if s.Index >= len(blockLabels) {
802 panic("BlockListSpec used in non-block context")
803 }
804
805 return blockLabels[s.Index].Range
806}
807
808func findLabelSpecs(spec Spec) []string {
809 maxIdx := -1
810 var names map[int]string
811
812 var visit visitFunc
813 visit = func(s Spec) {
814 if ls, ok := s.(*BlockLabelSpec); ok {
815 if maxIdx < ls.Index {
816 maxIdx = ls.Index
817 }
818 if names == nil {
819 names = make(map[int]string)
820 }
821 names[ls.Index] = ls.Name
822 }
823 s.visitSameBodyChildren(visit)
824 }
825
826 visit(spec)
827
828 if maxIdx < 0 {
829 return nil // no labels at all
830 }
831
832 ret := make([]string, maxIdx+1)
833 for i := range ret {
834 name := names[i]
835 if name == "" {
836 // Should never happen if the spec is conformant, since we require
837 // consecutive indices starting at zero.
838 name = fmt.Sprintf("missing%02d", i)
839 }
840 ret[i] = name
841 }
842
843 return ret
844}
845
846// DefaultSpec is a spec that wraps two specs, evaluating the primary first
847// and then evaluating the default if the primary returns a null value.
848//
849// The two specifications must have the same implied result type for correct
850// operation. If not, the result is undefined.
851type DefaultSpec struct {
852 Primary Spec
853 Default Spec
854}
855
856func (s *DefaultSpec) visitSameBodyChildren(cb visitFunc) {
857 cb(s.Primary)
858 cb(s.Default)
859}
860
861func (s *DefaultSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
862 val, diags := s.Primary.decode(content, blockLabels, ctx)
863 if val.IsNull() {
864 var moreDiags hcl.Diagnostics
865 val, moreDiags = s.Default.decode(content, blockLabels, ctx)
866 diags = append(diags, moreDiags...)
867 }
868 return val, diags
869}
870
871func (s *DefaultSpec) impliedType() cty.Type {
872 return s.Primary.impliedType()
873}
874
875func (s *DefaultSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
876 // We can't tell from here which of the two specs will ultimately be used
877 // in our result, so we'll just assume the first. This is usually the right
878 // choice because the default is often a literal spec that doesn't have a
879 // reasonable source range to return anyway.
880 return s.Primary.sourceRange(content, blockLabels)
881}
882
883// TransformExprSpec is a spec that wraps another and then evaluates a given
884// hcl.Expression on the result.
885//
886// The implied type of this spec is determined by evaluating the expression
887// with an unknown value of the nested spec's implied type, which may cause
888// the result to be imprecise. This spec should not be used in situations where
889// precise result type information is needed.
890type TransformExprSpec struct {
891 Wrapped Spec
892 Expr hcl.Expression
893 TransformCtx *hcl.EvalContext
894 VarName string
895}
896
897func (s *TransformExprSpec) visitSameBodyChildren(cb visitFunc) {
898 cb(s.Wrapped)
899}
900
901func (s *TransformExprSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
902 wrappedVal, diags := s.Wrapped.decode(content, blockLabels, ctx)
903 if diags.HasErrors() {
904 // We won't try to run our function in this case, because it'll probably
905 // generate confusing additional errors that will distract from the
906 // root cause.
907 return cty.UnknownVal(s.impliedType()), diags
908 }
909
910 chiCtx := s.TransformCtx.NewChild()
911 chiCtx.Variables = map[string]cty.Value{
912 s.VarName: wrappedVal,
913 }
914 resultVal, resultDiags := s.Expr.Value(chiCtx)
915 diags = append(diags, resultDiags...)
916 return resultVal, diags
917}
918
919func (s *TransformExprSpec) impliedType() cty.Type {
920 wrappedTy := s.Wrapped.impliedType()
921 chiCtx := s.TransformCtx.NewChild()
922 chiCtx.Variables = map[string]cty.Value{
923 s.VarName: cty.UnknownVal(wrappedTy),
924 }
925 resultVal, _ := s.Expr.Value(chiCtx)
926 return resultVal.Type()
927}
928
929func (s *TransformExprSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
930 // We'll just pass through our wrapped range here, even though that's
931 // not super-accurate, because there's nothing better to return.
932 return s.Wrapped.sourceRange(content, blockLabels)
933}
934
935// TransformFuncSpec is a spec that wraps another and then evaluates a given
936// cty function with the result. The given function must expect exactly one
937// argument, where the result of the wrapped spec will be passed.
938//
939// The implied type of this spec is determined by type-checking the function
940// with an unknown value of the nested spec's implied type, which may cause
941// the result to be imprecise. This spec should not be used in situations where
942// precise result type information is needed.
943//
944// If the given function produces an error when run, this spec will produce
945// a non-user-actionable diagnostic message. It's the caller's responsibility
946// to ensure that the given function cannot fail for any non-error result
947// of the wrapped spec.
948type TransformFuncSpec struct {
949 Wrapped Spec
950 Func function.Function
951}
952
953func (s *TransformFuncSpec) visitSameBodyChildren(cb visitFunc) {
954 cb(s.Wrapped)
955}
956
957func (s *TransformFuncSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
958 wrappedVal, diags := s.Wrapped.decode(content, blockLabels, ctx)
959 if diags.HasErrors() {
960 // We won't try to run our function in this case, because it'll probably
961 // generate confusing additional errors that will distract from the
962 // root cause.
963 return cty.UnknownVal(s.impliedType()), diags
964 }
965
966 resultVal, err := s.Func.Call([]cty.Value{wrappedVal})
967 if err != nil {
968 // This is not a good example of a diagnostic because it is reporting
969 // a programming error in the calling application, rather than something
970 // an end-user could act on.
971 diags = append(diags, &hcl.Diagnostic{
972 Severity: hcl.DiagError,
973 Summary: "Transform function failed",
974 Detail: fmt.Sprintf("Decoder transform returned an error: %s", err),
975 Subject: s.sourceRange(content, blockLabels).Ptr(),
976 })
977 return cty.UnknownVal(s.impliedType()), diags
978 }
979
980 return resultVal, diags
981}
982
983func (s *TransformFuncSpec) impliedType() cty.Type {
984 wrappedTy := s.Wrapped.impliedType()
985 resultTy, err := s.Func.ReturnType([]cty.Type{wrappedTy})
986 if err != nil {
987 // Should never happen with a correctly-configured spec
988 return cty.DynamicPseudoType
989 }
990
991 return resultTy
992}
993
994func (s *TransformFuncSpec) sourceRange(content *hcl.BodyContent, blockLabels []blockLabel) hcl.Range {
995 // We'll just pass through our wrapped range here, even though that's
996 // not super-accurate, because there's nothing better to return.
997 return s.Wrapped.sourceRange(content, blockLabels)
998}
diff --git a/vendor/github.com/hashicorp/hcl2/hcldec/variables.go b/vendor/github.com/hashicorp/hcl2/hcldec/variables.go
new file mode 100644
index 0000000..427b0d0
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hcldec/variables.go
@@ -0,0 +1,34 @@
1package hcldec
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5)
6
7// Variables processes the given body with the given spec and returns a
8// list of the variable traversals that would be required to decode
9// the same pairing of body and spec.
10//
11// This can be used to conditionally populate the variables in the EvalContext
12// passed to Decode, for applications where a static scope is insufficient.
13//
14// If the given body is not compliant with the given schema, the result may
15// be incomplete, but that's assumed to be okay because the eventual call
16// to Decode will produce error diagnostics anyway.
17func Variables(body hcl.Body, spec Spec) []hcl.Traversal {
18 schema := ImpliedSchema(spec)
19
20 content, _, _ := body.PartialContent(schema)
21
22 var vars []hcl.Traversal
23
24 if vs, ok := spec.(specNeedingVariables); ok {
25 vars = append(vars, vs.variablesNeeded(content)...)
26 }
27 spec.visitSameBodyChildren(func(s Spec) {
28 if vs, ok := s.(specNeedingVariables); ok {
29 vars = append(vars, vs.variablesNeeded(content)...)
30 }
31 })
32
33 return vars
34}
diff --git a/vendor/github.com/hashicorp/hcl2/hclparse/parser.go b/vendor/github.com/hashicorp/hcl2/hclparse/parser.go
new file mode 100644
index 0000000..6d47f12
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl2/hclparse/parser.go
@@ -0,0 +1,123 @@
1package hclparse
2
3import (
4 "fmt"
5 "io/ioutil"
6
7 "github.com/hashicorp/hcl2/hcl"
8 "github.com/hashicorp/hcl2/hcl/hclsyntax"
9 "github.com/hashicorp/hcl2/hcl/json"
10)
11
12// NOTE: This is the public interface for parsing. The actual parsers are
13// in other packages alongside this one, with this package just wrapping them
14// to provide a unified interface for the caller across all supported formats.
15
16// Parser is the main interface for parsing configuration files. As well as
17// parsing files, a parser also retains a registry of all of the files it
18// has parsed so that multiple attempts to parse the same file will return
19// the same object and so the collected files can be used when printing
20// diagnostics.
21//
22// Any diagnostics for parsing a file are only returned once on the first
23// call to parse that file. Callers are expected to collect up diagnostics
24// and present them together, so returning diagnostics for the same file
25// multiple times would create a confusing result.
26type Parser struct {
27 files map[string]*hcl.File
28}
29
30// NewParser creates a new parser, ready to parse configuration files.
31func NewParser() *Parser {
32 return &Parser{
33 files: map[string]*hcl.File{},
34 }
35}
36
37// ParseHCL parses the given buffer (which is assumed to have been loaded from
38// the given filename) as a native-syntax configuration file and returns the
39// hcl.File object representing it.
40func (p *Parser) ParseHCL(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
41 if existing := p.files[filename]; existing != nil {
42 return existing, nil
43 }
44
45 file, diags := hclsyntax.ParseConfig(src, filename, hcl.Pos{Byte: 0, Line: 1, Column: 1})
46 p.files[filename] = file
47 return file, diags
48}
49
50// ParseHCLFile reads the given filename and parses it as a native-syntax HCL
51// configuration file. An error diagnostic is returned if the given file
52// cannot be read.
53func (p *Parser) ParseHCLFile(filename string) (*hcl.File, hcl.Diagnostics) {
54 if existing := p.files[filename]; existing != nil {
55 return existing, nil
56 }
57
58 src, err := ioutil.ReadFile(filename)
59 if err != nil {
60 return nil, hcl.Diagnostics{
61 {
62 Severity: hcl.DiagError,
63 Summary: "Failed to read file",
64 Detail: fmt.Sprintf("The configuration file %q could not be read.", filename),
65 },
66 }
67 }
68
69 return p.ParseHCL(src, filename)
70}
71
72// ParseJSON parses the given JSON buffer (which is assumed to have been loaded
73// from the given filename) and returns the hcl.File object representing it.
74func (p *Parser) ParseJSON(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
75 if existing := p.files[filename]; existing != nil {
76 return existing, nil
77 }
78
79 file, diags := json.Parse(src, filename)
80 p.files[filename] = file
81 return file, diags
82}
83
84// ParseJSONFile reads the given filename and parses it as JSON, similarly to
85// ParseJSON. An error diagnostic is returned if the given file cannot be read.
86func (p *Parser) ParseJSONFile(filename string) (*hcl.File, hcl.Diagnostics) {
87 if existing := p.files[filename]; existing != nil {
88 return existing, nil
89 }
90
91 file, diags := json.ParseFile(filename)
92 p.files[filename] = file
93 return file, diags
94}
95
96// AddFile allows a caller to record in a parser a file that was parsed some
97// other way, thus allowing it to be included in the registry of sources.
98func (p *Parser) AddFile(filename string, file *hcl.File) {
99 p.files[filename] = file
100}
101
102// Sources returns a map from filenames to the raw source code that was
103// read from them. This is intended to be used, for example, to print
104// diagnostics with contextual information.
105//
106// The arrays underlying the returned slices should not be modified.
107func (p *Parser) Sources() map[string][]byte {
108 ret := make(map[string][]byte)
109 for fn, f := range p.files {
110 ret[fn] = f.Bytes
111 }
112 return ret
113}
114
115// Files returns a map from filenames to the File objects produced from them.
116// This is intended to be used, for example, to print diagnostics with
117// contextual information.
118//
119// The returned map and all of the objects it refers to directly or indirectly
120// must not be modified.
121func (p *Parser) Files() map[string]*hcl.File {
122 return p.files
123}
diff --git a/vendor/github.com/hashicorp/hil/scanner/scanner.go b/vendor/github.com/hashicorp/hil/scanner/scanner.go
index bab86c6..86085de 100644
--- a/vendor/github.com/hashicorp/hil/scanner/scanner.go
+++ b/vendor/github.com/hashicorp/hil/scanner/scanner.go
@@ -395,6 +395,12 @@ func scanLiteral(s string, startPos ast.Pos, nested bool) (string, *Token) {
395 pos.Column = pos.Column + 2 395 pos.Column = pos.Column + 2
396 litLen = litLen + 2 396 litLen = litLen + 2
397 continue 397 continue
398 } else if follow == '\\' {
399 // \\ escapes \
400 // so we will consume both characters here.
401 pos.Column = pos.Column + 2
402 litLen = litLen + 2
403 continue
398 } 404 }
399 } 405 }
400 } 406 }
diff --git a/vendor/github.com/hashicorp/terraform/config/append.go b/vendor/github.com/hashicorp/terraform/config/append.go
index 5f4e89e..9d80c42 100644
--- a/vendor/github.com/hashicorp/terraform/config/append.go
+++ b/vendor/github.com/hashicorp/terraform/config/append.go
@@ -82,5 +82,11 @@ func Append(c1, c2 *Config) (*Config, error) {
82 c.Variables = append(c.Variables, c2.Variables...) 82 c.Variables = append(c.Variables, c2.Variables...)
83 } 83 }
84 84
85 if len(c1.Locals) > 0 || len(c2.Locals) > 0 {
86 c.Locals = make([]*Local, 0, len(c1.Locals)+len(c2.Locals))
87 c.Locals = append(c.Locals, c1.Locals...)
88 c.Locals = append(c.Locals, c2.Locals...)
89 }
90
85 return c, nil 91 return c, nil
86} 92}
diff --git a/vendor/github.com/hashicorp/terraform/config/config.go b/vendor/github.com/hashicorp/terraform/config/config.go
index 3f756dc..1772fd7 100644
--- a/vendor/github.com/hashicorp/terraform/config/config.go
+++ b/vendor/github.com/hashicorp/terraform/config/config.go
@@ -8,11 +8,11 @@ import (
8 "strconv" 8 "strconv"
9 "strings" 9 "strings"
10 10
11 "github.com/hashicorp/go-multierror" 11 hcl2 "github.com/hashicorp/hcl2/hcl"
12 "github.com/hashicorp/hil"
13 "github.com/hashicorp/hil/ast" 12 "github.com/hashicorp/hil/ast"
14 "github.com/hashicorp/terraform/helper/hilmapstructure" 13 "github.com/hashicorp/terraform/helper/hilmapstructure"
15 "github.com/hashicorp/terraform/plugin/discovery" 14 "github.com/hashicorp/terraform/plugin/discovery"
15 "github.com/hashicorp/terraform/tfdiags"
16 "github.com/mitchellh/reflectwalk" 16 "github.com/mitchellh/reflectwalk"
17) 17)
18 18
@@ -34,6 +34,7 @@ type Config struct {
34 ProviderConfigs []*ProviderConfig 34 ProviderConfigs []*ProviderConfig
35 Resources []*Resource 35 Resources []*Resource
36 Variables []*Variable 36 Variables []*Variable
37 Locals []*Local
37 Outputs []*Output 38 Outputs []*Output
38 39
39 // The fields below can be filled in by loaders for validation 40 // The fields below can be filled in by loaders for validation
@@ -55,6 +56,8 @@ type AtlasConfig struct {
55type Module struct { 56type Module struct {
56 Name string 57 Name string
57 Source string 58 Source string
59 Version string
60 Providers map[string]string
58 RawConfig *RawConfig 61 RawConfig *RawConfig
59} 62}
60 63
@@ -147,7 +150,7 @@ func (p *Provisioner) Copy() *Provisioner {
147 } 150 }
148} 151}
149 152
150// Variable is a variable defined within the configuration. 153// Variable is a module argument defined within the configuration.
151type Variable struct { 154type Variable struct {
152 Name string 155 Name string
153 DeclaredType string `mapstructure:"type"` 156 DeclaredType string `mapstructure:"type"`
@@ -155,6 +158,12 @@ type Variable struct {
155 Description string 158 Description string
156} 159}
157 160
161// Local is a local value defined within the configuration.
162type Local struct {
163 Name string
164 RawConfig *RawConfig
165}
166
158// Output is an output defined within the configuration. An output is 167// Output is an output defined within the configuration. An output is
159// resulting data that is highlighted by Terraform when finished. An 168// resulting data that is highlighted by Terraform when finished. An
160// output marked Sensitive will be output in a masked form following 169// output marked Sensitive will be output in a masked form following
@@ -222,7 +231,10 @@ func (r *Resource) Count() (int, error) {
222 231
223 v, err := strconv.ParseInt(count, 0, 0) 232 v, err := strconv.ParseInt(count, 0, 0)
224 if err != nil { 233 if err != nil {
225 return 0, err 234 return 0, fmt.Errorf(
235 "cannot parse %q as an integer",
236 count,
237 )
226 } 238 }
227 239
228 return int(v), nil 240 return int(v), nil
@@ -253,7 +265,9 @@ func (r *Resource) ProviderFullName() string {
253// the provider name is inferred from the resource type name. 265// the provider name is inferred from the resource type name.
254func ResourceProviderFullName(resourceType, explicitProvider string) string { 266func ResourceProviderFullName(resourceType, explicitProvider string) string {
255 if explicitProvider != "" { 267 if explicitProvider != "" {
256 return explicitProvider 268 // check for an explicit provider name, or return the original
269 parts := strings.SplitAfter(explicitProvider, "provider.")
270 return parts[len(parts)-1]
257 } 271 }
258 272
259 idx := strings.IndexRune(resourceType, '_') 273 idx := strings.IndexRune(resourceType, '_')
@@ -268,30 +282,35 @@ func ResourceProviderFullName(resourceType, explicitProvider string) string {
268} 282}
269 283
270// Validate does some basic semantic checking of the configuration. 284// Validate does some basic semantic checking of the configuration.
271func (c *Config) Validate() error { 285func (c *Config) Validate() tfdiags.Diagnostics {
272 if c == nil { 286 if c == nil {
273 return nil 287 return nil
274 } 288 }
275 289
276 var errs []error 290 var diags tfdiags.Diagnostics
277 291
278 for _, k := range c.unknownKeys { 292 for _, k := range c.unknownKeys {
279 errs = append(errs, fmt.Errorf( 293 diags = diags.Append(
280 "Unknown root level key: %s", k)) 294 fmt.Errorf("Unknown root level key: %s", k),
295 )
281 } 296 }
282 297
283 // Validate the Terraform config 298 // Validate the Terraform config
284 if tf := c.Terraform; tf != nil { 299 if tf := c.Terraform; tf != nil {
285 errs = append(errs, c.Terraform.Validate()...) 300 errs := c.Terraform.Validate()
301 for _, err := range errs {
302 diags = diags.Append(err)
303 }
286 } 304 }
287 305
288 vars := c.InterpolatedVariables() 306 vars := c.InterpolatedVariables()
289 varMap := make(map[string]*Variable) 307 varMap := make(map[string]*Variable)
290 for _, v := range c.Variables { 308 for _, v := range c.Variables {
291 if _, ok := varMap[v.Name]; ok { 309 if _, ok := varMap[v.Name]; ok {
292 errs = append(errs, fmt.Errorf( 310 diags = diags.Append(fmt.Errorf(
293 "Variable '%s': duplicate found. Variable names must be unique.", 311 "Variable '%s': duplicate found. Variable names must be unique.",
294 v.Name)) 312 v.Name,
313 ))
295 } 314 }
296 315
297 varMap[v.Name] = v 316 varMap[v.Name] = v
@@ -299,17 +318,19 @@ func (c *Config) Validate() error {
299 318
300 for k, _ := range varMap { 319 for k, _ := range varMap {
301 if !NameRegexp.MatchString(k) { 320 if !NameRegexp.MatchString(k) {
302 errs = append(errs, fmt.Errorf( 321 diags = diags.Append(fmt.Errorf(
303 "variable %q: variable name must match regular expresion %s", 322 "variable %q: variable name must match regular expression %s",
304 k, NameRegexp)) 323 k, NameRegexp,
324 ))
305 } 325 }
306 } 326 }
307 327
308 for _, v := range c.Variables { 328 for _, v := range c.Variables {
309 if v.Type() == VariableTypeUnknown { 329 if v.Type() == VariableTypeUnknown {
310 errs = append(errs, fmt.Errorf( 330 diags = diags.Append(fmt.Errorf(
311 "Variable '%s': must be a string or a map", 331 "Variable '%s': must be a string or a map",
312 v.Name)) 332 v.Name,
333 ))
313 continue 334 continue
314 } 335 }
315 336
@@ -330,9 +351,10 @@ func (c *Config) Validate() error {
330 if v.Default != nil { 351 if v.Default != nil {
331 if err := reflectwalk.Walk(v.Default, w); err == nil { 352 if err := reflectwalk.Walk(v.Default, w); err == nil {
332 if interp { 353 if interp {
333 errs = append(errs, fmt.Errorf( 354 diags = diags.Append(fmt.Errorf(
334 "Variable '%s': cannot contain interpolations", 355 "variable %q: default may not contain interpolations",
335 v.Name)) 356 v.Name,
357 ))
336 } 358 }
337 } 359 }
338 } 360 }
@@ -348,10 +370,11 @@ func (c *Config) Validate() error {
348 } 370 }
349 371
350 if _, ok := varMap[uv.Name]; !ok { 372 if _, ok := varMap[uv.Name]; !ok {
351 errs = append(errs, fmt.Errorf( 373 diags = diags.Append(fmt.Errorf(
352 "%s: unknown variable referenced: '%s'. define it with 'variable' blocks", 374 "%s: unknown variable referenced: '%s'; define it with a 'variable' block",
353 source, 375 source,
354 uv.Name)) 376 uv.Name,
377 ))
355 } 378 }
356 } 379 }
357 } 380 }
@@ -362,17 +385,19 @@ func (c *Config) Validate() error {
362 switch v := rawV.(type) { 385 switch v := rawV.(type) {
363 case *CountVariable: 386 case *CountVariable:
364 if v.Type == CountValueInvalid { 387 if v.Type == CountValueInvalid {
365 errs = append(errs, fmt.Errorf( 388 diags = diags.Append(fmt.Errorf(
366 "%s: invalid count variable: %s", 389 "%s: invalid count variable: %s",
367 source, 390 source,
368 v.FullKey())) 391 v.FullKey(),
392 ))
369 } 393 }
370 case *PathVariable: 394 case *PathVariable:
371 if v.Type == PathValueInvalid { 395 if v.Type == PathValueInvalid {
372 errs = append(errs, fmt.Errorf( 396 diags = diags.Append(fmt.Errorf(
373 "%s: invalid path variable: %s", 397 "%s: invalid path variable: %s",
374 source, 398 source,
375 v.FullKey())) 399 v.FullKey(),
400 ))
376 } 401 }
377 } 402 }
378 } 403 }
@@ -380,27 +405,35 @@ func (c *Config) Validate() error {
380 405
381 // Check that providers aren't declared multiple times and that their 406 // Check that providers aren't declared multiple times and that their
382 // version constraints, where present, are syntactically valid. 407 // version constraints, where present, are syntactically valid.
383 providerSet := make(map[string]struct{}) 408 providerSet := make(map[string]bool)
384 for _, p := range c.ProviderConfigs { 409 for _, p := range c.ProviderConfigs {
385 name := p.FullName() 410 name := p.FullName()
386 if _, ok := providerSet[name]; ok { 411 if _, ok := providerSet[name]; ok {
387 errs = append(errs, fmt.Errorf( 412 diags = diags.Append(fmt.Errorf(
388 "provider.%s: declared multiple times, you can only declare a provider once", 413 "provider.%s: multiple configurations present; only one configuration is allowed per provider",
389 name)) 414 name,
415 ))
390 continue 416 continue
391 } 417 }
392 418
393 if p.Version != "" { 419 if p.Version != "" {
394 _, err := discovery.ConstraintStr(p.Version).Parse() 420 _, err := discovery.ConstraintStr(p.Version).Parse()
395 if err != nil { 421 if err != nil {
396 errs = append(errs, fmt.Errorf( 422 diags = diags.Append(&hcl2.Diagnostic{
397 "provider.%s: invalid version constraint %q: %s", 423 Severity: hcl2.DiagError,
398 name, p.Version, err, 424 Summary: "Invalid provider version constraint",
399 )) 425 Detail: fmt.Sprintf(
426 "The value %q given for provider.%s is not a valid version constraint.",
427 p.Version, name,
428 ),
429 // TODO: include a "Subject" source reference in here,
430 // once the config loader is able to retain source
431 // location information.
432 })
400 } 433 }
401 } 434 }
402 435
403 providerSet[name] = struct{}{} 436 providerSet[name] = true
404 } 437 }
405 438
406 // Check that all references to modules are valid 439 // Check that all references to modules are valid
@@ -412,9 +445,10 @@ func (c *Config) Validate() error {
412 if _, ok := dupped[m.Id()]; !ok { 445 if _, ok := dupped[m.Id()]; !ok {
413 dupped[m.Id()] = struct{}{} 446 dupped[m.Id()] = struct{}{}
414 447
415 errs = append(errs, fmt.Errorf( 448 diags = diags.Append(fmt.Errorf(
416 "%s: module repeated multiple times", 449 "module %q: module repeated multiple times",
417 m.Id())) 450 m.Id(),
451 ))
418 } 452 }
419 453
420 // Already seen this module, just skip it 454 // Already seen this module, just skip it
@@ -428,21 +462,23 @@ func (c *Config) Validate() error {
428 "root": m.Source, 462 "root": m.Source,
429 }) 463 })
430 if err != nil { 464 if err != nil {
431 errs = append(errs, fmt.Errorf( 465 diags = diags.Append(fmt.Errorf(
432 "%s: module source error: %s", 466 "module %q: module source error: %s",
433 m.Id(), err)) 467 m.Id(), err,
468 ))
434 } else if len(rc.Interpolations) > 0 { 469 } else if len(rc.Interpolations) > 0 {
435 errs = append(errs, fmt.Errorf( 470 diags = diags.Append(fmt.Errorf(
436 "%s: module source cannot contain interpolations", 471 "module %q: module source cannot contain interpolations",
437 m.Id())) 472 m.Id(),
473 ))
438 } 474 }
439 475
440 // Check that the name matches our regexp 476 // Check that the name matches our regexp
441 if !NameRegexp.Match([]byte(m.Name)) { 477 if !NameRegexp.Match([]byte(m.Name)) {
442 errs = append(errs, fmt.Errorf( 478 diags = diags.Append(fmt.Errorf(
443 "%s: module name can only contain letters, numbers, "+ 479 "module %q: module name must be a letter or underscore followed by only letters, numbers, dashes, and underscores",
444 "dashes, and underscores", 480 m.Id(),
445 m.Id())) 481 ))
446 } 482 }
447 483
448 // Check that the configuration can all be strings, lists or maps 484 // Check that the configuration can all be strings, lists or maps
@@ -466,30 +502,47 @@ func (c *Config) Validate() error {
466 continue 502 continue
467 } 503 }
468 504
469 errs = append(errs, fmt.Errorf( 505 diags = diags.Append(fmt.Errorf(
470 "%s: variable %s must be a string, list or map value", 506 "module %q: argument %s must have a string, list, or map value",
471 m.Id(), k)) 507 m.Id(), k,
508 ))
472 } 509 }
473 510
474 // Check for invalid count variables 511 // Check for invalid count variables
475 for _, v := range m.RawConfig.Variables { 512 for _, v := range m.RawConfig.Variables {
476 switch v.(type) { 513 switch v.(type) {
477 case *CountVariable: 514 case *CountVariable:
478 errs = append(errs, fmt.Errorf( 515 diags = diags.Append(fmt.Errorf(
479 "%s: count variables are only valid within resources", m.Name)) 516 "module %q: count variables are only valid within resources",
517 m.Name,
518 ))
480 case *SelfVariable: 519 case *SelfVariable:
481 errs = append(errs, fmt.Errorf( 520 diags = diags.Append(fmt.Errorf(
482 "%s: self variables are only valid within resources", m.Name)) 521 "module %q: self variables are only valid within resources",
522 m.Name,
523 ))
483 } 524 }
484 } 525 }
485 526
486 // Update the raw configuration to only contain the string values 527 // Update the raw configuration to only contain the string values
487 m.RawConfig, err = NewRawConfig(raw) 528 m.RawConfig, err = NewRawConfig(raw)
488 if err != nil { 529 if err != nil {
489 errs = append(errs, fmt.Errorf( 530 diags = diags.Append(fmt.Errorf(
490 "%s: can't initialize configuration: %s", 531 "%s: can't initialize configuration: %s",
491 m.Id(), err)) 532 m.Id(), err,
533 ))
492 } 534 }
535
536 // check that all named providers actually exist
537 for _, p := range m.Providers {
538 if !providerSet[p] {
539 diags = diags.Append(fmt.Errorf(
540 "module %q: cannot pass non-existent provider %q",
541 m.Name, p,
542 ))
543 }
544 }
545
493 } 546 }
494 dupped = nil 547 dupped = nil
495 548
@@ -503,10 +556,10 @@ func (c *Config) Validate() error {
503 } 556 }
504 557
505 if _, ok := modules[mv.Name]; !ok { 558 if _, ok := modules[mv.Name]; !ok {
506 errs = append(errs, fmt.Errorf( 559 diags = diags.Append(fmt.Errorf(
507 "%s: unknown module referenced: %s", 560 "%s: unknown module referenced: %s",
508 source, 561 source, mv.Name,
509 mv.Name)) 562 ))
510 } 563 }
511 } 564 }
512 } 565 }
@@ -519,9 +572,10 @@ func (c *Config) Validate() error {
519 if _, ok := dupped[r.Id()]; !ok { 572 if _, ok := dupped[r.Id()]; !ok {
520 dupped[r.Id()] = struct{}{} 573 dupped[r.Id()] = struct{}{}
521 574
522 errs = append(errs, fmt.Errorf( 575 diags = diags.Append(fmt.Errorf(
523 "%s: resource repeated multiple times", 576 "%s: resource repeated multiple times",
524 r.Id())) 577 r.Id(),
578 ))
525 } 579 }
526 } 580 }
527 581
@@ -535,53 +589,42 @@ func (c *Config) Validate() error {
535 for _, v := range r.RawCount.Variables { 589 for _, v := range r.RawCount.Variables {
536 switch v.(type) { 590 switch v.(type) {
537 case *CountVariable: 591 case *CountVariable:
538 errs = append(errs, fmt.Errorf( 592 diags = diags.Append(fmt.Errorf(
539 "%s: resource count can't reference count variable: %s", 593 "%s: resource count can't reference count variable: %s",
540 n, 594 n, v.FullKey(),
541 v.FullKey())) 595 ))
542 case *SimpleVariable: 596 case *SimpleVariable:
543 errs = append(errs, fmt.Errorf( 597 diags = diags.Append(fmt.Errorf(
544 "%s: resource count can't reference variable: %s", 598 "%s: resource count can't reference variable: %s",
545 n, 599 n, v.FullKey(),
546 v.FullKey())) 600 ))
547 601
548 // Good 602 // Good
549 case *ModuleVariable: 603 case *ModuleVariable:
550 case *ResourceVariable: 604 case *ResourceVariable:
551 case *TerraformVariable: 605 case *TerraformVariable:
552 case *UserVariable: 606 case *UserVariable:
607 case *LocalVariable:
553 608
554 default: 609 default:
555 errs = append(errs, fmt.Errorf( 610 diags = diags.Append(fmt.Errorf(
556 "Internal error. Unknown type in count var in %s: %T", 611 "Internal error. Unknown type in count var in %s: %T",
557 n, v)) 612 n, v,
613 ))
558 } 614 }
559 } 615 }
560 616
561 // Interpolate with a fixed number to verify that its a number. 617 if !r.RawCount.couldBeInteger() {
562 r.RawCount.interpolate(func(root ast.Node) (interface{}, error) { 618 diags = diags.Append(fmt.Errorf(
563 // Execute the node but transform the AST so that it returns 619 "%s: resource count must be an integer", n,
564 // a fixed value of "5" for all interpolations. 620 ))
565 result, err := hil.Eval(
566 hil.FixedValueTransform(
567 root, &ast.LiteralNode{Value: "5", Typex: ast.TypeString}),
568 nil)
569 if err != nil {
570 return "", err
571 }
572
573 return result.Value, nil
574 })
575 _, err := strconv.ParseInt(r.RawCount.Value().(string), 0, 0)
576 if err != nil {
577 errs = append(errs, fmt.Errorf(
578 "%s: resource count must be an integer",
579 n))
580 } 621 }
581 r.RawCount.init() 622 r.RawCount.init()
582 623
583 // Validate DependsOn 624 // Validate DependsOn
584 errs = append(errs, c.validateDependsOn(n, r.DependsOn, resources, modules)...) 625 for _, err := range c.validateDependsOn(n, r.DependsOn, resources, modules) {
626 diags = diags.Append(err)
627 }
585 628
586 // Verify provisioners 629 // Verify provisioners
587 for _, p := range r.Provisioners { 630 for _, p := range r.Provisioners {
@@ -595,9 +638,10 @@ func (c *Config) Validate() error {
595 } 638 }
596 639
597 if rv.Multi && rv.Index == -1 && rv.Type == r.Type && rv.Name == r.Name { 640 if rv.Multi && rv.Index == -1 && rv.Type == r.Type && rv.Name == r.Name {
598 errs = append(errs, fmt.Errorf( 641 diags = diags.Append(fmt.Errorf(
599 "%s: connection info cannot contain splat variable "+ 642 "%s: connection info cannot contain splat variable referencing itself",
600 "referencing itself", n)) 643 n,
644 ))
601 break 645 break
602 } 646 }
603 } 647 }
@@ -609,9 +653,10 @@ func (c *Config) Validate() error {
609 } 653 }
610 654
611 if rv.Multi && rv.Index == -1 && rv.Type == r.Type && rv.Name == r.Name { 655 if rv.Multi && rv.Index == -1 && rv.Type == r.Type && rv.Name == r.Name {
612 errs = append(errs, fmt.Errorf( 656 diags = diags.Append(fmt.Errorf(
613 "%s: connection info cannot contain splat variable "+ 657 "%s: connection info cannot contain splat variable referencing itself",
614 "referencing itself", n)) 658 n,
659 ))
615 break 660 break
616 } 661 }
617 } 662 }
@@ -619,21 +664,24 @@ func (c *Config) Validate() error {
619 // Check for invalid when/onFailure values, though this should be 664 // Check for invalid when/onFailure values, though this should be
620 // picked up by the loader we check here just in case. 665 // picked up by the loader we check here just in case.
621 if p.When == ProvisionerWhenInvalid { 666 if p.When == ProvisionerWhenInvalid {
622 errs = append(errs, fmt.Errorf( 667 diags = diags.Append(fmt.Errorf(
623 "%s: provisioner 'when' value is invalid", n)) 668 "%s: provisioner 'when' value is invalid", n,
669 ))
624 } 670 }
625 if p.OnFailure == ProvisionerOnFailureInvalid { 671 if p.OnFailure == ProvisionerOnFailureInvalid {
626 errs = append(errs, fmt.Errorf( 672 diags = diags.Append(fmt.Errorf(
627 "%s: provisioner 'on_failure' value is invalid", n)) 673 "%s: provisioner 'on_failure' value is invalid", n,
674 ))
628 } 675 }
629 } 676 }
630 677
631 // Verify ignore_changes contains valid entries 678 // Verify ignore_changes contains valid entries
632 for _, v := range r.Lifecycle.IgnoreChanges { 679 for _, v := range r.Lifecycle.IgnoreChanges {
633 if strings.Contains(v, "*") && v != "*" { 680 if strings.Contains(v, "*") && v != "*" {
634 errs = append(errs, fmt.Errorf( 681 diags = diags.Append(fmt.Errorf(
635 "%s: ignore_changes does not support using a partial string "+ 682 "%s: ignore_changes does not support using a partial string together with a wildcard: %s",
636 "together with a wildcard: %s", n, v)) 683 n, v,
684 ))
637 } 685 }
638 } 686 }
639 687
@@ -642,21 +690,24 @@ func (c *Config) Validate() error {
642 "root": r.Lifecycle.IgnoreChanges, 690 "root": r.Lifecycle.IgnoreChanges,
643 }) 691 })
644 if err != nil { 692 if err != nil {
645 errs = append(errs, fmt.Errorf( 693 diags = diags.Append(fmt.Errorf(
646 "%s: lifecycle ignore_changes error: %s", 694 "%s: lifecycle ignore_changes error: %s",
647 n, err)) 695 n, err,
696 ))
648 } else if len(rc.Interpolations) > 0 { 697 } else if len(rc.Interpolations) > 0 {
649 errs = append(errs, fmt.Errorf( 698 diags = diags.Append(fmt.Errorf(
650 "%s: lifecycle ignore_changes cannot contain interpolations", 699 "%s: lifecycle ignore_changes cannot contain interpolations",
651 n)) 700 n,
701 ))
652 } 702 }
653 703
654 // If it is a data source then it can't have provisioners 704 // If it is a data source then it can't have provisioners
655 if r.Mode == DataResourceMode { 705 if r.Mode == DataResourceMode {
656 if _, ok := r.RawConfig.Raw["provisioner"]; ok { 706 if _, ok := r.RawConfig.Raw["provisioner"]; ok {
657 errs = append(errs, fmt.Errorf( 707 diags = diags.Append(fmt.Errorf(
658 "%s: data sources cannot have provisioners", 708 "%s: data sources cannot have provisioners",
659 n)) 709 n,
710 ))
660 } 711 }
661 } 712 }
662 } 713 }
@@ -670,25 +721,50 @@ func (c *Config) Validate() error {
670 721
671 id := rv.ResourceId() 722 id := rv.ResourceId()
672 if _, ok := resources[id]; !ok { 723 if _, ok := resources[id]; !ok {
673 errs = append(errs, fmt.Errorf( 724 diags = diags.Append(fmt.Errorf(
674 "%s: unknown resource '%s' referenced in variable %s", 725 "%s: unknown resource '%s' referenced in variable %s",
675 source, 726 source,
676 id, 727 id,
677 rv.FullKey())) 728 rv.FullKey(),
729 ))
678 continue 730 continue
679 } 731 }
680 } 732 }
681 } 733 }
682 734
735 // Check that all locals are valid
736 {
737 found := make(map[string]struct{})
738 for _, l := range c.Locals {
739 if _, ok := found[l.Name]; ok {
740 diags = diags.Append(fmt.Errorf(
741 "%s: duplicate local. local value names must be unique",
742 l.Name,
743 ))
744 continue
745 }
746 found[l.Name] = struct{}{}
747
748 for _, v := range l.RawConfig.Variables {
749 if _, ok := v.(*CountVariable); ok {
750 diags = diags.Append(fmt.Errorf(
751 "local %s: count variables are only valid within resources", l.Name,
752 ))
753 }
754 }
755 }
756 }
757
683 // Check that all outputs are valid 758 // Check that all outputs are valid
684 { 759 {
685 found := make(map[string]struct{}) 760 found := make(map[string]struct{})
686 for _, o := range c.Outputs { 761 for _, o := range c.Outputs {
687 // Verify the output is new 762 // Verify the output is new
688 if _, ok := found[o.Name]; ok { 763 if _, ok := found[o.Name]; ok {
689 errs = append(errs, fmt.Errorf( 764 diags = diags.Append(fmt.Errorf(
690 "%s: duplicate output. output names must be unique.", 765 "output %q: an output of this name was already defined",
691 o.Name)) 766 o.Name,
767 ))
692 continue 768 continue
693 } 769 }
694 found[o.Name] = struct{}{} 770 found[o.Name] = struct{}{}
@@ -708,9 +784,10 @@ func (c *Config) Validate() error {
708 continue 784 continue
709 } 785 }
710 786
711 errs = append(errs, fmt.Errorf( 787 diags = diags.Append(fmt.Errorf(
712 "%s: value for 'sensitive' must be boolean", 788 "output %q: value for 'sensitive' must be boolean",
713 o.Name)) 789 o.Name,
790 ))
714 continue 791 continue
715 } 792 }
716 if k == "description" { 793 if k == "description" {
@@ -719,27 +796,78 @@ func (c *Config) Validate() error {
719 continue 796 continue
720 } 797 }
721 798
722 errs = append(errs, fmt.Errorf( 799 diags = diags.Append(fmt.Errorf(
723 "%s: value for 'description' must be string", 800 "output %q: value for 'description' must be string",
724 o.Name)) 801 o.Name,
802 ))
725 continue 803 continue
726 } 804 }
727 invalidKeys = append(invalidKeys, k) 805 invalidKeys = append(invalidKeys, k)
728 } 806 }
729 if len(invalidKeys) > 0 { 807 if len(invalidKeys) > 0 {
730 errs = append(errs, fmt.Errorf( 808 diags = diags.Append(fmt.Errorf(
731 "%s: output has invalid keys: %s", 809 "output %q: invalid keys: %s",
732 o.Name, strings.Join(invalidKeys, ", "))) 810 o.Name, strings.Join(invalidKeys, ", "),
811 ))
733 } 812 }
734 if !valueKeyFound { 813 if !valueKeyFound {
735 errs = append(errs, fmt.Errorf( 814 diags = diags.Append(fmt.Errorf(
736 "%s: output is missing required 'value' key", o.Name)) 815 "output %q: missing required 'value' argument", o.Name,
816 ))
737 } 817 }
738 818
739 for _, v := range o.RawConfig.Variables { 819 for _, v := range o.RawConfig.Variables {
740 if _, ok := v.(*CountVariable); ok { 820 if _, ok := v.(*CountVariable); ok {
741 errs = append(errs, fmt.Errorf( 821 diags = diags.Append(fmt.Errorf(
742 "%s: count variables are only valid within resources", o.Name)) 822 "output %q: count variables are only valid within resources",
823 o.Name,
824 ))
825 }
826 }
827
828 // Detect a common mistake of using a "count"ed resource in
829 // an output value without using the splat or index form.
830 // Prior to 0.11 this error was silently ignored, but outputs
831 // now have their errors checked like all other contexts.
832 //
833 // TODO: Remove this in 0.12.
834 for _, v := range o.RawConfig.Variables {
835 rv, ok := v.(*ResourceVariable)
836 if !ok {
837 continue
838 }
839
840 // If the variable seems to be treating the referenced
841 // resource as a singleton (no count specified) then
842 // we'll check to make sure it is indeed a singleton.
843 // It's a warning if not.
844
845 if rv.Multi || rv.Index != 0 {
846 // This reference is treating the resource as a
847 // multi-resource, so the warning doesn't apply.
848 continue
849 }
850
851 for _, r := range c.Resources {
852 if r.Id() != rv.ResourceId() {
853 continue
854 }
855
856 // We test specifically for the raw string "1" here
857 // because we _do_ want to generate this warning if
858 // the user has provided an expression that happens
859 // to return 1 right now, to catch situations where
860 // a count might dynamically be set to something
861 // other than 1 and thus splat syntax is still needed
862 // to be safe.
863 if r.RawCount != nil && r.RawCount.Raw != nil && r.RawCount.Raw["count"] != "1" && rv.Field != "count" {
864 diags = diags.Append(tfdiags.SimpleWarning(fmt.Sprintf(
865 "output %q: must use splat syntax to access %s attribute %q, because it has \"count\" set; use %s.*.%s to obtain a list of the attributes across all instances",
866 o.Name,
867 r.Id(), rv.Field,
868 r.Id(), rv.Field,
869 )))
870 }
743 } 871 }
744 } 872 }
745 } 873 }
@@ -755,17 +883,15 @@ func (c *Config) Validate() error {
755 883
756 for _, v := range rc.Variables { 884 for _, v := range rc.Variables {
757 if _, ok := v.(*SelfVariable); ok { 885 if _, ok := v.(*SelfVariable); ok {
758 errs = append(errs, fmt.Errorf( 886 diags = diags.Append(fmt.Errorf(
759 "%s: cannot contain self-reference %s", source, v.FullKey())) 887 "%s: cannot contain self-reference %s",
888 source, v.FullKey(),
889 ))
760 } 890 }
761 } 891 }
762 } 892 }
763 893
764 if len(errs) > 0 { 894 return diags
765 return &multierror.Error{Errors: errs}
766 }
767
768 return nil
769} 895}
770 896
771// InterpolatedVariables is a helper that returns a mapping of all the interpolated 897// InterpolatedVariables is a helper that returns a mapping of all the interpolated
diff --git a/vendor/github.com/hashicorp/terraform/config/config_string.go b/vendor/github.com/hashicorp/terraform/config/config_string.go
index 0b3abbc..a6933c2 100644
--- a/vendor/github.com/hashicorp/terraform/config/config_string.go
+++ b/vendor/github.com/hashicorp/terraform/config/config_string.go
@@ -143,6 +143,46 @@ func outputsStr(os []*Output) string {
143 result += fmt.Sprintf(" %s: %s\n", kind, str) 143 result += fmt.Sprintf(" %s: %s\n", kind, str)
144 } 144 }
145 } 145 }
146
147 if o.Description != "" {
148 result += fmt.Sprintf(" description\n %s\n", o.Description)
149 }
150 }
151
152 return strings.TrimSpace(result)
153}
154
155func localsStr(ls []*Local) string {
156 ns := make([]string, 0, len(ls))
157 m := make(map[string]*Local)
158 for _, l := range ls {
159 ns = append(ns, l.Name)
160 m[l.Name] = l
161 }
162 sort.Strings(ns)
163
164 result := ""
165 for _, n := range ns {
166 l := m[n]
167
168 result += fmt.Sprintf("%s\n", n)
169
170 if len(l.RawConfig.Variables) > 0 {
171 result += fmt.Sprintf(" vars\n")
172 for _, rawV := range l.RawConfig.Variables {
173 kind := "unknown"
174 str := rawV.FullKey()
175
176 switch rawV.(type) {
177 case *ResourceVariable:
178 kind = "resource"
179 case *UserVariable:
180 kind = "user"
181 }
182
183 result += fmt.Sprintf(" %s: %s\n", kind, str)
184 }
185 }
146 } 186 }
147 187
148 return strings.TrimSpace(result) 188 return strings.TrimSpace(result)
diff --git a/vendor/github.com/hashicorp/terraform/config/configschema/decoder_spec.go b/vendor/github.com/hashicorp/terraform/config/configschema/decoder_spec.go
new file mode 100644
index 0000000..2b1b0ca
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/configschema/decoder_spec.go
@@ -0,0 +1,97 @@
1package configschema
2
3import (
4 "github.com/hashicorp/hcl2/hcldec"
5 "github.com/zclconf/go-cty/cty"
6)
7
8var mapLabelNames = []string{"key"}
9
10// DecoderSpec returns a hcldec.Spec that can be used to decode a HCL Body
11// using the facilities in the hcldec package.
12//
13// The returned specification is guaranteed to return a value of the same type
14// returned by method ImpliedType, but it may contain null or unknown values if
15// any of the block attributes are defined as optional and/or computed
16// respectively.
17func (b *Block) DecoderSpec() hcldec.Spec {
18 ret := hcldec.ObjectSpec{}
19 if b == nil {
20 return ret
21 }
22
23 for name, attrS := range b.Attributes {
24 switch {
25 case attrS.Computed && attrS.Optional:
26 // In this special case we use an unknown value as a default
27 // to get the intended behavior that the result is computed
28 // unless it has been explicitly set in config.
29 ret[name] = &hcldec.DefaultSpec{
30 Primary: &hcldec.AttrSpec{
31 Name: name,
32 Type: attrS.Type,
33 },
34 Default: &hcldec.LiteralSpec{
35 Value: cty.UnknownVal(attrS.Type),
36 },
37 }
38 case attrS.Computed:
39 ret[name] = &hcldec.LiteralSpec{
40 Value: cty.UnknownVal(attrS.Type),
41 }
42 default:
43 ret[name] = &hcldec.AttrSpec{
44 Name: name,
45 Type: attrS.Type,
46 Required: attrS.Required,
47 }
48 }
49 }
50
51 for name, blockS := range b.BlockTypes {
52 if _, exists := ret[name]; exists {
53 // This indicates an invalid schema, since it's not valid to
54 // define both an attribute and a block type of the same name.
55 // However, we don't raise this here since it's checked by
56 // InternalValidate.
57 continue
58 }
59
60 childSpec := blockS.Block.DecoderSpec()
61
62 switch blockS.Nesting {
63 case NestingSingle:
64 ret[name] = &hcldec.BlockSpec{
65 TypeName: name,
66 Nested: childSpec,
67 Required: blockS.MinItems == 1 && blockS.MaxItems >= 1,
68 }
69 case NestingList:
70 ret[name] = &hcldec.BlockListSpec{
71 TypeName: name,
72 Nested: childSpec,
73 MinItems: blockS.MinItems,
74 MaxItems: blockS.MaxItems,
75 }
76 case NestingSet:
77 ret[name] = &hcldec.BlockSetSpec{
78 TypeName: name,
79 Nested: childSpec,
80 MinItems: blockS.MinItems,
81 MaxItems: blockS.MaxItems,
82 }
83 case NestingMap:
84 ret[name] = &hcldec.BlockMapSpec{
85 TypeName: name,
86 Nested: childSpec,
87 LabelNames: mapLabelNames,
88 }
89 default:
90 // Invalid nesting type is just ignored. It's checked by
91 // InternalValidate.
92 continue
93 }
94 }
95
96 return ret
97}
diff --git a/vendor/github.com/hashicorp/terraform/config/configschema/doc.go b/vendor/github.com/hashicorp/terraform/config/configschema/doc.go
new file mode 100644
index 0000000..caf8d73
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/configschema/doc.go
@@ -0,0 +1,14 @@
1// Package configschema contains types for describing the expected structure
2// of a configuration block whose shape is not known until runtime.
3//
4// For example, this is used to describe the expected contents of a resource
5// configuration block, which is defined by the corresponding provider plugin
6// and thus not compiled into Terraform core.
7//
8// A configschema primarily describes the shape of configuration, but it is
9// also suitable for use with other structures derived from the configuration,
10// such as the cached state of a resource or a resource diff.
11//
12// This package should not be confused with the package helper/schema, which
13// is the higher-level helper library used to implement providers themselves.
14package configschema
diff --git a/vendor/github.com/hashicorp/terraform/config/configschema/implied_type.go b/vendor/github.com/hashicorp/terraform/config/configschema/implied_type.go
new file mode 100644
index 0000000..67324eb
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/configschema/implied_type.go
@@ -0,0 +1,21 @@
1package configschema
2
3import (
4 "github.com/hashicorp/hcl2/hcldec"
5 "github.com/zclconf/go-cty/cty"
6)
7
8// ImpliedType returns the cty.Type that would result from decoding a
9// configuration block using the receiving block schema.
10//
11// ImpliedType always returns a result, even if the given schema is
12// inconsistent. Code that creates configschema.Block objects should be
13// tested using the InternalValidate method to detect any inconsistencies
14// that would cause this method to fall back on defaults and assumptions.
15func (b *Block) ImpliedType() cty.Type {
16 if b == nil {
17 return cty.EmptyObject
18 }
19
20 return hcldec.ImpliedType(b.DecoderSpec())
21}
diff --git a/vendor/github.com/hashicorp/terraform/config/configschema/internal_validate.go b/vendor/github.com/hashicorp/terraform/config/configschema/internal_validate.go
new file mode 100644
index 0000000..33cbe88
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/configschema/internal_validate.go
@@ -0,0 +1,92 @@
1package configschema
2
3import (
4 "fmt"
5 "regexp"
6
7 "github.com/zclconf/go-cty/cty"
8
9 multierror "github.com/hashicorp/go-multierror"
10)
11
12var validName = regexp.MustCompile(`^[a-z0-9_]+$`)
13
14// InternalValidate returns an error if the receiving block and its child
15// schema definitions have any consistencies with the documented rules for
16// valid schema.
17//
18// This is intended to be used within unit tests to detect when a given
19// schema is invalid.
20func (b *Block) InternalValidate() error {
21 if b == nil {
22 return fmt.Errorf("top-level block schema is nil")
23 }
24 return b.internalValidate("", nil)
25
26}
27
28func (b *Block) internalValidate(prefix string, err error) error {
29 for name, attrS := range b.Attributes {
30 if attrS == nil {
31 err = multierror.Append(err, fmt.Errorf("%s%s: attribute schema is nil", prefix, name))
32 continue
33 }
34 if !validName.MatchString(name) {
35 err = multierror.Append(err, fmt.Errorf("%s%s: name may contain only lowercase letters, digits and underscores", prefix, name))
36 }
37 if attrS.Optional == false && attrS.Required == false && attrS.Computed == false {
38 err = multierror.Append(err, fmt.Errorf("%s%s: must set Optional, Required or Computed", prefix, name))
39 }
40 if attrS.Optional && attrS.Required {
41 err = multierror.Append(err, fmt.Errorf("%s%s: cannot set both Optional and Required", prefix, name))
42 }
43 if attrS.Computed && attrS.Required {
44 err = multierror.Append(err, fmt.Errorf("%s%s: cannot set both Computed and Required", prefix, name))
45 }
46 if attrS.Type == cty.NilType {
47 err = multierror.Append(err, fmt.Errorf("%s%s: Type must be set to something other than cty.NilType", prefix, name))
48 }
49 }
50
51 for name, blockS := range b.BlockTypes {
52 if blockS == nil {
53 err = multierror.Append(err, fmt.Errorf("%s%s: block schema is nil", prefix, name))
54 continue
55 }
56
57 if _, isAttr := b.Attributes[name]; isAttr {
58 err = multierror.Append(err, fmt.Errorf("%s%s: name defined as both attribute and child block type", prefix, name))
59 } else if !validName.MatchString(name) {
60 err = multierror.Append(err, fmt.Errorf("%s%s: name may contain only lowercase letters, digits and underscores", prefix, name))
61 }
62
63 if blockS.MinItems < 0 || blockS.MaxItems < 0 {
64 err = multierror.Append(err, fmt.Errorf("%s%s: MinItems and MaxItems must both be greater than zero", prefix, name))
65 }
66
67 switch blockS.Nesting {
68 case NestingSingle:
69 switch {
70 case blockS.MinItems != blockS.MaxItems:
71 err = multierror.Append(err, fmt.Errorf("%s%s: MinItems and MaxItems must match in NestingSingle mode", prefix, name))
72 case blockS.MinItems < 0 || blockS.MinItems > 1:
73 err = multierror.Append(err, fmt.Errorf("%s%s: MinItems and MaxItems must be set to either 0 or 1 in NestingSingle mode", prefix, name))
74 }
75 case NestingList, NestingSet:
76 if blockS.MinItems > blockS.MaxItems && blockS.MaxItems != 0 {
77 err = multierror.Append(err, fmt.Errorf("%s%s: MinItems must be less than or equal to MaxItems in %s mode", prefix, name, blockS.Nesting))
78 }
79 case NestingMap:
80 if blockS.MinItems != 0 || blockS.MaxItems != 0 {
81 err = multierror.Append(err, fmt.Errorf("%s%s: MinItems and MaxItems must both be 0 in NestingMap mode", prefix, name))
82 }
83 default:
84 err = multierror.Append(err, fmt.Errorf("%s%s: invalid nesting mode %s", prefix, name, blockS.Nesting))
85 }
86
87 subPrefix := prefix + name + "."
88 err = blockS.Block.internalValidate(subPrefix, err)
89 }
90
91 return err
92}
diff --git a/vendor/github.com/hashicorp/terraform/config/configschema/nestingmode_string.go b/vendor/github.com/hashicorp/terraform/config/configschema/nestingmode_string.go
new file mode 100644
index 0000000..6cb9313
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/configschema/nestingmode_string.go
@@ -0,0 +1,16 @@
1// Code generated by "stringer -type=NestingMode"; DO NOT EDIT.
2
3package configschema
4
5import "strconv"
6
7const _NestingMode_name = "nestingModeInvalidNestingSingleNestingListNestingSetNestingMap"
8
9var _NestingMode_index = [...]uint8{0, 18, 31, 42, 52, 62}
10
11func (i NestingMode) String() string {
12 if i < 0 || i >= NestingMode(len(_NestingMode_index)-1) {
13 return "NestingMode(" + strconv.FormatInt(int64(i), 10) + ")"
14 }
15 return _NestingMode_name[_NestingMode_index[i]:_NestingMode_index[i+1]]
16}
diff --git a/vendor/github.com/hashicorp/terraform/config/configschema/schema.go b/vendor/github.com/hashicorp/terraform/config/configschema/schema.go
new file mode 100644
index 0000000..9a8ee55
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/configschema/schema.go
@@ -0,0 +1,107 @@
1package configschema
2
3import (
4 "github.com/zclconf/go-cty/cty"
5)
6
7// Block represents a configuration block.
8//
9// "Block" here is a logical grouping construct, though it happens to map
10// directly onto the physical block syntax of Terraform's native configuration
11// syntax. It may be a more a matter of convention in other syntaxes, such as
12// JSON.
13//
14// When converted to a value, a Block always becomes an instance of an object
15// type derived from its defined attributes and nested blocks
16type Block struct {
17 // Attributes describes any attributes that may appear directly inside
18 // the block.
19 Attributes map[string]*Attribute
20
21 // BlockTypes describes any nested block types that may appear directly
22 // inside the block.
23 BlockTypes map[string]*NestedBlock
24}
25
26// Attribute represents a configuration attribute, within a block.
27type Attribute struct {
28 // Type is a type specification that the attribute's value must conform to.
29 Type cty.Type
30
31 // Required, if set to true, specifies that an omitted or null value is
32 // not permitted.
33 Required bool
34
35 // Optional, if set to true, specifies that an omitted or null value is
36 // permitted. This field conflicts with Required.
37 Optional bool
38
39 // Computed, if set to true, specifies that the value comes from the
40 // provider rather than from configuration. If combined with Optional,
41 // then the config may optionally provide an overridden value.
42 Computed bool
43
44 // Sensitive, if set to true, indicates that an attribute may contain
45 // sensitive information.
46 //
47 // At present nothing is done with this information, but callers are
48 // encouraged to set it where appropriate so that it may be used in the
49 // future to help Terraform mask sensitive information. (Terraform
50 // currently achieves this in a limited sense via other mechanisms.)
51 Sensitive bool
52}
53
54// NestedBlock represents the embedding of one block within another.
55type NestedBlock struct {
56 // Block is the description of the block that's nested.
57 Block
58
59 // Nesting provides the nesting mode for the child block, which determines
60 // how many instances of the block are allowed, how many labels it expects,
61 // and how the resulting data will be converted into a data structure.
62 Nesting NestingMode
63
64 // MinItems and MaxItems set, for the NestingList and NestingSet nesting
65 // modes, lower and upper limits on the number of child blocks allowed
66 // of the given type. If both are left at zero, no limit is applied.
67 //
68 // As a special case, both values can be set to 1 for NestingSingle in
69 // order to indicate that a particular single block is required.
70 //
71 // These fields are ignored for other nesting modes and must both be left
72 // at zero.
73 MinItems, MaxItems int
74}
75
76// NestingMode is an enumeration of modes for nesting blocks inside other
77// blocks.
78type NestingMode int
79
80//go:generate stringer -type=NestingMode
81
82const (
83 nestingModeInvalid NestingMode = iota
84
85 // NestingSingle indicates that only a single instance of a given
86 // block type is permitted, with no labels, and its content should be
87 // provided directly as an object value.
88 NestingSingle
89
90 // NestingList indicates that multiple blocks of the given type are
91 // permitted, with no labels, and that their corresponding objects should
92 // be provided in a list.
93 NestingList
94
95 // NestingSet indicates that multiple blocks of the given type are
96 // permitted, with no labels, and that their corresponding objects should
97 // be provided in a set.
98 NestingSet
99
100 // NestingMap indicates that multiple blocks of the given type are
101 // permitted, each with a single label, and that their corresponding
102 // objects should be provided in a map whose keys are the labels.
103 //
104 // It's an error, therefore, to use the same label value on multiple
105 // blocks.
106 NestingMap
107)
diff --git a/vendor/github.com/hashicorp/terraform/config/hcl2_shim_util.go b/vendor/github.com/hashicorp/terraform/config/hcl2_shim_util.go
new file mode 100644
index 0000000..207d105
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/hcl2_shim_util.go
@@ -0,0 +1,134 @@
1package config
2
3import (
4 "fmt"
5
6 "github.com/zclconf/go-cty/cty/function/stdlib"
7
8 "github.com/hashicorp/hil/ast"
9 "github.com/hashicorp/terraform/config/hcl2shim"
10
11 hcl2 "github.com/hashicorp/hcl2/hcl"
12 "github.com/zclconf/go-cty/cty"
13 "github.com/zclconf/go-cty/cty/convert"
14 "github.com/zclconf/go-cty/cty/function"
15)
16
17// ---------------------------------------------------------------------------
18// This file contains some helper functions that are used to shim between
19// HCL2 concepts and HCL/HIL concepts, to help us mostly preserve the existing
20// public API that was built around HCL/HIL-oriented approaches.
21// ---------------------------------------------------------------------------
22
23func hcl2InterpolationFuncs() map[string]function.Function {
24 hcl2Funcs := map[string]function.Function{}
25
26 for name, hilFunc := range Funcs() {
27 hcl2Funcs[name] = hcl2InterpolationFuncShim(hilFunc)
28 }
29
30 // Some functions in the old world are dealt with inside langEvalConfig
31 // due to their legacy reliance on direct access to the symbol table.
32 // Since 0.7 they don't actually need it anymore and just ignore it,
33 // so we're cheating a bit here and exploiting that detail by passing nil.
34 hcl2Funcs["lookup"] = hcl2InterpolationFuncShim(interpolationFuncLookup(nil))
35 hcl2Funcs["keys"] = hcl2InterpolationFuncShim(interpolationFuncKeys(nil))
36 hcl2Funcs["values"] = hcl2InterpolationFuncShim(interpolationFuncValues(nil))
37
38 // As a bonus, we'll provide the JSON-handling functions from the cty
39 // function library since its "jsonencode" is more complete (doesn't force
40 // weird type conversions) and HIL's type system can't represent
41 // "jsondecode" at all. The result of jsondecode will eventually be forced
42 // to conform to the HIL type system on exit into the rest of Terraform due
43 // to our shimming right now, but it should be usable for decoding _within_
44 // an expression.
45 hcl2Funcs["jsonencode"] = stdlib.JSONEncodeFunc
46 hcl2Funcs["jsondecode"] = stdlib.JSONDecodeFunc
47
48 return hcl2Funcs
49}
50
51func hcl2InterpolationFuncShim(hilFunc ast.Function) function.Function {
52 spec := &function.Spec{}
53
54 for i, hilArgType := range hilFunc.ArgTypes {
55 spec.Params = append(spec.Params, function.Parameter{
56 Type: hcl2shim.HCL2TypeForHILType(hilArgType),
57 Name: fmt.Sprintf("arg%d", i+1), // HIL args don't have names, so we'll fudge it
58 })
59 }
60
61 if hilFunc.Variadic {
62 spec.VarParam = &function.Parameter{
63 Type: hcl2shim.HCL2TypeForHILType(hilFunc.VariadicType),
64 Name: "varargs", // HIL args don't have names, so we'll fudge it
65 }
66 }
67
68 spec.Type = func(args []cty.Value) (cty.Type, error) {
69 return hcl2shim.HCL2TypeForHILType(hilFunc.ReturnType), nil
70 }
71 spec.Impl = func(args []cty.Value, retType cty.Type) (cty.Value, error) {
72 hilArgs := make([]interface{}, len(args))
73 for i, arg := range args {
74 hilV := hcl2shim.HILVariableFromHCL2Value(arg)
75
76 // Although the cty function system does automatic type conversions
77 // to match the argument types, cty doesn't distinguish int and
78 // float and so we may need to adjust here to ensure that the
79 // wrapped function gets exactly the Go type it was expecting.
80 var wantType ast.Type
81 if i < len(hilFunc.ArgTypes) {
82 wantType = hilFunc.ArgTypes[i]
83 } else {
84 wantType = hilFunc.VariadicType
85 }
86 switch {
87 case hilV.Type == ast.TypeInt && wantType == ast.TypeFloat:
88 hilV.Type = wantType
89 hilV.Value = float64(hilV.Value.(int))
90 case hilV.Type == ast.TypeFloat && wantType == ast.TypeInt:
91 hilV.Type = wantType
92 hilV.Value = int(hilV.Value.(float64))
93 }
94
95 // HIL functions actually expect to have the outermost variable
96 // "peeled" but any nested values (in lists or maps) will
97 // still have their ast.Variable wrapping.
98 hilArgs[i] = hilV.Value
99 }
100
101 hilResult, err := hilFunc.Callback(hilArgs)
102 if err != nil {
103 return cty.DynamicVal, err
104 }
105
106 // Just as on the way in, we get back a partially-peeled ast.Variable
107 // which we need to re-wrap in order to convert it back into what
108 // we're calling a "config value".
109 rv := hcl2shim.HCL2ValueFromHILVariable(ast.Variable{
110 Type: hilFunc.ReturnType,
111 Value: hilResult,
112 })
113
114 return convert.Convert(rv, retType) // if result is unknown we'll force the correct type here
115 }
116 return function.New(spec)
117}
118
119func hcl2EvalWithUnknownVars(expr hcl2.Expression) (cty.Value, hcl2.Diagnostics) {
120 trs := expr.Variables()
121 vars := map[string]cty.Value{}
122 val := cty.DynamicVal
123
124 for _, tr := range trs {
125 name := tr.RootName()
126 vars[name] = val
127 }
128
129 ctx := &hcl2.EvalContext{
130 Variables: vars,
131 Functions: hcl2InterpolationFuncs(),
132 }
133 return expr.Value(ctx)
134}
diff --git a/vendor/github.com/hashicorp/terraform/config/hcl2shim/single_attr_body.go b/vendor/github.com/hashicorp/terraform/config/hcl2shim/single_attr_body.go
new file mode 100644
index 0000000..19651c8
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/hcl2shim/single_attr_body.go
@@ -0,0 +1,85 @@
1package hcl2shim
2
3import (
4 "fmt"
5
6 hcl2 "github.com/hashicorp/hcl2/hcl"
7)
8
9// SingleAttrBody is a weird implementation of hcl2.Body that acts as if
10// it has a single attribute whose value is the given expression.
11//
12// This is used to shim Resource.RawCount and Output.RawConfig to behave
13// more like they do in the old HCL loader.
14type SingleAttrBody struct {
15 Name string
16 Expr hcl2.Expression
17}
18
19var _ hcl2.Body = SingleAttrBody{}
20
21func (b SingleAttrBody) Content(schema *hcl2.BodySchema) (*hcl2.BodyContent, hcl2.Diagnostics) {
22 content, all, diags := b.content(schema)
23 if !all {
24 // This should never happen because this body implementation should only
25 // be used by code that is aware that it's using a single-attr body.
26 diags = append(diags, &hcl2.Diagnostic{
27 Severity: hcl2.DiagError,
28 Summary: "Invalid attribute",
29 Detail: fmt.Sprintf("The correct attribute name is %q.", b.Name),
30 Subject: b.Expr.Range().Ptr(),
31 })
32 }
33 return content, diags
34}
35
36func (b SingleAttrBody) PartialContent(schema *hcl2.BodySchema) (*hcl2.BodyContent, hcl2.Body, hcl2.Diagnostics) {
37 content, all, diags := b.content(schema)
38 var remain hcl2.Body
39 if all {
40 // If the request matched the one attribute we represent, then the
41 // remaining body is empty.
42 remain = hcl2.EmptyBody()
43 } else {
44 remain = b
45 }
46 return content, remain, diags
47}
48
49func (b SingleAttrBody) content(schema *hcl2.BodySchema) (*hcl2.BodyContent, bool, hcl2.Diagnostics) {
50 ret := &hcl2.BodyContent{}
51 all := false
52 var diags hcl2.Diagnostics
53
54 for _, attrS := range schema.Attributes {
55 if attrS.Name == b.Name {
56 attrs, _ := b.JustAttributes()
57 ret.Attributes = attrs
58 all = true
59 } else if attrS.Required {
60 diags = append(diags, &hcl2.Diagnostic{
61 Severity: hcl2.DiagError,
62 Summary: "Missing attribute",
63 Detail: fmt.Sprintf("The attribute %q is required.", attrS.Name),
64 Subject: b.Expr.Range().Ptr(),
65 })
66 }
67 }
68
69 return ret, all, diags
70}
71
72func (b SingleAttrBody) JustAttributes() (hcl2.Attributes, hcl2.Diagnostics) {
73 return hcl2.Attributes{
74 b.Name: {
75 Expr: b.Expr,
76 Name: b.Name,
77 NameRange: b.Expr.Range(),
78 Range: b.Expr.Range(),
79 },
80 }, nil
81}
82
83func (b SingleAttrBody) MissingItemRange() hcl2.Range {
84 return b.Expr.Range()
85}
diff --git a/vendor/github.com/hashicorp/terraform/config/hcl2shim/values.go b/vendor/github.com/hashicorp/terraform/config/hcl2shim/values.go
new file mode 100644
index 0000000..0b697a5
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/hcl2shim/values.go
@@ -0,0 +1,246 @@
1package hcl2shim
2
3import (
4 "fmt"
5 "math/big"
6
7 "github.com/hashicorp/hil/ast"
8 "github.com/zclconf/go-cty/cty"
9)
10
11// UnknownVariableValue is a sentinel value that can be used
12// to denote that the value of a variable is unknown at this time.
13// RawConfig uses this information to build up data about
14// unknown keys.
15const UnknownVariableValue = "74D93920-ED26-11E3-AC10-0800200C9A66"
16
17// ConfigValueFromHCL2 converts a value from HCL2 (really, from the cty dynamic
18// types library that HCL2 uses) to a value type that matches what would've
19// been produced from the HCL-based interpolator for an equivalent structure.
20//
21// This function will transform a cty null value into a Go nil value, which
22// isn't a possible outcome of the HCL/HIL-based decoder and so callers may
23// need to detect and reject any null values.
24func ConfigValueFromHCL2(v cty.Value) interface{} {
25 if !v.IsKnown() {
26 return UnknownVariableValue
27 }
28 if v.IsNull() {
29 return nil
30 }
31
32 switch v.Type() {
33 case cty.Bool:
34 return v.True() // like HCL.BOOL
35 case cty.String:
36 return v.AsString() // like HCL token.STRING or token.HEREDOC
37 case cty.Number:
38 // We can't match HCL _exactly_ here because it distinguishes between
39 // int and float values, but we'll get as close as we can by using
40 // an int if the number is exactly representable, and a float if not.
41 // The conversion to float will force precision to that of a float64,
42 // which is potentially losing information from the specific number
43 // given, but no worse than what HCL would've done in its own conversion
44 // to float.
45
46 f := v.AsBigFloat()
47 if i, acc := f.Int64(); acc == big.Exact {
48 // if we're on a 32-bit system and the number is too big for 32-bit
49 // int then we'll fall through here and use a float64.
50 const MaxInt = int(^uint(0) >> 1)
51 const MinInt = -MaxInt - 1
52 if i <= int64(MaxInt) && i >= int64(MinInt) {
53 return int(i) // Like HCL token.NUMBER
54 }
55 }
56
57 f64, _ := f.Float64()
58 return f64 // like HCL token.FLOAT
59 }
60
61 if v.Type().IsListType() || v.Type().IsSetType() || v.Type().IsTupleType() {
62 l := make([]interface{}, 0, v.LengthInt())
63 it := v.ElementIterator()
64 for it.Next() {
65 _, ev := it.Element()
66 l = append(l, ConfigValueFromHCL2(ev))
67 }
68 return l
69 }
70
71 if v.Type().IsMapType() || v.Type().IsObjectType() {
72 l := make(map[string]interface{})
73 it := v.ElementIterator()
74 for it.Next() {
75 ek, ev := it.Element()
76 l[ek.AsString()] = ConfigValueFromHCL2(ev)
77 }
78 return l
79 }
80
81 // If we fall out here then we have some weird type that we haven't
82 // accounted for. This should never happen unless the caller is using
83 // capsule types, and we don't currently have any such types defined.
84 panic(fmt.Errorf("can't convert %#v to config value", v))
85}
86
87// HCL2ValueFromConfigValue is the opposite of configValueFromHCL2: it takes
88// a value as would be returned from the old interpolator and turns it into
89// a cty.Value so it can be used within, for example, an HCL2 EvalContext.
90func HCL2ValueFromConfigValue(v interface{}) cty.Value {
91 if v == nil {
92 return cty.NullVal(cty.DynamicPseudoType)
93 }
94 if v == UnknownVariableValue {
95 return cty.DynamicVal
96 }
97
98 switch tv := v.(type) {
99 case bool:
100 return cty.BoolVal(tv)
101 case string:
102 return cty.StringVal(tv)
103 case int:
104 return cty.NumberIntVal(int64(tv))
105 case float64:
106 return cty.NumberFloatVal(tv)
107 case []interface{}:
108 vals := make([]cty.Value, len(tv))
109 for i, ev := range tv {
110 vals[i] = HCL2ValueFromConfigValue(ev)
111 }
112 return cty.TupleVal(vals)
113 case map[string]interface{}:
114 vals := map[string]cty.Value{}
115 for k, ev := range tv {
116 vals[k] = HCL2ValueFromConfigValue(ev)
117 }
118 return cty.ObjectVal(vals)
119 default:
120 // HCL/HIL should never generate anything that isn't caught by
121 // the above, so if we get here something has gone very wrong.
122 panic(fmt.Errorf("can't convert %#v to cty.Value", v))
123 }
124}
125
126func HILVariableFromHCL2Value(v cty.Value) ast.Variable {
127 if v.IsNull() {
128 // Caller should guarantee/check this before calling
129 panic("Null values cannot be represented in HIL")
130 }
131 if !v.IsKnown() {
132 return ast.Variable{
133 Type: ast.TypeUnknown,
134 Value: UnknownVariableValue,
135 }
136 }
137
138 switch v.Type() {
139 case cty.Bool:
140 return ast.Variable{
141 Type: ast.TypeBool,
142 Value: v.True(),
143 }
144 case cty.Number:
145 v := ConfigValueFromHCL2(v)
146 switch tv := v.(type) {
147 case int:
148 return ast.Variable{
149 Type: ast.TypeInt,
150 Value: tv,
151 }
152 case float64:
153 return ast.Variable{
154 Type: ast.TypeFloat,
155 Value: tv,
156 }
157 default:
158 // should never happen
159 panic("invalid return value for configValueFromHCL2")
160 }
161 case cty.String:
162 return ast.Variable{
163 Type: ast.TypeString,
164 Value: v.AsString(),
165 }
166 }
167
168 if v.Type().IsListType() || v.Type().IsSetType() || v.Type().IsTupleType() {
169 l := make([]ast.Variable, 0, v.LengthInt())
170 it := v.ElementIterator()
171 for it.Next() {
172 _, ev := it.Element()
173 l = append(l, HILVariableFromHCL2Value(ev))
174 }
175 // If we were given a tuple then this could actually produce an invalid
176 // list with non-homogenous types, which we expect to be caught inside
177 // HIL just like a user-supplied non-homogenous list would be.
178 return ast.Variable{
179 Type: ast.TypeList,
180 Value: l,
181 }
182 }
183
184 if v.Type().IsMapType() || v.Type().IsObjectType() {
185 l := make(map[string]ast.Variable)
186 it := v.ElementIterator()
187 for it.Next() {
188 ek, ev := it.Element()
189 l[ek.AsString()] = HILVariableFromHCL2Value(ev)
190 }
191 // If we were given an object then this could actually produce an invalid
192 // map with non-homogenous types, which we expect to be caught inside
193 // HIL just like a user-supplied non-homogenous map would be.
194 return ast.Variable{
195 Type: ast.TypeMap,
196 Value: l,
197 }
198 }
199
200 // If we fall out here then we have some weird type that we haven't
201 // accounted for. This should never happen unless the caller is using
202 // capsule types, and we don't currently have any such types defined.
203 panic(fmt.Errorf("can't convert %#v to HIL variable", v))
204}
205
206func HCL2ValueFromHILVariable(v ast.Variable) cty.Value {
207 switch v.Type {
208 case ast.TypeList:
209 vals := make([]cty.Value, len(v.Value.([]ast.Variable)))
210 for i, ev := range v.Value.([]ast.Variable) {
211 vals[i] = HCL2ValueFromHILVariable(ev)
212 }
213 return cty.TupleVal(vals)
214 case ast.TypeMap:
215 vals := make(map[string]cty.Value, len(v.Value.(map[string]ast.Variable)))
216 for k, ev := range v.Value.(map[string]ast.Variable) {
217 vals[k] = HCL2ValueFromHILVariable(ev)
218 }
219 return cty.ObjectVal(vals)
220 default:
221 return HCL2ValueFromConfigValue(v.Value)
222 }
223}
224
225func HCL2TypeForHILType(hilType ast.Type) cty.Type {
226 switch hilType {
227 case ast.TypeAny:
228 return cty.DynamicPseudoType
229 case ast.TypeUnknown:
230 return cty.DynamicPseudoType
231 case ast.TypeBool:
232 return cty.Bool
233 case ast.TypeInt:
234 return cty.Number
235 case ast.TypeFloat:
236 return cty.Number
237 case ast.TypeString:
238 return cty.String
239 case ast.TypeList:
240 return cty.List(cty.DynamicPseudoType)
241 case ast.TypeMap:
242 return cty.Map(cty.DynamicPseudoType)
243 default:
244 return cty.NilType // equilvalent to ast.TypeInvalid
245 }
246}
diff --git a/vendor/github.com/hashicorp/terraform/config/import_tree.go b/vendor/github.com/hashicorp/terraform/config/import_tree.go
index 37ec11a..08cbc77 100644
--- a/vendor/github.com/hashicorp/terraform/config/import_tree.go
+++ b/vendor/github.com/hashicorp/terraform/config/import_tree.go
@@ -1,8 +1,12 @@
1package config 1package config
2 2
3import ( 3import (
4 "bufio"
4 "fmt" 5 "fmt"
5 "io" 6 "io"
7 "os"
8
9 "github.com/hashicorp/errwrap"
6) 10)
7 11
8// configurable is an interface that must be implemented by any configuration 12// configurable is an interface that must be implemented by any configuration
@@ -27,15 +31,52 @@ type importTree struct {
27// imports. 31// imports.
28type fileLoaderFunc func(path string) (configurable, []string, error) 32type fileLoaderFunc func(path string) (configurable, []string, error)
29 33
34// Set this to a non-empty value at link time to enable the HCL2 experiment.
35// This is not currently enabled for release builds.
36//
37// For example:
38// go install -ldflags="-X github.com/hashicorp/terraform/config.enableHCL2Experiment=true" github.com/hashicorp/terraform
39var enableHCL2Experiment = ""
40
30// loadTree takes a single file and loads the entire importTree for that 41// loadTree takes a single file and loads the entire importTree for that
31// file. This function detects what kind of configuration file it is an 42// file. This function detects what kind of configuration file it is an
32// executes the proper fileLoaderFunc. 43// executes the proper fileLoaderFunc.
33func loadTree(root string) (*importTree, error) { 44func loadTree(root string) (*importTree, error) {
34 var f fileLoaderFunc 45 var f fileLoaderFunc
35 switch ext(root) { 46
36 case ".tf", ".tf.json": 47 // HCL2 experiment is currently activated at build time via the linker.
37 f = loadFileHcl 48 // See the comment on this variable for more information.
38 default: 49 if enableHCL2Experiment == "" {
50 // Main-line behavior: always use the original HCL parser
51 switch ext(root) {
52 case ".tf", ".tf.json":
53 f = loadFileHcl
54 default:
55 }
56 } else {
57 // Experimental behavior: use the HCL2 parser if the opt-in comment
58 // is present.
59 switch ext(root) {
60 case ".tf":
61 // We need to sniff the file for the opt-in comment line to decide
62 // if the file is participating in the HCL2 experiment.
63 cf, err := os.Open(root)
64 if err != nil {
65 return nil, err
66 }
67 sc := bufio.NewScanner(cf)
68 for sc.Scan() {
69 if sc.Text() == "#terraform:hcl2" {
70 f = globalHCL2Loader.loadFile
71 }
72 }
73 if f == nil {
74 f = loadFileHcl
75 }
76 case ".tf.json":
77 f = loadFileHcl
78 default:
79 }
39 } 80 }
40 81
41 if f == nil { 82 if f == nil {
@@ -86,10 +127,7 @@ func (t *importTree) Close() error {
86func (t *importTree) ConfigTree() (*configTree, error) { 127func (t *importTree) ConfigTree() (*configTree, error) {
87 config, err := t.Raw.Config() 128 config, err := t.Raw.Config()
88 if err != nil { 129 if err != nil {
89 return nil, fmt.Errorf( 130 return nil, errwrap.Wrapf(fmt.Sprintf("Error loading %s: {{err}}", t.Path), err)
90 "Error loading %s: %s",
91 t.Path,
92 err)
93 } 131 }
94 132
95 // Build our result 133 // Build our result
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate.go b/vendor/github.com/hashicorp/terraform/config/interpolate.go
index bbb3555..599e5ec 100644
--- a/vendor/github.com/hashicorp/terraform/config/interpolate.go
+++ b/vendor/github.com/hashicorp/terraform/config/interpolate.go
@@ -5,6 +5,8 @@ import (
5 "strconv" 5 "strconv"
6 "strings" 6 "strings"
7 7
8 "github.com/hashicorp/terraform/tfdiags"
9
8 "github.com/hashicorp/hil/ast" 10 "github.com/hashicorp/hil/ast"
9) 11)
10 12
@@ -14,6 +16,21 @@ import (
14// variables can come from: user variables, resources, etc. 16// variables can come from: user variables, resources, etc.
15type InterpolatedVariable interface { 17type InterpolatedVariable interface {
16 FullKey() string 18 FullKey() string
19 SourceRange() tfdiags.SourceRange
20}
21
22// varRange can be embedded into an InterpolatedVariable implementation to
23// implement the SourceRange method.
24type varRange struct {
25 rng tfdiags.SourceRange
26}
27
28func (r varRange) SourceRange() tfdiags.SourceRange {
29 return r.rng
30}
31
32func makeVarRange(rng tfdiags.SourceRange) varRange {
33 return varRange{rng}
17} 34}
18 35
19// CountVariable is a variable for referencing information about 36// CountVariable is a variable for referencing information about
@@ -21,6 +38,7 @@ type InterpolatedVariable interface {
21type CountVariable struct { 38type CountVariable struct {
22 Type CountValueType 39 Type CountValueType
23 key string 40 key string
41 varRange
24} 42}
25 43
26// CountValueType is the type of the count variable that is referenced. 44// CountValueType is the type of the count variable that is referenced.
@@ -37,6 +55,7 @@ type ModuleVariable struct {
37 Name string 55 Name string
38 Field string 56 Field string
39 key string 57 key string
58 varRange
40} 59}
41 60
42// A PathVariable is a variable that references path information about the 61// A PathVariable is a variable that references path information about the
@@ -44,6 +63,7 @@ type ModuleVariable struct {
44type PathVariable struct { 63type PathVariable struct {
45 Type PathValueType 64 Type PathValueType
46 key string 65 key string
66 varRange
47} 67}
48 68
49type PathValueType byte 69type PathValueType byte
@@ -67,6 +87,7 @@ type ResourceVariable struct {
67 Index int // Index for multi-variable: aws_instance.foo.1.id == 1 87 Index int // Index for multi-variable: aws_instance.foo.1.id == 1
68 88
69 key string 89 key string
90 varRange
70} 91}
71 92
72// SelfVariable is a variable that is referencing the same resource 93// SelfVariable is a variable that is referencing the same resource
@@ -75,6 +96,7 @@ type SelfVariable struct {
75 Field string 96 Field string
76 97
77 key string 98 key string
99 varRange
78} 100}
79 101
80// SimpleVariable is an unprefixed variable, which can show up when users have 102// SimpleVariable is an unprefixed variable, which can show up when users have
@@ -82,6 +104,7 @@ type SelfVariable struct {
82// internally. The template_file resource is an example of this. 104// internally. The template_file resource is an example of this.
83type SimpleVariable struct { 105type SimpleVariable struct {
84 Key string 106 Key string
107 varRange
85} 108}
86 109
87// TerraformVariable is a "terraform."-prefixed variable used to access 110// TerraformVariable is a "terraform."-prefixed variable used to access
@@ -89,6 +112,7 @@ type SimpleVariable struct {
89type TerraformVariable struct { 112type TerraformVariable struct {
90 Field string 113 Field string
91 key string 114 key string
115 varRange
92} 116}
93 117
94// A UserVariable is a variable that is referencing a user variable 118// A UserVariable is a variable that is referencing a user variable
@@ -99,6 +123,14 @@ type UserVariable struct {
99 Elem string 123 Elem string
100 124
101 key string 125 key string
126 varRange
127}
128
129// A LocalVariable is a variable that references a local value defined within
130// the current module, via a "locals" block. This looks like "${local.foo}".
131type LocalVariable struct {
132 Name string
133 varRange
102} 134}
103 135
104func NewInterpolatedVariable(v string) (InterpolatedVariable, error) { 136func NewInterpolatedVariable(v string) (InterpolatedVariable, error) {
@@ -112,6 +144,8 @@ func NewInterpolatedVariable(v string) (InterpolatedVariable, error) {
112 return NewTerraformVariable(v) 144 return NewTerraformVariable(v)
113 } else if strings.HasPrefix(v, "var.") { 145 } else if strings.HasPrefix(v, "var.") {
114 return NewUserVariable(v) 146 return NewUserVariable(v)
147 } else if strings.HasPrefix(v, "local.") {
148 return NewLocalVariable(v)
115 } else if strings.HasPrefix(v, "module.") { 149 } else if strings.HasPrefix(v, "module.") {
116 return NewModuleVariable(v) 150 return NewModuleVariable(v)
117 } else if !strings.ContainsRune(v, '.') { 151 } else if !strings.ContainsRune(v, '.') {
@@ -276,7 +310,7 @@ func (v *SelfVariable) GoString() string {
276} 310}
277 311
278func NewSimpleVariable(key string) (*SimpleVariable, error) { 312func NewSimpleVariable(key string) (*SimpleVariable, error) {
279 return &SimpleVariable{key}, nil 313 return &SimpleVariable{Key: key}, nil
280} 314}
281 315
282func (v *SimpleVariable) FullKey() string { 316func (v *SimpleVariable) FullKey() string {
@@ -331,6 +365,25 @@ func (v *UserVariable) GoString() string {
331 return fmt.Sprintf("*%#v", *v) 365 return fmt.Sprintf("*%#v", *v)
332} 366}
333 367
368func NewLocalVariable(key string) (*LocalVariable, error) {
369 name := key[len("local."):]
370 if idx := strings.Index(name, "."); idx > -1 {
371 return nil, fmt.Errorf("Can't use dot (.) attribute access in local.%s; use square bracket indexing", name)
372 }
373
374 return &LocalVariable{
375 Name: name,
376 }, nil
377}
378
379func (v *LocalVariable) FullKey() string {
380 return fmt.Sprintf("local.%s", v.Name)
381}
382
383func (v *LocalVariable) GoString() string {
384 return fmt.Sprintf("*%#v", *v)
385}
386
334// DetectVariables takes an AST root and returns all the interpolated 387// DetectVariables takes an AST root and returns all the interpolated
335// variables that are detected in the AST tree. 388// variables that are detected in the AST tree.
336func DetectVariables(root ast.Node) ([]InterpolatedVariable, error) { 389func DetectVariables(root ast.Node) ([]InterpolatedVariable, error) {
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go b/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go
index a298cf2..421edb0 100644
--- a/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go
+++ b/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go
@@ -1,17 +1,23 @@
1package config 1package config
2 2
3import ( 3import (
4 "bytes"
5 "compress/gzip"
4 "crypto/md5" 6 "crypto/md5"
7 "crypto/rsa"
5 "crypto/sha1" 8 "crypto/sha1"
6 "crypto/sha256" 9 "crypto/sha256"
7 "crypto/sha512" 10 "crypto/sha512"
11 "crypto/x509"
8 "encoding/base64" 12 "encoding/base64"
9 "encoding/hex" 13 "encoding/hex"
10 "encoding/json" 14 "encoding/json"
15 "encoding/pem"
11 "fmt" 16 "fmt"
12 "io/ioutil" 17 "io/ioutil"
13 "math" 18 "math"
14 "net" 19 "net"
20 "net/url"
15 "path/filepath" 21 "path/filepath"
16 "regexp" 22 "regexp"
17 "sort" 23 "sort"
@@ -55,59 +61,74 @@ func listVariableValueToStringSlice(values []ast.Variable) ([]string, error) {
55// Funcs is the mapping of built-in functions for configuration. 61// Funcs is the mapping of built-in functions for configuration.
56func Funcs() map[string]ast.Function { 62func Funcs() map[string]ast.Function {
57 return map[string]ast.Function{ 63 return map[string]ast.Function{
58 "basename": interpolationFuncBasename(), 64 "abs": interpolationFuncAbs(),
59 "base64decode": interpolationFuncBase64Decode(), 65 "basename": interpolationFuncBasename(),
60 "base64encode": interpolationFuncBase64Encode(), 66 "base64decode": interpolationFuncBase64Decode(),
61 "base64sha256": interpolationFuncBase64Sha256(), 67 "base64encode": interpolationFuncBase64Encode(),
62 "base64sha512": interpolationFuncBase64Sha512(), 68 "base64gzip": interpolationFuncBase64Gzip(),
63 "bcrypt": interpolationFuncBcrypt(), 69 "base64sha256": interpolationFuncBase64Sha256(),
64 "ceil": interpolationFuncCeil(), 70 "base64sha512": interpolationFuncBase64Sha512(),
65 "chomp": interpolationFuncChomp(), 71 "bcrypt": interpolationFuncBcrypt(),
66 "cidrhost": interpolationFuncCidrHost(), 72 "ceil": interpolationFuncCeil(),
67 "cidrnetmask": interpolationFuncCidrNetmask(), 73 "chomp": interpolationFuncChomp(),
68 "cidrsubnet": interpolationFuncCidrSubnet(), 74 "cidrhost": interpolationFuncCidrHost(),
69 "coalesce": interpolationFuncCoalesce(), 75 "cidrnetmask": interpolationFuncCidrNetmask(),
70 "coalescelist": interpolationFuncCoalesceList(), 76 "cidrsubnet": interpolationFuncCidrSubnet(),
71 "compact": interpolationFuncCompact(), 77 "coalesce": interpolationFuncCoalesce(),
72 "concat": interpolationFuncConcat(), 78 "coalescelist": interpolationFuncCoalesceList(),
73 "contains": interpolationFuncContains(), 79 "compact": interpolationFuncCompact(),
74 "dirname": interpolationFuncDirname(), 80 "concat": interpolationFuncConcat(),
75 "distinct": interpolationFuncDistinct(), 81 "contains": interpolationFuncContains(),
76 "element": interpolationFuncElement(), 82 "dirname": interpolationFuncDirname(),
77 "file": interpolationFuncFile(), 83 "distinct": interpolationFuncDistinct(),
78 "matchkeys": interpolationFuncMatchKeys(), 84 "element": interpolationFuncElement(),
79 "floor": interpolationFuncFloor(), 85 "chunklist": interpolationFuncChunklist(),
80 "format": interpolationFuncFormat(), 86 "file": interpolationFuncFile(),
81 "formatlist": interpolationFuncFormatList(), 87 "filebase64sha256": interpolationFuncMakeFileHash(interpolationFuncBase64Sha256()),
82 "index": interpolationFuncIndex(), 88 "filebase64sha512": interpolationFuncMakeFileHash(interpolationFuncBase64Sha512()),
83 "join": interpolationFuncJoin(), 89 "filemd5": interpolationFuncMakeFileHash(interpolationFuncMd5()),
84 "jsonencode": interpolationFuncJSONEncode(), 90 "filesha1": interpolationFuncMakeFileHash(interpolationFuncSha1()),
85 "length": interpolationFuncLength(), 91 "filesha256": interpolationFuncMakeFileHash(interpolationFuncSha256()),
86 "list": interpolationFuncList(), 92 "filesha512": interpolationFuncMakeFileHash(interpolationFuncSha512()),
87 "log": interpolationFuncLog(), 93 "matchkeys": interpolationFuncMatchKeys(),
88 "lower": interpolationFuncLower(), 94 "flatten": interpolationFuncFlatten(),
89 "map": interpolationFuncMap(), 95 "floor": interpolationFuncFloor(),
90 "max": interpolationFuncMax(), 96 "format": interpolationFuncFormat(),
91 "md5": interpolationFuncMd5(), 97 "formatlist": interpolationFuncFormatList(),
92 "merge": interpolationFuncMerge(), 98 "indent": interpolationFuncIndent(),
93 "min": interpolationFuncMin(), 99 "index": interpolationFuncIndex(),
94 "pathexpand": interpolationFuncPathExpand(), 100 "join": interpolationFuncJoin(),
95 "pow": interpolationFuncPow(), 101 "jsonencode": interpolationFuncJSONEncode(),
96 "uuid": interpolationFuncUUID(), 102 "length": interpolationFuncLength(),
97 "replace": interpolationFuncReplace(), 103 "list": interpolationFuncList(),
98 "sha1": interpolationFuncSha1(), 104 "log": interpolationFuncLog(),
99 "sha256": interpolationFuncSha256(), 105 "lower": interpolationFuncLower(),
100 "sha512": interpolationFuncSha512(), 106 "map": interpolationFuncMap(),
101 "signum": interpolationFuncSignum(), 107 "max": interpolationFuncMax(),
102 "slice": interpolationFuncSlice(), 108 "md5": interpolationFuncMd5(),
103 "sort": interpolationFuncSort(), 109 "merge": interpolationFuncMerge(),
104 "split": interpolationFuncSplit(), 110 "min": interpolationFuncMin(),
105 "substr": interpolationFuncSubstr(), 111 "pathexpand": interpolationFuncPathExpand(),
106 "timestamp": interpolationFuncTimestamp(), 112 "pow": interpolationFuncPow(),
107 "title": interpolationFuncTitle(), 113 "uuid": interpolationFuncUUID(),
108 "trimspace": interpolationFuncTrimSpace(), 114 "replace": interpolationFuncReplace(),
109 "upper": interpolationFuncUpper(), 115 "rsadecrypt": interpolationFuncRsaDecrypt(),
110 "zipmap": interpolationFuncZipMap(), 116 "sha1": interpolationFuncSha1(),
117 "sha256": interpolationFuncSha256(),
118 "sha512": interpolationFuncSha512(),
119 "signum": interpolationFuncSignum(),
120 "slice": interpolationFuncSlice(),
121 "sort": interpolationFuncSort(),
122 "split": interpolationFuncSplit(),
123 "substr": interpolationFuncSubstr(),
124 "timestamp": interpolationFuncTimestamp(),
125 "timeadd": interpolationFuncTimeAdd(),
126 "title": interpolationFuncTitle(),
127 "transpose": interpolationFuncTranspose(),
128 "trimspace": interpolationFuncTrimSpace(),
129 "upper": interpolationFuncUpper(),
130 "urlencode": interpolationFuncURLEncode(),
131 "zipmap": interpolationFuncZipMap(),
111 } 132 }
112} 133}
113 134
@@ -669,6 +690,21 @@ func interpolationFuncFormatList() ast.Function {
669 } 690 }
670} 691}
671 692
693// interpolationFuncIndent indents a multi-line string with the
694// specified number of spaces
695func interpolationFuncIndent() ast.Function {
696 return ast.Function{
697 ArgTypes: []ast.Type{ast.TypeInt, ast.TypeString},
698 ReturnType: ast.TypeString,
699 Callback: func(args []interface{}) (interface{}, error) {
700 spaces := args[0].(int)
701 data := args[1].(string)
702 pad := strings.Repeat(" ", spaces)
703 return strings.Replace(data, "\n", "\n"+pad, -1), nil
704 },
705 }
706}
707
672// interpolationFuncIndex implements the "index" function that allows one to 708// interpolationFuncIndex implements the "index" function that allows one to
673// find the index of a specific element in a list 709// find the index of a specific element in a list
674func interpolationFuncIndex() ast.Function { 710func interpolationFuncIndex() ast.Function {
@@ -823,8 +859,7 @@ func interpolationFuncJoin() ast.Function {
823} 859}
824 860
825// interpolationFuncJSONEncode implements the "jsonencode" function that encodes 861// interpolationFuncJSONEncode implements the "jsonencode" function that encodes
826// a string, list, or map as its JSON representation. For now, values in the 862// a string, list, or map as its JSON representation.
827// list or map may only be strings.
828func interpolationFuncJSONEncode() ast.Function { 863func interpolationFuncJSONEncode() ast.Function {
829 return ast.Function{ 864 return ast.Function{
830 ArgTypes: []ast.Type{ast.TypeAny}, 865 ArgTypes: []ast.Type{ast.TypeAny},
@@ -837,28 +872,36 @@ func interpolationFuncJSONEncode() ast.Function {
837 toEncode = typedArg 872 toEncode = typedArg
838 873
839 case []ast.Variable: 874 case []ast.Variable:
840 // We preallocate the list here. Note that it's important that in
841 // the length 0 case, we have an empty list rather than nil, as
842 // they encode differently.
843 // XXX It would be nice to support arbitrarily nested data here. Is
844 // there an inverse of hil.InterfaceToVariable?
845 strings := make([]string, len(typedArg)) 875 strings := make([]string, len(typedArg))
846 876
847 for i, v := range typedArg { 877 for i, v := range typedArg {
848 if v.Type != ast.TypeString { 878 if v.Type != ast.TypeString {
849 return "", fmt.Errorf("list elements must be strings") 879 variable, _ := hil.InterfaceToVariable(typedArg)
880 toEncode, _ = hil.VariableToInterface(variable)
881
882 jEnc, err := json.Marshal(toEncode)
883 if err != nil {
884 return "", fmt.Errorf("failed to encode JSON data '%s'", toEncode)
885 }
886 return string(jEnc), nil
887
850 } 888 }
851 strings[i] = v.Value.(string) 889 strings[i] = v.Value.(string)
852 } 890 }
853 toEncode = strings 891 toEncode = strings
854 892
855 case map[string]ast.Variable: 893 case map[string]ast.Variable:
856 // XXX It would be nice to support arbitrarily nested data here. Is
857 // there an inverse of hil.InterfaceToVariable?
858 stringMap := make(map[string]string) 894 stringMap := make(map[string]string)
859 for k, v := range typedArg { 895 for k, v := range typedArg {
860 if v.Type != ast.TypeString { 896 if v.Type != ast.TypeString {
861 return "", fmt.Errorf("map values must be strings") 897 variable, _ := hil.InterfaceToVariable(typedArg)
898 toEncode, _ = hil.VariableToInterface(variable)
899
900 jEnc, err := json.Marshal(toEncode)
901 if err != nil {
902 return "", fmt.Errorf("failed to encode JSON data '%s'", toEncode)
903 }
904 return string(jEnc), nil
862 } 905 }
863 stringMap[k] = v.Value.(string) 906 stringMap[k] = v.Value.(string)
864 } 907 }
@@ -1098,6 +1141,56 @@ func interpolationFuncElement() ast.Function {
1098 } 1141 }
1099} 1142}
1100 1143
1144// returns the `list` items chunked by `size`.
1145func interpolationFuncChunklist() ast.Function {
1146 return ast.Function{
1147 ArgTypes: []ast.Type{
1148 ast.TypeList, // inputList
1149 ast.TypeInt, // size
1150 },
1151 ReturnType: ast.TypeList,
1152 Callback: func(args []interface{}) (interface{}, error) {
1153 output := make([]ast.Variable, 0)
1154
1155 values, _ := args[0].([]ast.Variable)
1156 size, _ := args[1].(int)
1157
1158 // errors if size is negative
1159 if size < 0 {
1160 return nil, fmt.Errorf("The size argument must be positive")
1161 }
1162
1163 // if size is 0, returns a list made of the initial list
1164 if size == 0 {
1165 output = append(output, ast.Variable{
1166 Type: ast.TypeList,
1167 Value: values,
1168 })
1169 return output, nil
1170 }
1171
1172 variables := make([]ast.Variable, 0)
1173 chunk := ast.Variable{
1174 Type: ast.TypeList,
1175 Value: variables,
1176 }
1177 l := len(values)
1178 for i, v := range values {
1179 variables = append(variables, v)
1180
1181 // Chunk when index isn't 0, or when reaching the values's length
1182 if (i+1)%size == 0 || (i+1) == l {
1183 chunk.Value = variables
1184 output = append(output, chunk)
1185 variables = make([]ast.Variable, 0)
1186 }
1187 }
1188
1189 return output, nil
1190 },
1191 }
1192}
1193
1101// interpolationFuncKeys implements the "keys" function that yields a list of 1194// interpolationFuncKeys implements the "keys" function that yields a list of
1102// keys of map types within a Terraform configuration. 1195// keys of map types within a Terraform configuration.
1103func interpolationFuncKeys(vs map[string]ast.Variable) ast.Function { 1196func interpolationFuncKeys(vs map[string]ast.Variable) ast.Function {
@@ -1197,6 +1290,32 @@ func interpolationFuncBase64Decode() ast.Function {
1197 } 1290 }
1198} 1291}
1199 1292
1293// interpolationFuncBase64Gzip implements the "gzip" function that allows gzip
1294// compression encoding the result using base64
1295func interpolationFuncBase64Gzip() ast.Function {
1296 return ast.Function{
1297 ArgTypes: []ast.Type{ast.TypeString},
1298 ReturnType: ast.TypeString,
1299 Callback: func(args []interface{}) (interface{}, error) {
1300 s := args[0].(string)
1301
1302 var b bytes.Buffer
1303 gz := gzip.NewWriter(&b)
1304 if _, err := gz.Write([]byte(s)); err != nil {
1305 return "", fmt.Errorf("failed to write gzip raw data: '%s'", s)
1306 }
1307 if err := gz.Flush(); err != nil {
1308 return "", fmt.Errorf("failed to flush gzip writer: '%s'", s)
1309 }
1310 if err := gz.Close(); err != nil {
1311 return "", fmt.Errorf("failed to close gzip writer: '%s'", s)
1312 }
1313
1314 return base64.StdEncoding.EncodeToString(b.Bytes()), nil
1315 },
1316 }
1317}
1318
1200// interpolationFuncLower implements the "lower" function that does 1319// interpolationFuncLower implements the "lower" function that does
1201// string lower casing. 1320// string lower casing.
1202func interpolationFuncLower() ast.Function { 1321func interpolationFuncLower() ast.Function {
@@ -1396,6 +1515,29 @@ func interpolationFuncTimestamp() ast.Function {
1396 } 1515 }
1397} 1516}
1398 1517
1518func interpolationFuncTimeAdd() ast.Function {
1519 return ast.Function{
1520 ArgTypes: []ast.Type{
1521 ast.TypeString, // input timestamp string in RFC3339 format
1522 ast.TypeString, // duration to add to input timestamp that should be parsable by time.ParseDuration
1523 },
1524 ReturnType: ast.TypeString,
1525 Callback: func(args []interface{}) (interface{}, error) {
1526
1527 ts, err := time.Parse(time.RFC3339, args[0].(string))
1528 if err != nil {
1529 return nil, err
1530 }
1531 duration, err := time.ParseDuration(args[1].(string))
1532 if err != nil {
1533 return nil, err
1534 }
1535
1536 return ts.Add(duration).Format(time.RFC3339), nil
1537 },
1538 }
1539}
1540
1399// interpolationFuncTitle implements the "title" function that returns a copy of the 1541// interpolationFuncTitle implements the "title" function that returns a copy of the
1400// string in which first characters of all the words are capitalized. 1542// string in which first characters of all the words are capitalized.
1401func interpolationFuncTitle() ast.Function { 1543func interpolationFuncTitle() ast.Function {
@@ -1441,7 +1583,7 @@ func interpolationFuncSubstr() ast.Function {
1441 return nil, fmt.Errorf("length should be a non-negative integer") 1583 return nil, fmt.Errorf("length should be a non-negative integer")
1442 } 1584 }
1443 1585
1444 if offset > len(str) { 1586 if offset > len(str) || offset < 0 {
1445 return nil, fmt.Errorf("offset cannot be larger than the length of the string") 1587 return nil, fmt.Errorf("offset cannot be larger than the length of the string")
1446 } 1588 }
1447 1589
@@ -1453,3 +1595,160 @@ func interpolationFuncSubstr() ast.Function {
1453 }, 1595 },
1454 } 1596 }
1455} 1597}
1598
1599// Flatten until it's not ast.TypeList
1600func flattener(finalList []ast.Variable, flattenList []ast.Variable) []ast.Variable {
1601 for _, val := range flattenList {
1602 if val.Type == ast.TypeList {
1603 finalList = flattener(finalList, val.Value.([]ast.Variable))
1604 } else {
1605 finalList = append(finalList, val)
1606 }
1607 }
1608 return finalList
1609}
1610
1611// Flatten to single list
1612func interpolationFuncFlatten() ast.Function {
1613 return ast.Function{
1614 ArgTypes: []ast.Type{ast.TypeList},
1615 ReturnType: ast.TypeList,
1616 Variadic: false,
1617 Callback: func(args []interface{}) (interface{}, error) {
1618 inputList := args[0].([]ast.Variable)
1619
1620 var outputList []ast.Variable
1621 return flattener(outputList, inputList), nil
1622 },
1623 }
1624}
1625
1626func interpolationFuncURLEncode() ast.Function {
1627 return ast.Function{
1628 ArgTypes: []ast.Type{ast.TypeString},
1629 ReturnType: ast.TypeString,
1630 Callback: func(args []interface{}) (interface{}, error) {
1631 s := args[0].(string)
1632 return url.QueryEscape(s), nil
1633 },
1634 }
1635}
1636
1637// interpolationFuncTranspose implements the "transpose" function
1638// that converts a map (string,list) to a map (string,list) where
1639// the unique values of the original lists become the keys of the
1640// new map and the keys of the original map become values for the
1641// corresponding new keys.
1642func interpolationFuncTranspose() ast.Function {
1643 return ast.Function{
1644 ArgTypes: []ast.Type{ast.TypeMap},
1645 ReturnType: ast.TypeMap,
1646 Callback: func(args []interface{}) (interface{}, error) {
1647
1648 inputMap := args[0].(map[string]ast.Variable)
1649 outputMap := make(map[string]ast.Variable)
1650 tmpMap := make(map[string][]string)
1651
1652 for inKey, inVal := range inputMap {
1653 if inVal.Type != ast.TypeList {
1654 return nil, fmt.Errorf("transpose requires a map of lists of strings")
1655 }
1656 values := inVal.Value.([]ast.Variable)
1657 for _, listVal := range values {
1658 if listVal.Type != ast.TypeString {
1659 return nil, fmt.Errorf("transpose requires the given map values to be lists of strings")
1660 }
1661 outKey := listVal.Value.(string)
1662 if _, ok := tmpMap[outKey]; !ok {
1663 tmpMap[outKey] = make([]string, 0)
1664 }
1665 outVal := tmpMap[outKey]
1666 outVal = append(outVal, inKey)
1667 sort.Strings(outVal)
1668 tmpMap[outKey] = outVal
1669 }
1670 }
1671
1672 for outKey, outVal := range tmpMap {
1673 values := make([]ast.Variable, 0)
1674 for _, v := range outVal {
1675 values = append(values, ast.Variable{Type: ast.TypeString, Value: v})
1676 }
1677 outputMap[outKey] = ast.Variable{Type: ast.TypeList, Value: values}
1678 }
1679 return outputMap, nil
1680 },
1681 }
1682}
1683
1684// interpolationFuncAbs returns the absolute value of a given float.
1685func interpolationFuncAbs() ast.Function {
1686 return ast.Function{
1687 ArgTypes: []ast.Type{ast.TypeFloat},
1688 ReturnType: ast.TypeFloat,
1689 Callback: func(args []interface{}) (interface{}, error) {
1690 return math.Abs(args[0].(float64)), nil
1691 },
1692 }
1693}
1694
1695// interpolationFuncRsaDecrypt implements the "rsadecrypt" function that does
1696// RSA decryption.
1697func interpolationFuncRsaDecrypt() ast.Function {
1698 return ast.Function{
1699 ArgTypes: []ast.Type{ast.TypeString, ast.TypeString},
1700 ReturnType: ast.TypeString,
1701 Callback: func(args []interface{}) (interface{}, error) {
1702 s := args[0].(string)
1703 key := args[1].(string)
1704
1705 b, err := base64.StdEncoding.DecodeString(s)
1706 if err != nil {
1707 return "", fmt.Errorf("Failed to decode input %q: cipher text must be base64-encoded", s)
1708 }
1709
1710 block, _ := pem.Decode([]byte(key))
1711 if block == nil {
1712 return "", fmt.Errorf("Failed to read key %q: no key found", key)
1713 }
1714 if block.Headers["Proc-Type"] == "4,ENCRYPTED" {
1715 return "", fmt.Errorf(
1716 "Failed to read key %q: password protected keys are\n"+
1717 "not supported. Please decrypt the key prior to use.", key)
1718 }
1719
1720 x509Key, err := x509.ParsePKCS1PrivateKey(block.Bytes)
1721 if err != nil {
1722 return "", err
1723 }
1724
1725 out, err := rsa.DecryptPKCS1v15(nil, x509Key, b)
1726 if err != nil {
1727 return "", err
1728 }
1729
1730 return string(out), nil
1731 },
1732 }
1733}
1734
1735// interpolationFuncMakeFileHash constructs a function that hashes the contents
1736// of a file by combining the implementations of the file(...) function and
1737// a given other function that is assumed to take a single string argument and
1738// return a hash value.
1739func interpolationFuncMakeFileHash(hashFunc ast.Function) ast.Function {
1740 fileFunc := interpolationFuncFile()
1741
1742 return ast.Function{
1743 ArgTypes: []ast.Type{ast.TypeString},
1744 ReturnType: ast.TypeString,
1745 Callback: func(args []interface{}) (interface{}, error) {
1746 filename := args[0].(string)
1747 contents, err := fileFunc.Callback([]interface{}{filename})
1748 if err != nil {
1749 return nil, err
1750 }
1751 return hashFunc.Callback([]interface{}{contents})
1752 },
1753 }
1754}
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go b/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go
index ead3d10..66a677d 100644
--- a/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go
+++ b/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go
@@ -271,9 +271,7 @@ func (w *interpolationWalker) splitSlice() {
271 result = append(result, val.Value) 271 result = append(result, val.Value)
272 } 272 }
273 case []interface{}: 273 case []interface{}:
274 for _, element := range val { 274 result = append(result, val...)
275 result = append(result, element)
276 }
277 default: 275 default:
278 result = append(result, v) 276 result = append(result, v)
279 } 277 }
diff --git a/vendor/github.com/hashicorp/terraform/config/loader.go b/vendor/github.com/hashicorp/terraform/config/loader.go
index 5dd7d46..6e34781 100644
--- a/vendor/github.com/hashicorp/terraform/config/loader.go
+++ b/vendor/github.com/hashicorp/terraform/config/loader.go
@@ -80,7 +80,7 @@ func LoadDir(root string) (*Config, error) {
80 if err != nil { 80 if err != nil {
81 return nil, err 81 return nil, err
82 } 82 }
83 if len(files) == 0 { 83 if len(files) == 0 && len(overrides) == 0 {
84 return nil, &ErrNoConfigsFound{Dir: root} 84 return nil, &ErrNoConfigsFound{Dir: root}
85 } 85 }
86 86
@@ -112,6 +112,9 @@ func LoadDir(root string) (*Config, error) {
112 result = c 112 result = c
113 } 113 }
114 } 114 }
115 if len(files) == 0 {
116 result = &Config{}
117 }
115 118
116 // Load all the overrides, and merge them into the config 119 // Load all the overrides, and merge them into the config
117 for _, f := range overrides { 120 for _, f := range overrides {
diff --git a/vendor/github.com/hashicorp/terraform/config/loader_hcl.go b/vendor/github.com/hashicorp/terraform/config/loader_hcl.go
index e85e493..68cffe2 100644
--- a/vendor/github.com/hashicorp/terraform/config/loader_hcl.go
+++ b/vendor/github.com/hashicorp/terraform/config/loader_hcl.go
@@ -17,10 +17,20 @@ type hclConfigurable struct {
17 Root *ast.File 17 Root *ast.File
18} 18}
19 19
20var ReservedDataSourceFields = []string{
21 "connection",
22 "count",
23 "depends_on",
24 "lifecycle",
25 "provider",
26 "provisioner",
27}
28
20var ReservedResourceFields = []string{ 29var ReservedResourceFields = []string{
21 "connection", 30 "connection",
22 "count", 31 "count",
23 "depends_on", 32 "depends_on",
33 "id",
24 "lifecycle", 34 "lifecycle",
25 "provider", 35 "provider",
26 "provisioner", 36 "provisioner",
@@ -35,6 +45,7 @@ func (t *hclConfigurable) Config() (*Config, error) {
35 validKeys := map[string]struct{}{ 45 validKeys := map[string]struct{}{
36 "atlas": struct{}{}, 46 "atlas": struct{}{},
37 "data": struct{}{}, 47 "data": struct{}{},
48 "locals": struct{}{},
38 "module": struct{}{}, 49 "module": struct{}{},
39 "output": struct{}{}, 50 "output": struct{}{},
40 "provider": struct{}{}, 51 "provider": struct{}{},
@@ -70,6 +81,15 @@ func (t *hclConfigurable) Config() (*Config, error) {
70 } 81 }
71 } 82 }
72 83
84 // Build local values
85 if locals := list.Filter("locals"); len(locals.Items) > 0 {
86 var err error
87 config.Locals, err = loadLocalsHcl(locals)
88 if err != nil {
89 return nil, err
90 }
91 }
92
73 // Get Atlas configuration 93 // Get Atlas configuration
74 if atlas := list.Filter("atlas"); len(atlas.Items) > 0 { 94 if atlas := list.Filter("atlas"); len(atlas.Items) > 0 {
75 var err error 95 var err error
@@ -373,9 +393,6 @@ func loadModulesHcl(list *ast.ObjectList) ([]*Module, error) {
373 err) 393 err)
374 } 394 }
375 395
376 // Remove the fields we handle specially
377 delete(config, "source")
378
379 rawConfig, err := NewRawConfig(config) 396 rawConfig, err := NewRawConfig(config)
380 if err != nil { 397 if err != nil {
381 return nil, fmt.Errorf( 398 return nil, fmt.Errorf(
@@ -384,7 +401,11 @@ func loadModulesHcl(list *ast.ObjectList) ([]*Module, error) {
384 err) 401 err)
385 } 402 }
386 403
387 // If we have a count, then figure it out 404 // Remove the fields we handle specially
405 delete(config, "source")
406 delete(config, "version")
407 delete(config, "providers")
408
388 var source string 409 var source string
389 if o := listVal.Filter("source"); len(o.Items) > 0 { 410 if o := listVal.Filter("source"); len(o.Items) > 0 {
390 err = hcl.DecodeObject(&source, o.Items[0].Val) 411 err = hcl.DecodeObject(&source, o.Items[0].Val)
@@ -396,9 +417,33 @@ func loadModulesHcl(list *ast.ObjectList) ([]*Module, error) {
396 } 417 }
397 } 418 }
398 419
420 var version string
421 if o := listVal.Filter("version"); len(o.Items) > 0 {
422 err = hcl.DecodeObject(&version, o.Items[0].Val)
423 if err != nil {
424 return nil, fmt.Errorf(
425 "Error parsing version for %s: %s",
426 k,
427 err)
428 }
429 }
430
431 var providers map[string]string
432 if o := listVal.Filter("providers"); len(o.Items) > 0 {
433 err = hcl.DecodeObject(&providers, o.Items[0].Val)
434 if err != nil {
435 return nil, fmt.Errorf(
436 "Error parsing providers for %s: %s",
437 k,
438 err)
439 }
440 }
441
399 result = append(result, &Module{ 442 result = append(result, &Module{
400 Name: k, 443 Name: k,
401 Source: source, 444 Source: source,
445 Version: version,
446 Providers: providers,
402 RawConfig: rawConfig, 447 RawConfig: rawConfig,
403 }) 448 })
404 } 449 }
@@ -406,6 +451,59 @@ func loadModulesHcl(list *ast.ObjectList) ([]*Module, error) {
406 return result, nil 451 return result, nil
407} 452}
408 453
454// loadLocalsHcl recurses into the given HCL object turns it into
455// a list of locals.
456func loadLocalsHcl(list *ast.ObjectList) ([]*Local, error) {
457
458 result := make([]*Local, 0, len(list.Items))
459
460 for _, block := range list.Items {
461 if len(block.Keys) > 0 {
462 return nil, fmt.Errorf(
463 "locals block at %s should not have label %q",
464 block.Pos(), block.Keys[0].Token.Value(),
465 )
466 }
467
468 blockObj, ok := block.Val.(*ast.ObjectType)
469 if !ok {
470 return nil, fmt.Errorf("locals value at %s should be a block", block.Val.Pos())
471 }
472
473 // blockObj now contains directly our local decls
474 for _, item := range blockObj.List.Items {
475 if len(item.Keys) != 1 {
476 return nil, fmt.Errorf("local declaration at %s may not be a block", item.Val.Pos())
477 }
478
479 // By the time we get here there can only be one item left, but
480 // we'll decode into a map anyway because it's a convenient way
481 // to extract both the key and the value robustly.
482 kv := map[string]interface{}{}
483 hcl.DecodeObject(&kv, item)
484 for k, v := range kv {
485 rawConfig, err := NewRawConfig(map[string]interface{}{
486 "value": v,
487 })
488
489 if err != nil {
490 return nil, fmt.Errorf(
491 "error parsing local value %q at %s: %s",
492 k, item.Val.Pos(), err,
493 )
494 }
495
496 result = append(result, &Local{
497 Name: k,
498 RawConfig: rawConfig,
499 })
500 }
501 }
502 }
503
504 return result, nil
505}
506
409// LoadOutputsHcl recurses into the given HCL object and turns 507// LoadOutputsHcl recurses into the given HCL object and turns
410// it into a mapping of outputs. 508// it into a mapping of outputs.
411func loadOutputsHcl(list *ast.ObjectList) ([]*Output, error) { 509func loadOutputsHcl(list *ast.ObjectList) ([]*Output, error) {
@@ -434,6 +532,7 @@ func loadOutputsHcl(list *ast.ObjectList) ([]*Output, error) {
434 532
435 // Delete special keys 533 // Delete special keys
436 delete(config, "depends_on") 534 delete(config, "depends_on")
535 delete(config, "description")
437 536
438 rawConfig, err := NewRawConfig(config) 537 rawConfig, err := NewRawConfig(config)
439 if err != nil { 538 if err != nil {
@@ -455,10 +554,23 @@ func loadOutputsHcl(list *ast.ObjectList) ([]*Output, error) {
455 } 554 }
456 } 555 }
457 556
557 // If we have a description field, then filter that
558 var description string
559 if o := listVal.Filter("description"); len(o.Items) > 0 {
560 err := hcl.DecodeObject(&description, o.Items[0].Val)
561 if err != nil {
562 return nil, fmt.Errorf(
563 "Error reading description for output %q: %s",
564 n,
565 err)
566 }
567 }
568
458 result = append(result, &Output{ 569 result = append(result, &Output{
459 Name: n, 570 Name: n,
460 RawConfig: rawConfig, 571 RawConfig: rawConfig,
461 DependsOn: dependsOn, 572 DependsOn: dependsOn,
573 Description: description,
462 }) 574 })
463 } 575 }
464 576
diff --git a/vendor/github.com/hashicorp/terraform/config/loader_hcl2.go b/vendor/github.com/hashicorp/terraform/config/loader_hcl2.go
new file mode 100644
index 0000000..4f9f129
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/loader_hcl2.go
@@ -0,0 +1,473 @@
1package config
2
3import (
4 "fmt"
5 "sort"
6 "strings"
7
8 gohcl2 "github.com/hashicorp/hcl2/gohcl"
9 hcl2 "github.com/hashicorp/hcl2/hcl"
10 hcl2parse "github.com/hashicorp/hcl2/hclparse"
11 "github.com/hashicorp/terraform/config/hcl2shim"
12 "github.com/zclconf/go-cty/cty"
13)
14
15// hcl2Configurable is an implementation of configurable that knows
16// how to turn a HCL Body into a *Config object.
17type hcl2Configurable struct {
18 SourceFilename string
19 Body hcl2.Body
20}
21
22// hcl2Loader is a wrapper around a HCL parser that provides a fileLoaderFunc.
23type hcl2Loader struct {
24 Parser *hcl2parse.Parser
25}
26
27// For the moment we'll just have a global loader since we don't have anywhere
28// better to stash this.
29// TODO: refactor the loader API so that it uses some sort of object we can
30// stash the parser inside.
31var globalHCL2Loader = newHCL2Loader()
32
33// newHCL2Loader creates a new hcl2Loader containing a new HCL Parser.
34//
35// HCL parsers retain information about files that are loaded to aid in
36// producing diagnostic messages, so all files within a single configuration
37// should be loaded with the same parser to ensure the availability of
38// full diagnostic information.
39func newHCL2Loader() hcl2Loader {
40 return hcl2Loader{
41 Parser: hcl2parse.NewParser(),
42 }
43}
44
45// loadFile is a fileLoaderFunc that knows how to read a HCL2 file and turn it
46// into a hcl2Configurable.
47func (l hcl2Loader) loadFile(filename string) (configurable, []string, error) {
48 var f *hcl2.File
49 var diags hcl2.Diagnostics
50 if strings.HasSuffix(filename, ".json") {
51 f, diags = l.Parser.ParseJSONFile(filename)
52 } else {
53 f, diags = l.Parser.ParseHCLFile(filename)
54 }
55 if diags.HasErrors() {
56 // Return diagnostics as an error; callers may type-assert this to
57 // recover the original diagnostics, if it doesn't end up wrapped
58 // in another error.
59 return nil, nil, diags
60 }
61
62 return &hcl2Configurable{
63 SourceFilename: filename,
64 Body: f.Body,
65 }, nil, nil
66}
67
68func (t *hcl2Configurable) Config() (*Config, error) {
69 config := &Config{}
70
71 // these structs are used only for the initial shallow decoding; we'll
72 // expand this into the main, public-facing config structs afterwards.
73 type atlas struct {
74 Name string `hcl:"name"`
75 Include *[]string `hcl:"include"`
76 Exclude *[]string `hcl:"exclude"`
77 }
78 type provider struct {
79 Name string `hcl:"name,label"`
80 Alias *string `hcl:"alias,attr"`
81 Version *string `hcl:"version,attr"`
82 Config hcl2.Body `hcl:",remain"`
83 }
84 type module struct {
85 Name string `hcl:"name,label"`
86 Source string `hcl:"source,attr"`
87 Version *string `hcl:"version,attr"`
88 Providers *map[string]string `hcl:"providers,attr"`
89 Config hcl2.Body `hcl:",remain"`
90 }
91 type resourceLifecycle struct {
92 CreateBeforeDestroy *bool `hcl:"create_before_destroy,attr"`
93 PreventDestroy *bool `hcl:"prevent_destroy,attr"`
94 IgnoreChanges *[]string `hcl:"ignore_changes,attr"`
95 }
96 type connection struct {
97 Config hcl2.Body `hcl:",remain"`
98 }
99 type provisioner struct {
100 Type string `hcl:"type,label"`
101
102 When *string `hcl:"when,attr"`
103 OnFailure *string `hcl:"on_failure,attr"`
104
105 Connection *connection `hcl:"connection,block"`
106 Config hcl2.Body `hcl:",remain"`
107 }
108 type managedResource struct {
109 Type string `hcl:"type,label"`
110 Name string `hcl:"name,label"`
111
112 CountExpr hcl2.Expression `hcl:"count,attr"`
113 Provider *string `hcl:"provider,attr"`
114 DependsOn *[]string `hcl:"depends_on,attr"`
115
116 Lifecycle *resourceLifecycle `hcl:"lifecycle,block"`
117 Provisioners []provisioner `hcl:"provisioner,block"`
118 Connection *connection `hcl:"connection,block"`
119
120 Config hcl2.Body `hcl:",remain"`
121 }
122 type dataResource struct {
123 Type string `hcl:"type,label"`
124 Name string `hcl:"name,label"`
125
126 CountExpr hcl2.Expression `hcl:"count,attr"`
127 Provider *string `hcl:"provider,attr"`
128 DependsOn *[]string `hcl:"depends_on,attr"`
129
130 Config hcl2.Body `hcl:",remain"`
131 }
132 type variable struct {
133 Name string `hcl:"name,label"`
134
135 DeclaredType *string `hcl:"type,attr"`
136 Default *cty.Value `hcl:"default,attr"`
137 Description *string `hcl:"description,attr"`
138 Sensitive *bool `hcl:"sensitive,attr"`
139 }
140 type output struct {
141 Name string `hcl:"name,label"`
142
143 ValueExpr hcl2.Expression `hcl:"value,attr"`
144 DependsOn *[]string `hcl:"depends_on,attr"`
145 Description *string `hcl:"description,attr"`
146 Sensitive *bool `hcl:"sensitive,attr"`
147 }
148 type locals struct {
149 Definitions hcl2.Attributes `hcl:",remain"`
150 }
151 type backend struct {
152 Type string `hcl:"type,label"`
153 Config hcl2.Body `hcl:",remain"`
154 }
155 type terraform struct {
156 RequiredVersion *string `hcl:"required_version,attr"`
157 Backend *backend `hcl:"backend,block"`
158 }
159 type topLevel struct {
160 Atlas *atlas `hcl:"atlas,block"`
161 Datas []dataResource `hcl:"data,block"`
162 Modules []module `hcl:"module,block"`
163 Outputs []output `hcl:"output,block"`
164 Providers []provider `hcl:"provider,block"`
165 Resources []managedResource `hcl:"resource,block"`
166 Terraform *terraform `hcl:"terraform,block"`
167 Variables []variable `hcl:"variable,block"`
168 Locals []*locals `hcl:"locals,block"`
169 }
170
171 var raw topLevel
172 diags := gohcl2.DecodeBody(t.Body, nil, &raw)
173 if diags.HasErrors() {
174 // Do some minimal decoding to see if we can at least get the
175 // required Terraform version, which might help explain why we
176 // couldn't parse the rest.
177 if raw.Terraform != nil && raw.Terraform.RequiredVersion != nil {
178 config.Terraform = &Terraform{
179 RequiredVersion: *raw.Terraform.RequiredVersion,
180 }
181 }
182
183 // We return the diags as an implementation of error, which the
184 // caller than then type-assert if desired to recover the individual
185 // diagnostics.
186 // FIXME: The current API gives us no way to return warnings in the
187 // absense of any errors.
188 return config, diags
189 }
190
191 if raw.Terraform != nil {
192 var reqdVersion string
193 var backend *Backend
194
195 if raw.Terraform.RequiredVersion != nil {
196 reqdVersion = *raw.Terraform.RequiredVersion
197 }
198 if raw.Terraform.Backend != nil {
199 backend = new(Backend)
200 backend.Type = raw.Terraform.Backend.Type
201
202 // We don't permit interpolations or nested blocks inside the
203 // backend config, so we can decode the config early here and
204 // get direct access to the values, which is important for the
205 // config hashing to work as expected.
206 var config map[string]string
207 configDiags := gohcl2.DecodeBody(raw.Terraform.Backend.Config, nil, &config)
208 diags = append(diags, configDiags...)
209
210 raw := make(map[string]interface{}, len(config))
211 for k, v := range config {
212 raw[k] = v
213 }
214
215 var err error
216 backend.RawConfig, err = NewRawConfig(raw)
217 if err != nil {
218 diags = append(diags, &hcl2.Diagnostic{
219 Severity: hcl2.DiagError,
220 Summary: "Invalid backend configuration",
221 Detail: fmt.Sprintf("Error in backend configuration: %s", err),
222 })
223 }
224 }
225
226 config.Terraform = &Terraform{
227 RequiredVersion: reqdVersion,
228 Backend: backend,
229 }
230 }
231
232 if raw.Atlas != nil {
233 var include, exclude []string
234 if raw.Atlas.Include != nil {
235 include = *raw.Atlas.Include
236 }
237 if raw.Atlas.Exclude != nil {
238 exclude = *raw.Atlas.Exclude
239 }
240 config.Atlas = &AtlasConfig{
241 Name: raw.Atlas.Name,
242 Include: include,
243 Exclude: exclude,
244 }
245 }
246
247 for _, rawM := range raw.Modules {
248 m := &Module{
249 Name: rawM.Name,
250 Source: rawM.Source,
251 RawConfig: NewRawConfigHCL2(rawM.Config),
252 }
253
254 if rawM.Version != nil {
255 m.Version = *rawM.Version
256 }
257
258 if rawM.Providers != nil {
259 m.Providers = *rawM.Providers
260 }
261
262 config.Modules = append(config.Modules, m)
263 }
264
265 for _, rawV := range raw.Variables {
266 v := &Variable{
267 Name: rawV.Name,
268 }
269 if rawV.DeclaredType != nil {
270 v.DeclaredType = *rawV.DeclaredType
271 }
272 if rawV.Default != nil {
273 v.Default = hcl2shim.ConfigValueFromHCL2(*rawV.Default)
274 }
275 if rawV.Description != nil {
276 v.Description = *rawV.Description
277 }
278
279 config.Variables = append(config.Variables, v)
280 }
281
282 for _, rawO := range raw.Outputs {
283 o := &Output{
284 Name: rawO.Name,
285 }
286
287 if rawO.Description != nil {
288 o.Description = *rawO.Description
289 }
290 if rawO.DependsOn != nil {
291 o.DependsOn = *rawO.DependsOn
292 }
293 if rawO.Sensitive != nil {
294 o.Sensitive = *rawO.Sensitive
295 }
296
297 // The result is expected to be a map like map[string]interface{}{"value": something},
298 // so we'll fake that with our hcl2shim.SingleAttrBody shim.
299 o.RawConfig = NewRawConfigHCL2(hcl2shim.SingleAttrBody{
300 Name: "value",
301 Expr: rawO.ValueExpr,
302 })
303
304 config.Outputs = append(config.Outputs, o)
305 }
306
307 for _, rawR := range raw.Resources {
308 r := &Resource{
309 Mode: ManagedResourceMode,
310 Type: rawR.Type,
311 Name: rawR.Name,
312 }
313 if rawR.Lifecycle != nil {
314 var l ResourceLifecycle
315 if rawR.Lifecycle.CreateBeforeDestroy != nil {
316 l.CreateBeforeDestroy = *rawR.Lifecycle.CreateBeforeDestroy
317 }
318 if rawR.Lifecycle.PreventDestroy != nil {
319 l.PreventDestroy = *rawR.Lifecycle.PreventDestroy
320 }
321 if rawR.Lifecycle.IgnoreChanges != nil {
322 l.IgnoreChanges = *rawR.Lifecycle.IgnoreChanges
323 }
324 r.Lifecycle = l
325 }
326 if rawR.Provider != nil {
327 r.Provider = *rawR.Provider
328 }
329 if rawR.DependsOn != nil {
330 r.DependsOn = *rawR.DependsOn
331 }
332
333 var defaultConnInfo *RawConfig
334 if rawR.Connection != nil {
335 defaultConnInfo = NewRawConfigHCL2(rawR.Connection.Config)
336 }
337
338 for _, rawP := range rawR.Provisioners {
339 p := &Provisioner{
340 Type: rawP.Type,
341 }
342
343 switch {
344 case rawP.When == nil:
345 p.When = ProvisionerWhenCreate
346 case *rawP.When == "create":
347 p.When = ProvisionerWhenCreate
348 case *rawP.When == "destroy":
349 p.When = ProvisionerWhenDestroy
350 default:
351 p.When = ProvisionerWhenInvalid
352 }
353
354 switch {
355 case rawP.OnFailure == nil:
356 p.OnFailure = ProvisionerOnFailureFail
357 case *rawP.When == "fail":
358 p.OnFailure = ProvisionerOnFailureFail
359 case *rawP.When == "continue":
360 p.OnFailure = ProvisionerOnFailureContinue
361 default:
362 p.OnFailure = ProvisionerOnFailureInvalid
363 }
364
365 if rawP.Connection != nil {
366 p.ConnInfo = NewRawConfigHCL2(rawP.Connection.Config)
367 } else {
368 p.ConnInfo = defaultConnInfo
369 }
370
371 p.RawConfig = NewRawConfigHCL2(rawP.Config)
372
373 r.Provisioners = append(r.Provisioners, p)
374 }
375
376 // The old loader records the count expression as a weird RawConfig with
377 // a single-element map inside. Since the rest of the world is assuming
378 // that, we'll mimic it here.
379 {
380 countBody := hcl2shim.SingleAttrBody{
381 Name: "count",
382 Expr: rawR.CountExpr,
383 }
384
385 r.RawCount = NewRawConfigHCL2(countBody)
386 r.RawCount.Key = "count"
387 }
388
389 r.RawConfig = NewRawConfigHCL2(rawR.Config)
390
391 config.Resources = append(config.Resources, r)
392
393 }
394
395 for _, rawR := range raw.Datas {
396 r := &Resource{
397 Mode: DataResourceMode,
398 Type: rawR.Type,
399 Name: rawR.Name,
400 }
401
402 if rawR.Provider != nil {
403 r.Provider = *rawR.Provider
404 }
405 if rawR.DependsOn != nil {
406 r.DependsOn = *rawR.DependsOn
407 }
408
409 // The old loader records the count expression as a weird RawConfig with
410 // a single-element map inside. Since the rest of the world is assuming
411 // that, we'll mimic it here.
412 {
413 countBody := hcl2shim.SingleAttrBody{
414 Name: "count",
415 Expr: rawR.CountExpr,
416 }
417
418 r.RawCount = NewRawConfigHCL2(countBody)
419 r.RawCount.Key = "count"
420 }
421
422 r.RawConfig = NewRawConfigHCL2(rawR.Config)
423
424 config.Resources = append(config.Resources, r)
425 }
426
427 for _, rawP := range raw.Providers {
428 p := &ProviderConfig{
429 Name: rawP.Name,
430 }
431
432 if rawP.Alias != nil {
433 p.Alias = *rawP.Alias
434 }
435 if rawP.Version != nil {
436 p.Version = *rawP.Version
437 }
438
439 // The result is expected to be a map like map[string]interface{}{"value": something},
440 // so we'll fake that with our hcl2shim.SingleAttrBody shim.
441 p.RawConfig = NewRawConfigHCL2(rawP.Config)
442
443 config.ProviderConfigs = append(config.ProviderConfigs, p)
444 }
445
446 for _, rawL := range raw.Locals {
447 names := make([]string, 0, len(rawL.Definitions))
448 for n := range rawL.Definitions {
449 names = append(names, n)
450 }
451 sort.Strings(names)
452 for _, n := range names {
453 attr := rawL.Definitions[n]
454 l := &Local{
455 Name: n,
456 RawConfig: NewRawConfigHCL2(hcl2shim.SingleAttrBody{
457 Name: "value",
458 Expr: attr.Expr,
459 }),
460 }
461 config.Locals = append(config.Locals, l)
462 }
463 }
464
465 // FIXME: The current API gives us no way to return warnings in the
466 // absense of any errors.
467 var err error
468 if diags.HasErrors() {
469 err = diags
470 }
471
472 return config, err
473}
diff --git a/vendor/github.com/hashicorp/terraform/config/merge.go b/vendor/github.com/hashicorp/terraform/config/merge.go
index db214be..55fc864 100644
--- a/vendor/github.com/hashicorp/terraform/config/merge.go
+++ b/vendor/github.com/hashicorp/terraform/config/merge.go
@@ -137,6 +137,17 @@ func Merge(c1, c2 *Config) (*Config, error) {
137 } 137 }
138 } 138 }
139 139
140 // Local Values
141 // These are simpler than the other config elements because they are just
142 // flat values and so no deep merging is required.
143 if localsCount := len(c1.Locals) + len(c2.Locals); localsCount != 0 {
144 // Explicit length check above because we want c.Locals to remain
145 // nil if the result would be empty.
146 c.Locals = make([]*Local, 0, len(c1.Locals)+len(c2.Locals))
147 c.Locals = append(c.Locals, c1.Locals...)
148 c.Locals = append(c.Locals, c2.Locals...)
149 }
150
140 return c, nil 151 return c, nil
141} 152}
142 153
diff --git a/vendor/github.com/hashicorp/terraform/config/module/get.go b/vendor/github.com/hashicorp/terraform/config/module/get.go
index 96b4a63..5073d0d 100644
--- a/vendor/github.com/hashicorp/terraform/config/module/get.go
+++ b/vendor/github.com/hashicorp/terraform/config/module/get.go
@@ -3,6 +3,7 @@ package module
3import ( 3import (
4 "io/ioutil" 4 "io/ioutil"
5 "os" 5 "os"
6 "path/filepath"
6 7
7 "github.com/hashicorp/go-getter" 8 "github.com/hashicorp/go-getter"
8) 9)
@@ -37,13 +38,10 @@ func GetCopy(dst, src string) error {
37 if err != nil { 38 if err != nil {
38 return err 39 return err
39 } 40 }
40 // FIXME: This isn't completely safe. Creating and removing our temp path
41 // exposes where to race to inject files.
42 if err := os.RemoveAll(tmpDir); err != nil {
43 return err
44 }
45 defer os.RemoveAll(tmpDir) 41 defer os.RemoveAll(tmpDir)
46 42
43 tmpDir = filepath.Join(tmpDir, "module")
44
47 // Get to that temporary dir 45 // Get to that temporary dir
48 if err := getter.Get(tmpDir, src); err != nil { 46 if err := getter.Get(tmpDir, src); err != nil {
49 return err 47 return err
@@ -57,15 +55,3 @@ func GetCopy(dst, src string) error {
57 // Copy to the final location 55 // Copy to the final location
58 return copyDir(dst, tmpDir) 56 return copyDir(dst, tmpDir)
59} 57}
60
61func getStorage(s getter.Storage, key string, src string, mode GetMode) (string, bool, error) {
62 // Get the module with the level specified if we were told to.
63 if mode > GetModeNone {
64 if err := s.Get(key, src, mode == GetModeUpdate); err != nil {
65 return "", false, err
66 }
67 }
68
69 // Get the directory where the module is.
70 return s.Dir(key)
71}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/inode.go b/vendor/github.com/hashicorp/terraform/config/module/inode.go
index 8603ee2..da520ab 100644
--- a/vendor/github.com/hashicorp/terraform/config/module/inode.go
+++ b/vendor/github.com/hashicorp/terraform/config/module/inode.go
@@ -1,4 +1,4 @@
1// +build linux darwin openbsd netbsd solaris 1// +build linux darwin openbsd netbsd solaris dragonfly
2 2
3package module 3package module
4 4
diff --git a/vendor/github.com/hashicorp/terraform/config/module/module.go b/vendor/github.com/hashicorp/terraform/config/module/module.go
index f8649f6..7dc8fcc 100644
--- a/vendor/github.com/hashicorp/terraform/config/module/module.go
+++ b/vendor/github.com/hashicorp/terraform/config/module/module.go
@@ -2,6 +2,8 @@ package module
2 2
3// Module represents the metadata for a single module. 3// Module represents the metadata for a single module.
4type Module struct { 4type Module struct {
5 Name string 5 Name string
6 Source string 6 Source string
7 Version string
8 Providers map[string]string
7} 9}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/storage.go b/vendor/github.com/hashicorp/terraform/config/module/storage.go
new file mode 100644
index 0000000..58e3a10
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/storage.go
@@ -0,0 +1,365 @@
1package module
2
3import (
4 "encoding/json"
5 "fmt"
6 "io/ioutil"
7 "log"
8 "os"
9 "path/filepath"
10 "strings"
11
12 getter "github.com/hashicorp/go-getter"
13 "github.com/hashicorp/terraform/registry"
14 "github.com/hashicorp/terraform/registry/regsrc"
15 "github.com/hashicorp/terraform/svchost/disco"
16 "github.com/mitchellh/cli"
17)
18
19const manifestName = "modules.json"
20
21// moduleManifest is the serialization structure used to record the stored
22// module's metadata.
23type moduleManifest struct {
24 Modules []moduleRecord
25}
26
27// moduleRecords represents the stored module's metadata.
28// This is compared for equality using '==', so all fields needs to remain
29// comparable.
30type moduleRecord struct {
31 // Source is the module source string from the config, minus any
32 // subdirectory.
33 Source string
34
35 // Key is the locally unique identifier for this module.
36 Key string
37
38 // Version is the exact version string for the stored module.
39 Version string
40
41 // Dir is the directory name returned by the FileStorage. This is what
42 // allows us to correlate a particular module version with the location on
43 // disk.
44 Dir string
45
46 // Root is the root directory containing the module. If the module is
47 // unpacked from an archive, and not located in the root directory, this is
48 // used to direct the loader to the correct subdirectory. This is
49 // independent from any subdirectory in the original source string, which
50 // may traverse further into the module tree.
51 Root string
52
53 // url is the location of the module source
54 url string
55
56 // Registry is true if this module is sourced from a registry
57 registry bool
58}
59
60// Storage implements methods to manage the storage of modules.
61// This is used by Tree.Load to query registries, authenticate requests, and
62// store modules locally.
63type Storage struct {
64 // StorageDir is the full path to the directory where all modules will be
65 // stored.
66 StorageDir string
67
68 // Ui is an optional cli.Ui for user output
69 Ui cli.Ui
70
71 // Mode is the GetMode that will be used for various operations.
72 Mode GetMode
73
74 registry *registry.Client
75}
76
77// NewStorage returns a new initialized Storage object.
78func NewStorage(dir string, services *disco.Disco) *Storage {
79 regClient := registry.NewClient(services, nil)
80
81 return &Storage{
82 StorageDir: dir,
83 registry: regClient,
84 }
85}
86
87// loadManifest returns the moduleManifest file from the parent directory.
88func (s Storage) loadManifest() (moduleManifest, error) {
89 manifest := moduleManifest{}
90
91 manifestPath := filepath.Join(s.StorageDir, manifestName)
92 data, err := ioutil.ReadFile(manifestPath)
93 if err != nil && !os.IsNotExist(err) {
94 return manifest, err
95 }
96
97 if len(data) == 0 {
98 return manifest, nil
99 }
100
101 if err := json.Unmarshal(data, &manifest); err != nil {
102 return manifest, err
103 }
104
105 for i, rec := range manifest.Modules {
106 // If the path was recorded before we changed to always using a
107 // slash as separator, we delete the record from the manifest so
108 // it can be discovered again and will be recorded using a slash.
109 if strings.Contains(rec.Dir, "\\") {
110 manifest.Modules[i] = manifest.Modules[len(manifest.Modules)-1]
111 manifest.Modules = manifest.Modules[:len(manifest.Modules)-1]
112 continue
113 }
114
115 // Make sure we use the correct path separator.
116 rec.Dir = filepath.FromSlash(rec.Dir)
117 }
118
119 return manifest, nil
120}
121
122// Store the location of the module, along with the version used and the module
123// root directory. The storage method loads the entire file and rewrites it
124// each time. This is only done a few times during init, so efficiency is
125// not a concern.
126func (s Storage) recordModule(rec moduleRecord) error {
127 manifest, err := s.loadManifest()
128 if err != nil {
129 // if there was a problem with the file, we will attempt to write a new
130 // one. Any non-data related error should surface there.
131 log.Printf("[WARN] error reading module manifest: %s", err)
132 }
133
134 // do nothing if we already have the exact module
135 for i, stored := range manifest.Modules {
136 if rec == stored {
137 return nil
138 }
139
140 // they are not equal, but if the storage path is the same we need to
141 // remove this rec to be replaced.
142 if rec.Dir == stored.Dir {
143 manifest.Modules[i] = manifest.Modules[len(manifest.Modules)-1]
144 manifest.Modules = manifest.Modules[:len(manifest.Modules)-1]
145 break
146 }
147 }
148
149 // Make sure we always use a slash separator.
150 rec.Dir = filepath.ToSlash(rec.Dir)
151
152 manifest.Modules = append(manifest.Modules, rec)
153
154 js, err := json.Marshal(manifest)
155 if err != nil {
156 panic(err)
157 }
158
159 manifestPath := filepath.Join(s.StorageDir, manifestName)
160 return ioutil.WriteFile(manifestPath, js, 0644)
161}
162
163// load the manifest from dir, and return all module versions matching the
164// provided source. Records with no version info will be skipped, as they need
165// to be uniquely identified by other means.
166func (s Storage) moduleVersions(source string) ([]moduleRecord, error) {
167 manifest, err := s.loadManifest()
168 if err != nil {
169 return manifest.Modules, err
170 }
171
172 var matching []moduleRecord
173
174 for _, m := range manifest.Modules {
175 if m.Source == source && m.Version != "" {
176 log.Printf("[DEBUG] found local version %q for module %s", m.Version, m.Source)
177 matching = append(matching, m)
178 }
179 }
180
181 return matching, nil
182}
183
184func (s Storage) moduleDir(key string) (string, error) {
185 manifest, err := s.loadManifest()
186 if err != nil {
187 return "", err
188 }
189
190 for _, m := range manifest.Modules {
191 if m.Key == key {
192 return m.Dir, nil
193 }
194 }
195
196 return "", nil
197}
198
199// return only the root directory of the module stored in dir.
200func (s Storage) getModuleRoot(dir string) (string, error) {
201 manifest, err := s.loadManifest()
202 if err != nil {
203 return "", err
204 }
205
206 for _, mod := range manifest.Modules {
207 if mod.Dir == dir {
208 return mod.Root, nil
209 }
210 }
211 return "", nil
212}
213
214// record only the Root directory for the module stored at dir.
215func (s Storage) recordModuleRoot(dir, root string) error {
216 rec := moduleRecord{
217 Dir: dir,
218 Root: root,
219 }
220
221 return s.recordModule(rec)
222}
223
224func (s Storage) output(msg string) {
225 if s.Ui == nil || s.Mode == GetModeNone {
226 return
227 }
228 s.Ui.Output(msg)
229}
230
231func (s Storage) getStorage(key string, src string) (string, bool, error) {
232 storage := &getter.FolderStorage{
233 StorageDir: s.StorageDir,
234 }
235
236 log.Printf("[DEBUG] fetching module from %s", src)
237
238 // Get the module with the level specified if we were told to.
239 if s.Mode > GetModeNone {
240 log.Printf("[DEBUG] fetching %q with key %q", src, key)
241 if err := storage.Get(key, src, s.Mode == GetModeUpdate); err != nil {
242 return "", false, err
243 }
244 }
245
246 // Get the directory where the module is.
247 dir, found, err := storage.Dir(key)
248 log.Printf("[DEBUG] found %q in %q: %t", src, dir, found)
249 return dir, found, err
250}
251
252// find a stored module that's not from a registry
253func (s Storage) findModule(key string) (string, error) {
254 if s.Mode == GetModeUpdate {
255 return "", nil
256 }
257
258 return s.moduleDir(key)
259}
260
261// GetModule fetches a module source into the specified directory. This is used
262// as a convenience function by the CLI to initialize a configuration.
263func (s Storage) GetModule(dst, src string) error {
264 // reset this in case the caller was going to re-use it
265 mode := s.Mode
266 s.Mode = GetModeUpdate
267 defer func() {
268 s.Mode = mode
269 }()
270
271 rec, err := s.findRegistryModule(src, anyVersion)
272 if err != nil {
273 return err
274 }
275
276 pwd, err := os.Getwd()
277 if err != nil {
278 return err
279 }
280
281 source := rec.url
282 if source == "" {
283 source, err = getter.Detect(src, pwd, getter.Detectors)
284 if err != nil {
285 return fmt.Errorf("module %s: %s", src, err)
286 }
287 }
288
289 if source == "" {
290 return fmt.Errorf("module %q not found", src)
291 }
292
293 return GetCopy(dst, source)
294}
295
296// find a registry module
297func (s Storage) findRegistryModule(mSource, constraint string) (moduleRecord, error) {
298 rec := moduleRecord{
299 Source: mSource,
300 }
301 // detect if we have a registry source
302 mod, err := regsrc.ParseModuleSource(mSource)
303 switch err {
304 case nil:
305 //ok
306 case regsrc.ErrInvalidModuleSource:
307 return rec, nil
308 default:
309 return rec, err
310 }
311 rec.registry = true
312
313 log.Printf("[TRACE] %q is a registry module", mod.Display())
314
315 versions, err := s.moduleVersions(mod.String())
316 if err != nil {
317 log.Printf("[ERROR] error looking up versions for %q: %s", mod.Display(), err)
318 return rec, err
319 }
320
321 match, err := newestRecord(versions, constraint)
322 if err != nil {
323 log.Printf("[INFO] no matching version for %q<%s>, %s", mod.Display(), constraint, err)
324 }
325 log.Printf("[DEBUG] matched %q version %s for %s", mod, match.Version, constraint)
326
327 rec.Dir = match.Dir
328 rec.Version = match.Version
329 found := rec.Dir != ""
330
331 // we need to lookup available versions
332 // Only on Get if it's not found, on unconditionally on Update
333 if (s.Mode == GetModeGet && !found) || (s.Mode == GetModeUpdate) {
334 resp, err := s.registry.Versions(mod)
335 if err != nil {
336 return rec, err
337 }
338
339 if len(resp.Modules) == 0 {
340 return rec, fmt.Errorf("module %q not found in registry", mod.Display())
341 }
342
343 match, err := newestVersion(resp.Modules[0].Versions, constraint)
344 if err != nil {
345 return rec, err
346 }
347
348 if match == nil {
349 return rec, fmt.Errorf("no versions for %q found matching %q", mod.Display(), constraint)
350 }
351
352 rec.Version = match.Version
353
354 rec.url, err = s.registry.Location(mod, rec.Version)
355 if err != nil {
356 return rec, err
357 }
358
359 // we've already validated this by now
360 host, _ := mod.SvcHost()
361 s.output(fmt.Sprintf(" Found version %s of %s on %s", rec.Version, mod.Module(), host.ForDisplay()))
362
363 }
364 return rec, nil
365}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/testing.go b/vendor/github.com/hashicorp/terraform/config/module/testing.go
index fc9e733..6f1ff05 100644
--- a/vendor/github.com/hashicorp/terraform/config/module/testing.go
+++ b/vendor/github.com/hashicorp/terraform/config/module/testing.go
@@ -4,8 +4,6 @@ import (
4 "io/ioutil" 4 "io/ioutil"
5 "os" 5 "os"
6 "testing" 6 "testing"
7
8 "github.com/hashicorp/go-getter"
9) 7)
10 8
11// TestTree loads a module at the given path and returns the tree as well 9// TestTree loads a module at the given path and returns the tree as well
@@ -26,8 +24,8 @@ func TestTree(t *testing.T, path string) (*Tree, func()) {
26 } 24 }
27 25
28 // Get the child modules 26 // Get the child modules
29 s := &getter.FolderStorage{StorageDir: dir} 27 s := &Storage{StorageDir: dir, Mode: GetModeGet}
30 if err := mod.Load(s, GetModeGet); err != nil { 28 if err := mod.Load(s); err != nil {
31 t.Fatalf("err: %s", err) 29 t.Fatalf("err: %s", err)
32 return nil, nil 30 return nil, nil
33 } 31 }
diff --git a/vendor/github.com/hashicorp/terraform/config/module/tree.go b/vendor/github.com/hashicorp/terraform/config/module/tree.go
index 4b0b153..f56d69b 100644
--- a/vendor/github.com/hashicorp/terraform/config/module/tree.go
+++ b/vendor/github.com/hashicorp/terraform/config/module/tree.go
@@ -4,11 +4,14 @@ import (
4 "bufio" 4 "bufio"
5 "bytes" 5 "bytes"
6 "fmt" 6 "fmt"
7 "log"
7 "path/filepath" 8 "path/filepath"
8 "strings" 9 "strings"
9 "sync" 10 "sync"
10 11
11 "github.com/hashicorp/go-getter" 12 "github.com/hashicorp/terraform/tfdiags"
13
14 getter "github.com/hashicorp/go-getter"
12 "github.com/hashicorp/terraform/config" 15 "github.com/hashicorp/terraform/config"
13) 16)
14 17
@@ -26,6 +29,17 @@ type Tree struct {
26 children map[string]*Tree 29 children map[string]*Tree
27 path []string 30 path []string
28 lock sync.RWMutex 31 lock sync.RWMutex
32
33 // version is the final version of the config loaded for the Tree's module
34 version string
35 // source is the "source" string used to load this module. It's possible
36 // for a module source to change, but the path remains the same, preventing
37 // it from being reloaded.
38 source string
39 // parent allows us to walk back up the tree and determine if there are any
40 // versioned ancestor modules which may effect the stored location of
41 // submodules
42 parent *Tree
29} 43}
30 44
31// NewTree returns a new Tree for the given config structure. 45// NewTree returns a new Tree for the given config structure.
@@ -40,7 +54,7 @@ func NewEmptyTree() *Tree {
40 // We do this dummy load so that the tree is marked as "loaded". It 54 // We do this dummy load so that the tree is marked as "loaded". It
41 // should never fail because this is just about a no-op. If it does fail 55 // should never fail because this is just about a no-op. If it does fail
42 // we panic so we can know its a bug. 56 // we panic so we can know its a bug.
43 if err := t.Load(nil, GetModeGet); err != nil { 57 if err := t.Load(&Storage{Mode: GetModeGet}); err != nil {
44 panic(err) 58 panic(err)
45 } 59 }
46 60
@@ -126,8 +140,10 @@ func (t *Tree) Modules() []*Module {
126 result := make([]*Module, len(t.config.Modules)) 140 result := make([]*Module, len(t.config.Modules))
127 for i, m := range t.config.Modules { 141 for i, m := range t.config.Modules {
128 result[i] = &Module{ 142 result[i] = &Module{
129 Name: m.Name, 143 Name: m.Name,
130 Source: m.Source, 144 Version: m.Version,
145 Source: m.Source,
146 Providers: m.Providers,
131 } 147 }
132 } 148 }
133 149
@@ -155,81 +171,178 @@ func (t *Tree) Name() string {
155// module trees inherently require the configuration to be in a reasonably 171// module trees inherently require the configuration to be in a reasonably
156// sane state: no circular dependencies, proper module sources, etc. A full 172// sane state: no circular dependencies, proper module sources, etc. A full
157// suite of validations can be done by running Validate (after loading). 173// suite of validations can be done by running Validate (after loading).
158func (t *Tree) Load(s getter.Storage, mode GetMode) error { 174func (t *Tree) Load(s *Storage) error {
159 t.lock.Lock() 175 t.lock.Lock()
160 defer t.lock.Unlock() 176 defer t.lock.Unlock()
161 177
162 // Reset the children if we have any 178 children, err := t.getChildren(s)
163 t.children = nil 179 if err != nil {
180 return err
181 }
182
183 // Go through all the children and load them.
184 for _, c := range children {
185 if err := c.Load(s); err != nil {
186 return err
187 }
188 }
189
190 // Set our tree up
191 t.children = children
164 192
165 modules := t.Modules() 193 return nil
194}
195
196func (t *Tree) getChildren(s *Storage) (map[string]*Tree, error) {
166 children := make(map[string]*Tree) 197 children := make(map[string]*Tree)
167 198
168 // Go through all the modules and get the directory for them. 199 // Go through all the modules and get the directory for them.
169 for _, m := range modules { 200 for _, m := range t.Modules() {
170 if _, ok := children[m.Name]; ok { 201 if _, ok := children[m.Name]; ok {
171 return fmt.Errorf( 202 return nil, fmt.Errorf(
172 "module %s: duplicated. module names must be unique", m.Name) 203 "module %s: duplicated. module names must be unique", m.Name)
173 } 204 }
174 205
175 // Determine the path to this child 206 // Determine the path to this child
176 path := make([]string, len(t.path), len(t.path)+1) 207 modPath := make([]string, len(t.path), len(t.path)+1)
177 copy(path, t.path) 208 copy(modPath, t.path)
178 path = append(path, m.Name) 209 modPath = append(modPath, m.Name)
179 210
180 // Split out the subdir if we have one 211 log.Printf("[TRACE] module source: %q", m.Source)
181 source, subDir := getter.SourceDirSubdir(m.Source)
182 212
183 source, err := getter.Detect(source, t.config.Dir, getter.Detectors) 213 // add the module path to help indicate where modules with relative
214 // paths are being loaded from
215 s.output(fmt.Sprintf("- module.%s", strings.Join(modPath, ".")))
216
217 // Lookup the local location of the module.
218 // dir is the local directory where the module is stored
219 mod, err := s.findRegistryModule(m.Source, m.Version)
184 if err != nil { 220 if err != nil {
185 return fmt.Errorf("module %s: %s", m.Name, err) 221 return nil, err
186 } 222 }
187 223
224 // The key is the string that will be used to uniquely id the Source in
225 // the local storage. The prefix digit can be incremented to
226 // invalidate the local module storage.
227 key := "1." + t.versionedPathKey(m)
228 if mod.Version != "" {
229 key += "." + mod.Version
230 }
231
232 // Check for the exact key if it's not a registry module
233 if !mod.registry {
234 mod.Dir, err = s.findModule(key)
235 if err != nil {
236 return nil, err
237 }
238 }
239
240 if mod.Dir != "" && s.Mode != GetModeUpdate {
241 // We found it locally, but in order to load the Tree we need to
242 // find out if there was another subDir stored from detection.
243 subDir, err := s.getModuleRoot(mod.Dir)
244 if err != nil {
245 // If there's a problem with the subdir record, we'll let the
246 // recordSubdir method fix it up. Any other filesystem errors
247 // will turn up again below.
248 log.Println("[WARN] error reading subdir record:", err)
249 }
250
251 fullDir := filepath.Join(mod.Dir, subDir)
252
253 child, err := NewTreeModule(m.Name, fullDir)
254 if err != nil {
255 return nil, fmt.Errorf("module %s: %s", m.Name, err)
256 }
257 child.path = modPath
258 child.parent = t
259 child.version = mod.Version
260 child.source = m.Source
261 children[m.Name] = child
262 continue
263 }
264
265 // Split out the subdir if we have one.
266 // Terraform keeps the entire requested tree, so that modules can
267 // reference sibling modules from the same archive or repo.
268 rawSource, subDir := getter.SourceDirSubdir(m.Source)
269
270 // we haven't found a source, so fallback to the go-getter detectors
271 source := mod.url
272 if source == "" {
273 source, err = getter.Detect(rawSource, t.config.Dir, getter.Detectors)
274 if err != nil {
275 return nil, fmt.Errorf("module %s: %s", m.Name, err)
276 }
277 }
278
279 log.Printf("[TRACE] detected module source %q", source)
280
188 // Check if the detector introduced something new. 281 // Check if the detector introduced something new.
189 source, subDir2 := getter.SourceDirSubdir(source) 282 // For example, the registry always adds a subdir of `//*`,
190 if subDir2 != "" { 283 // indicating that we need to strip off the first component from the
191 subDir = filepath.Join(subDir2, subDir) 284 // tar archive, though we may not yet know what it is called.
285 source, detectedSubDir := getter.SourceDirSubdir(source)
286 if detectedSubDir != "" {
287 subDir = filepath.Join(detectedSubDir, subDir)
288 }
289
290 output := ""
291 switch s.Mode {
292 case GetModeUpdate:
293 output = fmt.Sprintf(" Updating source %q", m.Source)
294 default:
295 output = fmt.Sprintf(" Getting source %q", m.Source)
192 } 296 }
297 s.output(output)
193 298
194 // Get the directory where this module is so we can load it 299 dir, ok, err := s.getStorage(key, source)
195 key := strings.Join(path, ".")
196 key = fmt.Sprintf("root.%s-%s", key, m.Source)
197 dir, ok, err := getStorage(s, key, source, mode)
198 if err != nil { 300 if err != nil {
199 return err 301 return nil, err
200 } 302 }
201 if !ok { 303 if !ok {
202 return fmt.Errorf( 304 return nil, fmt.Errorf("module %s: not found, may need to run 'terraform init'", m.Name)
203 "module %s: not found, may need to be downloaded using 'terraform get'", m.Name)
204 } 305 }
205 306
206 // If we have a subdirectory, then merge that in 307 log.Printf("[TRACE] %q stored in %q", source, dir)
308
309 // expand and record the subDir for later
310 fullDir := dir
207 if subDir != "" { 311 if subDir != "" {
208 dir = filepath.Join(dir, subDir) 312 fullDir, err = getter.SubdirGlob(dir, subDir)
209 } 313 if err != nil {
314 return nil, err
315 }
210 316
211 // Load the configurations.Dir(source) 317 // +1 to account for the pathsep
212 children[m.Name], err = NewTreeModule(m.Name, dir) 318 if len(dir)+1 > len(fullDir) {
213 if err != nil { 319 return nil, fmt.Errorf("invalid module storage path %q", fullDir)
214 return fmt.Errorf( 320 }
215 "module %s: %s", m.Name, err) 321 subDir = fullDir[len(dir)+1:]
216 } 322 }
217 323
218 // Set the path of this child 324 // add new info to the module record
219 children[m.Name].path = path 325 mod.Key = key
220 } 326 mod.Dir = dir
327 mod.Root = subDir
221 328
222 // Go through all the children and load them. 329 // record the module in our manifest
223 for _, c := range children { 330 if err := s.recordModule(mod); err != nil {
224 if err := c.Load(s, mode); err != nil { 331 return nil, err
225 return err
226 } 332 }
227 }
228 333
229 // Set our tree up 334 child, err := NewTreeModule(m.Name, fullDir)
230 t.children = children 335 if err != nil {
336 return nil, fmt.Errorf("module %s: %s", m.Name, err)
337 }
338 child.path = modPath
339 child.parent = t
340 child.version = mod.Version
341 child.source = m.Source
342 children[m.Name] = child
343 }
231 344
232 return nil 345 return children, nil
233} 346}
234 347
235// Path is the full path to this tree. 348// Path is the full path to this tree.
@@ -272,32 +385,35 @@ func (t *Tree) String() string {
272// as verifying things such as parameters/outputs between the various modules. 385// as verifying things such as parameters/outputs between the various modules.
273// 386//
274// Load must be called prior to calling Validate or an error will be returned. 387// Load must be called prior to calling Validate or an error will be returned.
275func (t *Tree) Validate() error { 388func (t *Tree) Validate() tfdiags.Diagnostics {
389 var diags tfdiags.Diagnostics
390
276 if !t.Loaded() { 391 if !t.Loaded() {
277 return fmt.Errorf("tree must be loaded before calling Validate") 392 diags = diags.Append(fmt.Errorf(
393 "tree must be loaded before calling Validate",
394 ))
395 return diags
278 } 396 }
279 397
280 // If something goes wrong, here is our error template
281 newErr := &treeError{Name: []string{t.Name()}}
282
283 // Terraform core does not handle root module children named "root". 398 // Terraform core does not handle root module children named "root".
284 // We plan to fix this in the future but this bug was brought up in 399 // We plan to fix this in the future but this bug was brought up in
285 // the middle of a release and we don't want to introduce wide-sweeping 400 // the middle of a release and we don't want to introduce wide-sweeping
286 // changes at that time. 401 // changes at that time.
287 if len(t.path) == 1 && t.name == "root" { 402 if len(t.path) == 1 && t.name == "root" {
288 return fmt.Errorf("root module cannot contain module named 'root'") 403 diags = diags.Append(fmt.Errorf(
404 "root module cannot contain module named 'root'",
405 ))
406 return diags
289 } 407 }
290 408
291 // Validate our configuration first. 409 // Validate our configuration first.
292 if err := t.config.Validate(); err != nil { 410 diags = diags.Append(t.config.Validate())
293 newErr.Add(err)
294 }
295 411
296 // If we're the root, we do extra validation. This validation usually 412 // If we're the root, we do extra validation. This validation usually
297 // requires the entire tree (since children don't have parent pointers). 413 // requires the entire tree (since children don't have parent pointers).
298 if len(t.path) == 0 { 414 if len(t.path) == 0 {
299 if err := t.validateProviderAlias(); err != nil { 415 if err := t.validateProviderAlias(); err != nil {
300 newErr.Add(err) 416 diags = diags.Append(err)
301 } 417 }
302 } 418 }
303 419
@@ -306,20 +422,11 @@ func (t *Tree) Validate() error {
306 422
307 // Validate all our children 423 // Validate all our children
308 for _, c := range children { 424 for _, c := range children {
309 err := c.Validate() 425 childDiags := c.Validate()
310 if err == nil { 426 diags = diags.Append(childDiags)
427 if diags.HasErrors() {
311 continue 428 continue
312 } 429 }
313
314 verr, ok := err.(*treeError)
315 if !ok {
316 // Unknown error, just return...
317 return err
318 }
319
320 // Append ourselves to the error and then return
321 verr.Name = append(verr.Name, t.Name())
322 newErr.AddChild(verr)
323 } 430 }
324 431
325 // Go over all the modules and verify that any parameters are valid 432 // Go over all the modules and verify that any parameters are valid
@@ -345,9 +452,10 @@ func (t *Tree) Validate() error {
345 // Compare to the keys in our raw config for the module 452 // Compare to the keys in our raw config for the module
346 for k, _ := range m.RawConfig.Raw { 453 for k, _ := range m.RawConfig.Raw {
347 if _, ok := varMap[k]; !ok { 454 if _, ok := varMap[k]; !ok {
348 newErr.Add(fmt.Errorf( 455 diags = diags.Append(fmt.Errorf(
349 "module %s: %s is not a valid parameter", 456 "module %q: %q is not a valid argument",
350 m.Name, k)) 457 m.Name, k,
458 ))
351 } 459 }
352 460
353 // Remove the required 461 // Remove the required
@@ -356,9 +464,10 @@ func (t *Tree) Validate() error {
356 464
357 // If we have any required left over, they aren't set. 465 // If we have any required left over, they aren't set.
358 for k, _ := range requiredMap { 466 for k, _ := range requiredMap {
359 newErr.Add(fmt.Errorf( 467 diags = diags.Append(fmt.Errorf(
360 "module %s: required variable %q not set", 468 "module %q: missing required argument %q",
361 m.Name, k)) 469 m.Name, k,
470 ))
362 } 471 }
363 } 472 }
364 473
@@ -373,9 +482,10 @@ func (t *Tree) Validate() error {
373 482
374 tree, ok := children[mv.Name] 483 tree, ok := children[mv.Name]
375 if !ok { 484 if !ok {
376 newErr.Add(fmt.Errorf( 485 diags = diags.Append(fmt.Errorf(
377 "%s: undefined module referenced %s", 486 "%s: reference to undefined module %q",
378 source, mv.Name)) 487 source, mv.Name,
488 ))
379 continue 489 continue
380 } 490 }
381 491
@@ -387,14 +497,56 @@ func (t *Tree) Validate() error {
387 } 497 }
388 } 498 }
389 if !found { 499 if !found {
390 newErr.Add(fmt.Errorf( 500 diags = diags.Append(fmt.Errorf(
391 "%s: %s is not a valid output for module %s", 501 "%s: %q is not a valid output for module %q",
392 source, mv.Field, mv.Name)) 502 source, mv.Field, mv.Name,
503 ))
393 } 504 }
394 } 505 }
395 } 506 }
396 507
397 return newErr.ErrOrNil() 508 return diags
509}
510
511// versionedPathKey returns a path string with every levels full name, version
512// and source encoded. This is to provide a unique key for our module storage,
513// since submodules need to know which versions of their ancestor modules they
514// are loaded from.
515// For example, if module A has a subdirectory B, if module A's source or
516// version is updated B's storage key must reflect this change in order for the
517// correct version of B's source to be loaded.
518func (t *Tree) versionedPathKey(m *Module) string {
519 path := make([]string, len(t.path)+1)
520 path[len(path)-1] = m.Name + ";" + m.Source
521 // We're going to load these in order for easier reading and debugging, but
522 // in practice they only need to be unique and consistent.
523
524 p := t
525 i := len(path) - 2
526 for ; i >= 0; i-- {
527 if p == nil {
528 break
529 }
530 // we may have been loaded under a blank Tree, so always check for a name
531 // too.
532 if p.name == "" {
533 break
534 }
535 seg := p.name
536 if p.version != "" {
537 seg += "#" + p.version
538 }
539
540 if p.source != "" {
541 seg += ";" + p.source
542 }
543
544 path[i] = seg
545 p = p.parent
546 }
547
548 key := strings.Join(path, "|")
549 return key
398} 550}
399 551
400// treeError is an error use by Tree.Validate to accumulates all 552// treeError is an error use by Tree.Validate to accumulates all
diff --git a/vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go b/vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go
index 090d4f7..f203556 100644
--- a/vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go
+++ b/vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go
@@ -67,7 +67,7 @@ func (t *Tree) validateProviderAlias() error {
67 67
68 // We didn't find the alias, error! 68 // We didn't find the alias, error!
69 err = multierror.Append(err, fmt.Errorf( 69 err = multierror.Append(err, fmt.Errorf(
70 "module %s: provider alias must be defined by the module or a parent: %s", 70 "module %s: provider alias must be defined by the module: %s",
71 strings.Join(pv.Path, "."), k)) 71 strings.Join(pv.Path, "."), k))
72 } 72 }
73 } 73 }
diff --git a/vendor/github.com/hashicorp/terraform/config/module/versions.go b/vendor/github.com/hashicorp/terraform/config/module/versions.go
new file mode 100644
index 0000000..8348d4b
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/versions.go
@@ -0,0 +1,95 @@
1package module
2
3import (
4 "errors"
5 "fmt"
6 "sort"
7
8 version "github.com/hashicorp/go-version"
9 "github.com/hashicorp/terraform/registry/response"
10)
11
12const anyVersion = ">=0.0.0"
13
14// return the newest version that satisfies the provided constraint
15func newest(versions []string, constraint string) (string, error) {
16 if constraint == "" {
17 constraint = anyVersion
18 }
19 cs, err := version.NewConstraint(constraint)
20 if err != nil {
21 return "", err
22 }
23
24 switch len(versions) {
25 case 0:
26 return "", errors.New("no versions found")
27 case 1:
28 v, err := version.NewVersion(versions[0])
29 if err != nil {
30 return "", err
31 }
32
33 if !cs.Check(v) {
34 return "", fmt.Errorf("no version found matching constraint %q", constraint)
35 }
36 return versions[0], nil
37 }
38
39 sort.Slice(versions, func(i, j int) bool {
40 // versions should have already been validated
41 // sort invalid version strings to the end
42 iv, err := version.NewVersion(versions[i])
43 if err != nil {
44 return true
45 }
46 jv, err := version.NewVersion(versions[j])
47 if err != nil {
48 return true
49 }
50 return iv.GreaterThan(jv)
51 })
52
53 // versions are now in order, so just find the first which satisfies the
54 // constraint
55 for i := range versions {
56 v, err := version.NewVersion(versions[i])
57 if err != nil {
58 continue
59 }
60 if cs.Check(v) {
61 return versions[i], nil
62 }
63 }
64
65 return "", nil
66}
67
68// return the newest *moduleVersion that matches the given constraint
69// TODO: reconcile these two types and newest* functions
70func newestVersion(moduleVersions []*response.ModuleVersion, constraint string) (*response.ModuleVersion, error) {
71 var versions []string
72 modules := make(map[string]*response.ModuleVersion)
73
74 for _, m := range moduleVersions {
75 versions = append(versions, m.Version)
76 modules[m.Version] = m
77 }
78
79 match, err := newest(versions, constraint)
80 return modules[match], err
81}
82
83// return the newest moduleRecord that matches the given constraint
84func newestRecord(moduleVersions []moduleRecord, constraint string) (moduleRecord, error) {
85 var versions []string
86 modules := make(map[string]moduleRecord)
87
88 for _, m := range moduleVersions {
89 versions = append(versions, m.Version)
90 modules[m.Version] = m
91 }
92
93 match, err := newest(versions, constraint)
94 return modules[match], err
95}
diff --git a/vendor/github.com/hashicorp/terraform/config/raw_config.go b/vendor/github.com/hashicorp/terraform/config/raw_config.go
index f8498d8..1854a8b 100644
--- a/vendor/github.com/hashicorp/terraform/config/raw_config.go
+++ b/vendor/github.com/hashicorp/terraform/config/raw_config.go
@@ -3,8 +3,14 @@ package config
3import ( 3import (
4 "bytes" 4 "bytes"
5 "encoding/gob" 5 "encoding/gob"
6 "errors"
7 "strconv"
6 "sync" 8 "sync"
7 9
10 "github.com/zclconf/go-cty/cty"
11 "github.com/zclconf/go-cty/cty/convert"
12
13 hcl2 "github.com/hashicorp/hcl2/hcl"
8 "github.com/hashicorp/hil" 14 "github.com/hashicorp/hil"
9 "github.com/hashicorp/hil/ast" 15 "github.com/hashicorp/hil/ast"
10 "github.com/mitchellh/copystructure" 16 "github.com/mitchellh/copystructure"
@@ -27,8 +33,24 @@ const UnknownVariableValue = "74D93920-ED26-11E3-AC10-0800200C9A66"
27// RawConfig supports a query-like interface to request 33// RawConfig supports a query-like interface to request
28// information from deep within the structure. 34// information from deep within the structure.
29type RawConfig struct { 35type RawConfig struct {
30 Key string 36 Key string
31 Raw map[string]interface{} 37
38 // Only _one_ of Raw and Body may be populated at a time.
39 //
40 // In the normal case, Raw is populated and Body is nil.
41 //
42 // When the experimental HCL2 parsing mode is enabled, "Body"
43 // is populated and RawConfig serves only to transport the hcl2.Body
44 // through the rest of Terraform core so we can ultimately decode it
45 // once its schema is known.
46 //
47 // Once we transition to HCL2 as the primary representation, RawConfig
48 // should be removed altogether and the hcl2.Body should be passed
49 // around directly.
50
51 Raw map[string]interface{}
52 Body hcl2.Body
53
32 Interpolations []ast.Node 54 Interpolations []ast.Node
33 Variables map[string]InterpolatedVariable 55 Variables map[string]InterpolatedVariable
34 56
@@ -48,6 +70,26 @@ func NewRawConfig(raw map[string]interface{}) (*RawConfig, error) {
48 return result, nil 70 return result, nil
49} 71}
50 72
73// NewRawConfigHCL2 creates a new RawConfig that is serving as a capsule
74// to transport a hcl2.Body. In this mode, the publicly-readable struct
75// fields are not populated since all operations should instead be diverted
76// to the HCL2 body.
77//
78// For a RawConfig object constructed with this function, the only valid use
79// is to later retrieve the Body value and call its own methods. Callers
80// may choose to set and then later handle the Key field, in a manner
81// consistent with how it is handled by the Value method, but the Value
82// method itself must not be used.
83//
84// This is an experimental codepath to be used only by the HCL2 config loader.
85// Non-experimental parsing should _always_ use NewRawConfig to produce a
86// fully-functional RawConfig object.
87func NewRawConfigHCL2(body hcl2.Body) *RawConfig {
88 return &RawConfig{
89 Body: body,
90 }
91}
92
51// RawMap returns a copy of the RawConfig.Raw map. 93// RawMap returns a copy of the RawConfig.Raw map.
52func (r *RawConfig) RawMap() map[string]interface{} { 94func (r *RawConfig) RawMap() map[string]interface{} {
53 r.lock.Lock() 95 r.lock.Lock()
@@ -69,6 +111,10 @@ func (r *RawConfig) Copy() *RawConfig {
69 r.lock.Lock() 111 r.lock.Lock()
70 defer r.lock.Unlock() 112 defer r.lock.Unlock()
71 113
114 if r.Body != nil {
115 return NewRawConfigHCL2(r.Body)
116 }
117
72 newRaw := make(map[string]interface{}) 118 newRaw := make(map[string]interface{})
73 for k, v := range r.Raw { 119 for k, v := range r.Raw {
74 newRaw[k] = v 120 newRaw[k] = v
@@ -223,6 +269,13 @@ func (r *RawConfig) init() error {
223} 269}
224 270
225func (r *RawConfig) interpolate(fn interpolationWalkerFunc) error { 271func (r *RawConfig) interpolate(fn interpolationWalkerFunc) error {
272 if r.Body != nil {
273 // For RawConfigs created for the HCL2 experiement, callers must
274 // use the HCL2 Body API directly rather than interpolating via
275 // the RawConfig.
276 return errors.New("this feature is not yet supported under the HCL2 experiment")
277 }
278
226 config, err := copystructure.Copy(r.Raw) 279 config, err := copystructure.Copy(r.Raw)
227 if err != nil { 280 if err != nil {
228 return err 281 return err
@@ -268,6 +321,74 @@ func (r *RawConfig) merge(r2 *RawConfig) *RawConfig {
268 return result 321 return result
269} 322}
270 323
324// couldBeInteger is a helper that determines if the represented value could
325// result in an integer.
326//
327// This function only works for RawConfigs that have "Key" set, meaning that
328// a single result can be produced. Calling this function will overwrite
329// the Config and Value results to be a test value.
330//
331// This function is conservative. If there is some doubt about whether the
332// result could be an integer -- for example, if it depends on a variable
333// whose type we don't know yet -- it will still return true.
334func (r *RawConfig) couldBeInteger() bool {
335 if r.Key == "" {
336 // un-keyed RawConfigs can never produce numbers
337 return false
338 }
339 if r.Body == nil {
340 // Normal path: using the interpolator in this package
341 // Interpolate with a fixed number to verify that its a number.
342 r.interpolate(func(root ast.Node) (interface{}, error) {
343 // Execute the node but transform the AST so that it returns
344 // a fixed value of "5" for all interpolations.
345 result, err := hil.Eval(
346 hil.FixedValueTransform(
347 root, &ast.LiteralNode{Value: "5", Typex: ast.TypeString}),
348 nil)
349 if err != nil {
350 return "", err
351 }
352
353 return result.Value, nil
354 })
355 _, err := strconv.ParseInt(r.Value().(string), 0, 0)
356 return err == nil
357 } else {
358 // HCL2 experiment path: using the HCL2 API via shims
359 //
360 // This path catches fewer situations because we have to assume all
361 // variables are entirely unknown in HCL2, rather than the assumption
362 // above that all variables can be numbers because names like "var.foo"
363 // are considered a single variable rather than an attribute access.
364 // This is fine in practice, because we get a definitive answer
365 // during the graph walk when we have real values to work with.
366 attrs, diags := r.Body.JustAttributes()
367 if diags.HasErrors() {
368 // This body is not just a single attribute with a value, so
369 // this can't be a number.
370 return false
371 }
372 attr, hasAttr := attrs[r.Key]
373 if !hasAttr {
374 return false
375 }
376 result, diags := hcl2EvalWithUnknownVars(attr.Expr)
377 if diags.HasErrors() {
378 // We'll conservatively assume that this error is a result of
379 // us not being ready to fully-populate the scope, and catch
380 // any further problems during the main graph walk.
381 return true
382 }
383
384 // If the result is convertable to number then we'll allow it.
385 // We do this because an unknown string is optimistically convertable
386 // to number (might be "5") but a _known_ string "hello" is not.
387 _, err := convert.Convert(result, cty.Number)
388 return err == nil
389 }
390}
391
271// UnknownKeys returns the keys of the configuration that are unknown 392// UnknownKeys returns the keys of the configuration that are unknown
272// because they had interpolated variables that must be computed. 393// because they had interpolated variables that must be computed.
273func (r *RawConfig) UnknownKeys() []string { 394func (r *RawConfig) UnknownKeys() []string {
diff --git a/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go b/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go
index ea68b4f..8a55e06 100644
--- a/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go
+++ b/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go
@@ -2,7 +2,7 @@
2 2
3package config 3package config
4 4
5import "fmt" 5import "strconv"
6 6
7const _ResourceMode_name = "ManagedResourceModeDataResourceMode" 7const _ResourceMode_name = "ManagedResourceModeDataResourceMode"
8 8
@@ -10,7 +10,7 @@ var _ResourceMode_index = [...]uint8{0, 19, 35}
10 10
11func (i ResourceMode) String() string { 11func (i ResourceMode) String() string {
12 if i < 0 || i >= ResourceMode(len(_ResourceMode_index)-1) { 12 if i < 0 || i >= ResourceMode(len(_ResourceMode_index)-1) {
13 return fmt.Sprintf("ResourceMode(%d)", i) 13 return "ResourceMode(" + strconv.FormatInt(int64(i), 10) + ")"
14 } 14 }
15 return _ResourceMode_name[_ResourceMode_index[i]:_ResourceMode_index[i+1]] 15 return _ResourceMode_name[_ResourceMode_index[i]:_ResourceMode_index[i+1]]
16} 16}
diff --git a/vendor/github.com/hashicorp/terraform/config/testing.go b/vendor/github.com/hashicorp/terraform/config/testing.go
index f7bfadd..831fc77 100644
--- a/vendor/github.com/hashicorp/terraform/config/testing.go
+++ b/vendor/github.com/hashicorp/terraform/config/testing.go
@@ -6,6 +6,8 @@ import (
6 6
7// TestRawConfig is used to create a RawConfig for testing. 7// TestRawConfig is used to create a RawConfig for testing.
8func TestRawConfig(t *testing.T, c map[string]interface{}) *RawConfig { 8func TestRawConfig(t *testing.T, c map[string]interface{}) *RawConfig {
9 t.Helper()
10
9 cfg, err := NewRawConfig(c) 11 cfg, err := NewRawConfig(c)
10 if err != nil { 12 if err != nil {
11 t.Fatalf("err: %s", err) 13 t.Fatalf("err: %s", err)
diff --git a/vendor/github.com/hashicorp/terraform/dag/dag.go b/vendor/github.com/hashicorp/terraform/dag/dag.go
index f8776bc..b7eb10c 100644
--- a/vendor/github.com/hashicorp/terraform/dag/dag.go
+++ b/vendor/github.com/hashicorp/terraform/dag/dag.go
@@ -106,7 +106,7 @@ func (g *AcyclicGraph) TransitiveReduction() {
106 uTargets := g.DownEdges(u) 106 uTargets := g.DownEdges(u)
107 vs := AsVertexList(g.DownEdges(u)) 107 vs := AsVertexList(g.DownEdges(u))
108 108
109 g.DepthFirstWalk(vs, func(v Vertex, d int) error { 109 g.depthFirstWalk(vs, false, func(v Vertex, d int) error {
110 shared := uTargets.Intersection(g.DownEdges(v)) 110 shared := uTargets.Intersection(g.DownEdges(v))
111 for _, vPrime := range AsVertexList(shared) { 111 for _, vPrime := range AsVertexList(shared) {
112 g.RemoveEdge(BasicEdge(u, vPrime)) 112 g.RemoveEdge(BasicEdge(u, vPrime))
@@ -187,9 +187,18 @@ type vertexAtDepth struct {
187} 187}
188 188
189// depthFirstWalk does a depth-first walk of the graph starting from 189// depthFirstWalk does a depth-first walk of the graph starting from
190// the vertices in start. This is not exported now but it would make sense 190// the vertices in start.
191// to export this publicly at some point.
192func (g *AcyclicGraph) DepthFirstWalk(start []Vertex, f DepthWalkFunc) error { 191func (g *AcyclicGraph) DepthFirstWalk(start []Vertex, f DepthWalkFunc) error {
192 return g.depthFirstWalk(start, true, f)
193}
194
195// This internal method provides the option of not sorting the vertices during
196// the walk, which we use for the Transitive reduction.
197// Some configurations can lead to fully-connected subgraphs, which makes our
198// transitive reduction algorithm O(n^3). This is still passable for the size
199// of our graphs, but the additional n^2 sort operations would make this
200// uncomputable in a reasonable amount of time.
201func (g *AcyclicGraph) depthFirstWalk(start []Vertex, sorted bool, f DepthWalkFunc) error {
193 defer g.debug.BeginOperation(typeDepthFirstWalk, "").End("") 202 defer g.debug.BeginOperation(typeDepthFirstWalk, "").End("")
194 203
195 seen := make(map[Vertex]struct{}) 204 seen := make(map[Vertex]struct{})
@@ -219,7 +228,11 @@ func (g *AcyclicGraph) DepthFirstWalk(start []Vertex, f DepthWalkFunc) error {
219 228
220 // Visit targets of this in a consistent order. 229 // Visit targets of this in a consistent order.
221 targets := AsVertexList(g.DownEdges(current.Vertex)) 230 targets := AsVertexList(g.DownEdges(current.Vertex))
222 sort.Sort(byVertexName(targets)) 231
232 if sorted {
233 sort.Sort(byVertexName(targets))
234 }
235
223 for _, t := range targets { 236 for _, t := range targets {
224 frontier = append(frontier, &vertexAtDepth{ 237 frontier = append(frontier, &vertexAtDepth{
225 Vertex: t, 238 Vertex: t,
diff --git a/vendor/github.com/hashicorp/terraform/dag/marshal.go b/vendor/github.com/hashicorp/terraform/dag/marshal.go
index 16d5dd6..c567d27 100644
--- a/vendor/github.com/hashicorp/terraform/dag/marshal.go
+++ b/vendor/github.com/hashicorp/terraform/dag/marshal.go
@@ -273,6 +273,9 @@ func (e *encoder) Encode(i interface{}) {
273} 273}
274 274
275func (e *encoder) Add(v Vertex) { 275func (e *encoder) Add(v Vertex) {
276 if e == nil {
277 return
278 }
276 e.Encode(marshalTransform{ 279 e.Encode(marshalTransform{
277 Type: typeTransform, 280 Type: typeTransform,
278 AddVertex: newMarshalVertex(v), 281 AddVertex: newMarshalVertex(v),
@@ -281,6 +284,9 @@ func (e *encoder) Add(v Vertex) {
281 284
282// Remove records the removal of Vertex v. 285// Remove records the removal of Vertex v.
283func (e *encoder) Remove(v Vertex) { 286func (e *encoder) Remove(v Vertex) {
287 if e == nil {
288 return
289 }
284 e.Encode(marshalTransform{ 290 e.Encode(marshalTransform{
285 Type: typeTransform, 291 Type: typeTransform,
286 RemoveVertex: newMarshalVertex(v), 292 RemoveVertex: newMarshalVertex(v),
@@ -288,6 +294,9 @@ func (e *encoder) Remove(v Vertex) {
288} 294}
289 295
290func (e *encoder) Connect(edge Edge) { 296func (e *encoder) Connect(edge Edge) {
297 if e == nil {
298 return
299 }
291 e.Encode(marshalTransform{ 300 e.Encode(marshalTransform{
292 Type: typeTransform, 301 Type: typeTransform,
293 AddEdge: newMarshalEdge(edge), 302 AddEdge: newMarshalEdge(edge),
@@ -295,6 +304,9 @@ func (e *encoder) Connect(edge Edge) {
295} 304}
296 305
297func (e *encoder) RemoveEdge(edge Edge) { 306func (e *encoder) RemoveEdge(edge Edge) {
307 if e == nil {
308 return
309 }
298 e.Encode(marshalTransform{ 310 e.Encode(marshalTransform{
299 Type: typeTransform, 311 Type: typeTransform,
300 RemoveEdge: newMarshalEdge(edge), 312 RemoveEdge: newMarshalEdge(edge),
diff --git a/vendor/github.com/hashicorp/terraform/dag/walk.go b/vendor/github.com/hashicorp/terraform/dag/walk.go
index 23c87ad..f03b100 100644
--- a/vendor/github.com/hashicorp/terraform/dag/walk.go
+++ b/vendor/github.com/hashicorp/terraform/dag/walk.go
@@ -166,7 +166,7 @@ func (w *Walker) Update(g *AcyclicGraph) {
166 w.wait.Add(1) 166 w.wait.Add(1)
167 167
168 // Add to our own set so we know about it already 168 // Add to our own set so we know about it already
169 log.Printf("[DEBUG] dag/walk: added new vertex: %q", VertexName(v)) 169 log.Printf("[TRACE] dag/walk: added new vertex: %q", VertexName(v))
170 w.vertices.Add(raw) 170 w.vertices.Add(raw)
171 171
172 // Initialize the vertex info 172 // Initialize the vertex info
@@ -198,7 +198,7 @@ func (w *Walker) Update(g *AcyclicGraph) {
198 // Delete it out of the map 198 // Delete it out of the map
199 delete(w.vertexMap, v) 199 delete(w.vertexMap, v)
200 200
201 log.Printf("[DEBUG] dag/walk: removed vertex: %q", VertexName(v)) 201 log.Printf("[TRACE] dag/walk: removed vertex: %q", VertexName(v))
202 w.vertices.Delete(raw) 202 w.vertices.Delete(raw)
203 } 203 }
204 204
@@ -229,7 +229,7 @@ func (w *Walker) Update(g *AcyclicGraph) {
229 changedDeps.Add(waiter) 229 changedDeps.Add(waiter)
230 230
231 log.Printf( 231 log.Printf(
232 "[DEBUG] dag/walk: added edge: %q waiting on %q", 232 "[TRACE] dag/walk: added edge: %q waiting on %q",
233 VertexName(waiter), VertexName(dep)) 233 VertexName(waiter), VertexName(dep))
234 w.edges.Add(raw) 234 w.edges.Add(raw)
235 } 235 }
@@ -253,7 +253,7 @@ func (w *Walker) Update(g *AcyclicGraph) {
253 changedDeps.Add(waiter) 253 changedDeps.Add(waiter)
254 254
255 log.Printf( 255 log.Printf(
256 "[DEBUG] dag/walk: removed edge: %q waiting on %q", 256 "[TRACE] dag/walk: removed edge: %q waiting on %q",
257 VertexName(waiter), VertexName(dep)) 257 VertexName(waiter), VertexName(dep))
258 w.edges.Delete(raw) 258 w.edges.Delete(raw)
259 } 259 }
@@ -296,7 +296,7 @@ func (w *Walker) Update(g *AcyclicGraph) {
296 info.depsCancelCh = cancelCh 296 info.depsCancelCh = cancelCh
297 297
298 log.Printf( 298 log.Printf(
299 "[DEBUG] dag/walk: dependencies changed for %q, sending new deps", 299 "[TRACE] dag/walk: dependencies changed for %q, sending new deps",
300 VertexName(v)) 300 VertexName(v))
301 301
302 // Start the waiter 302 // Start the waiter
@@ -383,10 +383,10 @@ func (w *Walker) walkVertex(v Vertex, info *walkerVertex) {
383 // Run our callback or note that our upstream failed 383 // Run our callback or note that our upstream failed
384 var err error 384 var err error
385 if depsSuccess { 385 if depsSuccess {
386 log.Printf("[DEBUG] dag/walk: walking %q", VertexName(v)) 386 log.Printf("[TRACE] dag/walk: walking %q", VertexName(v))
387 err = w.Callback(v) 387 err = w.Callback(v)
388 } else { 388 } else {
389 log.Printf("[DEBUG] dag/walk: upstream errored, not walking %q", VertexName(v)) 389 log.Printf("[TRACE] dag/walk: upstream errored, not walking %q", VertexName(v))
390 err = errWalkUpstream 390 err = errWalkUpstream
391 } 391 }
392 392
@@ -423,7 +423,7 @@ func (w *Walker) waitDeps(
423 return 423 return
424 424
425 case <-time.After(time.Second * 5): 425 case <-time.After(time.Second * 5):
426 log.Printf("[DEBUG] dag/walk: vertex %q, waiting for: %q", 426 log.Printf("[TRACE] dag/walk: vertex %q, waiting for: %q",
427 VertexName(v), VertexName(dep)) 427 VertexName(v), VertexName(dep))
428 } 428 }
429 } 429 }
diff --git a/vendor/github.com/hashicorp/terraform/helper/experiment/experiment.go b/vendor/github.com/hashicorp/terraform/helper/experiment/experiment.go
deleted file mode 100644
index 18b8837..0000000
--- a/vendor/github.com/hashicorp/terraform/helper/experiment/experiment.go
+++ /dev/null
@@ -1,154 +0,0 @@
1// experiment package contains helper functions for tracking experimental
2// features throughout Terraform.
3//
4// This package should be used for creating, enabling, querying, and deleting
5// experimental features. By unifying all of that onto a single interface,
6// we can have the Go compiler help us by enforcing every place we touch
7// an experimental feature.
8//
9// To create a new experiment:
10//
11// 1. Add the experiment to the global vars list below, prefixed with X_
12//
13// 2. Add the experiment variable to the All listin the init() function
14//
15// 3. Use it!
16//
17// To remove an experiment:
18//
19// 1. Delete the experiment global var.
20//
21// 2. Try to compile and fix all the places where the var was referenced.
22//
23// To use an experiment:
24//
25// 1. Use Flag() if you want the experiment to be available from the CLI.
26//
27// 2. Use Enabled() to check whether it is enabled.
28//
29// As a general user:
30//
31// 1. The `-Xexperiment-name` flag
32// 2. The `TF_X_<experiment-name>` env var.
33// 3. The `TF_X_FORCE` env var can be set to force an experimental feature
34// without human verifications.
35//
36package experiment
37
38import (
39 "flag"
40 "fmt"
41 "os"
42 "strconv"
43 "strings"
44 "sync"
45)
46
47// The experiments that are available are listed below. Any package in
48// Terraform defining an experiment should define the experiments below.
49// By keeping them all within the experiment package we force a single point
50// of definition and use. This allows the compiler to enforce references
51// so it becomes easy to remove the features.
52var (
53 // Shadow graph. This is already on by default. Disabling it will be
54 // allowed for awhile in order for it to not block operations.
55 X_shadow = newBasicID("shadow", "SHADOW", false)
56)
57
58// Global variables this package uses because we are a package
59// with global state.
60var (
61 // all is the list of all experiements. Do not modify this.
62 All []ID
63
64 // enabled keeps track of what flags have been enabled
65 enabled map[string]bool
66 enabledLock sync.Mutex
67
68 // Hidden "experiment" that forces all others to be on without verification
69 x_force = newBasicID("force", "FORCE", false)
70)
71
72func init() {
73 // The list of all experiments, update this when an experiment is added.
74 All = []ID{
75 X_shadow,
76 x_force,
77 }
78
79 // Load
80 reload()
81}
82
83// reload is used by tests to reload the global state. This is called by
84// init publicly.
85func reload() {
86 // Initialize
87 enabledLock.Lock()
88 enabled = make(map[string]bool)
89 enabledLock.Unlock()
90
91 // Set defaults and check env vars
92 for _, id := range All {
93 // Get the default value
94 def := id.Default()
95
96 // If we set it in the env var, default it to true
97 key := fmt.Sprintf("TF_X_%s", strings.ToUpper(id.Env()))
98 if v := os.Getenv(key); v != "" {
99 def = v != "0"
100 }
101
102 // Set the default
103 SetEnabled(id, def)
104 }
105}
106
107// Enabled returns whether an experiment has been enabled or not.
108func Enabled(id ID) bool {
109 enabledLock.Lock()
110 defer enabledLock.Unlock()
111 return enabled[id.Flag()]
112}
113
114// SetEnabled sets an experiment to enabled/disabled. Please check with
115// the experiment docs for when calling this actually affects the experiment.
116func SetEnabled(id ID, v bool) {
117 enabledLock.Lock()
118 defer enabledLock.Unlock()
119 enabled[id.Flag()] = v
120}
121
122// Force returns true if the -Xforce of TF_X_FORCE flag is present, which
123// advises users of this package to not verify with the user that they want
124// experimental behavior and to just continue with it.
125func Force() bool {
126 return Enabled(x_force)
127}
128
129// Flag configures the given FlagSet with the flags to configure
130// all active experiments.
131func Flag(fs *flag.FlagSet) {
132 for _, id := range All {
133 desc := id.Flag()
134 key := fmt.Sprintf("X%s", id.Flag())
135 fs.Var(&idValue{X: id}, key, desc)
136 }
137}
138
139// idValue implements flag.Value for setting the enabled/disabled state
140// of an experiment from the CLI.
141type idValue struct {
142 X ID
143}
144
145func (v *idValue) IsBoolFlag() bool { return true }
146func (v *idValue) String() string { return strconv.FormatBool(Enabled(v.X)) }
147func (v *idValue) Set(raw string) error {
148 b, err := strconv.ParseBool(raw)
149 if err == nil {
150 SetEnabled(v.X, b)
151 }
152
153 return err
154}
diff --git a/vendor/github.com/hashicorp/terraform/helper/experiment/id.go b/vendor/github.com/hashicorp/terraform/helper/experiment/id.go
deleted file mode 100644
index 8e2f707..0000000
--- a/vendor/github.com/hashicorp/terraform/helper/experiment/id.go
+++ /dev/null
@@ -1,34 +0,0 @@
1package experiment
2
3// ID represents an experimental feature.
4//
5// The global vars defined on this package should be used as ID values.
6// This interface is purposely not implement-able outside of this package
7// so that we can rely on the Go compiler to enforce all experiment references.
8type ID interface {
9 Env() string
10 Flag() string
11 Default() bool
12
13 unexported() // So the ID can't be implemented externally.
14}
15
16// basicID implements ID.
17type basicID struct {
18 EnvValue string
19 FlagValue string
20 DefaultValue bool
21}
22
23func newBasicID(flag, env string, def bool) ID {
24 return &basicID{
25 EnvValue: env,
26 FlagValue: flag,
27 DefaultValue: def,
28 }
29}
30
31func (id *basicID) Env() string { return id.EnvValue }
32func (id *basicID) Flag() string { return id.FlagValue }
33func (id *basicID) Default() bool { return id.DefaultValue }
34func (id *basicID) unexported() {}
diff --git a/vendor/github.com/hashicorp/terraform/helper/hashcode/hashcode.go b/vendor/github.com/hashicorp/terraform/helper/hashcode/hashcode.go
index 64d8263..6ccc523 100644
--- a/vendor/github.com/hashicorp/terraform/helper/hashcode/hashcode.go
+++ b/vendor/github.com/hashicorp/terraform/helper/hashcode/hashcode.go
@@ -1,6 +1,8 @@
1package hashcode 1package hashcode
2 2
3import ( 3import (
4 "bytes"
5 "fmt"
4 "hash/crc32" 6 "hash/crc32"
5) 7)
6 8
@@ -20,3 +22,14 @@ func String(s string) int {
20 // v == MinInt 22 // v == MinInt
21 return 0 23 return 0
22} 24}
25
26// Strings hashes a list of strings to a unique hashcode.
27func Strings(strings []string) string {
28 var buf bytes.Buffer
29
30 for _, s := range strings {
31 buf.WriteString(fmt.Sprintf("%s-", s))
32 }
33
34 return fmt.Sprintf("%d", String(buf.String()))
35}
diff --git a/vendor/github.com/hashicorp/terraform/helper/logging/logging.go b/vendor/github.com/hashicorp/terraform/helper/logging/logging.go
index 433cd77..6bd92f7 100644
--- a/vendor/github.com/hashicorp/terraform/helper/logging/logging.go
+++ b/vendor/github.com/hashicorp/terraform/helper/logging/logging.go
@@ -18,7 +18,7 @@ const (
18 EnvLogFile = "TF_LOG_PATH" // Set to a file 18 EnvLogFile = "TF_LOG_PATH" // Set to a file
19) 19)
20 20
21var validLevels = []logutils.LogLevel{"TRACE", "DEBUG", "INFO", "WARN", "ERROR"} 21var ValidLevels = []logutils.LogLevel{"TRACE", "DEBUG", "INFO", "WARN", "ERROR"}
22 22
23// LogOutput determines where we should send logs (if anywhere) and the log level. 23// LogOutput determines where we should send logs (if anywhere) and the log level.
24func LogOutput() (logOutput io.Writer, err error) { 24func LogOutput() (logOutput io.Writer, err error) {
@@ -40,7 +40,7 @@ func LogOutput() (logOutput io.Writer, err error) {
40 40
41 // This was the default since the beginning 41 // This was the default since the beginning
42 logOutput = &logutils.LevelFilter{ 42 logOutput = &logutils.LevelFilter{
43 Levels: validLevels, 43 Levels: ValidLevels,
44 MinLevel: logutils.LogLevel(logLevel), 44 MinLevel: logutils.LogLevel(logLevel),
45 Writer: logOutput, 45 Writer: logOutput,
46 } 46 }
@@ -77,7 +77,7 @@ func LogLevel() string {
77 logLevel = strings.ToUpper(envLevel) 77 logLevel = strings.ToUpper(envLevel)
78 } else { 78 } else {
79 log.Printf("[WARN] Invalid log level: %q. Defaulting to level: TRACE. Valid levels are: %+v", 79 log.Printf("[WARN] Invalid log level: %q. Defaulting to level: TRACE. Valid levels are: %+v",
80 envLevel, validLevels) 80 envLevel, ValidLevels)
81 } 81 }
82 82
83 return logLevel 83 return logLevel
@@ -90,7 +90,7 @@ func IsDebugOrHigher() bool {
90} 90}
91 91
92func isValidLogLevel(level string) bool { 92func isValidLogLevel(level string) bool {
93 for _, l := range validLevels { 93 for _, l := range ValidLevels {
94 if strings.ToUpper(level) == string(l) { 94 if strings.ToUpper(level) == string(l) {
95 return true 95 return true
96 } 96 }
diff --git a/vendor/github.com/hashicorp/terraform/helper/logging/transport.go b/vendor/github.com/hashicorp/terraform/helper/logging/transport.go
index 4477924..bddabe6 100644
--- a/vendor/github.com/hashicorp/terraform/helper/logging/transport.go
+++ b/vendor/github.com/hashicorp/terraform/helper/logging/transport.go
@@ -1,9 +1,12 @@
1package logging 1package logging
2 2
3import ( 3import (
4 "bytes"
5 "encoding/json"
4 "log" 6 "log"
5 "net/http" 7 "net/http"
6 "net/http/httputil" 8 "net/http/httputil"
9 "strings"
7) 10)
8 11
9type transport struct { 12type transport struct {
@@ -15,7 +18,7 @@ func (t *transport) RoundTrip(req *http.Request) (*http.Response, error) {
15 if IsDebugOrHigher() { 18 if IsDebugOrHigher() {
16 reqData, err := httputil.DumpRequestOut(req, true) 19 reqData, err := httputil.DumpRequestOut(req, true)
17 if err == nil { 20 if err == nil {
18 log.Printf("[DEBUG] "+logReqMsg, t.name, string(reqData)) 21 log.Printf("[DEBUG] "+logReqMsg, t.name, prettyPrintJsonLines(reqData))
19 } else { 22 } else {
20 log.Printf("[ERROR] %s API Request error: %#v", t.name, err) 23 log.Printf("[ERROR] %s API Request error: %#v", t.name, err)
21 } 24 }
@@ -29,7 +32,7 @@ func (t *transport) RoundTrip(req *http.Request) (*http.Response, error) {
29 if IsDebugOrHigher() { 32 if IsDebugOrHigher() {
30 respData, err := httputil.DumpResponse(resp, true) 33 respData, err := httputil.DumpResponse(resp, true)
31 if err == nil { 34 if err == nil {
32 log.Printf("[DEBUG] "+logRespMsg, t.name, string(respData)) 35 log.Printf("[DEBUG] "+logRespMsg, t.name, prettyPrintJsonLines(respData))
33 } else { 36 } else {
34 log.Printf("[ERROR] %s API Response error: %#v", t.name, err) 37 log.Printf("[ERROR] %s API Response error: %#v", t.name, err)
35 } 38 }
@@ -42,6 +45,20 @@ func NewTransport(name string, t http.RoundTripper) *transport {
42 return &transport{name, t} 45 return &transport{name, t}
43} 46}
44 47
48// prettyPrintJsonLines iterates through a []byte line-by-line,
49// transforming any lines that are complete json into pretty-printed json.
50func prettyPrintJsonLines(b []byte) string {
51 parts := strings.Split(string(b), "\n")
52 for i, p := range parts {
53 if b := []byte(p); json.Valid(b) {
54 var out bytes.Buffer
55 json.Indent(&out, b, "", " ")
56 parts[i] = out.String()
57 }
58 }
59 return strings.Join(parts, "\n")
60}
61
45const logReqMsg = `%s API Request Details: 62const logReqMsg = `%s API Request Details:
46---[ REQUEST ]--------------------------------------- 63---[ REQUEST ]---------------------------------------
47%s 64%s
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/id.go b/vendor/github.com/hashicorp/terraform/helper/resource/id.go
index 1cde67c..4494955 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/id.go
+++ b/vendor/github.com/hashicorp/terraform/helper/resource/id.go
@@ -18,6 +18,11 @@ func UniqueId() string {
18 return PrefixedUniqueId(UniqueIdPrefix) 18 return PrefixedUniqueId(UniqueIdPrefix)
19} 19}
20 20
21// UniqueIDSuffixLength is the string length of the suffix generated by
22// PrefixedUniqueId. This can be used by length validation functions to
23// ensure prefixes are the correct length for the target field.
24const UniqueIDSuffixLength = 26
25
21// Helper for a resource to generate a unique identifier w/ given prefix 26// Helper for a resource to generate a unique identifier w/ given prefix
22// 27//
23// After the prefix, the ID consists of an incrementing 26 digit value (to match 28// After the prefix, the ID consists of an incrementing 26 digit value (to match
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/state.go b/vendor/github.com/hashicorp/terraform/helper/resource/state.go
index 37c586a..c34e21b 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/state.go
+++ b/vendor/github.com/hashicorp/terraform/helper/resource/state.go
@@ -46,7 +46,7 @@ type StateChangeConf struct {
46// If the Timeout is exceeded before reaching the Target state, return an 46// If the Timeout is exceeded before reaching the Target state, return an
47// error. 47// error.
48// 48//
49// Otherwise, result the result of the first call to the Refresh function to 49// Otherwise, the result is the result of the first call to the Refresh function to
50// reach the target state. 50// reach the target state.
51func (conf *StateChangeConf) WaitForState() (interface{}, error) { 51func (conf *StateChangeConf) WaitForState() (interface{}, error) {
52 log.Printf("[DEBUG] Waiting for state to become: %s", conf.Target) 52 log.Printf("[DEBUG] Waiting for state to become: %s", conf.Target)
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/testing.go b/vendor/github.com/hashicorp/terraform/helper/resource/testing.go
index d7de1a0..b97673f 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/testing.go
+++ b/vendor/github.com/hashicorp/terraform/helper/resource/testing.go
@@ -11,11 +11,13 @@ import (
11 "reflect" 11 "reflect"
12 "regexp" 12 "regexp"
13 "strings" 13 "strings"
14 "syscall"
14 "testing" 15 "testing"
15 16
16 "github.com/davecgh/go-spew/spew" 17 "github.com/davecgh/go-spew/spew"
17 "github.com/hashicorp/go-getter" 18 "github.com/hashicorp/errwrap"
18 "github.com/hashicorp/go-multierror" 19 "github.com/hashicorp/go-multierror"
20 "github.com/hashicorp/logutils"
19 "github.com/hashicorp/terraform/config/module" 21 "github.com/hashicorp/terraform/config/module"
20 "github.com/hashicorp/terraform/helper/logging" 22 "github.com/hashicorp/terraform/helper/logging"
21 "github.com/hashicorp/terraform/terraform" 23 "github.com/hashicorp/terraform/terraform"
@@ -186,6 +188,10 @@ type TestCheckFunc func(*terraform.State) error
186// ImportStateCheckFunc is the check function for ImportState tests 188// ImportStateCheckFunc is the check function for ImportState tests
187type ImportStateCheckFunc func([]*terraform.InstanceState) error 189type ImportStateCheckFunc func([]*terraform.InstanceState) error
188 190
191// ImportStateIdFunc is an ID generation function to help with complex ID
192// generation for ImportState tests.
193type ImportStateIdFunc func(*terraform.State) (string, error)
194
189// TestCase is a single acceptance test case used to test the apply/destroy 195// TestCase is a single acceptance test case used to test the apply/destroy
190// lifecycle of a resource in a specific configuration. 196// lifecycle of a resource in a specific configuration.
191// 197//
@@ -260,6 +266,15 @@ type TestStep struct {
260 // below. 266 // below.
261 PreConfig func() 267 PreConfig func()
262 268
269 // Taint is a list of resource addresses to taint prior to the execution of
270 // the step. Be sure to only include this at a step where the referenced
271 // address will be present in state, as it will fail the test if the resource
272 // is missing.
273 //
274 // This option is ignored on ImportState tests, and currently only works for
275 // resources in the root module path.
276 Taint []string
277
263 //--------------------------------------------------------------- 278 //---------------------------------------------------------------
264 // Test modes. One of the following groups of settings must be 279 // Test modes. One of the following groups of settings must be
265 // set to determine what the test step will do. Ideally we would've 280 // set to determine what the test step will do. Ideally we would've
@@ -304,10 +319,19 @@ type TestStep struct {
304 // no-op plans 319 // no-op plans
305 PlanOnly bool 320 PlanOnly bool
306 321
322 // PreventDiskCleanup can be set to true for testing terraform modules which
323 // require access to disk at runtime. Note that this will leave files in the
324 // temp folder
325 PreventDiskCleanup bool
326
307 // PreventPostDestroyRefresh can be set to true for cases where data sources 327 // PreventPostDestroyRefresh can be set to true for cases where data sources
308 // are tested alongside real resources 328 // are tested alongside real resources
309 PreventPostDestroyRefresh bool 329 PreventPostDestroyRefresh bool
310 330
331 // SkipFunc is called before applying config, but after PreConfig
332 // This is useful for defining test steps with platform-dependent checks
333 SkipFunc func() (bool, error)
334
311 //--------------------------------------------------------------- 335 //---------------------------------------------------------------
312 // ImportState testing 336 // ImportState testing
313 //--------------------------------------------------------------- 337 //---------------------------------------------------------------
@@ -329,6 +353,12 @@ type TestStep struct {
329 // the unset ImportStateId field. 353 // the unset ImportStateId field.
330 ImportStateIdPrefix string 354 ImportStateIdPrefix string
331 355
356 // ImportStateIdFunc is a function that can be used to dynamically generate
357 // the ID for the ImportState tests. It is sent the state, which can be
358 // checked to derive the attributes necessary and generate the string in the
359 // desired format.
360 ImportStateIdFunc ImportStateIdFunc
361
332 // ImportStateCheck checks the results of ImportState. It should be 362 // ImportStateCheck checks the results of ImportState. It should be
333 // used to verify that the resulting value of ImportState has the 363 // used to verify that the resulting value of ImportState has the
334 // proper resources, IDs, and attributes. 364 // proper resources, IDs, and attributes.
@@ -345,6 +375,60 @@ type TestStep struct {
345 ImportStateVerifyIgnore []string 375 ImportStateVerifyIgnore []string
346} 376}
347 377
378// Set to a file mask in sprintf format where %s is test name
379const EnvLogPathMask = "TF_LOG_PATH_MASK"
380
381func LogOutput(t TestT) (logOutput io.Writer, err error) {
382 logOutput = ioutil.Discard
383
384 logLevel := logging.LogLevel()
385 if logLevel == "" {
386 return
387 }
388
389 logOutput = os.Stderr
390
391 if logPath := os.Getenv(logging.EnvLogFile); logPath != "" {
392 var err error
393 logOutput, err = os.OpenFile(logPath, syscall.O_CREAT|syscall.O_RDWR|syscall.O_APPEND, 0666)
394 if err != nil {
395 return nil, err
396 }
397 }
398
399 if logPathMask := os.Getenv(EnvLogPathMask); logPathMask != "" {
400 // Escape special characters which may appear if we have subtests
401 testName := strings.Replace(t.Name(), "/", "__", -1)
402
403 logPath := fmt.Sprintf(logPathMask, testName)
404 var err error
405 logOutput, err = os.OpenFile(logPath, syscall.O_CREAT|syscall.O_RDWR|syscall.O_APPEND, 0666)
406 if err != nil {
407 return nil, err
408 }
409 }
410
411 // This was the default since the beginning
412 logOutput = &logutils.LevelFilter{
413 Levels: logging.ValidLevels,
414 MinLevel: logutils.LogLevel(logLevel),
415 Writer: logOutput,
416 }
417
418 return
419}
420
421// ParallelTest performs an acceptance test on a resource, allowing concurrency
422// with other ParallelTest.
423//
424// Tests will fail if they do not properly handle conditions to allow multiple
425// tests to occur against the same resource or service (e.g. random naming).
426// All other requirements of the Test function also apply to this function.
427func ParallelTest(t TestT, c TestCase) {
428 t.Parallel()
429 Test(t, c)
430}
431
348// Test performs an acceptance test on a resource. 432// Test performs an acceptance test on a resource.
349// 433//
350// Tests are not run unless an environmental variable "TF_ACC" is 434// Tests are not run unless an environmental variable "TF_ACC" is
@@ -366,7 +450,7 @@ func Test(t TestT, c TestCase) {
366 return 450 return
367 } 451 }
368 452
369 logWriter, err := logging.LogOutput() 453 logWriter, err := LogOutput(t)
370 if err != nil { 454 if err != nil {
371 t.Error(fmt.Errorf("error setting up logging: %s", err)) 455 t.Error(fmt.Errorf("error setting up logging: %s", err))
372 } 456 }
@@ -398,7 +482,18 @@ func Test(t TestT, c TestCase) {
398 errored := false 482 errored := false
399 for i, step := range c.Steps { 483 for i, step := range c.Steps {
400 var err error 484 var err error
401 log.Printf("[WARN] Test: Executing step %d", i) 485 log.Printf("[DEBUG] Test: Executing step %d", i)
486
487 if step.SkipFunc != nil {
488 skip, err := step.SkipFunc()
489 if err != nil {
490 t.Fatal(err)
491 }
492 if skip {
493 log.Printf("[WARN] Skipping step %d", i)
494 continue
495 }
496 }
402 497
403 if step.Config == "" && !step.ImportState { 498 if step.Config == "" && !step.ImportState {
404 err = fmt.Errorf( 499 err = fmt.Errorf(
@@ -418,6 +513,15 @@ func Test(t TestT, c TestCase) {
418 } 513 }
419 } 514 }
420 515
516 // If we expected an error, but did not get one, fail
517 if err == nil && step.ExpectError != nil {
518 errored = true
519 t.Error(fmt.Sprintf(
520 "Step %d, no error received, but expected a match to:\n\n%s\n\n",
521 i, step.ExpectError))
522 break
523 }
524
421 // If there was an error, exit 525 // If there was an error, exit
422 if err != nil { 526 if err != nil {
423 // Perhaps we expected an error? Check if it matches 527 // Perhaps we expected an error? Check if it matches
@@ -485,6 +589,7 @@ func Test(t TestT, c TestCase) {
485 Config: lastStep.Config, 589 Config: lastStep.Config,
486 Check: c.CheckDestroy, 590 Check: c.CheckDestroy,
487 Destroy: true, 591 Destroy: true,
592 PreventDiskCleanup: lastStep.PreventDiskCleanup,
488 PreventPostDestroyRefresh: c.PreventPostDestroyRefresh, 593 PreventPostDestroyRefresh: c.PreventPostDestroyRefresh,
489 } 594 }
490 595
@@ -593,18 +698,12 @@ func testIDOnlyRefresh(c TestCase, opts terraform.ContextOpts, step TestStep, r
593 if err != nil { 698 if err != nil {
594 return err 699 return err
595 } 700 }
596 if ws, es := ctx.Validate(); len(ws) > 0 || len(es) > 0 { 701 if diags := ctx.Validate(); len(diags) > 0 {
597 if len(es) > 0 { 702 if diags.HasErrors() {
598 estrs := make([]string, len(es)) 703 return errwrap.Wrapf("config is invalid: {{err}}", diags.Err())
599 for i, e := range es {
600 estrs[i] = e.Error()
601 }
602 return fmt.Errorf(
603 "Configuration is invalid.\n\nWarnings: %#v\n\nErrors: %#v",
604 ws, estrs)
605 } 704 }
606 705
607 log.Printf("[WARN] Config warnings: %#v", ws) 706 log.Printf("[WARN] Config warnings:\n%s", diags.Err().Error())
608 } 707 }
609 708
610 // Refresh! 709 // Refresh!
@@ -657,9 +756,7 @@ func testIDOnlyRefresh(c TestCase, opts terraform.ContextOpts, step TestStep, r
657 return nil 756 return nil
658} 757}
659 758
660func testModule( 759func testModule(opts terraform.ContextOpts, step TestStep) (*module.Tree, error) {
661 opts terraform.ContextOpts,
662 step TestStep) (*module.Tree, error) {
663 if step.PreConfig != nil { 760 if step.PreConfig != nil {
664 step.PreConfig() 761 step.PreConfig()
665 } 762 }
@@ -669,7 +766,12 @@ func testModule(
669 return nil, fmt.Errorf( 766 return nil, fmt.Errorf(
670 "Error creating temporary directory for config: %s", err) 767 "Error creating temporary directory for config: %s", err)
671 } 768 }
672 defer os.RemoveAll(cfgPath) 769
770 if step.PreventDiskCleanup {
771 log.Printf("[INFO] Skipping defer os.RemoveAll call")
772 } else {
773 defer os.RemoveAll(cfgPath)
774 }
673 775
674 // Write the configuration 776 // Write the configuration
675 cfgF, err := os.Create(filepath.Join(cfgPath, "main.tf")) 777 cfgF, err := os.Create(filepath.Join(cfgPath, "main.tf"))
@@ -693,10 +795,11 @@ func testModule(
693 } 795 }
694 796
695 // Load the modules 797 // Load the modules
696 modStorage := &getter.FolderStorage{ 798 modStorage := &module.Storage{
697 StorageDir: filepath.Join(cfgPath, ".tfmodules"), 799 StorageDir: filepath.Join(cfgPath, ".tfmodules"),
800 Mode: module.GetModeGet,
698 } 801 }
699 err = mod.Load(modStorage, module.GetModeGet) 802 err = mod.Load(modStorage)
700 if err != nil { 803 if err != nil {
701 return nil, fmt.Errorf("Error downloading modules: %s", err) 804 return nil, fmt.Errorf("Error downloading modules: %s", err)
702 } 805 }
@@ -771,12 +874,29 @@ func TestCheckResourceAttrSet(name, key string) TestCheckFunc {
771 return err 874 return err
772 } 875 }
773 876
774 if val, ok := is.Attributes[key]; ok && val != "" { 877 return testCheckResourceAttrSet(is, name, key)
775 return nil 878 }
879}
880
881// TestCheckModuleResourceAttrSet - as per TestCheckResourceAttrSet but with
882// support for non-root modules
883func TestCheckModuleResourceAttrSet(mp []string, name string, key string) TestCheckFunc {
884 return func(s *terraform.State) error {
885 is, err := modulePathPrimaryInstanceState(s, mp, name)
886 if err != nil {
887 return err
776 } 888 }
777 889
890 return testCheckResourceAttrSet(is, name, key)
891 }
892}
893
894func testCheckResourceAttrSet(is *terraform.InstanceState, name string, key string) error {
895 if val, ok := is.Attributes[key]; !ok || val == "" {
778 return fmt.Errorf("%s: Attribute '%s' expected to be set", name, key) 896 return fmt.Errorf("%s: Attribute '%s' expected to be set", name, key)
779 } 897 }
898
899 return nil
780} 900}
781 901
782// TestCheckResourceAttr is a TestCheckFunc which validates 902// TestCheckResourceAttr is a TestCheckFunc which validates
@@ -788,21 +908,37 @@ func TestCheckResourceAttr(name, key, value string) TestCheckFunc {
788 return err 908 return err
789 } 909 }
790 910
791 if v, ok := is.Attributes[key]; !ok || v != value { 911 return testCheckResourceAttr(is, name, key, value)
792 if !ok { 912 }
793 return fmt.Errorf("%s: Attribute '%s' not found", name, key) 913}
794 }
795 914
796 return fmt.Errorf( 915// TestCheckModuleResourceAttr - as per TestCheckResourceAttr but with
797 "%s: Attribute '%s' expected %#v, got %#v", 916// support for non-root modules
798 name, 917func TestCheckModuleResourceAttr(mp []string, name string, key string, value string) TestCheckFunc {
799 key, 918 return func(s *terraform.State) error {
800 value, 919 is, err := modulePathPrimaryInstanceState(s, mp, name)
801 v) 920 if err != nil {
921 return err
802 } 922 }
803 923
804 return nil 924 return testCheckResourceAttr(is, name, key, value)
925 }
926}
927
928func testCheckResourceAttr(is *terraform.InstanceState, name string, key string, value string) error {
929 if v, ok := is.Attributes[key]; !ok || v != value {
930 if !ok {
931 return fmt.Errorf("%s: Attribute '%s' not found", name, key)
932 }
933
934 return fmt.Errorf(
935 "%s: Attribute '%s' expected %#v, got %#v",
936 name,
937 key,
938 value,
939 v)
805 } 940 }
941 return nil
806} 942}
807 943
808// TestCheckNoResourceAttr is a TestCheckFunc which ensures that 944// TestCheckNoResourceAttr is a TestCheckFunc which ensures that
@@ -814,14 +950,31 @@ func TestCheckNoResourceAttr(name, key string) TestCheckFunc {
814 return err 950 return err
815 } 951 }
816 952
817 if _, ok := is.Attributes[key]; ok { 953 return testCheckNoResourceAttr(is, name, key)
818 return fmt.Errorf("%s: Attribute '%s' found when not expected", name, key) 954 }
955}
956
957// TestCheckModuleNoResourceAttr - as per TestCheckNoResourceAttr but with
958// support for non-root modules
959func TestCheckModuleNoResourceAttr(mp []string, name string, key string) TestCheckFunc {
960 return func(s *terraform.State) error {
961 is, err := modulePathPrimaryInstanceState(s, mp, name)
962 if err != nil {
963 return err
819 } 964 }
820 965
821 return nil 966 return testCheckNoResourceAttr(is, name, key)
822 } 967 }
823} 968}
824 969
970func testCheckNoResourceAttr(is *terraform.InstanceState, name string, key string) error {
971 if _, ok := is.Attributes[key]; ok {
972 return fmt.Errorf("%s: Attribute '%s' found when not expected", name, key)
973 }
974
975 return nil
976}
977
825// TestMatchResourceAttr is a TestCheckFunc which checks that the value 978// TestMatchResourceAttr is a TestCheckFunc which checks that the value
826// in state for the given name/key combination matches the given regex. 979// in state for the given name/key combination matches the given regex.
827func TestMatchResourceAttr(name, key string, r *regexp.Regexp) TestCheckFunc { 980func TestMatchResourceAttr(name, key string, r *regexp.Regexp) TestCheckFunc {
@@ -831,17 +984,34 @@ func TestMatchResourceAttr(name, key string, r *regexp.Regexp) TestCheckFunc {
831 return err 984 return err
832 } 985 }
833 986
834 if !r.MatchString(is.Attributes[key]) { 987 return testMatchResourceAttr(is, name, key, r)
835 return fmt.Errorf( 988 }
836 "%s: Attribute '%s' didn't match %q, got %#v", 989}
837 name, 990
838 key, 991// TestModuleMatchResourceAttr - as per TestMatchResourceAttr but with
839 r.String(), 992// support for non-root modules
840 is.Attributes[key]) 993func TestModuleMatchResourceAttr(mp []string, name string, key string, r *regexp.Regexp) TestCheckFunc {
994 return func(s *terraform.State) error {
995 is, err := modulePathPrimaryInstanceState(s, mp, name)
996 if err != nil {
997 return err
841 } 998 }
842 999
843 return nil 1000 return testMatchResourceAttr(is, name, key, r)
1001 }
1002}
1003
1004func testMatchResourceAttr(is *terraform.InstanceState, name string, key string, r *regexp.Regexp) error {
1005 if !r.MatchString(is.Attributes[key]) {
1006 return fmt.Errorf(
1007 "%s: Attribute '%s' didn't match %q, got %#v",
1008 name,
1009 key,
1010 r.String(),
1011 is.Attributes[key])
844 } 1012 }
1013
1014 return nil
845} 1015}
846 1016
847// TestCheckResourceAttrPtr is like TestCheckResourceAttr except the 1017// TestCheckResourceAttrPtr is like TestCheckResourceAttr except the
@@ -853,6 +1023,14 @@ func TestCheckResourceAttrPtr(name string, key string, value *string) TestCheckF
853 } 1023 }
854} 1024}
855 1025
1026// TestCheckModuleResourceAttrPtr - as per TestCheckResourceAttrPtr but with
1027// support for non-root modules
1028func TestCheckModuleResourceAttrPtr(mp []string, name string, key string, value *string) TestCheckFunc {
1029 return func(s *terraform.State) error {
1030 return TestCheckModuleResourceAttr(mp, name, key, *value)(s)
1031 }
1032}
1033
856// TestCheckResourceAttrPair is a TestCheckFunc which validates that the values 1034// TestCheckResourceAttrPair is a TestCheckFunc which validates that the values
857// in state for a pair of name/key combinations are equal. 1035// in state for a pair of name/key combinations are equal.
858func TestCheckResourceAttrPair(nameFirst, keyFirst, nameSecond, keySecond string) TestCheckFunc { 1036func TestCheckResourceAttrPair(nameFirst, keyFirst, nameSecond, keySecond string) TestCheckFunc {
@@ -861,33 +1039,57 @@ func TestCheckResourceAttrPair(nameFirst, keyFirst, nameSecond, keySecond string
861 if err != nil { 1039 if err != nil {
862 return err 1040 return err
863 } 1041 }
864 vFirst, ok := isFirst.Attributes[keyFirst]
865 if !ok {
866 return fmt.Errorf("%s: Attribute '%s' not found", nameFirst, keyFirst)
867 }
868 1042
869 isSecond, err := primaryInstanceState(s, nameSecond) 1043 isSecond, err := primaryInstanceState(s, nameSecond)
870 if err != nil { 1044 if err != nil {
871 return err 1045 return err
872 } 1046 }
873 vSecond, ok := isSecond.Attributes[keySecond] 1047
874 if !ok { 1048 return testCheckResourceAttrPair(isFirst, nameFirst, keyFirst, isSecond, nameSecond, keySecond)
875 return fmt.Errorf("%s: Attribute '%s' not found", nameSecond, keySecond) 1049 }
1050}
1051
1052// TestCheckModuleResourceAttrPair - as per TestCheckResourceAttrPair but with
1053// support for non-root modules
1054func TestCheckModuleResourceAttrPair(mpFirst []string, nameFirst string, keyFirst string, mpSecond []string, nameSecond string, keySecond string) TestCheckFunc {
1055 return func(s *terraform.State) error {
1056 isFirst, err := modulePathPrimaryInstanceState(s, mpFirst, nameFirst)
1057 if err != nil {
1058 return err
876 } 1059 }
877 1060
878 if vFirst != vSecond { 1061 isSecond, err := modulePathPrimaryInstanceState(s, mpSecond, nameSecond)
879 return fmt.Errorf( 1062 if err != nil {
880 "%s: Attribute '%s' expected %#v, got %#v", 1063 return err
881 nameFirst,
882 keyFirst,
883 vSecond,
884 vFirst)
885 } 1064 }
886 1065
887 return nil 1066 return testCheckResourceAttrPair(isFirst, nameFirst, keyFirst, isSecond, nameSecond, keySecond)
888 } 1067 }
889} 1068}
890 1069
1070func testCheckResourceAttrPair(isFirst *terraform.InstanceState, nameFirst string, keyFirst string, isSecond *terraform.InstanceState, nameSecond string, keySecond string) error {
1071 vFirst, ok := isFirst.Attributes[keyFirst]
1072 if !ok {
1073 return fmt.Errorf("%s: Attribute '%s' not found", nameFirst, keyFirst)
1074 }
1075
1076 vSecond, ok := isSecond.Attributes[keySecond]
1077 if !ok {
1078 return fmt.Errorf("%s: Attribute '%s' not found", nameSecond, keySecond)
1079 }
1080
1081 if vFirst != vSecond {
1082 return fmt.Errorf(
1083 "%s: Attribute '%s' expected %#v, got %#v",
1084 nameFirst,
1085 keyFirst,
1086 vSecond,
1087 vFirst)
1088 }
1089
1090 return nil
1091}
1092
891// TestCheckOutput checks an output in the Terraform configuration 1093// TestCheckOutput checks an output in the Terraform configuration
892func TestCheckOutput(name, value string) TestCheckFunc { 1094func TestCheckOutput(name, value string) TestCheckFunc {
893 return func(s *terraform.State) error { 1095 return func(s *terraform.State) error {
@@ -936,23 +1138,43 @@ type TestT interface {
936 Error(args ...interface{}) 1138 Error(args ...interface{})
937 Fatal(args ...interface{}) 1139 Fatal(args ...interface{})
938 Skip(args ...interface{}) 1140 Skip(args ...interface{})
1141 Name() string
1142 Parallel()
939} 1143}
940 1144
941// This is set to true by unit tests to alter some behavior 1145// This is set to true by unit tests to alter some behavior
942var testTesting = false 1146var testTesting = false
943 1147
944// primaryInstanceState returns the primary instance state for the given resource name. 1148// modulePrimaryInstanceState returns the instance state for the given resource
945func primaryInstanceState(s *terraform.State, name string) (*terraform.InstanceState, error) { 1149// name in a ModuleState
946 ms := s.RootModule() 1150func modulePrimaryInstanceState(s *terraform.State, ms *terraform.ModuleState, name string) (*terraform.InstanceState, error) {
947 rs, ok := ms.Resources[name] 1151 rs, ok := ms.Resources[name]
948 if !ok { 1152 if !ok {
949 return nil, fmt.Errorf("Not found: %s", name) 1153 return nil, fmt.Errorf("Not found: %s in %s", name, ms.Path)
950 } 1154 }
951 1155
952 is := rs.Primary 1156 is := rs.Primary
953 if is == nil { 1157 if is == nil {
954 return nil, fmt.Errorf("No primary instance: %s", name) 1158 return nil, fmt.Errorf("No primary instance: %s in %s", name, ms.Path)
955 } 1159 }
956 1160
957 return is, nil 1161 return is, nil
958} 1162}
1163
1164// modulePathPrimaryInstanceState returns the primary instance state for the
1165// given resource name in a given module path.
1166func modulePathPrimaryInstanceState(s *terraform.State, mp []string, name string) (*terraform.InstanceState, error) {
1167 ms := s.ModuleByPath(mp)
1168 if ms == nil {
1169 return nil, fmt.Errorf("No module found at: %s", mp)
1170 }
1171
1172 return modulePrimaryInstanceState(s, ms, name)
1173}
1174
1175// primaryInstanceState returns the primary instance state for the given
1176// resource name in the root module.
1177func primaryInstanceState(s *terraform.State, name string) (*terraform.InstanceState, error) {
1178 ms := s.RootModule()
1179 return modulePrimaryInstanceState(s, ms, name)
1180}
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/testing_config.go b/vendor/github.com/hashicorp/terraform/helper/resource/testing_config.go
index 537a11c..033f126 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/testing_config.go
+++ b/vendor/github.com/hashicorp/terraform/helper/resource/testing_config.go
@@ -1,10 +1,12 @@
1package resource 1package resource
2 2
3import ( 3import (
4 "errors"
4 "fmt" 5 "fmt"
5 "log" 6 "log"
6 "strings" 7 "strings"
7 8
9 "github.com/hashicorp/errwrap"
8 "github.com/hashicorp/terraform/terraform" 10 "github.com/hashicorp/terraform/terraform"
9) 11)
10 12
@@ -20,6 +22,14 @@ func testStep(
20 opts terraform.ContextOpts, 22 opts terraform.ContextOpts,
21 state *terraform.State, 23 state *terraform.State,
22 step TestStep) (*terraform.State, error) { 24 step TestStep) (*terraform.State, error) {
25 // Pre-taint any resources that have been defined in Taint, as long as this
26 // is not a destroy step.
27 if !step.Destroy {
28 if err := testStepTaint(state, step); err != nil {
29 return state, err
30 }
31 }
32
23 mod, err := testModule(opts, step) 33 mod, err := testModule(opts, step)
24 if err != nil { 34 if err != nil {
25 return state, err 35 return state, err
@@ -33,17 +43,12 @@ func testStep(
33 if err != nil { 43 if err != nil {
34 return state, fmt.Errorf("Error initializing context: %s", err) 44 return state, fmt.Errorf("Error initializing context: %s", err)
35 } 45 }
36 if ws, es := ctx.Validate(); len(ws) > 0 || len(es) > 0 { 46 if diags := ctx.Validate(); len(diags) > 0 {
37 if len(es) > 0 { 47 if diags.HasErrors() {
38 estrs := make([]string, len(es)) 48 return nil, errwrap.Wrapf("config is invalid: {{err}}", diags.Err())
39 for i, e := range es {
40 estrs[i] = e.Error()
41 }
42 return state, fmt.Errorf(
43 "Configuration is invalid.\n\nWarnings: %#v\n\nErrors: %#v",
44 ws, estrs)
45 } 49 }
46 log.Printf("[WARN] Config warnings: %#v", ws) 50
51 log.Printf("[WARN] Config warnings:\n%s", diags)
47 } 52 }
48 53
49 // Refresh! 54 // Refresh!
@@ -158,3 +163,19 @@ func testStep(
158 // Made it here? Good job test step! 163 // Made it here? Good job test step!
159 return state, nil 164 return state, nil
160} 165}
166
167func testStepTaint(state *terraform.State, step TestStep) error {
168 for _, p := range step.Taint {
169 m := state.RootModule()
170 if m == nil {
171 return errors.New("no state")
172 }
173 rs, ok := m.Resources[p]
174 if !ok {
175 return fmt.Errorf("resource %q not found in state", p)
176 }
177 log.Printf("[WARN] Test: Explicitly tainting resource %q", p)
178 rs.Taint()
179 }
180 return nil
181}
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/testing_import_state.go b/vendor/github.com/hashicorp/terraform/helper/resource/testing_import_state.go
index 28ad105..94fef3c 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/testing_import_state.go
+++ b/vendor/github.com/hashicorp/terraform/helper/resource/testing_import_state.go
@@ -16,15 +16,24 @@ func testStepImportState(
16 state *terraform.State, 16 state *terraform.State,
17 step TestStep) (*terraform.State, error) { 17 step TestStep) (*terraform.State, error) {
18 // Determine the ID to import 18 // Determine the ID to import
19 importId := step.ImportStateId 19 var importId string
20 if importId == "" { 20 switch {
21 case step.ImportStateIdFunc != nil:
22 var err error
23 importId, err = step.ImportStateIdFunc(state)
24 if err != nil {
25 return state, err
26 }
27 case step.ImportStateId != "":
28 importId = step.ImportStateId
29 default:
21 resource, err := testResource(step, state) 30 resource, err := testResource(step, state)
22 if err != nil { 31 if err != nil {
23 return state, err 32 return state, err
24 } 33 }
25
26 importId = resource.Primary.ID 34 importId = resource.Primary.ID
27 } 35 }
36
28 importPrefix := step.ImportStateIdPrefix 37 importPrefix := step.ImportStateIdPrefix
29 if importPrefix != "" { 38 if importPrefix != "" {
30 importId = fmt.Sprintf("%s%s", importPrefix, importId) 39 importId = fmt.Sprintf("%s%s", importPrefix, importId)
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/wait.go b/vendor/github.com/hashicorp/terraform/helper/resource/wait.go
index ca50e29..e56a515 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/wait.go
+++ b/vendor/github.com/hashicorp/terraform/helper/resource/wait.go
@@ -74,7 +74,7 @@ func RetryableError(err error) *RetryError {
74 return &RetryError{Err: err, Retryable: true} 74 return &RetryError{Err: err, Retryable: true}
75} 75}
76 76
77// NonRetryableError is a helper to create a RetryError that's _not)_ retryable 77// NonRetryableError is a helper to create a RetryError that's _not_ retryable
78// from a given error. 78// from a given error.
79func NonRetryableError(err error) *RetryError { 79func NonRetryableError(err error) *RetryError {
80 if err == nil { 80 if err == nil {
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/backend.go b/vendor/github.com/hashicorp/terraform/helper/schema/backend.go
index a0729c0..57fbba7 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/backend.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/backend.go
@@ -65,7 +65,7 @@ func (b *Backend) Configure(c *terraform.ResourceConfig) error {
65 65
66 // Get a ResourceData for this configuration. To do this, we actually 66 // Get a ResourceData for this configuration. To do this, we actually
67 // generate an intermediary "diff" although that is never exposed. 67 // generate an intermediary "diff" although that is never exposed.
68 diff, err := sm.Diff(nil, c) 68 diff, err := sm.Diff(nil, c, nil, nil)
69 if err != nil { 69 if err != nil {
70 return err 70 return err
71 } 71 }
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/core_schema.go b/vendor/github.com/hashicorp/terraform/helper/schema/core_schema.go
new file mode 100644
index 0000000..bf952f6
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/core_schema.go
@@ -0,0 +1,155 @@
1package schema
2
3import (
4 "fmt"
5
6 "github.com/hashicorp/terraform/config/configschema"
7 "github.com/zclconf/go-cty/cty"
8)
9
10// The functions and methods in this file are concerned with the conversion
11// of this package's schema model into the slightly-lower-level schema model
12// used by Terraform core for configuration parsing.
13
14// CoreConfigSchema lowers the receiver to the schema model expected by
15// Terraform core.
16//
17// This lower-level model has fewer features than the schema in this package,
18// describing only the basic structure of configuration and state values we
19// expect. The full schemaMap from this package is still required for full
20// validation, handling of default values, etc.
21//
22// This method presumes a schema that passes InternalValidate, and so may
23// panic or produce an invalid result if given an invalid schemaMap.
24func (m schemaMap) CoreConfigSchema() *configschema.Block {
25 if len(m) == 0 {
26 // We return an actual (empty) object here, rather than a nil,
27 // because a nil result would mean that we don't have a schema at
28 // all, rather than that we have an empty one.
29 return &configschema.Block{}
30 }
31
32 ret := &configschema.Block{
33 Attributes: map[string]*configschema.Attribute{},
34 BlockTypes: map[string]*configschema.NestedBlock{},
35 }
36
37 for name, schema := range m {
38 if schema.Elem == nil {
39 ret.Attributes[name] = schema.coreConfigSchemaAttribute()
40 continue
41 }
42 switch schema.Elem.(type) {
43 case *Schema:
44 ret.Attributes[name] = schema.coreConfigSchemaAttribute()
45 case *Resource:
46 ret.BlockTypes[name] = schema.coreConfigSchemaBlock()
47 default:
48 // Should never happen for a valid schema
49 panic(fmt.Errorf("invalid Schema.Elem %#v; need *Schema or *Resource", schema.Elem))
50 }
51 }
52
53 return ret
54}
55
56// coreConfigSchemaAttribute prepares a configschema.Attribute representation
57// of a schema. This is appropriate only for primitives or collections whose
58// Elem is an instance of Schema. Use coreConfigSchemaBlock for collections
59// whose elem is a whole resource.
60func (s *Schema) coreConfigSchemaAttribute() *configschema.Attribute {
61 return &configschema.Attribute{
62 Type: s.coreConfigSchemaType(),
63 Optional: s.Optional,
64 Required: s.Required,
65 Computed: s.Computed,
66 Sensitive: s.Sensitive,
67 }
68}
69
70// coreConfigSchemaBlock prepares a configschema.NestedBlock representation of
71// a schema. This is appropriate only for collections whose Elem is an instance
72// of Resource, and will panic otherwise.
73func (s *Schema) coreConfigSchemaBlock() *configschema.NestedBlock {
74 ret := &configschema.NestedBlock{}
75 if nested := s.Elem.(*Resource).CoreConfigSchema(); nested != nil {
76 ret.Block = *nested
77 }
78 switch s.Type {
79 case TypeList:
80 ret.Nesting = configschema.NestingList
81 case TypeSet:
82 ret.Nesting = configschema.NestingSet
83 case TypeMap:
84 ret.Nesting = configschema.NestingMap
85 default:
86 // Should never happen for a valid schema
87 panic(fmt.Errorf("invalid s.Type %s for s.Elem being resource", s.Type))
88 }
89
90 ret.MinItems = s.MinItems
91 ret.MaxItems = s.MaxItems
92
93 if s.Required && s.MinItems == 0 {
94 // configschema doesn't have a "required" representation for nested
95 // blocks, but we can fake it by requiring at least one item.
96 ret.MinItems = 1
97 }
98
99 return ret
100}
101
102// coreConfigSchemaType determines the core config schema type that corresponds
103// to a particular schema's type.
104func (s *Schema) coreConfigSchemaType() cty.Type {
105 switch s.Type {
106 case TypeString:
107 return cty.String
108 case TypeBool:
109 return cty.Bool
110 case TypeInt, TypeFloat:
111 // configschema doesn't distinguish int and float, so helper/schema
112 // will deal with this as an additional validation step after
113 // configuration has been parsed and decoded.
114 return cty.Number
115 case TypeList, TypeSet, TypeMap:
116 var elemType cty.Type
117 switch set := s.Elem.(type) {
118 case *Schema:
119 elemType = set.coreConfigSchemaType()
120 case *Resource:
121 // In practice we don't actually use this for normal schema
122 // construction because we construct a NestedBlock in that
123 // case instead. See schemaMap.CoreConfigSchema.
124 elemType = set.CoreConfigSchema().ImpliedType()
125 default:
126 if set != nil {
127 // Should never happen for a valid schema
128 panic(fmt.Errorf("invalid Schema.Elem %#v; need *Schema or *Resource", s.Elem))
129 }
130 // Some pre-existing schemas assume string as default, so we need
131 // to be compatible with them.
132 elemType = cty.String
133 }
134 switch s.Type {
135 case TypeList:
136 return cty.List(elemType)
137 case TypeSet:
138 return cty.Set(elemType)
139 case TypeMap:
140 return cty.Map(elemType)
141 default:
142 // can never get here in practice, due to the case we're inside
143 panic("invalid collection type")
144 }
145 default:
146 // should never happen for a valid schema
147 panic(fmt.Errorf("invalid Schema.Type %s", s.Type))
148 }
149}
150
151// CoreConfigSchema is a convenient shortcut for calling CoreConfigSchema
152// on the resource's schema.
153func (r *Resource) CoreConfigSchema() *configschema.Block {
154 return schemaMap(r.Schema).CoreConfigSchema()
155}
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/data_source_resource_shim.go b/vendor/github.com/hashicorp/terraform/helper/schema/data_source_resource_shim.go
index 5a03d2d..8d93750 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/data_source_resource_shim.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/data_source_resource_shim.go
@@ -32,7 +32,7 @@ func DataSourceResourceShim(name string, dataSource *Resource) *Resource {
32 32
33 // FIXME: Link to some further docs either on the website or in the 33 // FIXME: Link to some further docs either on the website or in the
34 // changelog, once such a thing exists. 34 // changelog, once such a thing exists.
35 dataSource.deprecationMessage = fmt.Sprintf( 35 dataSource.DeprecationMessage = fmt.Sprintf(
36 "using %s as a resource is deprecated; consider using the data source instead", 36 "using %s as a resource is deprecated; consider using the data source instead",
37 name, 37 name,
38 ) 38 )
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader.go b/vendor/github.com/hashicorp/terraform/helper/schema/field_reader.go
index 1660a67..b80b223 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/field_reader.go
@@ -126,6 +126,8 @@ func addrToSchema(addr []string, schemaMap map[string]*Schema) []*Schema {
126 switch v := current.Elem.(type) { 126 switch v := current.Elem.(type) {
127 case ValueType: 127 case ValueType:
128 current = &Schema{Type: v} 128 current = &Schema{Type: v}
129 case *Schema:
130 current, _ = current.Elem.(*Schema)
129 default: 131 default:
130 // maps default to string values. This is all we can have 132 // maps default to string values. This is all we can have
131 // if this is nested in another list or map. 133 // if this is nested in another list or map.
@@ -249,11 +251,10 @@ func readObjectField(
249} 251}
250 252
251// convert map values to the proper primitive type based on schema.Elem 253// convert map values to the proper primitive type based on schema.Elem
252func mapValuesToPrimitive(m map[string]interface{}, schema *Schema) error { 254func mapValuesToPrimitive(k string, m map[string]interface{}, schema *Schema) error {
253 255 elemType, err := getValueType(k, schema)
254 elemType := TypeString 256 if err != nil {
255 if et, ok := schema.Elem.(ValueType); ok { 257 return err
256 elemType = et
257 } 258 }
258 259
259 switch elemType { 260 switch elemType {
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_config.go b/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_config.go
index f958bbc..55a301d 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_config.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_config.go
@@ -206,7 +206,7 @@ func (r *ConfigFieldReader) readMap(k string, schema *Schema) (FieldReadResult,
206 panic(fmt.Sprintf("unknown type: %#v", mraw)) 206 panic(fmt.Sprintf("unknown type: %#v", mraw))
207 } 207 }
208 208
209 err := mapValuesToPrimitive(result, schema) 209 err := mapValuesToPrimitive(k, result, schema)
210 if err != nil { 210 if err != nil {
211 return FieldReadResult{}, nil 211 return FieldReadResult{}, nil
212 } 212 }
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_diff.go b/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_diff.go
index 16bbae2..d558a5b 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_diff.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_diff.go
@@ -29,29 +29,59 @@ type DiffFieldReader struct {
29 Diff *terraform.InstanceDiff 29 Diff *terraform.InstanceDiff
30 Source FieldReader 30 Source FieldReader
31 Schema map[string]*Schema 31 Schema map[string]*Schema
32
33 // cache for memoizing ReadField calls.
34 cache map[string]cachedFieldReadResult
35}
36
37type cachedFieldReadResult struct {
38 val FieldReadResult
39 err error
32} 40}
33 41
34func (r *DiffFieldReader) ReadField(address []string) (FieldReadResult, error) { 42func (r *DiffFieldReader) ReadField(address []string) (FieldReadResult, error) {
43 if r.cache == nil {
44 r.cache = make(map[string]cachedFieldReadResult)
45 }
46
47 // Create the cache key by joining around a value that isn't a valid part
48 // of an address. This assumes that the Source and Schema are not changed
49 // for the life of this DiffFieldReader.
50 cacheKey := strings.Join(address, "|")
51 if cached, ok := r.cache[cacheKey]; ok {
52 return cached.val, cached.err
53 }
54
35 schemaList := addrToSchema(address, r.Schema) 55 schemaList := addrToSchema(address, r.Schema)
36 if len(schemaList) == 0 { 56 if len(schemaList) == 0 {
57 r.cache[cacheKey] = cachedFieldReadResult{}
37 return FieldReadResult{}, nil 58 return FieldReadResult{}, nil
38 } 59 }
39 60
61 var res FieldReadResult
62 var err error
63
40 schema := schemaList[len(schemaList)-1] 64 schema := schemaList[len(schemaList)-1]
41 switch schema.Type { 65 switch schema.Type {
42 case TypeBool, TypeInt, TypeFloat, TypeString: 66 case TypeBool, TypeInt, TypeFloat, TypeString:
43 return r.readPrimitive(address, schema) 67 res, err = r.readPrimitive(address, schema)
44 case TypeList: 68 case TypeList:
45 return readListField(r, address, schema) 69 res, err = readListField(r, address, schema)
46 case TypeMap: 70 case TypeMap:
47 return r.readMap(address, schema) 71 res, err = r.readMap(address, schema)
48 case TypeSet: 72 case TypeSet:
49 return r.readSet(address, schema) 73 res, err = r.readSet(address, schema)
50 case typeObject: 74 case typeObject:
51 return readObjectField(r, address, schema.Elem.(map[string]*Schema)) 75 res, err = readObjectField(r, address, schema.Elem.(map[string]*Schema))
52 default: 76 default:
53 panic(fmt.Sprintf("Unknown type: %#v", schema.Type)) 77 panic(fmt.Sprintf("Unknown type: %#v", schema.Type))
54 } 78 }
79
80 r.cache[cacheKey] = cachedFieldReadResult{
81 val: res,
82 err: err,
83 }
84 return res, err
55} 85}
56 86
57func (r *DiffFieldReader) readMap( 87func (r *DiffFieldReader) readMap(
@@ -92,7 +122,8 @@ func (r *DiffFieldReader) readMap(
92 result[k] = v.New 122 result[k] = v.New
93 } 123 }
94 124
95 err = mapValuesToPrimitive(result, schema) 125 key := address[len(address)-1]
126 err = mapValuesToPrimitive(key, result, schema)
96 if err != nil { 127 if err != nil {
97 return FieldReadResult{}, nil 128 return FieldReadResult{}, nil
98 } 129 }
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_map.go b/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_map.go
index 9533981..054efe0 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_map.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_map.go
@@ -61,7 +61,7 @@ func (r *MapFieldReader) readMap(k string, schema *Schema) (FieldReadResult, err
61 return true 61 return true
62 }) 62 })
63 63
64 err := mapValuesToPrimitive(result, schema) 64 err := mapValuesToPrimitive(k, result, schema)
65 if err != nil { 65 if err != nil {
66 return FieldReadResult{}, nil 66 return FieldReadResult{}, nil
67 } 67 }
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_writer_map.go b/vendor/github.com/hashicorp/terraform/helper/schema/field_writer_map.go
index 689ed8d..814c7ba 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/field_writer_map.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/field_writer_map.go
@@ -39,6 +39,19 @@ func (w *MapFieldWriter) unsafeWriteField(addr string, value string) {
39 w.result[addr] = value 39 w.result[addr] = value
40} 40}
41 41
42// clearTree clears a field and any sub-fields of the given address out of the
43// map. This should be used to reset some kind of complex structures (namely
44// sets) before writing to make sure that any conflicting data is removed (for
45// example, if the set was previously written to the writer's layer).
46func (w *MapFieldWriter) clearTree(addr []string) {
47 prefix := strings.Join(addr, ".") + "."
48 for k := range w.result {
49 if strings.HasPrefix(k, prefix) {
50 delete(w.result, k)
51 }
52 }
53}
54
42func (w *MapFieldWriter) WriteField(addr []string, value interface{}) error { 55func (w *MapFieldWriter) WriteField(addr []string, value interface{}) error {
43 w.lock.Lock() 56 w.lock.Lock()
44 defer w.lock.Unlock() 57 defer w.lock.Unlock()
@@ -115,6 +128,14 @@ func (w *MapFieldWriter) setList(
115 return fmt.Errorf("%s: %s", k, err) 128 return fmt.Errorf("%s: %s", k, err)
116 } 129 }
117 130
131 // Wipe the set from the current writer prior to writing if it exists.
132 // Multiple writes to the same layer is a lot safer for lists than sets due
133 // to the fact that indexes are always deterministic and the length will
134 // always be updated with the current length on the last write, but making
135 // sure we have a clean namespace removes any chance for edge cases to pop up
136 // and ensures that the last write to the set is the correct value.
137 w.clearTree(addr)
138
118 // Set the entire list. 139 // Set the entire list.
119 var err error 140 var err error
120 for i, elem := range vs { 141 for i, elem := range vs {
@@ -162,6 +183,10 @@ func (w *MapFieldWriter) setMap(
162 vs[mk.String()] = mv.Interface() 183 vs[mk.String()] = mv.Interface()
163 } 184 }
164 185
186 // Wipe this address tree. The contents of the map should always reflect the
187 // last write made to it.
188 w.clearTree(addr)
189
165 // Remove the pure key since we're setting the full map value 190 // Remove the pure key since we're setting the full map value
166 delete(w.result, k) 191 delete(w.result, k)
167 192
@@ -308,6 +333,13 @@ func (w *MapFieldWriter) setSet(
308 value = s 333 value = s
309 } 334 }
310 335
336 // Clear any keys that match the set address first. This is necessary because
337 // it's always possible and sometimes may be necessary to write to a certain
338 // writer layer more than once with different set data each time, which will
339 // lead to different keys being inserted, which can lead to determinism
340 // problems when the old data isn't wiped first.
341 w.clearTree(addr)
342
311 for code, elem := range value.(*Set).m { 343 for code, elem := range value.(*Set).m {
312 if err := w.set(append(addrCopy, code), elem); err != nil { 344 if err := w.set(append(addrCopy, code), elem); err != nil {
313 return err 345 return err
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/getsource_string.go b/vendor/github.com/hashicorp/terraform/helper/schema/getsource_string.go
index 3a97629..38cd8c7 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/getsource_string.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/getsource_string.go
@@ -2,7 +2,7 @@
2 2
3package schema 3package schema
4 4
5import "fmt" 5import "strconv"
6 6
7const ( 7const (
8 _getSource_name_0 = "getSourceStategetSourceConfig" 8 _getSource_name_0 = "getSourceStategetSourceConfig"
@@ -13,8 +13,6 @@ const (
13 13
14var ( 14var (
15 _getSource_index_0 = [...]uint8{0, 14, 29} 15 _getSource_index_0 = [...]uint8{0, 14, 29}
16 _getSource_index_1 = [...]uint8{0, 13}
17 _getSource_index_2 = [...]uint8{0, 12}
18 _getSource_index_3 = [...]uint8{0, 18, 32} 16 _getSource_index_3 = [...]uint8{0, 18, 32}
19) 17)
20 18
@@ -31,6 +29,6 @@ func (i getSource) String() string {
31 i -= 15 29 i -= 15
32 return _getSource_name_3[_getSource_index_3[i]:_getSource_index_3[i+1]] 30 return _getSource_name_3[_getSource_index_3[i]:_getSource_index_3[i+1]]
33 default: 31 default:
34 return fmt.Sprintf("getSource(%d)", i) 32 return "getSource(" + strconv.FormatInt(int64(i), 10) + ")"
35 } 33 }
36} 34}
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/provider.go b/vendor/github.com/hashicorp/terraform/helper/schema/provider.go
index fb28b41..6cd325d 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/provider.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/provider.go
@@ -9,6 +9,7 @@ import (
9 9
10 "github.com/hashicorp/go-multierror" 10 "github.com/hashicorp/go-multierror"
11 "github.com/hashicorp/terraform/config" 11 "github.com/hashicorp/terraform/config"
12 "github.com/hashicorp/terraform/config/configschema"
12 "github.com/hashicorp/terraform/terraform" 13 "github.com/hashicorp/terraform/terraform"
13) 14)
14 15
@@ -58,7 +59,7 @@ type Provider struct {
58 59
59 meta interface{} 60 meta interface{}
60 61
61 // a mutex is required because TestReset can directly repalce the stopCtx 62 // a mutex is required because TestReset can directly replace the stopCtx
62 stopMu sync.Mutex 63 stopMu sync.Mutex
63 stopCtx context.Context 64 stopCtx context.Context
64 stopCtxCancel context.CancelFunc 65 stopCtxCancel context.CancelFunc
@@ -185,6 +186,29 @@ func (p *Provider) TestReset() error {
185 return nil 186 return nil
186} 187}
187 188
189// GetSchema implementation of terraform.ResourceProvider interface
190func (p *Provider) GetSchema(req *terraform.ProviderSchemaRequest) (*terraform.ProviderSchema, error) {
191 resourceTypes := map[string]*configschema.Block{}
192 dataSources := map[string]*configschema.Block{}
193
194 for _, name := range req.ResourceTypes {
195 if r, exists := p.ResourcesMap[name]; exists {
196 resourceTypes[name] = r.CoreConfigSchema()
197 }
198 }
199 for _, name := range req.DataSources {
200 if r, exists := p.DataSourcesMap[name]; exists {
201 dataSources[name] = r.CoreConfigSchema()
202 }
203 }
204
205 return &terraform.ProviderSchema{
206 Provider: schemaMap(p.Schema).CoreConfigSchema(),
207 ResourceTypes: resourceTypes,
208 DataSources: dataSources,
209 }, nil
210}
211
188// Input implementation of terraform.ResourceProvider interface. 212// Input implementation of terraform.ResourceProvider interface.
189func (p *Provider) Input( 213func (p *Provider) Input(
190 input terraform.UIInput, 214 input terraform.UIInput,
@@ -227,7 +251,7 @@ func (p *Provider) Configure(c *terraform.ResourceConfig) error {
227 251
228 // Get a ResourceData for this configuration. To do this, we actually 252 // Get a ResourceData for this configuration. To do this, we actually
229 // generate an intermediary "diff" although that is never exposed. 253 // generate an intermediary "diff" although that is never exposed.
230 diff, err := sm.Diff(nil, c) 254 diff, err := sm.Diff(nil, c, nil, p.meta)
231 if err != nil { 255 if err != nil {
232 return err 256 return err
233 } 257 }
@@ -269,7 +293,7 @@ func (p *Provider) Diff(
269 return nil, fmt.Errorf("unknown resource type: %s", info.Type) 293 return nil, fmt.Errorf("unknown resource type: %s", info.Type)
270 } 294 }
271 295
272 return r.Diff(s, c) 296 return r.Diff(s, c, p.meta)
273} 297}
274 298
275// Refresh implementation of terraform.ResourceProvider interface. 299// Refresh implementation of terraform.ResourceProvider interface.
@@ -305,6 +329,10 @@ func (p *Provider) Resources() []terraform.ResourceType {
305 result = append(result, terraform.ResourceType{ 329 result = append(result, terraform.ResourceType{
306 Name: k, 330 Name: k,
307 Importable: resource.Importer != nil, 331 Importable: resource.Importer != nil,
332
333 // Indicates that a provider is compiled against a new enough
334 // version of core to support the GetSchema method.
335 SchemaAvailable: true,
308 }) 336 })
309 } 337 }
310 338
@@ -382,7 +410,7 @@ func (p *Provider) ReadDataDiff(
382 return nil, fmt.Errorf("unknown data source: %s", info.Type) 410 return nil, fmt.Errorf("unknown data source: %s", info.Type)
383 } 411 }
384 412
385 return r.Diff(nil, c) 413 return r.Diff(nil, c, p.meta)
386} 414}
387 415
388// RefreshData implementation of terraform.ResourceProvider interface. 416// RefreshData implementation of terraform.ResourceProvider interface.
@@ -410,6 +438,10 @@ func (p *Provider) DataSources() []terraform.DataSource {
410 for _, k := range keys { 438 for _, k := range keys {
411 result = append(result, terraform.DataSource{ 439 result = append(result, terraform.DataSource{
412 Name: k, 440 Name: k,
441
442 // Indicates that a provider is compiled against a new enough
443 // version of core to support the GetSchema method.
444 SchemaAvailable: true,
413 }) 445 })
414 } 446 }
415 447
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/provisioner.go b/vendor/github.com/hashicorp/terraform/helper/schema/provisioner.go
index 476192e..a8d42db 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/provisioner.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/provisioner.go
@@ -146,7 +146,7 @@ func (p *Provisioner) Apply(
146 } 146 }
147 147
148 sm := schemaMap(p.ConnSchema) 148 sm := schemaMap(p.ConnSchema)
149 diff, err := sm.Diff(nil, terraform.NewResourceConfig(c)) 149 diff, err := sm.Diff(nil, terraform.NewResourceConfig(c), nil, nil)
150 if err != nil { 150 if err != nil {
151 return err 151 return err
152 } 152 }
@@ -160,7 +160,7 @@ func (p *Provisioner) Apply(
160 // Build the configuration data. Doing this requires making a "diff" 160 // Build the configuration data. Doing this requires making a "diff"
161 // even though that's never used. We use that just to get the correct types. 161 // even though that's never used. We use that just to get the correct types.
162 configMap := schemaMap(p.Schema) 162 configMap := schemaMap(p.Schema)
163 diff, err := configMap.Diff(nil, c) 163 diff, err := configMap.Diff(nil, c, nil, nil)
164 if err != nil { 164 if err != nil {
165 return err 165 return err
166 } 166 }
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/resource.go b/vendor/github.com/hashicorp/terraform/helper/schema/resource.go
index ddba109..d3be2d6 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/resource.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/resource.go
@@ -85,6 +85,37 @@ type Resource struct {
85 Delete DeleteFunc 85 Delete DeleteFunc
86 Exists ExistsFunc 86 Exists ExistsFunc
87 87
88 // CustomizeDiff is a custom function for working with the diff that
89 // Terraform has created for this resource - it can be used to customize the
90 // diff that has been created, diff values not controlled by configuration,
91 // or even veto the diff altogether and abort the plan. It is passed a
92 // *ResourceDiff, a structure similar to ResourceData but lacking most write
93 // functions like Set, while introducing new functions that work with the
94 // diff such as SetNew, SetNewComputed, and ForceNew.
95 //
96 // The phases Terraform runs this in, and the state available via functions
97 // like Get and GetChange, are as follows:
98 //
99 // * New resource: One run with no state
100 // * Existing resource: One run with state
101 // * Existing resource, forced new: One run with state (before ForceNew),
102 // then one run without state (as if new resource)
103 // * Tainted resource: No runs (custom diff logic is skipped)
104 // * Destroy: No runs (standard diff logic is skipped on destroy diffs)
105 //
106 // This function needs to be resilient to support all scenarios.
107 //
108 // If this function needs to access external API resources, remember to flag
109 // the RequiresRefresh attribute mentioned below to ensure that
110 // -refresh=false is blocked when running plan or apply, as this means that
111 // this resource requires refresh-like behaviour to work effectively.
112 //
113 // For the most part, only computed fields can be customized by this
114 // function.
115 //
116 // This function is only allowed on regular resources (not data sources).
117 CustomizeDiff CustomizeDiffFunc
118
88 // Importer is the ResourceImporter implementation for this resource. 119 // Importer is the ResourceImporter implementation for this resource.
89 // If this is nil, then this resource does not support importing. If 120 // If this is nil, then this resource does not support importing. If
90 // this is non-nil, then it supports importing and ResourceImporter 121 // this is non-nil, then it supports importing and ResourceImporter
@@ -93,9 +124,7 @@ type Resource struct {
93 Importer *ResourceImporter 124 Importer *ResourceImporter
94 125
95 // If non-empty, this string is emitted as a warning during Validate. 126 // If non-empty, this string is emitted as a warning during Validate.
96 // This is a private interface for now, for use by DataSourceResourceShim, 127 DeprecationMessage string
97 // and not for general use. (But maybe later...)
98 deprecationMessage string
99 128
100 // Timeouts allow users to specify specific time durations in which an 129 // Timeouts allow users to specify specific time durations in which an
101 // operation should time out, to allow them to extend an action to suit their 130 // operation should time out, to allow them to extend an action to suit their
@@ -126,6 +155,9 @@ type ExistsFunc func(*ResourceData, interface{}) (bool, error)
126type StateMigrateFunc func( 155type StateMigrateFunc func(
127 int, *terraform.InstanceState, interface{}) (*terraform.InstanceState, error) 156 int, *terraform.InstanceState, interface{}) (*terraform.InstanceState, error)
128 157
158// See Resource documentation.
159type CustomizeDiffFunc func(*ResourceDiff, interface{}) error
160
129// Apply creates, updates, and/or deletes a resource. 161// Apply creates, updates, and/or deletes a resource.
130func (r *Resource) Apply( 162func (r *Resource) Apply(
131 s *terraform.InstanceState, 163 s *terraform.InstanceState,
@@ -202,11 +234,11 @@ func (r *Resource) Apply(
202 return r.recordCurrentSchemaVersion(data.State()), err 234 return r.recordCurrentSchemaVersion(data.State()), err
203} 235}
204 236
205// Diff returns a diff of this resource and is API compatible with the 237// Diff returns a diff of this resource.
206// ResourceProvider interface.
207func (r *Resource) Diff( 238func (r *Resource) Diff(
208 s *terraform.InstanceState, 239 s *terraform.InstanceState,
209 c *terraform.ResourceConfig) (*terraform.InstanceDiff, error) { 240 c *terraform.ResourceConfig,
241 meta interface{}) (*terraform.InstanceDiff, error) {
210 242
211 t := &ResourceTimeout{} 243 t := &ResourceTimeout{}
212 err := t.ConfigDecode(r, c) 244 err := t.ConfigDecode(r, c)
@@ -215,7 +247,7 @@ func (r *Resource) Diff(
215 return nil, fmt.Errorf("[ERR] Error decoding timeout: %s", err) 247 return nil, fmt.Errorf("[ERR] Error decoding timeout: %s", err)
216 } 248 }
217 249
218 instanceDiff, err := schemaMap(r.Schema).Diff(s, c) 250 instanceDiff, err := schemaMap(r.Schema).Diff(s, c, r.CustomizeDiff, meta)
219 if err != nil { 251 if err != nil {
220 return instanceDiff, err 252 return instanceDiff, err
221 } 253 }
@@ -235,8 +267,8 @@ func (r *Resource) Diff(
235func (r *Resource) Validate(c *terraform.ResourceConfig) ([]string, []error) { 267func (r *Resource) Validate(c *terraform.ResourceConfig) ([]string, []error) {
236 warns, errs := schemaMap(r.Schema).Validate(c) 268 warns, errs := schemaMap(r.Schema).Validate(c)
237 269
238 if r.deprecationMessage != "" { 270 if r.DeprecationMessage != "" {
239 warns = append(warns, r.deprecationMessage) 271 warns = append(warns, r.DeprecationMessage)
240 } 272 }
241 273
242 return warns, errs 274 return warns, errs
@@ -248,7 +280,6 @@ func (r *Resource) ReadDataApply(
248 d *terraform.InstanceDiff, 280 d *terraform.InstanceDiff,
249 meta interface{}, 281 meta interface{},
250) (*terraform.InstanceState, error) { 282) (*terraform.InstanceState, error) {
251
252 // Data sources are always built completely from scratch 283 // Data sources are always built completely from scratch
253 // on each read, so the source state is always nil. 284 // on each read, so the source state is always nil.
254 data, err := schemaMap(r.Schema).Data(nil, d) 285 data, err := schemaMap(r.Schema).Data(nil, d)
@@ -346,6 +377,11 @@ func (r *Resource) InternalValidate(topSchemaMap schemaMap, writable bool) error
346 if r.Create != nil || r.Update != nil || r.Delete != nil { 377 if r.Create != nil || r.Update != nil || r.Delete != nil {
347 return fmt.Errorf("must not implement Create, Update or Delete") 378 return fmt.Errorf("must not implement Create, Update or Delete")
348 } 379 }
380
381 // CustomizeDiff cannot be defined for read-only resources
382 if r.CustomizeDiff != nil {
383 return fmt.Errorf("cannot implement CustomizeDiff")
384 }
349 } 385 }
350 386
351 tsm := topSchemaMap 387 tsm := topSchemaMap
@@ -393,19 +429,43 @@ func (r *Resource) InternalValidate(topSchemaMap schemaMap, writable bool) error
393 return err 429 return err
394 } 430 }
395 } 431 }
432
433 for k, f := range tsm {
434 if isReservedResourceFieldName(k, f) {
435 return fmt.Errorf("%s is a reserved field name", k)
436 }
437 }
396 } 438 }
397 439
398 // Resource-specific checks 440 // Data source
399 for k, _ := range tsm { 441 if r.isTopLevel() && !writable {
400 if isReservedResourceFieldName(k) { 442 tsm = schemaMap(r.Schema)
401 return fmt.Errorf("%s is a reserved field name for a resource", k) 443 for k, _ := range tsm {
444 if isReservedDataSourceFieldName(k) {
445 return fmt.Errorf("%s is a reserved field name", k)
446 }
402 } 447 }
403 } 448 }
404 449
405 return schemaMap(r.Schema).InternalValidate(tsm) 450 return schemaMap(r.Schema).InternalValidate(tsm)
406} 451}
407 452
408func isReservedResourceFieldName(name string) bool { 453func isReservedDataSourceFieldName(name string) bool {
454 for _, reservedName := range config.ReservedDataSourceFields {
455 if name == reservedName {
456 return true
457 }
458 }
459 return false
460}
461
462func isReservedResourceFieldName(name string, s *Schema) bool {
463 // Allow phasing out "id"
464 // See https://github.com/terraform-providers/terraform-provider-aws/pull/1626#issuecomment-328881415
465 if name == "id" && (s.Deprecated != "" || s.Removed != "") {
466 return false
467 }
468
409 for _, reservedName := range config.ReservedResourceFields { 469 for _, reservedName := range config.ReservedResourceFields {
410 if name == reservedName { 470 if name == reservedName {
411 return true 471 return true
@@ -430,6 +490,12 @@ func (r *Resource) Data(s *terraform.InstanceState) *ResourceData {
430 panic(err) 490 panic(err)
431 } 491 }
432 492
493 // load the Resource timeouts
494 result.timeouts = r.Timeouts
495 if result.timeouts == nil {
496 result.timeouts = &ResourceTimeout{}
497 }
498
433 // Set the schema version to latest by default 499 // Set the schema version to latest by default
434 result.meta = map[string]interface{}{ 500 result.meta = map[string]interface{}{
435 "schema_version": strconv.Itoa(r.SchemaVersion), 501 "schema_version": strconv.Itoa(r.SchemaVersion),
@@ -450,7 +516,7 @@ func (r *Resource) TestResourceData() *ResourceData {
450// Returns true if the resource is "top level" i.e. not a sub-resource. 516// Returns true if the resource is "top level" i.e. not a sub-resource.
451func (r *Resource) isTopLevel() bool { 517func (r *Resource) isTopLevel() bool {
452 // TODO: This is a heuristic; replace with a definitive attribute? 518 // TODO: This is a heuristic; replace with a definitive attribute?
453 return r.Create != nil 519 return (r.Create != nil || r.Read != nil)
454} 520}
455 521
456// Determines if a given InstanceState needs to be migrated by checking the 522// Determines if a given InstanceState needs to be migrated by checking the
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/resource_data.go b/vendor/github.com/hashicorp/terraform/helper/schema/resource_data.go
index b2bc8f6..6cc01ee 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/resource_data.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/resource_data.go
@@ -35,6 +35,8 @@ type ResourceData struct {
35 partialMap map[string]struct{} 35 partialMap map[string]struct{}
36 once sync.Once 36 once sync.Once
37 isNew bool 37 isNew bool
38
39 panicOnError bool
38} 40}
39 41
40// getResult is the internal structure that is generated when a Get 42// getResult is the internal structure that is generated when a Get
@@ -104,6 +106,22 @@ func (d *ResourceData) GetOk(key string) (interface{}, bool) {
104 return r.Value, exists 106 return r.Value, exists
105} 107}
106 108
109// GetOkExists returns the data for a given key and whether or not the key
110// has been set to a non-zero value. This is only useful for determining
111// if boolean attributes have been set, if they are Optional but do not
112// have a Default value.
113//
114// This is nearly the same function as GetOk, yet it does not check
115// for the zero value of the attribute's type. This allows for attributes
116// without a default, to fully check for a literal assignment, regardless
117// of the zero-value for that type.
118// This should only be used if absolutely required/needed.
119func (d *ResourceData) GetOkExists(key string) (interface{}, bool) {
120 r := d.getRaw(key, getSourceSet)
121 exists := r.Exists && !r.Computed
122 return r.Value, exists
123}
124
107func (d *ResourceData) getRaw(key string, level getSource) getResult { 125func (d *ResourceData) getRaw(key string, level getSource) getResult {
108 var parts []string 126 var parts []string
109 if key != "" { 127 if key != "" {
@@ -168,7 +186,11 @@ func (d *ResourceData) Set(key string, value interface{}) error {
168 } 186 }
169 } 187 }
170 188
171 return d.setWriter.WriteField(strings.Split(key, "."), value) 189 err := d.setWriter.WriteField(strings.Split(key, "."), value)
190 if err != nil && d.panicOnError {
191 panic(err)
192 }
193 return err
172} 194}
173 195
174// SetPartial adds the key to the final state output while 196// SetPartial adds the key to the final state output while
@@ -293,6 +315,7 @@ func (d *ResourceData) State() *terraform.InstanceState {
293 315
294 mapW := &MapFieldWriter{Schema: d.schema} 316 mapW := &MapFieldWriter{Schema: d.schema}
295 if err := mapW.WriteField(nil, rawMap); err != nil { 317 if err := mapW.WriteField(nil, rawMap); err != nil {
318 log.Printf("[ERR] Error writing fields: %s", err)
296 return nil 319 return nil
297 } 320 }
298 321
@@ -344,6 +367,13 @@ func (d *ResourceData) State() *terraform.InstanceState {
344func (d *ResourceData) Timeout(key string) time.Duration { 367func (d *ResourceData) Timeout(key string) time.Duration {
345 key = strings.ToLower(key) 368 key = strings.ToLower(key)
346 369
370 // System default of 20 minutes
371 defaultTimeout := 20 * time.Minute
372
373 if d.timeouts == nil {
374 return defaultTimeout
375 }
376
347 var timeout *time.Duration 377 var timeout *time.Duration
348 switch key { 378 switch key {
349 case TimeoutCreate: 379 case TimeoutCreate:
@@ -364,8 +394,7 @@ func (d *ResourceData) Timeout(key string) time.Duration {
364 return *d.timeouts.Default 394 return *d.timeouts.Default
365 } 395 }
366 396
367 // Return system default of 20 minutes 397 return defaultTimeout
368 return 20 * time.Minute
369} 398}
370 399
371func (d *ResourceData) init() { 400func (d *ResourceData) init() {
@@ -423,7 +452,7 @@ func (d *ResourceData) init() {
423} 452}
424 453
425func (d *ResourceData) diffChange( 454func (d *ResourceData) diffChange(
426 k string) (interface{}, interface{}, bool, bool) { 455 k string) (interface{}, interface{}, bool, bool, bool) {
427 // Get the change between the state and the config. 456 // Get the change between the state and the config.
428 o, n := d.getChange(k, getSourceState, getSourceConfig|getSourceExact) 457 o, n := d.getChange(k, getSourceState, getSourceConfig|getSourceExact)
429 if !o.Exists { 458 if !o.Exists {
@@ -434,7 +463,7 @@ func (d *ResourceData) diffChange(
434 } 463 }
435 464
436 // Return the old, new, and whether there is a change 465 // Return the old, new, and whether there is a change
437 return o.Value, n.Value, !reflect.DeepEqual(o.Value, n.Value), n.Computed 466 return o.Value, n.Value, !reflect.DeepEqual(o.Value, n.Value), n.Computed, false
438} 467}
439 468
440func (d *ResourceData) getChange( 469func (d *ResourceData) getChange(
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/resource_diff.go b/vendor/github.com/hashicorp/terraform/helper/schema/resource_diff.go
new file mode 100644
index 0000000..7db3dec
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/resource_diff.go
@@ -0,0 +1,559 @@
1package schema
2
3import (
4 "errors"
5 "fmt"
6 "reflect"
7 "strings"
8 "sync"
9
10 "github.com/hashicorp/terraform/terraform"
11)
12
13// newValueWriter is a minor re-implementation of MapFieldWriter to include
14// keys that should be marked as computed, to represent the new part of a
15// pseudo-diff.
16type newValueWriter struct {
17 *MapFieldWriter
18
19 // A list of keys that should be marked as computed.
20 computedKeys map[string]bool
21
22 // A lock to prevent races on writes. The underlying writer will have one as
23 // well - this is for computed keys.
24 lock sync.Mutex
25
26 // To be used with init.
27 once sync.Once
28}
29
30// init performs any initialization tasks for the newValueWriter.
31func (w *newValueWriter) init() {
32 if w.computedKeys == nil {
33 w.computedKeys = make(map[string]bool)
34 }
35}
36
37// WriteField overrides MapValueWriter's WriteField, adding the ability to flag
38// the address as computed.
39func (w *newValueWriter) WriteField(address []string, value interface{}, computed bool) error {
40 // Fail the write if we have a non-nil value and computed is true.
41 // NewComputed values should not have a value when written.
42 if value != nil && computed {
43 return errors.New("Non-nil value with computed set")
44 }
45
46 if err := w.MapFieldWriter.WriteField(address, value); err != nil {
47 return err
48 }
49
50 w.once.Do(w.init)
51
52 w.lock.Lock()
53 defer w.lock.Unlock()
54 if computed {
55 w.computedKeys[strings.Join(address, ".")] = true
56 }
57 return nil
58}
59
60// ComputedKeysMap returns the underlying computed keys map.
61func (w *newValueWriter) ComputedKeysMap() map[string]bool {
62 w.once.Do(w.init)
63 return w.computedKeys
64}
65
66// newValueReader is a minor re-implementation of MapFieldReader and is the
67// read counterpart to MapValueWriter, allowing the read of keys flagged as
68// computed to accommodate the diff override logic in ResourceDiff.
69type newValueReader struct {
70 *MapFieldReader
71
72 // The list of computed keys from a newValueWriter.
73 computedKeys map[string]bool
74}
75
76// ReadField reads the values from the underlying writer, returning the
77// computed value if it is found as well.
78func (r *newValueReader) ReadField(address []string) (FieldReadResult, error) {
79 addrKey := strings.Join(address, ".")
80 v, err := r.MapFieldReader.ReadField(address)
81 if err != nil {
82 return FieldReadResult{}, err
83 }
84 for computedKey := range r.computedKeys {
85 if childAddrOf(addrKey, computedKey) {
86 if strings.HasSuffix(addrKey, ".#") {
87 // This is a count value for a list or set that has been marked as
88 // computed, or a sub-list/sub-set of a complex resource that has
89 // been marked as computed. We need to pass through to other readers
90 // so that an accurate previous count can be fetched for the diff.
91 v.Exists = false
92 }
93 v.Computed = true
94 }
95 }
96
97 return v, nil
98}
99
100// ResourceDiff is used to query and make custom changes to an in-flight diff.
101// It can be used to veto particular changes in the diff, customize the diff
102// that has been created, or diff values not controlled by config.
103//
104// The object functions similar to ResourceData, however most notably lacks
105// Set, SetPartial, and Partial, as it should be used to change diff values
106// only. Most other first-class ResourceData functions exist, namely Get,
107// GetOk, HasChange, and GetChange exist.
108//
109// All functions in ResourceDiff, save for ForceNew, can only be used on
110// computed fields.
111type ResourceDiff struct {
112 // The schema for the resource being worked on.
113 schema map[string]*Schema
114
115 // The current config for this resource.
116 config *terraform.ResourceConfig
117
118 // The state for this resource as it exists post-refresh, after the initial
119 // diff.
120 state *terraform.InstanceState
121
122 // The diff created by Terraform. This diff is used, along with state,
123 // config, and custom-set diff data, to provide a multi-level reader
124 // experience similar to ResourceData.
125 diff *terraform.InstanceDiff
126
127 // The internal reader structure that contains the state, config, the default
128 // diff, and the new diff.
129 multiReader *MultiLevelFieldReader
130
131 // A writer that writes overridden new fields.
132 newWriter *newValueWriter
133
134 // Tracks which keys have been updated by ResourceDiff to ensure that the
135 // diff does not get re-run on keys that were not touched, or diffs that were
136 // just removed (re-running on the latter would just roll back the removal).
137 updatedKeys map[string]bool
138
139 // Tracks which keys were flagged as forceNew. These keys are not saved in
140 // newWriter, but we need to track them so that they can be re-diffed later.
141 forcedNewKeys map[string]bool
142}
143
144// newResourceDiff creates a new ResourceDiff instance.
145func newResourceDiff(schema map[string]*Schema, config *terraform.ResourceConfig, state *terraform.InstanceState, diff *terraform.InstanceDiff) *ResourceDiff {
146 d := &ResourceDiff{
147 config: config,
148 state: state,
149 diff: diff,
150 schema: schema,
151 }
152
153 d.newWriter = &newValueWriter{
154 MapFieldWriter: &MapFieldWriter{Schema: d.schema},
155 }
156 readers := make(map[string]FieldReader)
157 var stateAttributes map[string]string
158 if d.state != nil {
159 stateAttributes = d.state.Attributes
160 readers["state"] = &MapFieldReader{
161 Schema: d.schema,
162 Map: BasicMapReader(stateAttributes),
163 }
164 }
165 if d.config != nil {
166 readers["config"] = &ConfigFieldReader{
167 Schema: d.schema,
168 Config: d.config,
169 }
170 }
171 if d.diff != nil {
172 readers["diff"] = &DiffFieldReader{
173 Schema: d.schema,
174 Diff: d.diff,
175 Source: &MultiLevelFieldReader{
176 Levels: []string{"state", "config"},
177 Readers: readers,
178 },
179 }
180 }
181 readers["newDiff"] = &newValueReader{
182 MapFieldReader: &MapFieldReader{
183 Schema: d.schema,
184 Map: BasicMapReader(d.newWriter.Map()),
185 },
186 computedKeys: d.newWriter.ComputedKeysMap(),
187 }
188 d.multiReader = &MultiLevelFieldReader{
189 Levels: []string{
190 "state",
191 "config",
192 "diff",
193 "newDiff",
194 },
195
196 Readers: readers,
197 }
198
199 d.updatedKeys = make(map[string]bool)
200 d.forcedNewKeys = make(map[string]bool)
201
202 return d
203}
204
205// UpdatedKeys returns the keys that were updated by this ResourceDiff run.
206// These are the only keys that a diff should be re-calculated for.
207//
208// This is the combined result of both keys for which diff values were updated
209// for or cleared, and also keys that were flagged to be re-diffed as a result
210// of ForceNew.
211func (d *ResourceDiff) UpdatedKeys() []string {
212 var s []string
213 for k := range d.updatedKeys {
214 s = append(s, k)
215 }
216 for k := range d.forcedNewKeys {
217 for _, l := range s {
218 if k == l {
219 break
220 }
221 }
222 s = append(s, k)
223 }
224 return s
225}
226
227// Clear wipes the diff for a particular key. It is called by ResourceDiff's
228// functionality to remove any possibility of conflicts, but can be called on
229// its own to just remove a specific key from the diff completely.
230//
231// Note that this does not wipe an override. This function is only allowed on
232// computed keys.
233func (d *ResourceDiff) Clear(key string) error {
234 if err := d.checkKey(key, "Clear", true); err != nil {
235 return err
236 }
237
238 return d.clear(key)
239}
240
241func (d *ResourceDiff) clear(key string) error {
242 // Check the schema to make sure that this key exists first.
243 schemaL := addrToSchema(strings.Split(key, "."), d.schema)
244 if len(schemaL) == 0 {
245 return fmt.Errorf("%s is not a valid key", key)
246 }
247
248 for k := range d.diff.Attributes {
249 if strings.HasPrefix(k, key) {
250 delete(d.diff.Attributes, k)
251 }
252 }
253 return nil
254}
255
256// GetChangedKeysPrefix helps to implement Resource.CustomizeDiff
257// where we need to act on all nested fields
258// without calling out each one separately
259func (d *ResourceDiff) GetChangedKeysPrefix(prefix string) []string {
260 keys := make([]string, 0)
261 for k := range d.diff.Attributes {
262 if strings.HasPrefix(k, prefix) {
263 keys = append(keys, k)
264 }
265 }
266 return keys
267}
268
269// diffChange helps to implement resourceDiffer and derives its change values
270// from ResourceDiff's own change data, in addition to existing diff, config, and state.
271func (d *ResourceDiff) diffChange(key string) (interface{}, interface{}, bool, bool, bool) {
272 old, new, customized := d.getChange(key)
273
274 if !old.Exists {
275 old.Value = nil
276 }
277 if !new.Exists || d.removed(key) {
278 new.Value = nil
279 }
280
281 return old.Value, new.Value, !reflect.DeepEqual(old.Value, new.Value), new.Computed, customized
282}
283
284// SetNew is used to set a new diff value for the mentioned key. The value must
285// be correct for the attribute's schema (mostly relevant for maps, lists, and
286// sets). The original value from the state is used as the old value.
287//
288// This function is only allowed on computed attributes.
289func (d *ResourceDiff) SetNew(key string, value interface{}) error {
290 if err := d.checkKey(key, "SetNew", false); err != nil {
291 return err
292 }
293
294 return d.setDiff(key, value, false)
295}
296
297// SetNewComputed functions like SetNew, except that it blanks out a new value
298// and marks it as computed.
299//
300// This function is only allowed on computed attributes.
301func (d *ResourceDiff) SetNewComputed(key string) error {
302 if err := d.checkKey(key, "SetNewComputed", false); err != nil {
303 return err
304 }
305
306 return d.setDiff(key, nil, true)
307}
308
309// setDiff performs common diff setting behaviour.
310func (d *ResourceDiff) setDiff(key string, new interface{}, computed bool) error {
311 if err := d.clear(key); err != nil {
312 return err
313 }
314
315 if err := d.newWriter.WriteField(strings.Split(key, "."), new, computed); err != nil {
316 return fmt.Errorf("Cannot set new diff value for key %s: %s", key, err)
317 }
318
319 d.updatedKeys[key] = true
320
321 return nil
322}
323
324// ForceNew force-flags ForceNew in the schema for a specific key, and
325// re-calculates its diff, effectively causing this attribute to force a new
326// resource.
327//
328// Keep in mind that forcing a new resource will force a second run of the
329// resource's CustomizeDiff function (with a new ResourceDiff) once the current
330// one has completed. This second run is performed without state. This behavior
331// will be the same as if a new resource is being created and is performed to
332// ensure that the diff looks like the diff for a new resource as much as
333// possible. CustomizeDiff should expect such a scenario and act correctly.
334//
335// This function is a no-op/error if there is no diff.
336//
337// Note that the change to schema is permanent for the lifecycle of this
338// specific ResourceDiff instance.
339func (d *ResourceDiff) ForceNew(key string) error {
340 if !d.HasChange(key) {
341 return fmt.Errorf("ForceNew: No changes for %s", key)
342 }
343
344 keyParts := strings.Split(key, ".")
345 var schema *Schema
346 schemaL := addrToSchema(keyParts, d.schema)
347 if len(schemaL) > 0 {
348 schema = schemaL[len(schemaL)-1]
349 } else {
350 return fmt.Errorf("ForceNew: %s is not a valid key", key)
351 }
352
353 schema.ForceNew = true
354
355 // Flag this for a re-diff. Don't save any values to guarantee that existing
356 // diffs aren't messed with, as this gets messy when dealing with complex
357 // structures, zero values, etc.
358 d.forcedNewKeys[keyParts[0]] = true
359
360 return nil
361}
362
363// Get hands off to ResourceData.Get.
364func (d *ResourceDiff) Get(key string) interface{} {
365 r, _ := d.GetOk(key)
366 return r
367}
368
369// GetChange gets the change between the state and diff, checking first to see
370// if a overridden diff exists.
371//
372// This implementation differs from ResourceData's in the way that we first get
373// results from the exact levels for the new diff, then from state and diff as
374// per normal.
375func (d *ResourceDiff) GetChange(key string) (interface{}, interface{}) {
376 old, new, _ := d.getChange(key)
377 return old.Value, new.Value
378}
379
380// GetOk functions the same way as ResourceData.GetOk, but it also checks the
381// new diff levels to provide data consistent with the current state of the
382// customized diff.
383func (d *ResourceDiff) GetOk(key string) (interface{}, bool) {
384 r := d.get(strings.Split(key, "."), "newDiff")
385 exists := r.Exists && !r.Computed
386 if exists {
387 // If it exists, we also want to verify it is not the zero-value.
388 value := r.Value
389 zero := r.Schema.Type.Zero()
390
391 if eq, ok := value.(Equal); ok {
392 exists = !eq.Equal(zero)
393 } else {
394 exists = !reflect.DeepEqual(value, zero)
395 }
396 }
397
398 return r.Value, exists
399}
400
401// GetOkExists functions the same way as GetOkExists within ResourceData, but
402// it also checks the new diff levels to provide data consistent with the
403// current state of the customized diff.
404//
405// This is nearly the same function as GetOk, yet it does not check
406// for the zero value of the attribute's type. This allows for attributes
407// without a default, to fully check for a literal assignment, regardless
408// of the zero-value for that type.
409func (d *ResourceDiff) GetOkExists(key string) (interface{}, bool) {
410 r := d.get(strings.Split(key, "."), "newDiff")
411 exists := r.Exists && !r.Computed
412 return r.Value, exists
413}
414
415// NewValueKnown returns true if the new value for the given key is available
416// as its final value at diff time. If the return value is false, this means
417// either the value is based of interpolation that was unavailable at diff
418// time, or that the value was explicitly marked as computed by SetNewComputed.
419func (d *ResourceDiff) NewValueKnown(key string) bool {
420 r := d.get(strings.Split(key, "."), "newDiff")
421 return !r.Computed
422}
423
424// HasChange checks to see if there is a change between state and the diff, or
425// in the overridden diff.
426func (d *ResourceDiff) HasChange(key string) bool {
427 old, new := d.GetChange(key)
428
429 // If the type implements the Equal interface, then call that
430 // instead of just doing a reflect.DeepEqual. An example where this is
431 // needed is *Set
432 if eq, ok := old.(Equal); ok {
433 return !eq.Equal(new)
434 }
435
436 return !reflect.DeepEqual(old, new)
437}
438
439// Id returns the ID of this resource.
440//
441// Note that technically, ID does not change during diffs (it either has
442// already changed in the refresh, or will change on update), hence we do not
443// support updating the ID or fetching it from anything else other than state.
444func (d *ResourceDiff) Id() string {
445 var result string
446
447 if d.state != nil {
448 result = d.state.ID
449 }
450 return result
451}
452
453// getChange gets values from two different levels, designed for use in
454// diffChange, HasChange, and GetChange.
455//
456// This implementation differs from ResourceData's in the way that we first get
457// results from the exact levels for the new diff, then from state and diff as
458// per normal.
459func (d *ResourceDiff) getChange(key string) (getResult, getResult, bool) {
460 old := d.get(strings.Split(key, "."), "state")
461 var new getResult
462 for p := range d.updatedKeys {
463 if childAddrOf(key, p) {
464 new = d.getExact(strings.Split(key, "."), "newDiff")
465 return old, new, true
466 }
467 }
468 new = d.get(strings.Split(key, "."), "newDiff")
469 return old, new, false
470}
471
472// removed checks to see if the key is present in the existing, pre-customized
473// diff and if it was marked as NewRemoved.
474func (d *ResourceDiff) removed(k string) bool {
475 diff, ok := d.diff.Attributes[k]
476 if !ok {
477 return false
478 }
479 return diff.NewRemoved
480}
481
482// get performs the appropriate multi-level reader logic for ResourceDiff,
483// starting at source. Refer to newResourceDiff for the level order.
484func (d *ResourceDiff) get(addr []string, source string) getResult {
485 result, err := d.multiReader.ReadFieldMerge(addr, source)
486 if err != nil {
487 panic(err)
488 }
489
490 return d.finalizeResult(addr, result)
491}
492
493// getExact gets an attribute from the exact level referenced by source.
494func (d *ResourceDiff) getExact(addr []string, source string) getResult {
495 result, err := d.multiReader.ReadFieldExact(addr, source)
496 if err != nil {
497 panic(err)
498 }
499
500 return d.finalizeResult(addr, result)
501}
502
503// finalizeResult does some post-processing of the result produced by get and getExact.
504func (d *ResourceDiff) finalizeResult(addr []string, result FieldReadResult) getResult {
505 // If the result doesn't exist, then we set the value to the zero value
506 var schema *Schema
507 if schemaL := addrToSchema(addr, d.schema); len(schemaL) > 0 {
508 schema = schemaL[len(schemaL)-1]
509 }
510
511 if result.Value == nil && schema != nil {
512 result.Value = result.ValueOrZero(schema)
513 }
514
515 // Transform the FieldReadResult into a getResult. It might be worth
516 // merging these two structures one day.
517 return getResult{
518 Value: result.Value,
519 ValueProcessed: result.ValueProcessed,
520 Computed: result.Computed,
521 Exists: result.Exists,
522 Schema: schema,
523 }
524}
525
526// childAddrOf does a comparison of two addresses to see if one is the child of
527// the other.
528func childAddrOf(child, parent string) bool {
529 cs := strings.Split(child, ".")
530 ps := strings.Split(parent, ".")
531 if len(ps) > len(cs) {
532 return false
533 }
534 return reflect.DeepEqual(ps, cs[:len(ps)])
535}
536
537// checkKey checks the key to make sure it exists and is computed.
538func (d *ResourceDiff) checkKey(key, caller string, nested bool) error {
539 var schema *Schema
540 if nested {
541 keyParts := strings.Split(key, ".")
542 schemaL := addrToSchema(keyParts, d.schema)
543 if len(schemaL) > 0 {
544 schema = schemaL[len(schemaL)-1]
545 }
546 } else {
547 s, ok := d.schema[key]
548 if ok {
549 schema = s
550 }
551 }
552 if schema == nil {
553 return fmt.Errorf("%s: invalid key: %s", caller, key)
554 }
555 if !schema.Computed {
556 return fmt.Errorf("%s only operates on computed keys - %s is not one", caller, key)
557 }
558 return nil
559}
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/schema.go b/vendor/github.com/hashicorp/terraform/helper/schema/schema.go
index acb5618..0ea5aad 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/schema.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/schema.go
@@ -21,9 +21,13 @@ import (
21 "strings" 21 "strings"
22 22
23 "github.com/hashicorp/terraform/terraform" 23 "github.com/hashicorp/terraform/terraform"
24 "github.com/mitchellh/copystructure"
24 "github.com/mitchellh/mapstructure" 25 "github.com/mitchellh/mapstructure"
25) 26)
26 27
28// Name of ENV variable which (if not empty) prefers panic over error
29const PanicOnErr = "TF_SCHEMA_PANIC_ON_ERROR"
30
27// type used for schema package context keys 31// type used for schema package context keys
28type contextKey string 32type contextKey string
29 33
@@ -116,12 +120,16 @@ type Schema struct {
116 ForceNew bool 120 ForceNew bool
117 StateFunc SchemaStateFunc 121 StateFunc SchemaStateFunc
118 122
119 // The following fields are only set for a TypeList or TypeSet Type. 123 // The following fields are only set for a TypeList, TypeSet, or TypeMap.
120 // 124 //
121 // Elem must be either a *Schema or a *Resource only if the Type is 125 // Elem represents the element type. For a TypeMap, it must be a *Schema
122 // TypeList, and represents what the element type is. If it is *Schema, 126 // with a Type of TypeString, otherwise it may be either a *Schema or a
123 // the element type is just a simple value. If it is *Resource, the 127 // *Resource. If it is *Schema, the element type is just a simple value.
124 // element type is a complex structure, potentially with its own lifecycle. 128 // If it is *Resource, the element type is a complex structure,
129 // potentially with its own lifecycle.
130 Elem interface{}
131
132 // The following fields are only set for a TypeList or TypeSet.
125 // 133 //
126 // MaxItems defines a maximum amount of items that can exist within a 134 // MaxItems defines a maximum amount of items that can exist within a
127 // TypeSet or TypeList. Specific use cases would be if a TypeSet is being 135 // TypeSet or TypeList. Specific use cases would be if a TypeSet is being
@@ -138,7 +146,6 @@ type Schema struct {
138 // ["foo"] automatically. This is primarily for legacy reasons and the 146 // ["foo"] automatically. This is primarily for legacy reasons and the
139 // ambiguity is not recommended for new usage. Promotion is only allowed 147 // ambiguity is not recommended for new usage. Promotion is only allowed
140 // for primitive element types. 148 // for primitive element types.
141 Elem interface{}
142 MaxItems int 149 MaxItems int
143 MinItems int 150 MinItems int
144 PromoteSingle bool 151 PromoteSingle bool
@@ -192,7 +199,7 @@ type Schema struct {
192 Sensitive bool 199 Sensitive bool
193} 200}
194 201
195// SchemaDiffSuppresFunc is a function which can be used to determine 202// SchemaDiffSuppressFunc is a function which can be used to determine
196// whether a detected diff on a schema element is "valid" or not, and 203// whether a detected diff on a schema element is "valid" or not, and
197// suppress it from the plan if necessary. 204// suppress it from the plan if necessary.
198// 205//
@@ -289,8 +296,7 @@ func (s *Schema) ZeroValue() interface{} {
289 } 296 }
290} 297}
291 298
292func (s *Schema) finalizeDiff( 299func (s *Schema) finalizeDiff(d *terraform.ResourceAttrDiff, customized bool) *terraform.ResourceAttrDiff {
293 d *terraform.ResourceAttrDiff) *terraform.ResourceAttrDiff {
294 if d == nil { 300 if d == nil {
295 return d 301 return d
296 } 302 }
@@ -331,13 +337,20 @@ func (s *Schema) finalizeDiff(
331 } 337 }
332 338
333 if s.Computed { 339 if s.Computed {
334 if d.Old != "" && d.New == "" { 340 // FIXME: This is where the customized bool from getChange finally
335 // This is a computed value with an old value set already, 341 // comes into play. It allows the previously incorrect behavior
336 // just let it go. 342 // of an empty string being used as "unset" when the value is
337 return nil 343 // computed. This should be removed once we can properly
344 // represent an unset/nil value from the configuration.
345 if !customized {
346 if d.Old != "" && d.New == "" {
347 // This is a computed value with an old value set already,
348 // just let it go.
349 return nil
350 }
338 } 351 }
339 352
340 if d.New == "" { 353 if d.New == "" && !d.NewComputed {
341 // Computed attribute without a new value set 354 // Computed attribute without a new value set
342 d.NewComputed = true 355 d.NewComputed = true
343 } 356 }
@@ -354,6 +367,13 @@ func (s *Schema) finalizeDiff(
354// schemaMap is a wrapper that adds nice functions on top of schemas. 367// schemaMap is a wrapper that adds nice functions on top of schemas.
355type schemaMap map[string]*Schema 368type schemaMap map[string]*Schema
356 369
370func (m schemaMap) panicOnError() bool {
371 if os.Getenv(PanicOnErr) != "" {
372 return true
373 }
374 return false
375}
376
357// Data returns a ResourceData for the given schema, state, and diff. 377// Data returns a ResourceData for the given schema, state, and diff.
358// 378//
359// The diff is optional. 379// The diff is optional.
@@ -361,17 +381,30 @@ func (m schemaMap) Data(
361 s *terraform.InstanceState, 381 s *terraform.InstanceState,
362 d *terraform.InstanceDiff) (*ResourceData, error) { 382 d *terraform.InstanceDiff) (*ResourceData, error) {
363 return &ResourceData{ 383 return &ResourceData{
364 schema: m, 384 schema: m,
365 state: s, 385 state: s,
366 diff: d, 386 diff: d,
387 panicOnError: m.panicOnError(),
367 }, nil 388 }, nil
368} 389}
369 390
391// DeepCopy returns a copy of this schemaMap. The copy can be safely modified
392// without affecting the original.
393func (m *schemaMap) DeepCopy() schemaMap {
394 copy, err := copystructure.Config{Lock: true}.Copy(m)
395 if err != nil {
396 panic(err)
397 }
398 return *copy.(*schemaMap)
399}
400
370// Diff returns the diff for a resource given the schema map, 401// Diff returns the diff for a resource given the schema map,
371// state, and configuration. 402// state, and configuration.
372func (m schemaMap) Diff( 403func (m schemaMap) Diff(
373 s *terraform.InstanceState, 404 s *terraform.InstanceState,
374 c *terraform.ResourceConfig) (*terraform.InstanceDiff, error) { 405 c *terraform.ResourceConfig,
406 customizeDiff CustomizeDiffFunc,
407 meta interface{}) (*terraform.InstanceDiff, error) {
375 result := new(terraform.InstanceDiff) 408 result := new(terraform.InstanceDiff)
376 result.Attributes = make(map[string]*terraform.ResourceAttrDiff) 409 result.Attributes = make(map[string]*terraform.ResourceAttrDiff)
377 410
@@ -381,9 +414,10 @@ func (m schemaMap) Diff(
381 } 414 }
382 415
383 d := &ResourceData{ 416 d := &ResourceData{
384 schema: m, 417 schema: m,
385 state: s, 418 state: s,
386 config: c, 419 config: c,
420 panicOnError: m.panicOnError(),
387 } 421 }
388 422
389 for k, schema := range m { 423 for k, schema := range m {
@@ -393,6 +427,29 @@ func (m schemaMap) Diff(
393 } 427 }
394 } 428 }
395 429
430 // Remove any nil diffs just to keep things clean
431 for k, v := range result.Attributes {
432 if v == nil {
433 delete(result.Attributes, k)
434 }
435 }
436
437 // If this is a non-destroy diff, call any custom diff logic that has been
438 // defined.
439 if !result.DestroyTainted && customizeDiff != nil {
440 mc := m.DeepCopy()
441 rd := newResourceDiff(mc, c, s, result)
442 if err := customizeDiff(rd, meta); err != nil {
443 return nil, err
444 }
445 for _, k := range rd.UpdatedKeys() {
446 err := m.diff(k, mc[k], result, rd, false)
447 if err != nil {
448 return nil, err
449 }
450 }
451 }
452
396 // If the diff requires a new resource, then we recompute the diff 453 // If the diff requires a new resource, then we recompute the diff
397 // so we have the complete new resource diff, and preserve the 454 // so we have the complete new resource diff, and preserve the
398 // RequiresNew fields where necessary so the user knows exactly what 455 // RequiresNew fields where necessary so the user knows exactly what
@@ -418,6 +475,21 @@ func (m schemaMap) Diff(
418 } 475 }
419 } 476 }
420 477
478 // Re-run customization
479 if !result2.DestroyTainted && customizeDiff != nil {
480 mc := m.DeepCopy()
481 rd := newResourceDiff(mc, c, d.state, result2)
482 if err := customizeDiff(rd, meta); err != nil {
483 return nil, err
484 }
485 for _, k := range rd.UpdatedKeys() {
486 err := m.diff(k, mc[k], result2, rd, false)
487 if err != nil {
488 return nil, err
489 }
490 }
491 }
492
421 // Force all the fields to not force a new since we know what we 493 // Force all the fields to not force a new since we know what we
422 // want to force new. 494 // want to force new.
423 for k, attr := range result2.Attributes { 495 for k, attr := range result2.Attributes {
@@ -456,13 +528,6 @@ func (m schemaMap) Diff(
456 result = result2 528 result = result2
457 } 529 }
458 530
459 // Remove any nil diffs just to keep things clean
460 for k, v := range result.Attributes {
461 if v == nil {
462 delete(result.Attributes, k)
463 }
464 }
465
466 // Go through and detect all of the ComputedWhens now that we've 531 // Go through and detect all of the ComputedWhens now that we've
467 // finished the diff. 532 // finished the diff.
468 // TODO 533 // TODO
@@ -681,11 +746,23 @@ func isValidFieldName(name string) bool {
681 return re.MatchString(name) 746 return re.MatchString(name)
682} 747}
683 748
749// resourceDiffer is an interface that is used by the private diff functions.
750// This helps facilitate diff logic for both ResourceData and ResoureDiff with
751// minimal divergence in code.
752type resourceDiffer interface {
753 diffChange(string) (interface{}, interface{}, bool, bool, bool)
754 Get(string) interface{}
755 GetChange(string) (interface{}, interface{})
756 GetOk(string) (interface{}, bool)
757 HasChange(string) bool
758 Id() string
759}
760
684func (m schemaMap) diff( 761func (m schemaMap) diff(
685 k string, 762 k string,
686 schema *Schema, 763 schema *Schema,
687 diff *terraform.InstanceDiff, 764 diff *terraform.InstanceDiff,
688 d *ResourceData, 765 d resourceDiffer,
689 all bool) error { 766 all bool) error {
690 767
691 unsupressedDiff := new(terraform.InstanceDiff) 768 unsupressedDiff := new(terraform.InstanceDiff)
@@ -706,12 +783,14 @@ func (m schemaMap) diff(
706 } 783 }
707 784
708 for attrK, attrV := range unsupressedDiff.Attributes { 785 for attrK, attrV := range unsupressedDiff.Attributes {
709 if schema.DiffSuppressFunc != nil && 786 switch rd := d.(type) {
710 attrV != nil && 787 case *ResourceData:
711 schema.DiffSuppressFunc(attrK, attrV.Old, attrV.New, d) { 788 if schema.DiffSuppressFunc != nil &&
712 continue 789 attrV != nil &&
790 schema.DiffSuppressFunc(attrK, attrV.Old, attrV.New, rd) {
791 continue
792 }
713 } 793 }
714
715 diff.Attributes[attrK] = attrV 794 diff.Attributes[attrK] = attrV
716 } 795 }
717 796
@@ -722,9 +801,9 @@ func (m schemaMap) diffList(
722 k string, 801 k string,
723 schema *Schema, 802 schema *Schema,
724 diff *terraform.InstanceDiff, 803 diff *terraform.InstanceDiff,
725 d *ResourceData, 804 d resourceDiffer,
726 all bool) error { 805 all bool) error {
727 o, n, _, computedList := d.diffChange(k) 806 o, n, _, computedList, customized := d.diffChange(k)
728 if computedList { 807 if computedList {
729 n = nil 808 n = nil
730 } 809 }
@@ -791,10 +870,13 @@ func (m schemaMap) diffList(
791 oldStr = "" 870 oldStr = ""
792 } 871 }
793 872
794 diff.Attributes[k+".#"] = countSchema.finalizeDiff(&terraform.ResourceAttrDiff{ 873 diff.Attributes[k+".#"] = countSchema.finalizeDiff(
795 Old: oldStr, 874 &terraform.ResourceAttrDiff{
796 New: newStr, 875 Old: oldStr,
797 }) 876 New: newStr,
877 },
878 customized,
879 )
798 } 880 }
799 881
800 // Figure out the maximum 882 // Figure out the maximum
@@ -841,13 +923,13 @@ func (m schemaMap) diffMap(
841 k string, 923 k string,
842 schema *Schema, 924 schema *Schema,
843 diff *terraform.InstanceDiff, 925 diff *terraform.InstanceDiff,
844 d *ResourceData, 926 d resourceDiffer,
845 all bool) error { 927 all bool) error {
846 prefix := k + "." 928 prefix := k + "."
847 929
848 // First get all the values from the state 930 // First get all the values from the state
849 var stateMap, configMap map[string]string 931 var stateMap, configMap map[string]string
850 o, n, _, nComputed := d.diffChange(k) 932 o, n, _, nComputed, customized := d.diffChange(k)
851 if err := mapstructure.WeakDecode(o, &stateMap); err != nil { 933 if err := mapstructure.WeakDecode(o, &stateMap); err != nil {
852 return fmt.Errorf("%s: %s", k, err) 934 return fmt.Errorf("%s: %s", k, err)
853 } 935 }
@@ -899,6 +981,7 @@ func (m schemaMap) diffMap(
899 Old: oldStr, 981 Old: oldStr,
900 New: newStr, 982 New: newStr,
901 }, 983 },
984 customized,
902 ) 985 )
903 } 986 }
904 987
@@ -916,16 +999,22 @@ func (m schemaMap) diffMap(
916 continue 999 continue
917 } 1000 }
918 1001
919 diff.Attributes[prefix+k] = schema.finalizeDiff(&terraform.ResourceAttrDiff{ 1002 diff.Attributes[prefix+k] = schema.finalizeDiff(
920 Old: old, 1003 &terraform.ResourceAttrDiff{
921 New: v, 1004 Old: old,
922 }) 1005 New: v,
1006 },
1007 customized,
1008 )
923 } 1009 }
924 for k, v := range stateMap { 1010 for k, v := range stateMap {
925 diff.Attributes[prefix+k] = schema.finalizeDiff(&terraform.ResourceAttrDiff{ 1011 diff.Attributes[prefix+k] = schema.finalizeDiff(
926 Old: v, 1012 &terraform.ResourceAttrDiff{
927 NewRemoved: true, 1013 Old: v,
928 }) 1014 NewRemoved: true,
1015 },
1016 customized,
1017 )
929 } 1018 }
930 1019
931 return nil 1020 return nil
@@ -935,10 +1024,10 @@ func (m schemaMap) diffSet(
935 k string, 1024 k string,
936 schema *Schema, 1025 schema *Schema,
937 diff *terraform.InstanceDiff, 1026 diff *terraform.InstanceDiff,
938 d *ResourceData, 1027 d resourceDiffer,
939 all bool) error { 1028 all bool) error {
940 1029
941 o, n, _, computedSet := d.diffChange(k) 1030 o, n, _, computedSet, customized := d.diffChange(k)
942 if computedSet { 1031 if computedSet {
943 n = nil 1032 n = nil
944 } 1033 }
@@ -997,20 +1086,26 @@ func (m schemaMap) diffSet(
997 countStr = "" 1086 countStr = ""
998 } 1087 }
999 1088
1000 diff.Attributes[k+".#"] = countSchema.finalizeDiff(&terraform.ResourceAttrDiff{ 1089 diff.Attributes[k+".#"] = countSchema.finalizeDiff(
1001 Old: countStr, 1090 &terraform.ResourceAttrDiff{
1002 NewComputed: true, 1091 Old: countStr,
1003 }) 1092 NewComputed: true,
1093 },
1094 customized,
1095 )
1004 return nil 1096 return nil
1005 } 1097 }
1006 1098
1007 // If the counts are not the same, then record that diff 1099 // If the counts are not the same, then record that diff
1008 changed := oldLen != newLen 1100 changed := oldLen != newLen
1009 if changed || all { 1101 if changed || all {
1010 diff.Attributes[k+".#"] = countSchema.finalizeDiff(&terraform.ResourceAttrDiff{ 1102 diff.Attributes[k+".#"] = countSchema.finalizeDiff(
1011 Old: oldStr, 1103 &terraform.ResourceAttrDiff{
1012 New: newStr, 1104 Old: oldStr,
1013 }) 1105 New: newStr,
1106 },
1107 customized,
1108 )
1014 } 1109 }
1015 1110
1016 // Build the list of codes that will make up our set. This is the 1111 // Build the list of codes that will make up our set. This is the
@@ -1056,11 +1151,11 @@ func (m schemaMap) diffString(
1056 k string, 1151 k string,
1057 schema *Schema, 1152 schema *Schema,
1058 diff *terraform.InstanceDiff, 1153 diff *terraform.InstanceDiff,
1059 d *ResourceData, 1154 d resourceDiffer,
1060 all bool) error { 1155 all bool) error {
1061 var originalN interface{} 1156 var originalN interface{}
1062 var os, ns string 1157 var os, ns string
1063 o, n, _, computed := d.diffChange(k) 1158 o, n, _, computed, customized := d.diffChange(k)
1064 if schema.StateFunc != nil && n != nil { 1159 if schema.StateFunc != nil && n != nil {
1065 originalN = n 1160 originalN = n
1066 n = schema.StateFunc(n) 1161 n = schema.StateFunc(n)
@@ -1090,20 +1185,23 @@ func (m schemaMap) diffString(
1090 } 1185 }
1091 1186
1092 removed := false 1187 removed := false
1093 if o != nil && n == nil { 1188 if o != nil && n == nil && !computed {
1094 removed = true 1189 removed = true
1095 } 1190 }
1096 if removed && schema.Computed { 1191 if removed && schema.Computed {
1097 return nil 1192 return nil
1098 } 1193 }
1099 1194
1100 diff.Attributes[k] = schema.finalizeDiff(&terraform.ResourceAttrDiff{ 1195 diff.Attributes[k] = schema.finalizeDiff(
1101 Old: os, 1196 &terraform.ResourceAttrDiff{
1102 New: ns, 1197 Old: os,
1103 NewExtra: originalN, 1198 New: ns,
1104 NewRemoved: removed, 1199 NewExtra: originalN,
1105 NewComputed: computed, 1200 NewRemoved: removed,
1106 }) 1201 NewComputed: computed,
1202 },
1203 customized,
1204 )
1107 1205
1108 return nil 1206 return nil
1109} 1207}
@@ -1172,9 +1270,9 @@ func (m schemaMap) validateConflictingAttributes(
1172 } 1270 }
1173 1271
1174 for _, conflicting_key := range schema.ConflictsWith { 1272 for _, conflicting_key := range schema.ConflictsWith {
1175 if value, ok := c.Get(conflicting_key); ok { 1273 if _, ok := c.Get(conflicting_key); ok {
1176 return fmt.Errorf( 1274 return fmt.Errorf(
1177 "%q: conflicts with %s (%#v)", k, conflicting_key, value) 1275 "%q: conflicts with %s", k, conflicting_key)
1178 } 1276 }
1179 } 1277 }
1180 1278
@@ -1363,13 +1461,10 @@ func getValueType(k string, schema *Schema) (ValueType, error) {
1363 return vt, nil 1461 return vt, nil
1364 } 1462 }
1365 1463
1464 // If a Schema is provided to a Map, we use the Type of that schema
1465 // as the type for each element in the Map.
1366 if s, ok := schema.Elem.(*Schema); ok { 1466 if s, ok := schema.Elem.(*Schema); ok {
1367 if s.Elem == nil { 1467 return s.Type, nil
1368 return TypeString, nil
1369 }
1370 if vt, ok := s.Elem.(ValueType); ok {
1371 return vt, nil
1372 }
1373 } 1468 }
1374 1469
1375 if _, ok := schema.Elem.(*Resource); ok { 1470 if _, ok := schema.Elem.(*Resource); ok {
@@ -1430,7 +1525,6 @@ func (m schemaMap) validatePrimitive(
1430 raw interface{}, 1525 raw interface{},
1431 schema *Schema, 1526 schema *Schema,
1432 c *terraform.ResourceConfig) ([]string, []error) { 1527 c *terraform.ResourceConfig) ([]string, []error) {
1433
1434 // Catch if the user gave a complex type where a primitive was 1528 // Catch if the user gave a complex type where a primitive was
1435 // expected, so we can return a friendly error message that 1529 // expected, so we can return a friendly error message that
1436 // doesn't contain Go type system terminology. 1530 // doesn't contain Go type system terminology.
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/set.go b/vendor/github.com/hashicorp/terraform/helper/schema/set.go
index de05f40..cba2890 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/set.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/set.go
@@ -17,6 +17,12 @@ func HashString(v interface{}) int {
17 return hashcode.String(v.(string)) 17 return hashcode.String(v.(string))
18} 18}
19 19
20// HashInt hashes integers. If you want a Set of integers, this is the
21// SchemaSetFunc you want.
22func HashInt(v interface{}) int {
23 return hashcode.String(strconv.Itoa(v.(int)))
24}
25
20// HashResource hashes complex structures that are described using 26// HashResource hashes complex structures that are described using
21// a *Resource. This is the default set implementation used when a set's 27// a *Resource. This is the default set implementation used when a set's
22// element type is a full resource. 28// element type is a full resource.
@@ -153,6 +159,31 @@ func (s *Set) Equal(raw interface{}) bool {
153 return reflect.DeepEqual(s.m, other.m) 159 return reflect.DeepEqual(s.m, other.m)
154} 160}
155 161
162// HashEqual simply checks to the keys the top-level map to the keys in the
163// other set's top-level map to see if they are equal. This obviously assumes
164// you have a properly working hash function - use HashResource if in doubt.
165func (s *Set) HashEqual(raw interface{}) bool {
166 other, ok := raw.(*Set)
167 if !ok {
168 return false
169 }
170
171 ks1 := make([]string, 0)
172 ks2 := make([]string, 0)
173
174 for k := range s.m {
175 ks1 = append(ks1, k)
176 }
177 for k := range other.m {
178 ks2 = append(ks2, k)
179 }
180
181 sort.Strings(ks1)
182 sort.Strings(ks2)
183
184 return reflect.DeepEqual(ks1, ks2)
185}
186
156func (s *Set) GoString() string { 187func (s *Set) GoString() string {
157 return fmt.Sprintf("*Set(%#v)", s.m) 188 return fmt.Sprintf("*Set(%#v)", s.m)
158} 189}
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/testing.go b/vendor/github.com/hashicorp/terraform/helper/schema/testing.go
index 9765bdb..da754ac 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/testing.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/testing.go
@@ -10,13 +10,15 @@ import (
10// TestResourceDataRaw creates a ResourceData from a raw configuration map. 10// TestResourceDataRaw creates a ResourceData from a raw configuration map.
11func TestResourceDataRaw( 11func TestResourceDataRaw(
12 t *testing.T, schema map[string]*Schema, raw map[string]interface{}) *ResourceData { 12 t *testing.T, schema map[string]*Schema, raw map[string]interface{}) *ResourceData {
13 t.Helper()
14
13 c, err := config.NewRawConfig(raw) 15 c, err := config.NewRawConfig(raw)
14 if err != nil { 16 if err != nil {
15 t.Fatalf("err: %s", err) 17 t.Fatalf("err: %s", err)
16 } 18 }
17 19
18 sm := schemaMap(schema) 20 sm := schemaMap(schema)
19 diff, err := sm.Diff(nil, terraform.NewResourceConfig(c)) 21 diff, err := sm.Diff(nil, terraform.NewResourceConfig(c), nil, nil)
20 if err != nil { 22 if err != nil {
21 t.Fatalf("err: %s", err) 23 t.Fatalf("err: %s", err)
22 } 24 }
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/valuetype_string.go b/vendor/github.com/hashicorp/terraform/helper/schema/valuetype_string.go
index 1610cec..3bc3ac4 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/valuetype_string.go
+++ b/vendor/github.com/hashicorp/terraform/helper/schema/valuetype_string.go
@@ -2,7 +2,7 @@
2 2
3package schema 3package schema
4 4
5import "fmt" 5import "strconv"
6 6
7const _ValueType_name = "TypeInvalidTypeBoolTypeIntTypeFloatTypeStringTypeListTypeMapTypeSettypeObject" 7const _ValueType_name = "TypeInvalidTypeBoolTypeIntTypeFloatTypeStringTypeListTypeMapTypeSettypeObject"
8 8
@@ -10,7 +10,7 @@ var _ValueType_index = [...]uint8{0, 11, 19, 26, 35, 45, 53, 60, 67, 77}
10 10
11func (i ValueType) String() string { 11func (i ValueType) String() string {
12 if i < 0 || i >= ValueType(len(_ValueType_index)-1) { 12 if i < 0 || i >= ValueType(len(_ValueType_index)-1) {
13 return fmt.Sprintf("ValueType(%d)", i) 13 return "ValueType(" + strconv.FormatInt(int64(i), 10) + ")"
14 } 14 }
15 return _ValueType_name[_ValueType_index[i]:_ValueType_index[i+1]] 15 return _ValueType_name[_ValueType_index[i]:_ValueType_index[i+1]]
16} 16}
diff --git a/vendor/github.com/hashicorp/terraform/helper/shadow/closer.go b/vendor/github.com/hashicorp/terraform/helper/shadow/closer.go
deleted file mode 100644
index edc1e2a..0000000
--- a/vendor/github.com/hashicorp/terraform/helper/shadow/closer.go
+++ /dev/null
@@ -1,83 +0,0 @@
1package shadow
2
3import (
4 "fmt"
5 "io"
6 "reflect"
7
8 "github.com/hashicorp/go-multierror"
9 "github.com/mitchellh/reflectwalk"
10)
11
12// Close will close all shadow values within the given structure.
13//
14// This uses reflection to walk the structure, find all shadow elements,
15// and close them. Currently this will only find struct fields that are
16// shadow values, and not slice elements, etc.
17func Close(v interface{}) error {
18 // We require a pointer so we can address the internal fields
19 val := reflect.ValueOf(v)
20 if val.Kind() != reflect.Ptr {
21 return fmt.Errorf("value must be a pointer")
22 }
23
24 // Walk and close
25 var w closeWalker
26 if err := reflectwalk.Walk(v, &w); err != nil {
27 return err
28 }
29
30 return w.Err
31}
32
33type closeWalker struct {
34 Err error
35}
36
37func (w *closeWalker) Struct(reflect.Value) error {
38 // Do nothing. We implement this for reflectwalk.StructWalker
39 return nil
40}
41
42var closerType = reflect.TypeOf((*io.Closer)(nil)).Elem()
43
44func (w *closeWalker) StructField(f reflect.StructField, v reflect.Value) error {
45 // Not sure why this would be but lets avoid some panics
46 if !v.IsValid() {
47 return nil
48 }
49
50 // Empty for exported, so don't check unexported fields
51 if f.PkgPath != "" {
52 return nil
53 }
54
55 // Verify the io.Closer is in this package
56 typ := v.Type()
57 if typ.PkgPath() != "github.com/hashicorp/terraform/helper/shadow" {
58 return nil
59 }
60
61 var closer io.Closer
62 if v.Type().Implements(closerType) {
63 closer = v.Interface().(io.Closer)
64 } else if v.CanAddr() {
65 // The Close method may require a pointer receiver, but we only have a value.
66 v := v.Addr()
67 if v.Type().Implements(closerType) {
68 closer = v.Interface().(io.Closer)
69 }
70 }
71
72 if closer == nil {
73 return reflectwalk.SkipEntry
74 }
75
76 // Close it
77 if err := closer.Close(); err != nil {
78 w.Err = multierror.Append(w.Err, err)
79 }
80
81 // Don't go into the struct field
82 return reflectwalk.SkipEntry
83}
diff --git a/vendor/github.com/hashicorp/terraform/helper/shadow/compared_value.go b/vendor/github.com/hashicorp/terraform/helper/shadow/compared_value.go
deleted file mode 100644
index 4223e92..0000000
--- a/vendor/github.com/hashicorp/terraform/helper/shadow/compared_value.go
+++ /dev/null
@@ -1,128 +0,0 @@
1package shadow
2
3import (
4 "sync"
5)
6
7// ComparedValue is a struct that finds a value by comparing some key
8// to the list of stored values. This is useful when there is no easy
9// uniquely identifying key that works in a map (for that, use KeyedValue).
10//
11// ComparedValue is very expensive, relative to other Value types. Try to
12// limit the number of values stored in a ComparedValue by potentially
13// nesting it within a KeyedValue (a keyed value points to a compared value,
14// for example).
15type ComparedValue struct {
16 // Func is a function that is given the lookup key and a single
17 // stored value. If it matches, it returns true.
18 Func func(k, v interface{}) bool
19
20 lock sync.Mutex
21 once sync.Once
22 closed bool
23 values []interface{}
24 waiters map[interface{}]*Value
25}
26
27// Close closes the value. This can never fail. For a definition of
28// "close" see the ErrClosed docs.
29func (w *ComparedValue) Close() error {
30 w.lock.Lock()
31 defer w.lock.Unlock()
32
33 // Set closed to true always
34 w.closed = true
35
36 // For all waiters, complete with ErrClosed
37 for k, val := range w.waiters {
38 val.SetValue(ErrClosed)
39 delete(w.waiters, k)
40 }
41
42 return nil
43}
44
45// Value returns the value that was set for the given key, or blocks
46// until one is available.
47func (w *ComparedValue) Value(k interface{}) interface{} {
48 v, val := w.valueWaiter(k)
49 if val == nil {
50 return v
51 }
52
53 return val.Value()
54}
55
56// ValueOk gets the value for the given key, returning immediately if the
57// value doesn't exist. The second return argument is true if the value exists.
58func (w *ComparedValue) ValueOk(k interface{}) (interface{}, bool) {
59 v, val := w.valueWaiter(k)
60 return v, val == nil
61}
62
63func (w *ComparedValue) SetValue(v interface{}) {
64 w.lock.Lock()
65 defer w.lock.Unlock()
66 w.once.Do(w.init)
67
68 // Check if we already have this exact value (by simply comparing
69 // with == directly). If we do, then we don't insert it again.
70 found := false
71 for _, v2 := range w.values {
72 if v == v2 {
73 found = true
74 break
75 }
76 }
77
78 if !found {
79 // Set the value, always
80 w.values = append(w.values, v)
81 }
82
83 // Go through the waiters
84 for k, val := range w.waiters {
85 if w.Func(k, v) {
86 val.SetValue(v)
87 delete(w.waiters, k)
88 }
89 }
90}
91
92func (w *ComparedValue) valueWaiter(k interface{}) (interface{}, *Value) {
93 w.lock.Lock()
94 w.once.Do(w.init)
95
96 // Look for a pre-existing value
97 for _, v := range w.values {
98 if w.Func(k, v) {
99 w.lock.Unlock()
100 return v, nil
101 }
102 }
103
104 // If we're closed, return that
105 if w.closed {
106 w.lock.Unlock()
107 return ErrClosed, nil
108 }
109
110 // Pre-existing value doesn't exist, create a waiter
111 val := w.waiters[k]
112 if val == nil {
113 val = new(Value)
114 w.waiters[k] = val
115 }
116 w.lock.Unlock()
117
118 // Return the waiter
119 return nil, val
120}
121
122// Must be called with w.lock held.
123func (w *ComparedValue) init() {
124 w.waiters = make(map[interface{}]*Value)
125 if w.Func == nil {
126 w.Func = func(k, v interface{}) bool { return k == v }
127 }
128}
diff --git a/vendor/github.com/hashicorp/terraform/helper/shadow/keyed_value.go b/vendor/github.com/hashicorp/terraform/helper/shadow/keyed_value.go
deleted file mode 100644
index 432b036..0000000
--- a/vendor/github.com/hashicorp/terraform/helper/shadow/keyed_value.go
+++ /dev/null
@@ -1,151 +0,0 @@
1package shadow
2
3import (
4 "sync"
5)
6
7// KeyedValue is a struct that coordinates a value by key. If a value is
8// not available for a give key, it'll block until it is available.
9type KeyedValue struct {
10 lock sync.Mutex
11 once sync.Once
12 values map[string]interface{}
13 waiters map[string]*Value
14 closed bool
15}
16
17// Close closes the value. This can never fail. For a definition of
18// "close" see the ErrClosed docs.
19func (w *KeyedValue) Close() error {
20 w.lock.Lock()
21 defer w.lock.Unlock()
22
23 // Set closed to true always
24 w.closed = true
25
26 // For all waiters, complete with ErrClosed
27 for k, val := range w.waiters {
28 val.SetValue(ErrClosed)
29 delete(w.waiters, k)
30 }
31
32 return nil
33}
34
35// Value returns the value that was set for the given key, or blocks
36// until one is available.
37func (w *KeyedValue) Value(k string) interface{} {
38 w.lock.Lock()
39 v, val := w.valueWaiter(k)
40 w.lock.Unlock()
41
42 // If we have no waiter, then return the value
43 if val == nil {
44 return v
45 }
46
47 // We have a waiter, so wait
48 return val.Value()
49}
50
51// WaitForChange waits for the value with the given key to be set again.
52// If the key isn't set, it'll wait for an initial value. Note that while
53// it is called "WaitForChange", the value isn't guaranteed to _change_;
54// this will return when a SetValue is called for the given k.
55func (w *KeyedValue) WaitForChange(k string) interface{} {
56 w.lock.Lock()
57 w.once.Do(w.init)
58
59 // If we're closed, we're closed
60 if w.closed {
61 w.lock.Unlock()
62 return ErrClosed
63 }
64
65 // Check for an active waiter. If there isn't one, make it
66 val := w.waiters[k]
67 if val == nil {
68 val = new(Value)
69 w.waiters[k] = val
70 }
71 w.lock.Unlock()
72
73 // And wait
74 return val.Value()
75}
76
77// ValueOk gets the value for the given key, returning immediately if the
78// value doesn't exist. The second return argument is true if the value exists.
79func (w *KeyedValue) ValueOk(k string) (interface{}, bool) {
80 w.lock.Lock()
81 defer w.lock.Unlock()
82
83 v, val := w.valueWaiter(k)
84 return v, val == nil
85}
86
87func (w *KeyedValue) SetValue(k string, v interface{}) {
88 w.lock.Lock()
89 defer w.lock.Unlock()
90 w.setValue(k, v)
91}
92
93// Init will initialize the key to a given value only if the key has
94// not been set before. This is safe to call multiple times and in parallel.
95func (w *KeyedValue) Init(k string, v interface{}) {
96 w.lock.Lock()
97 defer w.lock.Unlock()
98
99 // If we have a waiter, set the value.
100 _, val := w.valueWaiter(k)
101 if val != nil {
102 w.setValue(k, v)
103 }
104}
105
106// Must be called with w.lock held.
107func (w *KeyedValue) init() {
108 w.values = make(map[string]interface{})
109 w.waiters = make(map[string]*Value)
110}
111
112// setValue is like SetValue but assumes the lock is held.
113func (w *KeyedValue) setValue(k string, v interface{}) {
114 w.once.Do(w.init)
115
116 // Set the value, always
117 w.values[k] = v
118
119 // If we have a waiter, set it
120 if val, ok := w.waiters[k]; ok {
121 val.SetValue(v)
122 delete(w.waiters, k)
123 }
124}
125
126// valueWaiter gets the value or the Value waiter for a given key.
127//
128// This must be called with lock held.
129func (w *KeyedValue) valueWaiter(k string) (interface{}, *Value) {
130 w.once.Do(w.init)
131
132 // If we have this value already, return it
133 if v, ok := w.values[k]; ok {
134 return v, nil
135 }
136
137 // If we're closed, return that
138 if w.closed {
139 return ErrClosed, nil
140 }
141
142 // No pending value, check for a waiter
143 val := w.waiters[k]
144 if val == nil {
145 val = new(Value)
146 w.waiters[k] = val
147 }
148
149 // Return the waiter
150 return nil, val
151}
diff --git a/vendor/github.com/hashicorp/terraform/helper/shadow/ordered_value.go b/vendor/github.com/hashicorp/terraform/helper/shadow/ordered_value.go
deleted file mode 100644
index 0a43d4d..0000000
--- a/vendor/github.com/hashicorp/terraform/helper/shadow/ordered_value.go
+++ /dev/null
@@ -1,66 +0,0 @@
1package shadow
2
3import (
4 "container/list"
5 "sync"
6)
7
8// OrderedValue is a struct that keeps track of a value in the order
9// it is set. Each time Value() is called, it will return the most recent
10// calls value then discard it.
11//
12// This is unlike Value that returns the same value once it is set.
13type OrderedValue struct {
14 lock sync.Mutex
15 values *list.List
16 waiters *list.List
17}
18
19// Value returns the last value that was set, or blocks until one
20// is received.
21func (w *OrderedValue) Value() interface{} {
22 w.lock.Lock()
23
24 // If we have a pending value already, use it
25 if w.values != nil && w.values.Len() > 0 {
26 front := w.values.Front()
27 w.values.Remove(front)
28 w.lock.Unlock()
29 return front.Value
30 }
31
32 // No pending value, create a waiter
33 if w.waiters == nil {
34 w.waiters = list.New()
35 }
36
37 var val Value
38 w.waiters.PushBack(&val)
39 w.lock.Unlock()
40
41 // Return the value once we have it
42 return val.Value()
43}
44
45// SetValue sets the latest value.
46func (w *OrderedValue) SetValue(v interface{}) {
47 w.lock.Lock()
48 defer w.lock.Unlock()
49
50 // If we have a waiter, notify it
51 if w.waiters != nil && w.waiters.Len() > 0 {
52 front := w.waiters.Front()
53 w.waiters.Remove(front)
54
55 val := front.Value.(*Value)
56 val.SetValue(v)
57 return
58 }
59
60 // Add it to the list of values
61 if w.values == nil {
62 w.values = list.New()
63 }
64
65 w.values.PushBack(v)
66}
diff --git a/vendor/github.com/hashicorp/terraform/helper/shadow/value.go b/vendor/github.com/hashicorp/terraform/helper/shadow/value.go
deleted file mode 100644
index 178b7e7..0000000
--- a/vendor/github.com/hashicorp/terraform/helper/shadow/value.go
+++ /dev/null
@@ -1,87 +0,0 @@
1package shadow
2
3import (
4 "errors"
5 "sync"
6)
7
8// ErrClosed is returned by any closed values.
9//
10// A "closed value" is when the shadow has been notified that the real
11// side is complete and any blocking values will _never_ be satisfied
12// in the future. In this case, this error is returned. If a value is already
13// available, that is still returned.
14var ErrClosed = errors.New("shadow closed")
15
16// Value is a struct that coordinates a value between two
17// parallel routines. It is similar to atomic.Value except that when
18// Value is called if it isn't set it will wait for it.
19//
20// The Value can be closed with Close, which will cause any future
21// blocking operations to return immediately with ErrClosed.
22type Value struct {
23 lock sync.Mutex
24 cond *sync.Cond
25 value interface{}
26 valueSet bool
27}
28
29func (v *Value) Lock() {
30 v.lock.Lock()
31}
32
33func (v *Value) Unlock() {
34 v.lock.Unlock()
35}
36
37// Close closes the value. This can never fail. For a definition of
38// "close" see the struct docs.
39func (w *Value) Close() error {
40 w.lock.Lock()
41 set := w.valueSet
42 w.lock.Unlock()
43
44 // If we haven't set the value, set it
45 if !set {
46 w.SetValue(ErrClosed)
47 }
48
49 // Done
50 return nil
51}
52
53// Value returns the value that was set.
54func (w *Value) Value() interface{} {
55 w.lock.Lock()
56 defer w.lock.Unlock()
57
58 // If we already have a value just return
59 for !w.valueSet {
60 // No value, setup the condition variable if we have to
61 if w.cond == nil {
62 w.cond = sync.NewCond(&w.lock)
63 }
64
65 // Wait on it
66 w.cond.Wait()
67 }
68
69 // Return the value
70 return w.value
71}
72
73// SetValue sets the value.
74func (w *Value) SetValue(v interface{}) {
75 w.lock.Lock()
76 defer w.lock.Unlock()
77
78 // Set the value
79 w.valueSet = true
80 w.value = v
81
82 // If we have a condition, clear it
83 if w.cond != nil {
84 w.cond.Broadcast()
85 w.cond = nil
86 }
87}
diff --git a/vendor/github.com/hashicorp/terraform/httpclient/client.go b/vendor/github.com/hashicorp/terraform/httpclient/client.go
new file mode 100644
index 0000000..bb06beb
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/httpclient/client.go
@@ -0,0 +1,18 @@
1package httpclient
2
3import (
4 "net/http"
5
6 cleanhttp "github.com/hashicorp/go-cleanhttp"
7)
8
9// New returns the DefaultPooledClient from the cleanhttp
10// package that will also send a Terraform User-Agent string.
11func New() *http.Client {
12 cli := cleanhttp.DefaultPooledClient()
13 cli.Transport = &userAgentRoundTripper{
14 userAgent: UserAgentString(),
15 inner: cli.Transport,
16 }
17 return cli
18}
diff --git a/vendor/github.com/hashicorp/terraform/httpclient/useragent.go b/vendor/github.com/hashicorp/terraform/httpclient/useragent.go
new file mode 100644
index 0000000..5e28017
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/httpclient/useragent.go
@@ -0,0 +1,40 @@
1package httpclient
2
3import (
4 "fmt"
5 "log"
6 "net/http"
7 "os"
8 "strings"
9
10 "github.com/hashicorp/terraform/version"
11)
12
13const userAgentFormat = "Terraform/%s"
14const uaEnvVar = "TF_APPEND_USER_AGENT"
15
16func UserAgentString() string {
17 ua := fmt.Sprintf(userAgentFormat, version.Version)
18
19 if add := os.Getenv(uaEnvVar); add != "" {
20 add = strings.TrimSpace(add)
21 if len(add) > 0 {
22 ua += " " + add
23 log.Printf("[DEBUG] Using modified User-Agent: %s", ua)
24 }
25 }
26
27 return ua
28}
29
30type userAgentRoundTripper struct {
31 inner http.RoundTripper
32 userAgent string
33}
34
35func (rt *userAgentRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
36 if _, ok := req.Header["User-Agent"]; !ok {
37 req.Header.Set("User-Agent", rt.userAgent)
38 }
39 return rt.inner.RoundTrip(req)
40}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/client.go b/vendor/github.com/hashicorp/terraform/plugin/client.go
index 3a5cb7a..7e2f4fe 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/client.go
+++ b/vendor/github.com/hashicorp/terraform/plugin/client.go
@@ -1,8 +1,10 @@
1package plugin 1package plugin
2 2
3import ( 3import (
4 "os"
4 "os/exec" 5 "os/exec"
5 6
7 hclog "github.com/hashicorp/go-hclog"
6 plugin "github.com/hashicorp/go-plugin" 8 plugin "github.com/hashicorp/go-plugin"
7 "github.com/hashicorp/terraform/plugin/discovery" 9 "github.com/hashicorp/terraform/plugin/discovery"
8) 10)
@@ -10,11 +12,18 @@ import (
10// ClientConfig returns a configuration object that can be used to instantiate 12// ClientConfig returns a configuration object that can be used to instantiate
11// a client for the plugin described by the given metadata. 13// a client for the plugin described by the given metadata.
12func ClientConfig(m discovery.PluginMeta) *plugin.ClientConfig { 14func ClientConfig(m discovery.PluginMeta) *plugin.ClientConfig {
15 logger := hclog.New(&hclog.LoggerOptions{
16 Name: "plugin",
17 Level: hclog.Trace,
18 Output: os.Stderr,
19 })
20
13 return &plugin.ClientConfig{ 21 return &plugin.ClientConfig{
14 Cmd: exec.Command(m.Path), 22 Cmd: exec.Command(m.Path),
15 HandshakeConfig: Handshake, 23 HandshakeConfig: Handshake,
16 Managed: true, 24 Managed: true,
17 Plugins: PluginMap, 25 Plugins: PluginMap,
26 Logger: logger,
18 } 27 }
19} 28}
20 29
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/find.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/find.go
index f5bc4c1..f053312 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/discovery/find.go
+++ b/vendor/github.com/hashicorp/terraform/plugin/discovery/find.go
@@ -3,6 +3,7 @@ package discovery
3import ( 3import (
4 "io/ioutil" 4 "io/ioutil"
5 "log" 5 "log"
6 "os"
6 "path/filepath" 7 "path/filepath"
7 "strings" 8 "strings"
8) 9)
@@ -59,7 +60,6 @@ func findPluginPaths(kind string, dirs []string) []string {
59 fullName := item.Name() 60 fullName := item.Name()
60 61
61 if !strings.HasPrefix(fullName, prefix) { 62 if !strings.HasPrefix(fullName, prefix) {
62 log.Printf("[DEBUG] skipping %q, not a %s", fullName, kind)
63 continue 63 continue
64 } 64 }
65 65
@@ -71,6 +71,12 @@ func findPluginPaths(kind string, dirs []string) []string {
71 continue 71 continue
72 } 72 }
73 73
74 // Check that the file we found is usable
75 if !pathIsFile(absPath) {
76 log.Printf("[ERROR] ignoring non-file %s", absPath)
77 continue
78 }
79
74 log.Printf("[DEBUG] found %s %q", kind, fullName) 80 log.Printf("[DEBUG] found %s %q", kind, fullName)
75 ret = append(ret, filepath.Clean(absPath)) 81 ret = append(ret, filepath.Clean(absPath))
76 continue 82 continue
@@ -83,7 +89,13 @@ func findPluginPaths(kind string, dirs []string) []string {
83 continue 89 continue
84 } 90 }
85 91
86 log.Printf("[WARNING] found legacy %s %q", kind, fullName) 92 // Check that the file we found is usable
93 if !pathIsFile(absPath) {
94 log.Printf("[ERROR] ignoring non-file %s", absPath)
95 continue
96 }
97
98 log.Printf("[WARN] found legacy %s %q", kind, fullName)
87 99
88 ret = append(ret, filepath.Clean(absPath)) 100 ret = append(ret, filepath.Clean(absPath))
89 } 101 }
@@ -92,6 +104,17 @@ func findPluginPaths(kind string, dirs []string) []string {
92 return ret 104 return ret
93} 105}
94 106
107// Returns true if and only if the given path refers to a file or a symlink
108// to a file.
109func pathIsFile(path string) bool {
110 info, err := os.Stat(path)
111 if err != nil {
112 return false
113 }
114
115 return !info.IsDir()
116}
117
95// ResolvePluginPaths takes a list of paths to plugin executables (as returned 118// ResolvePluginPaths takes a list of paths to plugin executables (as returned
96// by e.g. FindPluginPaths) and produces a PluginMetaSet describing the 119// by e.g. FindPluginPaths) and produces a PluginMetaSet describing the
97// referenced plugins. 120// referenced plugins.
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/get.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/get.go
index 241b5cb..815640f 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/discovery/get.go
+++ b/vendor/github.com/hashicorp/terraform/plugin/discovery/get.go
@@ -3,19 +3,22 @@ package discovery
3import ( 3import (
4 "errors" 4 "errors"
5 "fmt" 5 "fmt"
6 "io"
6 "io/ioutil" 7 "io/ioutil"
7 "log" 8 "log"
8 "net/http" 9 "net/http"
9 "os" 10 "os"
11 "path/filepath"
10 "runtime" 12 "runtime"
11 "strconv" 13 "strconv"
12 "strings" 14 "strings"
13 15
14 "golang.org/x/net/html" 16 "golang.org/x/net/html"
15 17
16 cleanhttp "github.com/hashicorp/go-cleanhttp"
17 getter "github.com/hashicorp/go-getter" 18 getter "github.com/hashicorp/go-getter"
18 multierror "github.com/hashicorp/go-multierror" 19 multierror "github.com/hashicorp/go-multierror"
20 "github.com/hashicorp/terraform/httpclient"
21 "github.com/mitchellh/cli"
19) 22)
20 23
21// Releases are located by parsing the html listing from releases.hashicorp.com. 24// Releases are located by parsing the html listing from releases.hashicorp.com.
@@ -30,7 +33,19 @@ const protocolVersionHeader = "x-terraform-protocol-version"
30 33
31var releaseHost = "https://releases.hashicorp.com" 34var releaseHost = "https://releases.hashicorp.com"
32 35
33var httpClient = cleanhttp.DefaultClient() 36var httpClient *http.Client
37
38func init() {
39 httpClient = httpclient.New()
40
41 httpGetter := &getter.HttpGetter{
42 Client: httpClient,
43 Netrc: true,
44 }
45
46 getter.Getters["http"] = httpGetter
47 getter.Getters["https"] = httpGetter
48}
34 49
35// An Installer maintains a local cache of plugins by downloading plugins 50// An Installer maintains a local cache of plugins by downloading plugins
36// from an online repository. 51// from an online repository.
@@ -47,6 +62,10 @@ type Installer interface {
47type ProviderInstaller struct { 62type ProviderInstaller struct {
48 Dir string 63 Dir string
49 64
65 // Cache is used to access and update a local cache of plugins if non-nil.
66 // Can be nil to disable caching.
67 Cache PluginCache
68
50 PluginProtocolVersion uint 69 PluginProtocolVersion uint
51 70
52 // OS and Arch specify the OS and architecture that should be used when 71 // OS and Arch specify the OS and architecture that should be used when
@@ -58,6 +77,8 @@ type ProviderInstaller struct {
58 77
59 // Skip checksum and signature verification 78 // Skip checksum and signature verification
60 SkipVerify bool 79 SkipVerify bool
80
81 Ui cli.Ui // Ui for output
61} 82}
62 83
63// Get is part of an implementation of type Installer, and attempts to download 84// Get is part of an implementation of type Installer, and attempts to download
@@ -98,6 +119,12 @@ func (i *ProviderInstaller) Get(provider string, req Constraints) (PluginMeta, e
98 // sort them newest to oldest 119 // sort them newest to oldest
99 Versions(versions).Sort() 120 Versions(versions).Sort()
100 121
122 // Ensure that our installation directory exists
123 err = os.MkdirAll(i.Dir, os.ModePerm)
124 if err != nil {
125 return PluginMeta{}, fmt.Errorf("failed to create plugin dir %s: %s", i.Dir, err)
126 }
127
101 // take the first matching plugin we find 128 // take the first matching plugin we find
102 for _, v := range versions { 129 for _, v := range versions {
103 url := i.providerURL(provider, v.String()) 130 url := i.providerURL(provider, v.String())
@@ -116,8 +143,9 @@ func (i *ProviderInstaller) Get(provider string, req Constraints) (PluginMeta, e
116 143
117 log.Printf("[DEBUG] fetching provider info for %s version %s", provider, v) 144 log.Printf("[DEBUG] fetching provider info for %s version %s", provider, v)
118 if checkPlugin(url, i.PluginProtocolVersion) { 145 if checkPlugin(url, i.PluginProtocolVersion) {
119 log.Printf("[DEBUG] getting provider %q version %q at %s", provider, v, url) 146 i.Ui.Info(fmt.Sprintf("- Downloading plugin for provider %q (%s)...", provider, v.String()))
120 err := getter.Get(i.Dir, url) 147 log.Printf("[DEBUG] getting provider %q version %q", provider, v)
148 err := i.install(provider, v, url)
121 if err != nil { 149 if err != nil {
122 return PluginMeta{}, err 150 return PluginMeta{}, err
123 } 151 }
@@ -164,6 +192,98 @@ func (i *ProviderInstaller) Get(provider string, req Constraints) (PluginMeta, e
164 return PluginMeta{}, ErrorNoVersionCompatible 192 return PluginMeta{}, ErrorNoVersionCompatible
165} 193}
166 194
195func (i *ProviderInstaller) install(provider string, version Version, url string) error {
196 if i.Cache != nil {
197 log.Printf("[DEBUG] looking for provider %s %s in plugin cache", provider, version)
198 cached := i.Cache.CachedPluginPath("provider", provider, version)
199 if cached == "" {
200 log.Printf("[DEBUG] %s %s not yet in cache, so downloading %s", provider, version, url)
201 err := getter.Get(i.Cache.InstallDir(), url)
202 if err != nil {
203 return err
204 }
205 // should now be in cache
206 cached = i.Cache.CachedPluginPath("provider", provider, version)
207 if cached == "" {
208 // should never happen if the getter is behaving properly
209 // and the plugins are packaged properly.
210 return fmt.Errorf("failed to find downloaded plugin in cache %s", i.Cache.InstallDir())
211 }
212 }
213
214 // Link or copy the cached binary into our install dir so the
215 // normal resolution machinery can find it.
216 filename := filepath.Base(cached)
217 targetPath := filepath.Join(i.Dir, filename)
218
219 log.Printf("[DEBUG] installing %s %s to %s from local cache %s", provider, version, targetPath, cached)
220
221 // Delete if we can. If there's nothing there already then no harm done.
222 // This is important because we can't create a link if there's
223 // already a file of the same name present.
224 // (any other error here we'll catch below when we try to write here)
225 os.Remove(targetPath)
226
227 // We don't attempt linking on Windows because links are not
228 // comprehensively supported by all tools/apps in Windows and
229 // so we choose to be conservative to avoid creating any
230 // weird issues for Windows users.
231 linkErr := errors.New("link not supported for Windows") // placeholder error, never actually returned
232 if runtime.GOOS != "windows" {
233 // Try hard linking first. Hard links are preferable because this
234 // creates a self-contained directory that doesn't depend on the
235 // cache after install.
236 linkErr = os.Link(cached, targetPath)
237
238 // If that failed, try a symlink. This _does_ depend on the cache
239 // after install, so the user must manage the cache more carefully
240 // in this case, but avoids creating redundant copies of the
241 // plugins on disk.
242 if linkErr != nil {
243 linkErr = os.Symlink(cached, targetPath)
244 }
245 }
246
247 // If we still have an error then we'll try a copy as a fallback.
248 // In this case either the OS is Windows or the target filesystem
249 // can't support symlinks.
250 if linkErr != nil {
251 srcFile, err := os.Open(cached)
252 if err != nil {
253 return fmt.Errorf("failed to open cached plugin %s: %s", cached, err)
254 }
255 defer srcFile.Close()
256
257 destFile, err := os.OpenFile(targetPath, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, os.ModePerm)
258 if err != nil {
259 return fmt.Errorf("failed to create %s: %s", targetPath, err)
260 }
261
262 _, err = io.Copy(destFile, srcFile)
263 if err != nil {
264 destFile.Close()
265 return fmt.Errorf("failed to copy cached plugin from %s to %s: %s", cached, targetPath, err)
266 }
267
268 err = destFile.Close()
269 if err != nil {
270 return fmt.Errorf("error creating %s: %s", targetPath, err)
271 }
272 }
273
274 // One way or another, by the time we get here we should have either
275 // a link or a copy of the cached plugin within i.Dir, as expected.
276 } else {
277 log.Printf("[DEBUG] plugin cache is disabled, so downloading %s %s from %s", provider, version, url)
278 err := getter.Get(i.Dir, url)
279 if err != nil {
280 return err
281 }
282 }
283
284 return nil
285}
286
167func (i *ProviderInstaller) PurgeUnused(used map[string]PluginMeta) (PluginMetaSet, error) { 287func (i *ProviderInstaller) PurgeUnused(used map[string]PluginMeta) (PluginMetaSet, error) {
168 purge := make(PluginMetaSet) 288 purge := make(PluginMetaSet)
169 289
@@ -261,7 +381,7 @@ func checkPlugin(url string, pluginProtocolVersion uint) bool {
261 if proto == "" { 381 if proto == "" {
262 // The header isn't present, but we don't make this error fatal since 382 // The header isn't present, but we don't make this error fatal since
263 // the latest version will probably work. 383 // the latest version will probably work.
264 log.Printf("[WARNING] missing %s from: %s", protocolVersionHeader, url) 384 log.Printf("[WARN] missing %s from: %s", protocolVersionHeader, url)
265 return true 385 return true
266 } 386 }
267 387
@@ -422,3 +542,7 @@ func getFile(url string) ([]byte, error) {
422 } 542 }
423 return data, nil 543 return data, nil
424} 544}
545
546func GetReleaseHost() string {
547 return releaseHost
548}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/get_cache.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/get_cache.go
new file mode 100644
index 0000000..1a10042
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/plugin/discovery/get_cache.go
@@ -0,0 +1,48 @@
1package discovery
2
3// PluginCache is an interface implemented by objects that are able to maintain
4// a cache of plugins.
5type PluginCache interface {
6 // CachedPluginPath returns a path where the requested plugin is already
7 // cached, or an empty string if the requested plugin is not yet cached.
8 CachedPluginPath(kind string, name string, version Version) string
9
10 // InstallDir returns the directory that new plugins should be installed into
11 // in order to populate the cache. This directory should be used as the
12 // first argument to getter.Get when downloading plugins with go-getter.
13 //
14 // After installing into this directory, use CachedPluginPath to obtain the
15 // path where the plugin was installed.
16 InstallDir() string
17}
18
19// NewLocalPluginCache returns a PluginCache that caches plugins in a
20// given local directory.
21func NewLocalPluginCache(dir string) PluginCache {
22 return &pluginCache{
23 Dir: dir,
24 }
25}
26
27type pluginCache struct {
28 Dir string
29}
30
31func (c *pluginCache) CachedPluginPath(kind string, name string, version Version) string {
32 allPlugins := FindPlugins(kind, []string{c.Dir})
33 plugins := allPlugins.WithName(name).WithVersion(version)
34
35 if plugins.Count() == 0 {
36 // nothing cached
37 return ""
38 }
39
40 // There should generally be only one plugin here; if there's more than
41 // one match for some reason then we'll just choose one arbitrarily.
42 plugin := plugins.Newest()
43 return plugin.Path
44}
45
46func (c *pluginCache) InstallDir() string {
47 return c.Dir
48}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/resource_provider.go b/vendor/github.com/hashicorp/terraform/plugin/resource_provider.go
index 473f786..d6a433c 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/resource_provider.go
+++ b/vendor/github.com/hashicorp/terraform/plugin/resource_provider.go
@@ -41,6 +41,24 @@ func (p *ResourceProvider) Stop() error {
41 return err 41 return err
42} 42}
43 43
44func (p *ResourceProvider) GetSchema(req *terraform.ProviderSchemaRequest) (*terraform.ProviderSchema, error) {
45 var result ResourceProviderGetSchemaResponse
46 args := &ResourceProviderGetSchemaArgs{
47 Req: req,
48 }
49
50 err := p.Client.Call("Plugin.GetSchema", args, &result)
51 if err != nil {
52 return nil, err
53 }
54
55 if result.Error != nil {
56 err = result.Error
57 }
58
59 return result.Schema, err
60}
61
44func (p *ResourceProvider) Input( 62func (p *ResourceProvider) Input(
45 input terraform.UIInput, 63 input terraform.UIInput,
46 c *terraform.ResourceConfig) (*terraform.ResourceConfig, error) { 64 c *terraform.ResourceConfig) (*terraform.ResourceConfig, error) {
@@ -312,6 +330,15 @@ type ResourceProviderStopResponse struct {
312 Error *plugin.BasicError 330 Error *plugin.BasicError
313} 331}
314 332
333type ResourceProviderGetSchemaArgs struct {
334 Req *terraform.ProviderSchemaRequest
335}
336
337type ResourceProviderGetSchemaResponse struct {
338 Schema *terraform.ProviderSchema
339 Error *plugin.BasicError
340}
341
315type ResourceProviderConfigureResponse struct { 342type ResourceProviderConfigureResponse struct {
316 Error *plugin.BasicError 343 Error *plugin.BasicError
317} 344}
@@ -418,6 +445,18 @@ func (s *ResourceProviderServer) Stop(
418 return nil 445 return nil
419} 446}
420 447
448func (s *ResourceProviderServer) GetSchema(
449 args *ResourceProviderGetSchemaArgs,
450 result *ResourceProviderGetSchemaResponse,
451) error {
452 schema, err := s.Provider.GetSchema(args.Req)
453 result.Schema = schema
454 if err != nil {
455 result.Error = plugin.NewBasicError(err)
456 }
457 return nil
458}
459
421func (s *ResourceProviderServer) Input( 460func (s *ResourceProviderServer) Input(
422 args *ResourceProviderInputArgs, 461 args *ResourceProviderInputArgs,
423 reply *ResourceProviderInputResponse) error { 462 reply *ResourceProviderInputResponse) error {
diff --git a/vendor/github.com/hashicorp/terraform/registry/client.go b/vendor/github.com/hashicorp/terraform/registry/client.go
new file mode 100644
index 0000000..a18e6b8
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/client.go
@@ -0,0 +1,227 @@
1package registry
2
3import (
4 "encoding/json"
5 "fmt"
6 "io/ioutil"
7 "log"
8 "net/http"
9 "net/url"
10 "path"
11 "strings"
12 "time"
13
14 "github.com/hashicorp/terraform/httpclient"
15 "github.com/hashicorp/terraform/registry/regsrc"
16 "github.com/hashicorp/terraform/registry/response"
17 "github.com/hashicorp/terraform/svchost"
18 "github.com/hashicorp/terraform/svchost/disco"
19 "github.com/hashicorp/terraform/version"
20)
21
22const (
23 xTerraformGet = "X-Terraform-Get"
24 xTerraformVersion = "X-Terraform-Version"
25 requestTimeout = 10 * time.Second
26 serviceID = "modules.v1"
27)
28
29var tfVersion = version.String()
30
31// Client provides methods to query Terraform Registries.
32type Client struct {
33 // this is the client to be used for all requests.
34 client *http.Client
35
36 // services is a required *disco.Disco, which may have services and
37 // credentials pre-loaded.
38 services *disco.Disco
39}
40
41// NewClient returns a new initialized registry client.
42func NewClient(services *disco.Disco, client *http.Client) *Client {
43 if services == nil {
44 services = disco.New()
45 }
46
47 if client == nil {
48 client = httpclient.New()
49 client.Timeout = requestTimeout
50 }
51
52 services.Transport = client.Transport
53
54 return &Client{
55 client: client,
56 services: services,
57 }
58}
59
60// Discover queries the host, and returns the url for the registry.
61func (c *Client) Discover(host svchost.Hostname) (*url.URL, error) {
62 service, err := c.services.DiscoverServiceURL(host, serviceID)
63 if err != nil {
64 return nil, err
65 }
66 if !strings.HasSuffix(service.Path, "/") {
67 service.Path += "/"
68 }
69 return service, nil
70}
71
72// Versions queries the registry for a module, and returns the available versions.
73func (c *Client) Versions(module *regsrc.Module) (*response.ModuleVersions, error) {
74 host, err := module.SvcHost()
75 if err != nil {
76 return nil, err
77 }
78
79 service, err := c.Discover(host)
80 if err != nil {
81 return nil, err
82 }
83
84 p, err := url.Parse(path.Join(module.Module(), "versions"))
85 if err != nil {
86 return nil, err
87 }
88
89 service = service.ResolveReference(p)
90
91 log.Printf("[DEBUG] fetching module versions from %q", service)
92
93 req, err := http.NewRequest("GET", service.String(), nil)
94 if err != nil {
95 return nil, err
96 }
97
98 c.addRequestCreds(host, req)
99 req.Header.Set(xTerraformVersion, tfVersion)
100
101 resp, err := c.client.Do(req)
102 if err != nil {
103 return nil, err
104 }
105 defer resp.Body.Close()
106
107 switch resp.StatusCode {
108 case http.StatusOK:
109 // OK
110 case http.StatusNotFound:
111 return nil, &errModuleNotFound{addr: module}
112 default:
113 return nil, fmt.Errorf("error looking up module versions: %s", resp.Status)
114 }
115
116 var versions response.ModuleVersions
117
118 dec := json.NewDecoder(resp.Body)
119 if err := dec.Decode(&versions); err != nil {
120 return nil, err
121 }
122
123 for _, mod := range versions.Modules {
124 for _, v := range mod.Versions {
125 log.Printf("[DEBUG] found available version %q for %s", v.Version, mod.Source)
126 }
127 }
128
129 return &versions, nil
130}
131
132func (c *Client) addRequestCreds(host svchost.Hostname, req *http.Request) {
133 creds, err := c.services.CredentialsForHost(host)
134 if err != nil {
135 log.Printf("[WARN] Failed to get credentials for %s: %s (ignoring)", host, err)
136 return
137 }
138
139 if creds != nil {
140 creds.PrepareRequest(req)
141 }
142}
143
144// Location find the download location for a specific version module.
145// This returns a string, because the final location may contain special go-getter syntax.
146func (c *Client) Location(module *regsrc.Module, version string) (string, error) {
147 host, err := module.SvcHost()
148 if err != nil {
149 return "", err
150 }
151
152 service, err := c.Discover(host)
153 if err != nil {
154 return "", err
155 }
156
157 var p *url.URL
158 if version == "" {
159 p, err = url.Parse(path.Join(module.Module(), "download"))
160 } else {
161 p, err = url.Parse(path.Join(module.Module(), version, "download"))
162 }
163 if err != nil {
164 return "", err
165 }
166 download := service.ResolveReference(p)
167
168 log.Printf("[DEBUG] looking up module location from %q", download)
169
170 req, err := http.NewRequest("GET", download.String(), nil)
171 if err != nil {
172 return "", err
173 }
174
175 c.addRequestCreds(host, req)
176 req.Header.Set(xTerraformVersion, tfVersion)
177
178 resp, err := c.client.Do(req)
179 if err != nil {
180 return "", err
181 }
182 defer resp.Body.Close()
183
184 // there should be no body, but save it for logging
185 body, err := ioutil.ReadAll(resp.Body)
186 if err != nil {
187 return "", fmt.Errorf("error reading response body from registry: %s", err)
188 }
189
190 switch resp.StatusCode {
191 case http.StatusOK, http.StatusNoContent:
192 // OK
193 case http.StatusNotFound:
194 return "", fmt.Errorf("module %q version %q not found", module, version)
195 default:
196 // anything else is an error:
197 return "", fmt.Errorf("error getting download location for %q: %s resp:%s", module, resp.Status, body)
198 }
199
200 // the download location is in the X-Terraform-Get header
201 location := resp.Header.Get(xTerraformGet)
202 if location == "" {
203 return "", fmt.Errorf("failed to get download URL for %q: %s resp:%s", module, resp.Status, body)
204 }
205
206 // If location looks like it's trying to be a relative URL, treat it as
207 // one.
208 //
209 // We don't do this for just _any_ location, since the X-Terraform-Get
210 // header is a go-getter location rather than a URL, and so not all
211 // possible values will parse reasonably as URLs.)
212 //
213 // When used in conjunction with go-getter we normally require this header
214 // to be an absolute URL, but we are more liberal here because third-party
215 // registry implementations may not "know" their own absolute URLs if
216 // e.g. they are running behind a reverse proxy frontend, or such.
217 if strings.HasPrefix(location, "/") || strings.HasPrefix(location, "./") || strings.HasPrefix(location, "../") {
218 locationURL, err := url.Parse(location)
219 if err != nil {
220 return "", fmt.Errorf("invalid relative URL for %q: %s", module, err)
221 }
222 locationURL = download.ResolveReference(locationURL)
223 location = locationURL.String()
224 }
225
226 return location, nil
227}
diff --git a/vendor/github.com/hashicorp/terraform/registry/errors.go b/vendor/github.com/hashicorp/terraform/registry/errors.go
new file mode 100644
index 0000000..b8dcd31
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/errors.go
@@ -0,0 +1,23 @@
1package registry
2
3import (
4 "fmt"
5
6 "github.com/hashicorp/terraform/registry/regsrc"
7)
8
9type errModuleNotFound struct {
10 addr *regsrc.Module
11}
12
13func (e *errModuleNotFound) Error() string {
14 return fmt.Sprintf("module %s not found", e.addr)
15}
16
17// IsModuleNotFound returns true only if the given error is a "module not found"
18// error. This allows callers to recognize this particular error condition
19// as distinct from operational errors such as poor network connectivity.
20func IsModuleNotFound(err error) bool {
21 _, ok := err.(*errModuleNotFound)
22 return ok
23}
diff --git a/vendor/github.com/hashicorp/terraform/registry/regsrc/friendly_host.go b/vendor/github.com/hashicorp/terraform/registry/regsrc/friendly_host.go
new file mode 100644
index 0000000..14b4dce
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/regsrc/friendly_host.go
@@ -0,0 +1,140 @@
1package regsrc
2
3import (
4 "regexp"
5 "strings"
6
7 "github.com/hashicorp/terraform/svchost"
8)
9
10var (
11 // InvalidHostString is a placeholder returned when a raw host can't be
12 // converted by IDNA spec. It will never be returned for any host for which
13 // Valid() is true.
14 InvalidHostString = "<invalid host>"
15
16 // urlLabelEndSubRe is a sub-expression that matches any character that's
17 // allowed at the start or end of a URL label according to RFC1123.
18 urlLabelEndSubRe = "[0-9A-Za-z]"
19
20 // urlLabelEndSubRe is a sub-expression that matches any character that's
21 // allowed at in a non-start or end of a URL label according to RFC1123.
22 urlLabelMidSubRe = "[0-9A-Za-z-]"
23
24 // urlLabelUnicodeSubRe is a sub-expression that matches any non-ascii char
25 // in an IDN (Unicode) display URL. It's not strict - there are only ~15k
26 // valid Unicode points in IDN RFC (some with conditions). We are just going
27 // with being liberal with matching and then erroring if we fail to convert
28 // to punycode later (which validates chars fully). This at least ensures
29 // ascii chars dissalowed by the RC1123 parts above don't become legal
30 // again.
31 urlLabelUnicodeSubRe = "[^[:ascii:]]"
32
33 // hostLabelSubRe is the sub-expression that matches a valid hostname label.
34 // It does not anchor the start or end so it can be composed into more
35 // complex RegExps below. Note that for sanity we don't handle disallowing
36 // raw punycode in this regexp (esp. since re2 doesn't support negative
37 // lookbehind, but we can capture it's presence here to check later).
38 hostLabelSubRe = "" +
39 // Match valid initial char, or unicode char
40 "(?:" + urlLabelEndSubRe + "|" + urlLabelUnicodeSubRe + ")" +
41 // Optionally, match 0 to 61 valid URL or Unicode chars,
42 // followed by one valid end char or unicode char
43 "(?:" +
44 "(?:" + urlLabelMidSubRe + "|" + urlLabelUnicodeSubRe + "){0,61}" +
45 "(?:" + urlLabelEndSubRe + "|" + urlLabelUnicodeSubRe + ")" +
46 ")?"
47
48 // hostSubRe is the sub-expression that matches a valid host prefix.
49 // Allows custom port.
50 hostSubRe = hostLabelSubRe + "(?:\\." + hostLabelSubRe + ")+(?::\\d+)?"
51
52 // hostRe is a regexp that matches a valid host prefix. Additional
53 // validation of unicode strings is needed for matches.
54 hostRe = regexp.MustCompile("^" + hostSubRe + "$")
55)
56
57// FriendlyHost describes a registry instance identified in source strings by a
58// simple bare hostname like registry.terraform.io.
59type FriendlyHost struct {
60 Raw string
61}
62
63func NewFriendlyHost(host string) *FriendlyHost {
64 return &FriendlyHost{Raw: host}
65}
66
67// ParseFriendlyHost attempts to parse a valid "friendly host" prefix from the
68// given string. If no valid prefix is found, host will be nil and rest will
69// contain the full source string. The host prefix must terminate at the end of
70// the input or at the first / character. If one or more characters exist after
71// the first /, they will be returned as rest (without the / delimiter).
72// Hostnames containing punycode WILL be parsed successfully since they may have
73// come from an internal normalized source string, however should be considered
74// invalid if the string came from a user directly. This must be checked
75// explicitly for user-input strings by calling Valid() on the
76// returned host.
77func ParseFriendlyHost(source string) (host *FriendlyHost, rest string) {
78 parts := strings.SplitN(source, "/", 2)
79
80 if hostRe.MatchString(parts[0]) {
81 host = &FriendlyHost{Raw: parts[0]}
82 if len(parts) == 2 {
83 rest = parts[1]
84 }
85 return
86 }
87
88 // No match, return whole string as rest along with nil host
89 rest = source
90 return
91}
92
93// Valid returns whether the host prefix is considered valid in any case.
94// Example of invalid prefixes might include ones that don't conform to the host
95// name specifications. Not that IDN prefixes containing punycode are not valid
96// input which we expect to always be in user-input or normalised display form.
97func (h *FriendlyHost) Valid() bool {
98 return svchost.IsValid(h.Raw)
99}
100
101// Display returns the host formatted for display to the user in CLI or web
102// output.
103func (h *FriendlyHost) Display() string {
104 return svchost.ForDisplay(h.Raw)
105}
106
107// Normalized returns the host formatted for internal reference or comparison.
108func (h *FriendlyHost) Normalized() string {
109 host, err := svchost.ForComparison(h.Raw)
110 if err != nil {
111 return InvalidHostString
112 }
113 return string(host)
114}
115
116// String returns the host formatted as the user originally typed it assuming it
117// was parsed from user input.
118func (h *FriendlyHost) String() string {
119 return h.Raw
120}
121
122// Equal compares the FriendlyHost against another instance taking normalization
123// into account. Invalid hosts cannot be compared and will always return false.
124func (h *FriendlyHost) Equal(other *FriendlyHost) bool {
125 if other == nil {
126 return false
127 }
128
129 otherHost, err := svchost.ForComparison(other.Raw)
130 if err != nil {
131 return false
132 }
133
134 host, err := svchost.ForComparison(h.Raw)
135 if err != nil {
136 return false
137 }
138
139 return otherHost == host
140}
diff --git a/vendor/github.com/hashicorp/terraform/registry/regsrc/module.go b/vendor/github.com/hashicorp/terraform/registry/regsrc/module.go
new file mode 100644
index 0000000..325706e
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/regsrc/module.go
@@ -0,0 +1,205 @@
1package regsrc
2
3import (
4 "errors"
5 "fmt"
6 "regexp"
7 "strings"
8
9 "github.com/hashicorp/terraform/svchost"
10)
11
12var (
13 ErrInvalidModuleSource = errors.New("not a valid registry module source")
14
15 // nameSubRe is the sub-expression that matches a valid module namespace or
16 // name. It's strictly a super-set of what GitHub allows for user/org and
17 // repo names respectively, but more restrictive than our original repo-name
18 // regex which allowed periods but could cause ambiguity with hostname
19 // prefixes. It does not anchor the start or end so it can be composed into
20 // more complex RegExps below. Alphanumeric with - and _ allowed in non
21 // leading or trailing positions. Max length 64 chars. (GitHub username is
22 // 38 max.)
23 nameSubRe = "[0-9A-Za-z](?:[0-9A-Za-z-_]{0,62}[0-9A-Za-z])?"
24
25 // providerSubRe is the sub-expression that matches a valid provider. It
26 // does not anchor the start or end so it can be composed into more complex
27 // RegExps below. Only lowercase chars and digits are supported in practice.
28 // Max length 64 chars.
29 providerSubRe = "[0-9a-z]{1,64}"
30
31 // moduleSourceRe is a regular expression that matches the basic
32 // namespace/name/provider[//...] format for registry sources. It assumes
33 // any FriendlyHost prefix has already been removed if present.
34 moduleSourceRe = regexp.MustCompile(
35 fmt.Sprintf("^(%s)\\/(%s)\\/(%s)(?:\\/\\/(.*))?$",
36 nameSubRe, nameSubRe, providerSubRe))
37
38 // NameRe is a regular expression defining the format allowed for namespace
39 // or name fields in module registry implementations.
40 NameRe = regexp.MustCompile("^" + nameSubRe + "$")
41
42 // ProviderRe is a regular expression defining the format allowed for
43 // provider fields in module registry implementations.
44 ProviderRe = regexp.MustCompile("^" + providerSubRe + "$")
45
46 // these hostnames are not allowed as registry sources, because they are
47 // already special case module sources in terraform.
48 disallowed = map[string]bool{
49 "github.com": true,
50 "bitbucket.org": true,
51 }
52)
53
54// Module describes a Terraform Registry Module source.
55type Module struct {
56 // RawHost is the friendly host prefix if one was present. It might be nil
57 // if the original source had no host prefix which implies
58 // PublicRegistryHost but is distinct from having an actual pointer to
59 // PublicRegistryHost since it encodes the fact the original string didn't
60 // include a host prefix at all which is significant for recovering actual
61 // input not just normalized form. Most callers should access it with Host()
62 // which will return public registry host instance if it's nil.
63 RawHost *FriendlyHost
64 RawNamespace string
65 RawName string
66 RawProvider string
67 RawSubmodule string
68}
69
70// NewModule construct a new module source from separate parts. Pass empty
71// string if host or submodule are not needed.
72func NewModule(host, namespace, name, provider, submodule string) (*Module, error) {
73 m := &Module{
74 RawNamespace: namespace,
75 RawName: name,
76 RawProvider: provider,
77 RawSubmodule: submodule,
78 }
79 if host != "" {
80 h := NewFriendlyHost(host)
81 if h != nil {
82 fmt.Println("HOST:", h)
83 if !h.Valid() || disallowed[h.Display()] {
84 return nil, ErrInvalidModuleSource
85 }
86 }
87 m.RawHost = h
88 }
89 return m, nil
90}
91
92// ParseModuleSource attempts to parse source as a Terraform registry module
93// source. If the string is not found to be in a valid format,
94// ErrInvalidModuleSource is returned. Note that this can only be used on
95// "input" strings, e.g. either ones supplied by the user or potentially
96// normalised but in Display form (unicode). It will fail to parse a source with
97// a punycoded domain since this is not permitted input from a user. If you have
98// an already normalized string internally, you can compare it without parsing
99// by comparing with the normalized version of the subject with the normal
100// string equality operator.
101func ParseModuleSource(source string) (*Module, error) {
102 // See if there is a friendly host prefix.
103 host, rest := ParseFriendlyHost(source)
104 if host != nil {
105 if !host.Valid() || disallowed[host.Display()] {
106 return nil, ErrInvalidModuleSource
107 }
108 }
109
110 matches := moduleSourceRe.FindStringSubmatch(rest)
111 if len(matches) < 4 {
112 return nil, ErrInvalidModuleSource
113 }
114
115 m := &Module{
116 RawHost: host,
117 RawNamespace: matches[1],
118 RawName: matches[2],
119 RawProvider: matches[3],
120 }
121
122 if len(matches) == 5 {
123 m.RawSubmodule = matches[4]
124 }
125
126 return m, nil
127}
128
129// Display returns the source formatted for display to the user in CLI or web
130// output.
131func (m *Module) Display() string {
132 return m.formatWithPrefix(m.normalizedHostPrefix(m.Host().Display()), false)
133}
134
135// Normalized returns the source formatted for internal reference or comparison.
136func (m *Module) Normalized() string {
137 return m.formatWithPrefix(m.normalizedHostPrefix(m.Host().Normalized()), false)
138}
139
140// String returns the source formatted as the user originally typed it assuming
141// it was parsed from user input.
142func (m *Module) String() string {
143 // Don't normalize public registry hostname - leave it exactly like the user
144 // input it.
145 hostPrefix := ""
146 if m.RawHost != nil {
147 hostPrefix = m.RawHost.String() + "/"
148 }
149 return m.formatWithPrefix(hostPrefix, true)
150}
151
152// Equal compares the module source against another instance taking
153// normalization into account.
154func (m *Module) Equal(other *Module) bool {
155 return m.Normalized() == other.Normalized()
156}
157
158// Host returns the FriendlyHost object describing which registry this module is
159// in. If the original source string had not host component this will return the
160// PublicRegistryHost.
161func (m *Module) Host() *FriendlyHost {
162 if m.RawHost == nil {
163 return PublicRegistryHost
164 }
165 return m.RawHost
166}
167
168func (m *Module) normalizedHostPrefix(host string) string {
169 if m.Host().Equal(PublicRegistryHost) {
170 return ""
171 }
172 return host + "/"
173}
174
175func (m *Module) formatWithPrefix(hostPrefix string, preserveCase bool) string {
176 suffix := ""
177 if m.RawSubmodule != "" {
178 suffix = "//" + m.RawSubmodule
179 }
180 str := fmt.Sprintf("%s%s/%s/%s%s", hostPrefix, m.RawNamespace, m.RawName,
181 m.RawProvider, suffix)
182
183 // lower case by default
184 if !preserveCase {
185 return strings.ToLower(str)
186 }
187 return str
188}
189
190// Module returns just the registry ID of the module, without a hostname or
191// suffix.
192func (m *Module) Module() string {
193 return fmt.Sprintf("%s/%s/%s", m.RawNamespace, m.RawName, m.RawProvider)
194}
195
196// SvcHost returns the svchost.Hostname for this module. Since FriendlyHost may
197// contain an invalid hostname, this also returns an error indicating if it
198// could be converted to a svchost.Hostname. If no host is specified, the
199// default PublicRegistryHost is returned.
200func (m *Module) SvcHost() (svchost.Hostname, error) {
201 if m.RawHost == nil {
202 return svchost.ForComparison(PublicRegistryHost.Raw)
203 }
204 return svchost.ForComparison(m.RawHost.Raw)
205}
diff --git a/vendor/github.com/hashicorp/terraform/registry/regsrc/regsrc.go b/vendor/github.com/hashicorp/terraform/registry/regsrc/regsrc.go
new file mode 100644
index 0000000..c430bf1
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/regsrc/regsrc.go
@@ -0,0 +1,8 @@
1// Package regsrc provides helpers for working with source strings that identify
2// resources within a Terraform registry.
3package regsrc
4
5var (
6 // PublicRegistryHost is a FriendlyHost that represents the public registry.
7 PublicRegistryHost = NewFriendlyHost("registry.terraform.io")
8)
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/module.go b/vendor/github.com/hashicorp/terraform/registry/response/module.go
new file mode 100644
index 0000000..3bd2b3d
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/response/module.go
@@ -0,0 +1,93 @@
1package response
2
3import (
4 "time"
5)
6
7// Module is the response structure with the data for a single module version.
8type Module struct {
9 ID string `json:"id"`
10
11 //---------------------------------------------------------------
12 // Metadata about the overall module.
13
14 Owner string `json:"owner"`
15 Namespace string `json:"namespace"`
16 Name string `json:"name"`
17 Version string `json:"version"`
18 Provider string `json:"provider"`
19 Description string `json:"description"`
20 Source string `json:"source"`
21 PublishedAt time.Time `json:"published_at"`
22 Downloads int `json:"downloads"`
23 Verified bool `json:"verified"`
24}
25
26// ModuleDetail represents a module in full detail.
27type ModuleDetail struct {
28 Module
29
30 //---------------------------------------------------------------
31 // Metadata about the overall module. This is only available when
32 // requesting the specific module (not in list responses).
33
34 // Root is the root module.
35 Root *ModuleSubmodule `json:"root"`
36
37 // Submodules are the other submodules that are available within
38 // this module.
39 Submodules []*ModuleSubmodule `json:"submodules"`
40
41 //---------------------------------------------------------------
42 // The fields below are only set when requesting this specific
43 // module. They are available to easily know all available versions
44 // and providers without multiple API calls.
45
46 Providers []string `json:"providers"` // All available providers
47 Versions []string `json:"versions"` // All versions
48}
49
50// ModuleSubmodule is the metadata about a specific submodule within
51// a module. This includes the root module as a special case.
52type ModuleSubmodule struct {
53 Path string `json:"path"`
54 Readme string `json:"readme"`
55 Empty bool `json:"empty"`
56
57 Inputs []*ModuleInput `json:"inputs"`
58 Outputs []*ModuleOutput `json:"outputs"`
59 Dependencies []*ModuleDep `json:"dependencies"`
60 Resources []*ModuleResource `json:"resources"`
61}
62
63// ModuleInput is an input for a module.
64type ModuleInput struct {
65 Name string `json:"name"`
66 Description string `json:"description"`
67 Default string `json:"default"`
68}
69
70// ModuleOutput is an output for a module.
71type ModuleOutput struct {
72 Name string `json:"name"`
73 Description string `json:"description"`
74}
75
76// ModuleDep is an output for a module.
77type ModuleDep struct {
78 Name string `json:"name"`
79 Source string `json:"source"`
80 Version string `json:"version"`
81}
82
83// ModuleProviderDep is the output for a provider dependency
84type ModuleProviderDep struct {
85 Name string `json:"name"`
86 Version string `json:"version"`
87}
88
89// ModuleResource is an output for a module.
90type ModuleResource struct {
91 Name string `json:"name"`
92 Type string `json:"type"`
93}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/module_list.go b/vendor/github.com/hashicorp/terraform/registry/response/module_list.go
new file mode 100644
index 0000000..9783748
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/response/module_list.go
@@ -0,0 +1,7 @@
1package response
2
3// ModuleList is the response structure for a pageable list of modules.
4type ModuleList struct {
5 Meta PaginationMeta `json:"meta"`
6 Modules []*Module `json:"modules"`
7}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/module_provider.go b/vendor/github.com/hashicorp/terraform/registry/response/module_provider.go
new file mode 100644
index 0000000..e48499d
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/response/module_provider.go
@@ -0,0 +1,14 @@
1package response
2
3// ModuleProvider represents a single provider for modules.
4type ModuleProvider struct {
5 Name string `json:"name"`
6 Downloads int `json:"downloads"`
7 ModuleCount int `json:"module_count"`
8}
9
10// ModuleProviderList is the response structure for a pageable list of ModuleProviders.
11type ModuleProviderList struct {
12 Meta PaginationMeta `json:"meta"`
13 Providers []*ModuleProvider `json:"providers"`
14}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/module_versions.go b/vendor/github.com/hashicorp/terraform/registry/response/module_versions.go
new file mode 100644
index 0000000..f69e975
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/response/module_versions.go
@@ -0,0 +1,32 @@
1package response
2
3// ModuleVersions is the response format that contains all metadata about module
4// versions needed for terraform CLI to resolve version constraints. See RFC
5// TF-042 for details on this format.
6type ModuleVersions struct {
7 Modules []*ModuleProviderVersions `json:"modules"`
8}
9
10// ModuleProviderVersions is the response format for a single module instance,
11// containing metadata about all versions and their dependencies.
12type ModuleProviderVersions struct {
13 Source string `json:"source"`
14 Versions []*ModuleVersion `json:"versions"`
15}
16
17// ModuleVersion is the output metadata for a given version needed by CLI to
18// resolve candidate versions to satisfy requirements.
19type ModuleVersion struct {
20 Version string `json:"version"`
21 Root VersionSubmodule `json:"root"`
22 Submodules []*VersionSubmodule `json:"submodules"`
23}
24
25// VersionSubmodule is the output metadata for a submodule within a given
26// version needed by CLI to resolve candidate versions to satisfy requirements.
27// When representing the Root in JSON the path is omitted.
28type VersionSubmodule struct {
29 Path string `json:"path,omitempty"`
30 Providers []*ModuleProviderDep `json:"providers"`
31 Dependencies []*ModuleDep `json:"dependencies"`
32}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/pagination.go b/vendor/github.com/hashicorp/terraform/registry/response/pagination.go
new file mode 100644
index 0000000..75a9254
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/response/pagination.go
@@ -0,0 +1,65 @@
1package response
2
3import (
4 "net/url"
5 "strconv"
6)
7
8// PaginationMeta is a structure included in responses for pagination.
9type PaginationMeta struct {
10 Limit int `json:"limit"`
11 CurrentOffset int `json:"current_offset"`
12 NextOffset *int `json:"next_offset,omitempty"`
13 PrevOffset *int `json:"prev_offset,omitempty"`
14 NextURL string `json:"next_url,omitempty"`
15 PrevURL string `json:"prev_url,omitempty"`
16}
17
18// NewPaginationMeta populates pagination meta data from result parameters
19func NewPaginationMeta(offset, limit int, hasMore bool, currentURL string) PaginationMeta {
20 pm := PaginationMeta{
21 Limit: limit,
22 CurrentOffset: offset,
23 }
24
25 // Calculate next/prev offsets, leave nil if not valid pages
26 nextOffset := offset + limit
27 if hasMore {
28 pm.NextOffset = &nextOffset
29 }
30
31 prevOffset := offset - limit
32 if prevOffset < 0 {
33 prevOffset = 0
34 }
35 if prevOffset < offset {
36 pm.PrevOffset = &prevOffset
37 }
38
39 // If URL format provided, populate URLs. Intentionally swallow URL errors for now, API should
40 // catch missing URLs if we call with bad URL arg (and we care about them being present).
41 if currentURL != "" && pm.NextOffset != nil {
42 pm.NextURL, _ = setQueryParam(currentURL, "offset", *pm.NextOffset, 0)
43 }
44 if currentURL != "" && pm.PrevOffset != nil {
45 pm.PrevURL, _ = setQueryParam(currentURL, "offset", *pm.PrevOffset, 0)
46 }
47
48 return pm
49}
50
51func setQueryParam(baseURL, key string, val, defaultVal int) (string, error) {
52 u, err := url.Parse(baseURL)
53 if err != nil {
54 return "", err
55 }
56 q := u.Query()
57 if val == defaultVal {
58 // elide param if it's the default value
59 q.Del(key)
60 } else {
61 q.Set(key, strconv.Itoa(val))
62 }
63 u.RawQuery = q.Encode()
64 return u.String(), nil
65}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/redirect.go b/vendor/github.com/hashicorp/terraform/registry/response/redirect.go
new file mode 100644
index 0000000..d5eb49b
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/registry/response/redirect.go
@@ -0,0 +1,6 @@
1package response
2
3// Redirect causes the frontend to perform a window redirect.
4type Redirect struct {
5 URL string `json:"url"`
6}
diff --git a/vendor/github.com/hashicorp/terraform/svchost/auth/cache.go b/vendor/github.com/hashicorp/terraform/svchost/auth/cache.go
new file mode 100644
index 0000000..4f0d168
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/svchost/auth/cache.go
@@ -0,0 +1,45 @@
1package auth
2
3import (
4 "github.com/hashicorp/terraform/svchost"
5)
6
7// CachingCredentialsSource creates a new credentials source that wraps another
8// and caches its results in memory, on a per-hostname basis.
9//
10// No means is provided for expiration of cached credentials, so a caching
11// credentials source should have a limited lifetime (one Terraform operation,
12// for example) to ensure that time-limited credentials don't expire before
13// their cache entries do.
14func CachingCredentialsSource(source CredentialsSource) CredentialsSource {
15 return &cachingCredentialsSource{
16 source: source,
17 cache: map[svchost.Hostname]HostCredentials{},
18 }
19}
20
21type cachingCredentialsSource struct {
22 source CredentialsSource
23 cache map[svchost.Hostname]HostCredentials
24}
25
26// ForHost passes the given hostname on to the wrapped credentials source and
27// caches the result to return for future requests with the same hostname.
28//
29// Both credentials and non-credentials (nil) responses are cached.
30//
31// No cache entry is created if the wrapped source returns an error, to allow
32// the caller to retry the failing operation.
33func (s *cachingCredentialsSource) ForHost(host svchost.Hostname) (HostCredentials, error) {
34 if cache, cached := s.cache[host]; cached {
35 return cache, nil
36 }
37
38 result, err := s.source.ForHost(host)
39 if err != nil {
40 return result, err
41 }
42
43 s.cache[host] = result
44 return result, nil
45}
diff --git a/vendor/github.com/hashicorp/terraform/svchost/auth/credentials.go b/vendor/github.com/hashicorp/terraform/svchost/auth/credentials.go
new file mode 100644
index 0000000..0372c16
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/svchost/auth/credentials.go
@@ -0,0 +1,63 @@
1// Package auth contains types and functions to manage authentication
2// credentials for service hosts.
3package auth
4
5import (
6 "net/http"
7
8 "github.com/hashicorp/terraform/svchost"
9)
10
11// Credentials is a list of CredentialsSource objects that can be tried in
12// turn until one returns credentials for a host, or one returns an error.
13//
14// A Credentials is itself a CredentialsSource, wrapping its members.
15// In principle one CredentialsSource can be nested inside another, though
16// there is no good reason to do so.
17type Credentials []CredentialsSource
18
19// NoCredentials is an empty CredentialsSource that always returns nil
20// when asked for credentials.
21var NoCredentials CredentialsSource = Credentials{}
22
23// A CredentialsSource is an object that may be able to provide credentials
24// for a given host.
25//
26// Credentials lookups are not guaranteed to be concurrency-safe. Callers
27// using these facilities in concurrent code must use external concurrency
28// primitives to prevent race conditions.
29type CredentialsSource interface {
30 // ForHost returns a non-nil HostCredentials if the source has credentials
31 // available for the host, and a nil HostCredentials if it does not.
32 //
33 // If an error is returned, progress through a list of CredentialsSources
34 // is halted and the error is returned to the user.
35 ForHost(host svchost.Hostname) (HostCredentials, error)
36}
37
38// HostCredentials represents a single set of credentials for a particular
39// host.
40type HostCredentials interface {
41 // PrepareRequest modifies the given request in-place to apply the
42 // receiving credentials. The usual behavior of this method is to
43 // add some sort of Authorization header to the request.
44 PrepareRequest(req *http.Request)
45
46 // Token returns the authentication token.
47 Token() string
48}
49
50// ForHost iterates over the contained CredentialsSource objects and
51// tries to obtain credentials for the given host from each one in turn.
52//
53// If any source returns either a non-nil HostCredentials or a non-nil error
54// then this result is returned. Otherwise, the result is nil, nil.
55func (c Credentials) ForHost(host svchost.Hostname) (HostCredentials, error) {
56 for _, source := range c {
57 creds, err := source.ForHost(host)
58 if creds != nil || err != nil {
59 return creds, err
60 }
61 }
62 return nil, nil
63}
diff --git a/vendor/github.com/hashicorp/terraform/svchost/auth/from_map.go b/vendor/github.com/hashicorp/terraform/svchost/auth/from_map.go
new file mode 100644
index 0000000..f91006a
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/svchost/auth/from_map.go
@@ -0,0 +1,18 @@
1package auth
2
3// HostCredentialsFromMap converts a map of key-value pairs from a credentials
4// definition provided by the user (e.g. in a config file, or via a credentials
5// helper) into a HostCredentials object if possible, or returns nil if
6// no credentials could be extracted from the map.
7//
8// This function ignores map keys it is unfamiliar with, to allow for future
9// expansion of the credentials map format for new credential types.
10func HostCredentialsFromMap(m map[string]interface{}) HostCredentials {
11 if m == nil {
12 return nil
13 }
14 if token, ok := m["token"].(string); ok {
15 return HostCredentialsToken(token)
16 }
17 return nil
18}
diff --git a/vendor/github.com/hashicorp/terraform/svchost/auth/helper_program.go b/vendor/github.com/hashicorp/terraform/svchost/auth/helper_program.go
new file mode 100644
index 0000000..d72ffe3
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/svchost/auth/helper_program.go
@@ -0,0 +1,80 @@
1package auth
2
3import (
4 "bytes"
5 "encoding/json"
6 "fmt"
7 "os/exec"
8 "path/filepath"
9
10 "github.com/hashicorp/terraform/svchost"
11)
12
13type helperProgramCredentialsSource struct {
14 executable string
15 args []string
16}
17
18// HelperProgramCredentialsSource returns a CredentialsSource that runs the
19// given program with the given arguments in order to obtain credentials.
20//
21// The given executable path must be an absolute path; it is the caller's
22// responsibility to validate and process a relative path or other input
23// provided by an end-user. If the given path is not absolute, this
24// function will panic.
25//
26// When credentials are requested, the program will be run in a child process
27// with the given arguments along with two additional arguments added to the
28// end of the list: the literal string "get", followed by the requested
29// hostname in ASCII compatibility form (punycode form).
30func HelperProgramCredentialsSource(executable string, args ...string) CredentialsSource {
31 if !filepath.IsAbs(executable) {
32 panic("NewCredentialsSourceHelperProgram requires absolute path to executable")
33 }
34
35 fullArgs := make([]string, len(args)+1)
36 fullArgs[0] = executable
37 copy(fullArgs[1:], args)
38
39 return &helperProgramCredentialsSource{
40 executable: executable,
41 args: fullArgs,
42 }
43}
44
45func (s *helperProgramCredentialsSource) ForHost(host svchost.Hostname) (HostCredentials, error) {
46 args := make([]string, len(s.args), len(s.args)+2)
47 copy(args, s.args)
48 args = append(args, "get")
49 args = append(args, string(host))
50
51 outBuf := bytes.Buffer{}
52 errBuf := bytes.Buffer{}
53
54 cmd := exec.Cmd{
55 Path: s.executable,
56 Args: args,
57 Stdin: nil,
58 Stdout: &outBuf,
59 Stderr: &errBuf,
60 }
61 err := cmd.Run()
62 if _, isExitErr := err.(*exec.ExitError); isExitErr {
63 errText := errBuf.String()
64 if errText == "" {
65 // Shouldn't happen for a well-behaved helper program
66 return nil, fmt.Errorf("error in %s, but it produced no error message", s.executable)
67 }
68 return nil, fmt.Errorf("error in %s: %s", s.executable, errText)
69 } else if err != nil {
70 return nil, fmt.Errorf("failed to run %s: %s", s.executable, err)
71 }
72
73 var m map[string]interface{}
74 err = json.Unmarshal(outBuf.Bytes(), &m)
75 if err != nil {
76 return nil, fmt.Errorf("malformed output from %s: %s", s.executable, err)
77 }
78
79 return HostCredentialsFromMap(m), nil
80}
diff --git a/vendor/github.com/hashicorp/terraform/svchost/auth/static.go b/vendor/github.com/hashicorp/terraform/svchost/auth/static.go
new file mode 100644
index 0000000..5373fdd
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/svchost/auth/static.go
@@ -0,0 +1,28 @@
1package auth
2
3import (
4 "github.com/hashicorp/terraform/svchost"
5)
6
7// StaticCredentialsSource is a credentials source that retrieves credentials
8// from the provided map. It returns nil if a requested hostname is not
9// present in the map.
10//
11// The caller should not modify the given map after passing it to this function.
12func StaticCredentialsSource(creds map[svchost.Hostname]map[string]interface{}) CredentialsSource {
13 return staticCredentialsSource(creds)
14}
15
16type staticCredentialsSource map[svchost.Hostname]map[string]interface{}
17
18func (s staticCredentialsSource) ForHost(host svchost.Hostname) (HostCredentials, error) {
19 if s == nil {
20 return nil, nil
21 }
22
23 if m, exists := s[host]; exists {
24 return HostCredentialsFromMap(m), nil
25 }
26
27 return nil, nil
28}
diff --git a/vendor/github.com/hashicorp/terraform/svchost/auth/token_credentials.go b/vendor/github.com/hashicorp/terraform/svchost/auth/token_credentials.go
new file mode 100644
index 0000000..9358bcb
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/svchost/auth/token_credentials.go
@@ -0,0 +1,25 @@
1package auth
2
3import (
4 "net/http"
5)
6
7// HostCredentialsToken is a HostCredentials implementation that represents a
8// single "bearer token", to be sent to the server via an Authorization header
9// with the auth type set to "Bearer"
10type HostCredentialsToken string
11
12// PrepareRequest alters the given HTTP request by setting its Authorization
13// header to the string "Bearer " followed by the encapsulated authentication
14// token.
15func (tc HostCredentialsToken) PrepareRequest(req *http.Request) {
16 if req.Header == nil {
17 req.Header = http.Header{}
18 }
19 req.Header.Set("Authorization", "Bearer "+string(tc))
20}
21
22// Token returns the authentication token.
23func (tc HostCredentialsToken) Token() string {
24 return string(tc)
25}
diff --git a/vendor/github.com/hashicorp/terraform/svchost/disco/disco.go b/vendor/github.com/hashicorp/terraform/svchost/disco/disco.go
new file mode 100644
index 0000000..1963cbd
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/svchost/disco/disco.go
@@ -0,0 +1,259 @@
1// Package disco handles Terraform's remote service discovery protocol.
2//
3// This protocol allows mapping from a service hostname, as produced by the
4// svchost package, to a set of services supported by that host and the
5// endpoint information for each supported service.
6package disco
7
8import (
9 "encoding/json"
10 "errors"
11 "fmt"
12 "io"
13 "io/ioutil"
14 "log"
15 "mime"
16 "net/http"
17 "net/url"
18 "time"
19
20 cleanhttp "github.com/hashicorp/go-cleanhttp"
21 "github.com/hashicorp/terraform/httpclient"
22 "github.com/hashicorp/terraform/svchost"
23 "github.com/hashicorp/terraform/svchost/auth"
24)
25
26const (
27 // Fixed path to the discovery manifest.
28 discoPath = "/.well-known/terraform.json"
29
30 // Arbitrary-but-small number to prevent runaway redirect loops.
31 maxRedirects = 3
32
33 // Arbitrary-but-small time limit to prevent UI "hangs" during discovery.
34 discoTimeout = 11 * time.Second
35
36 // 1MB - to prevent abusive services from using loads of our memory.
37 maxDiscoDocBytes = 1 * 1024 * 1024
38)
39
40// httpTransport is overridden during tests, to skip TLS verification.
41var httpTransport = cleanhttp.DefaultPooledTransport()
42
43// Disco is the main type in this package, which allows discovery on given
44// hostnames and caches the results by hostname to avoid repeated requests
45// for the same information.
46type Disco struct {
47 hostCache map[svchost.Hostname]*Host
48 credsSrc auth.CredentialsSource
49
50 // Transport is a custom http.RoundTripper to use.
51 Transport http.RoundTripper
52}
53
54// New returns a new initialized discovery object.
55func New() *Disco {
56 return NewWithCredentialsSource(nil)
57}
58
59// NewWithCredentialsSource returns a new discovery object initialized with
60// the given credentials source.
61func NewWithCredentialsSource(credsSrc auth.CredentialsSource) *Disco {
62 return &Disco{
63 hostCache: make(map[svchost.Hostname]*Host),
64 credsSrc: credsSrc,
65 Transport: httpTransport,
66 }
67}
68
69// SetCredentialsSource provides a credentials source that will be used to
70// add credentials to outgoing discovery requests, where available.
71//
72// If this method is never called, no outgoing discovery requests will have
73// credentials.
74func (d *Disco) SetCredentialsSource(src auth.CredentialsSource) {
75 d.credsSrc = src
76}
77
78// CredentialsForHost returns a non-nil HostCredentials if the embedded source has
79// credentials available for the host, and a nil HostCredentials if it does not.
80func (d *Disco) CredentialsForHost(hostname svchost.Hostname) (auth.HostCredentials, error) {
81 if d.credsSrc == nil {
82 return nil, nil
83 }
84 return d.credsSrc.ForHost(hostname)
85}
86
87// ForceHostServices provides a pre-defined set of services for a given
88// host, which prevents the receiver from attempting network-based discovery
89// for the given host. Instead, the given services map will be returned
90// verbatim.
91//
92// When providing "forced" services, any relative URLs are resolved against
93// the initial discovery URL that would have been used for network-based
94// discovery, yielding the same results as if the given map were published
95// at the host's default discovery URL, though using absolute URLs is strongly
96// recommended to make the configured behavior more explicit.
97func (d *Disco) ForceHostServices(hostname svchost.Hostname, services map[string]interface{}) {
98 if services == nil {
99 services = map[string]interface{}{}
100 }
101
102 d.hostCache[hostname] = &Host{
103 discoURL: &url.URL{
104 Scheme: "https",
105 Host: string(hostname),
106 Path: discoPath,
107 },
108 hostname: hostname.ForDisplay(),
109 services: services,
110 transport: d.Transport,
111 }
112}
113
114// Discover runs the discovery protocol against the given hostname (which must
115// already have been validated and prepared with svchost.ForComparison) and
116// returns an object describing the services available at that host.
117//
118// If a given hostname supports no Terraform services at all, a non-nil but
119// empty Host object is returned. When giving feedback to the end user about
120// such situations, we say "host <name> does not provide a <service> service",
121// regardless of whether that is due to that service specifically being absent
122// or due to the host not providing Terraform services at all, since we don't
123// wish to expose the detail of whole-host discovery to an end-user.
124func (d *Disco) Discover(hostname svchost.Hostname) (*Host, error) {
125 if host, cached := d.hostCache[hostname]; cached {
126 return host, nil
127 }
128
129 host, err := d.discover(hostname)
130 if err != nil {
131 return nil, err
132 }
133 d.hostCache[hostname] = host
134
135 return host, nil
136}
137
138// DiscoverServiceURL is a convenience wrapper for discovery on a given
139// hostname and then looking up a particular service in the result.
140func (d *Disco) DiscoverServiceURL(hostname svchost.Hostname, serviceID string) (*url.URL, error) {
141 host, err := d.Discover(hostname)
142 if err != nil {
143 return nil, err
144 }
145 return host.ServiceURL(serviceID)
146}
147
148// discover implements the actual discovery process, with its result cached
149// by the public-facing Discover method.
150func (d *Disco) discover(hostname svchost.Hostname) (*Host, error) {
151 discoURL := &url.URL{
152 Scheme: "https",
153 Host: hostname.String(),
154 Path: discoPath,
155 }
156
157 client := &http.Client{
158 Transport: d.Transport,
159 Timeout: discoTimeout,
160
161 CheckRedirect: func(req *http.Request, via []*http.Request) error {
162 log.Printf("[DEBUG] Service discovery redirected to %s", req.URL)
163 if len(via) > maxRedirects {
164 return errors.New("too many redirects") // this error will never actually be seen
165 }
166 return nil
167 },
168 }
169
170 req := &http.Request{
171 Header: make(http.Header),
172 Method: "GET",
173 URL: discoURL,
174 }
175 req.Header.Set("Accept", "application/json")
176 req.Header.Set("User-Agent", httpclient.UserAgentString())
177
178 creds, err := d.CredentialsForHost(hostname)
179 if err != nil {
180 log.Printf("[WARN] Failed to get credentials for %s: %s (ignoring)", hostname, err)
181 }
182 if creds != nil {
183 // Update the request to include credentials.
184 creds.PrepareRequest(req)
185 }
186
187 log.Printf("[DEBUG] Service discovery for %s at %s", hostname, discoURL)
188
189 resp, err := client.Do(req)
190 if err != nil {
191 return nil, fmt.Errorf("Failed to request discovery document: %v", err)
192 }
193 defer resp.Body.Close()
194
195 host := &Host{
196 // Use the discovery URL from resp.Request in
197 // case the client followed any redirects.
198 discoURL: resp.Request.URL,
199 hostname: hostname.ForDisplay(),
200 transport: d.Transport,
201 }
202
203 // Return the host without any services.
204 if resp.StatusCode == 404 {
205 return host, nil
206 }
207
208 if resp.StatusCode != 200 {
209 return nil, fmt.Errorf("Failed to request discovery document: %s", resp.Status)
210 }
211
212 contentType := resp.Header.Get("Content-Type")
213 mediaType, _, err := mime.ParseMediaType(contentType)
214 if err != nil {
215 return nil, fmt.Errorf("Discovery URL has a malformed Content-Type %q", contentType)
216 }
217 if mediaType != "application/json" {
218 return nil, fmt.Errorf("Discovery URL returned an unsupported Content-Type %q", mediaType)
219 }
220
221 // This doesn't catch chunked encoding, because ContentLength is -1 in that case.
222 if resp.ContentLength > maxDiscoDocBytes {
223 // Size limit here is not a contractual requirement and so we may
224 // adjust it over time if we find a different limit is warranted.
225 return nil, fmt.Errorf(
226 "Discovery doc response is too large (got %d bytes; limit %d)",
227 resp.ContentLength, maxDiscoDocBytes,
228 )
229 }
230
231 // If the response is using chunked encoding then we can't predict its
232 // size, but we'll at least prevent reading the entire thing into memory.
233 lr := io.LimitReader(resp.Body, maxDiscoDocBytes)
234
235 servicesBytes, err := ioutil.ReadAll(lr)
236 if err != nil {
237 return nil, fmt.Errorf("Error reading discovery document body: %v", err)
238 }
239
240 var services map[string]interface{}
241 err = json.Unmarshal(servicesBytes, &services)
242 if err != nil {
243 return nil, fmt.Errorf("Failed to decode discovery document as a JSON object: %v", err)
244 }
245 host.services = services
246
247 return host, nil
248}
249
250// Forget invalidates any cached record of the given hostname. If the host
251// has no cache entry then this is a no-op.
252func (d *Disco) Forget(hostname svchost.Hostname) {
253 delete(d.hostCache, hostname)
254}
255
256// ForgetAll is like Forget, but for all of the hostnames that have cache entries.
257func (d *Disco) ForgetAll() {
258 d.hostCache = make(map[svchost.Hostname]*Host)
259}
diff --git a/vendor/github.com/hashicorp/terraform/svchost/disco/host.go b/vendor/github.com/hashicorp/terraform/svchost/disco/host.go
new file mode 100644
index 0000000..ab9514c
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/svchost/disco/host.go
@@ -0,0 +1,264 @@
1package disco
2
3import (
4 "encoding/json"
5 "fmt"
6 "log"
7 "net/http"
8 "net/url"
9 "os"
10 "strconv"
11 "strings"
12 "time"
13
14 "github.com/hashicorp/go-version"
15 "github.com/hashicorp/terraform/httpclient"
16)
17
18const versionServiceID = "versions.v1"
19
20// Host represents a service discovered host.
21type Host struct {
22 discoURL *url.URL
23 hostname string
24 services map[string]interface{}
25 transport http.RoundTripper
26}
27
28// Constraints represents the version constraints of a service.
29type Constraints struct {
30 Service string `json:"service"`
31 Product string `json:"product"`
32 Minimum string `json:"minimum"`
33 Maximum string `json:"maximum"`
34 Excluding []string `json:"excluding"`
35}
36
37// ErrServiceNotProvided is returned when the service is not provided.
38type ErrServiceNotProvided struct {
39 hostname string
40 service string
41}
42
43// Error returns a customized error message.
44func (e *ErrServiceNotProvided) Error() string {
45 if e.hostname == "" {
46 return fmt.Sprintf("host does not provide a %s service", e.service)
47 }
48 return fmt.Sprintf("host %s does not provide a %s service", e.hostname, e.service)
49}
50
51// ErrVersionNotSupported is returned when the version is not supported.
52type ErrVersionNotSupported struct {
53 hostname string
54 service string
55 version string
56}
57
58// Error returns a customized error message.
59func (e *ErrVersionNotSupported) Error() string {
60 if e.hostname == "" {
61 return fmt.Sprintf("host does not support %s version %s", e.service, e.version)
62 }
63 return fmt.Sprintf("host %s does not support %s version %s", e.hostname, e.service, e.version)
64}
65
66// ErrNoVersionConstraints is returned when checkpoint was disabled
67// or the endpoint to query for version constraints was unavailable.
68type ErrNoVersionConstraints struct {
69 disabled bool
70}
71
72// Error returns a customized error message.
73func (e *ErrNoVersionConstraints) Error() string {
74 if e.disabled {
75 return "checkpoint disabled"
76 }
77 return "unable to contact versions service"
78}
79
80// ServiceURL returns the URL associated with the given service identifier,
81// which should be of the form "servicename.vN".
82//
83// A non-nil result is always an absolute URL with a scheme of either HTTPS
84// or HTTP.
85func (h *Host) ServiceURL(id string) (*url.URL, error) {
86 svc, ver, err := parseServiceID(id)
87 if err != nil {
88 return nil, err
89 }
90
91 // No services supported for an empty Host.
92 if h == nil || h.services == nil {
93 return nil, &ErrServiceNotProvided{service: svc}
94 }
95
96 urlStr, ok := h.services[id].(string)
97 if !ok {
98 // See if we have a matching service as that would indicate
99 // the service is supported, but not the requested version.
100 for serviceID := range h.services {
101 if strings.HasPrefix(serviceID, svc+".") {
102 return nil, &ErrVersionNotSupported{
103 hostname: h.hostname,
104 service: svc,
105 version: ver.Original(),
106 }
107 }
108 }
109
110 // No discovered services match the requested service.
111 return nil, &ErrServiceNotProvided{hostname: h.hostname, service: svc}
112 }
113
114 u, err := url.Parse(urlStr)
115 if err != nil {
116 return nil, fmt.Errorf("Failed to parse service URL: %v", err)
117 }
118
119 // Make relative URLs absolute using our discovery URL.
120 if !u.IsAbs() {
121 u = h.discoURL.ResolveReference(u)
122 }
123
124 if u.Scheme != "https" && u.Scheme != "http" {
125 return nil, fmt.Errorf("Service URL is using an unsupported scheme: %s", u.Scheme)
126 }
127 if u.User != nil {
128 return nil, fmt.Errorf("Embedded username/password information is not permitted")
129 }
130
131 // Fragment part is irrelevant, since we're not a browser.
132 u.Fragment = ""
133
134 return h.discoURL.ResolveReference(u), nil
135}
136
137// VersionConstraints returns the contraints for a given service identifier
138// (which should be of the form "servicename.vN") and product.
139//
140// When an exact (service and version) match is found, the constraints for
141// that service are returned.
142//
143// When the requested version is not provided but the service is, we will
144// search for all alternative versions. If mutliple alternative versions
145// are found, the contrains of the latest available version are returned.
146//
147// When a service is not provided at all an error will be returned instead.
148//
149// When checkpoint is disabled or when a 404 is returned after making the
150// HTTP call, an ErrNoVersionConstraints error will be returned.
151func (h *Host) VersionConstraints(id, product string) (*Constraints, error) {
152 svc, _, err := parseServiceID(id)
153 if err != nil {
154 return nil, err
155 }
156
157 // Return early if checkpoint is disabled.
158 if disabled := os.Getenv("CHECKPOINT_DISABLE"); disabled != "" {
159 return nil, &ErrNoVersionConstraints{disabled: true}
160 }
161
162 // No services supported for an empty Host.
163 if h == nil || h.services == nil {
164 return nil, &ErrServiceNotProvided{service: svc}
165 }
166
167 // Try to get the service URL for the version service and
168 // return early if the service isn't provided by the host.
169 u, err := h.ServiceURL(versionServiceID)
170 if err != nil {
171 return nil, err
172 }
173
174 // Check if we have an exact (service and version) match.
175 if _, ok := h.services[id].(string); !ok {
176 // If we don't have an exact match, we search for all matching
177 // services and then use the service ID of the latest version.
178 var services []string
179 for serviceID := range h.services {
180 if strings.HasPrefix(serviceID, svc+".") {
181 services = append(services, serviceID)
182 }
183 }
184
185 if len(services) == 0 {
186 // No discovered services match the requested service.
187 return nil, &ErrServiceNotProvided{hostname: h.hostname, service: svc}
188 }
189
190 // Set id to the latest service ID we found.
191 var latest *version.Version
192 for _, serviceID := range services {
193 if _, ver, err := parseServiceID(serviceID); err == nil {
194 if latest == nil || latest.LessThan(ver) {
195 id = serviceID
196 latest = ver
197 }
198 }
199 }
200 }
201
202 // Set a default timeout of 1 sec for the versions request (in milliseconds)
203 timeout := 1000
204 if v, err := strconv.Atoi(os.Getenv("CHECKPOINT_TIMEOUT")); err == nil {
205 timeout = v
206 }
207
208 client := &http.Client{
209 Transport: h.transport,
210 Timeout: time.Duration(timeout) * time.Millisecond,
211 }
212
213 // Prepare the service URL by setting the service and product.
214 v := u.Query()
215 v.Set("product", product)
216 u.Path += id
217 u.RawQuery = v.Encode()
218
219 // Create a new request.
220 req, err := http.NewRequest("GET", u.String(), nil)
221 if err != nil {
222 return nil, fmt.Errorf("Failed to create version constraints request: %v", err)
223 }
224 req.Header.Set("Accept", "application/json")
225 req.Header.Set("User-Agent", httpclient.UserAgentString())
226
227 log.Printf("[DEBUG] Retrieve version constraints for service %s and product %s", id, product)
228
229 resp, err := client.Do(req)
230 if err != nil {
231 return nil, fmt.Errorf("Failed to request version constraints: %v", err)
232 }
233 defer resp.Body.Close()
234
235 if resp.StatusCode == 404 {
236 return nil, &ErrNoVersionConstraints{disabled: false}
237 }
238
239 if resp.StatusCode != 200 {
240 return nil, fmt.Errorf("Failed to request version constraints: %s", resp.Status)
241 }
242
243 // Parse the constraints from the response body.
244 result := &Constraints{}
245 if err := json.NewDecoder(resp.Body).Decode(result); err != nil {
246 return nil, fmt.Errorf("Error parsing version constraints: %v", err)
247 }
248
249 return result, nil
250}
251
252func parseServiceID(id string) (string, *version.Version, error) {
253 parts := strings.SplitN(id, ".", 2)
254 if len(parts) != 2 {
255 return "", nil, fmt.Errorf("Invalid service ID format (i.e. service.vN): %s", id)
256 }
257
258 version, err := version.NewVersion(parts[1])
259 if err != nil {
260 return "", nil, fmt.Errorf("Invalid service version: %v", err)
261 }
262
263 return parts[0], version, nil
264}
diff --git a/vendor/github.com/hashicorp/terraform/svchost/label_iter.go b/vendor/github.com/hashicorp/terraform/svchost/label_iter.go
new file mode 100644
index 0000000..af8ccba
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/svchost/label_iter.go
@@ -0,0 +1,69 @@
1package svchost
2
3import (
4 "strings"
5)
6
7// A labelIter allows iterating over domain name labels.
8//
9// This type is copied from golang.org/x/net/idna, where it is used
10// to segment hostnames into their separate labels for analysis. We use
11// it for the same purpose here, in ForComparison.
12type labelIter struct {
13 orig string
14 slice []string
15 curStart int
16 curEnd int
17 i int
18}
19
20func (l *labelIter) reset() {
21 l.curStart = 0
22 l.curEnd = 0
23 l.i = 0
24}
25
26func (l *labelIter) done() bool {
27 return l.curStart >= len(l.orig)
28}
29
30func (l *labelIter) result() string {
31 if l.slice != nil {
32 return strings.Join(l.slice, ".")
33 }
34 return l.orig
35}
36
37func (l *labelIter) label() string {
38 if l.slice != nil {
39 return l.slice[l.i]
40 }
41 p := strings.IndexByte(l.orig[l.curStart:], '.')
42 l.curEnd = l.curStart + p
43 if p == -1 {
44 l.curEnd = len(l.orig)
45 }
46 return l.orig[l.curStart:l.curEnd]
47}
48
49// next sets the value to the next label. It skips the last label if it is empty.
50func (l *labelIter) next() {
51 l.i++
52 if l.slice != nil {
53 if l.i >= len(l.slice) || l.i == len(l.slice)-1 && l.slice[l.i] == "" {
54 l.curStart = len(l.orig)
55 }
56 } else {
57 l.curStart = l.curEnd + 1
58 if l.curStart == len(l.orig)-1 && l.orig[l.curStart] == '.' {
59 l.curStart = len(l.orig)
60 }
61 }
62}
63
64func (l *labelIter) set(s string) {
65 if l.slice == nil {
66 l.slice = strings.Split(l.orig, ".")
67 }
68 l.slice[l.i] = s
69}
diff --git a/vendor/github.com/hashicorp/terraform/svchost/svchost.go b/vendor/github.com/hashicorp/terraform/svchost/svchost.go
new file mode 100644
index 0000000..4eded14
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/svchost/svchost.go
@@ -0,0 +1,207 @@
1// Package svchost deals with the representations of the so-called "friendly
2// hostnames" that we use to represent systems that provide Terraform-native
3// remote services, such as module registry, remote operations, etc.
4//
5// Friendly hostnames are specified such that, as much as possible, they
6// are consistent with how web browsers think of hostnames, so that users
7// can bring their intuitions about how hostnames behave when they access
8// a Terraform Enterprise instance's web UI (or indeed any other website)
9// and have this behave in a similar way.
10package svchost
11
12import (
13 "errors"
14 "fmt"
15 "strconv"
16 "strings"
17
18 "golang.org/x/net/idna"
19)
20
21// Hostname is specialized name for string that indicates that the string
22// has been converted to (or was already in) the storage and comparison form.
23//
24// Hostname values are not suitable for display in the user-interface. Use
25// the ForDisplay method to obtain a form suitable for display in the UI.
26//
27// Unlike user-supplied hostnames, strings of type Hostname (assuming they
28// were constructed by a function within this package) can be compared for
29// equality using the standard Go == operator.
30type Hostname string
31
32// acePrefix is the ASCII Compatible Encoding prefix, used to indicate that
33// a domain name label is in "punycode" form.
34const acePrefix = "xn--"
35
36// displayProfile is a very liberal idna profile that we use to do
37// normalization for display without imposing validation rules.
38var displayProfile = idna.New(
39 idna.MapForLookup(),
40 idna.Transitional(true),
41)
42
43// ForDisplay takes a user-specified hostname and returns a normalized form of
44// it suitable for display in the UI.
45//
46// If the input is so invalid that no normalization can be performed then
47// this will return the input, assuming that the caller still wants to
48// display _something_. This function is, however, more tolerant than the
49// other functions in this package and will make a best effort to prepare
50// _any_ given hostname for display.
51//
52// For validation, use either IsValid (for explicit validation) or
53// ForComparison (which implicitly validates, returning an error if invalid).
54func ForDisplay(given string) string {
55 var portPortion string
56 if colonPos := strings.Index(given, ":"); colonPos != -1 {
57 given, portPortion = given[:colonPos], given[colonPos:]
58 }
59 portPortion, _ = normalizePortPortion(portPortion)
60
61 ascii, err := displayProfile.ToASCII(given)
62 if err != nil {
63 return given + portPortion
64 }
65 display, err := displayProfile.ToUnicode(ascii)
66 if err != nil {
67 return given + portPortion
68 }
69 return display + portPortion
70}
71
72// IsValid returns true if the given user-specified hostname is a valid
73// service hostname.
74//
75// Validity is determined by complying with the RFC 5891 requirements for
76// names that are valid for domain lookup (section 5), with the additional
77// requirement that user-supplied forms must not _already_ contain
78// Punycode segments.
79func IsValid(given string) bool {
80 _, err := ForComparison(given)
81 return err == nil
82}
83
84// ForComparison takes a user-specified hostname and returns a normalized
85// form of it suitable for storage and comparison. The result is not suitable
86// for display to end-users because it uses Punycode to represent non-ASCII
87// characters, and this form is unreadable for non-ASCII-speaking humans.
88//
89// The result is typed as Hostname -- a specialized name for string -- so that
90// other APIs can make it clear within the type system whether they expect a
91// user-specified or display-form hostname or a value already normalized for
92// comparison.
93//
94// The returned Hostname is not valid if the returned error is non-nil.
95func ForComparison(given string) (Hostname, error) {
96 var portPortion string
97 if colonPos := strings.Index(given, ":"); colonPos != -1 {
98 given, portPortion = given[:colonPos], given[colonPos:]
99 }
100
101 var err error
102 portPortion, err = normalizePortPortion(portPortion)
103 if err != nil {
104 return Hostname(""), err
105 }
106
107 if given == "" {
108 return Hostname(""), fmt.Errorf("empty string is not a valid hostname")
109 }
110
111 // First we'll apply our additional constraint that Punycode must not
112 // be given directly by the user. This is not an IDN specification
113 // requirement, but we prohibit it to force users to use human-readable
114 // hostname forms within Terraform configuration.
115 labels := labelIter{orig: given}
116 for ; !labels.done(); labels.next() {
117 label := labels.label()
118 if label == "" {
119 return Hostname(""), fmt.Errorf(
120 "hostname contains empty label (two consecutive periods)",
121 )
122 }
123 if strings.HasPrefix(label, acePrefix) {
124 return Hostname(""), fmt.Errorf(
125 "hostname label %q specified in punycode format; service hostnames must be given in unicode",
126 label,
127 )
128 }
129 }
130
131 result, err := idna.Lookup.ToASCII(given)
132 if err != nil {
133 return Hostname(""), err
134 }
135 return Hostname(result + portPortion), nil
136}
137
138// ForDisplay returns a version of the receiver that is appropriate for display
139// in the UI. This includes converting any punycode labels to their
140// corresponding Unicode characters.
141//
142// A round-trip through ForComparison and this ForDisplay method does not
143// guarantee the same result as calling this package's top-level ForDisplay
144// function, since a round-trip through the Hostname type implies stricter
145// handling than we do when doing basic display-only processing.
146func (h Hostname) ForDisplay() string {
147 given := string(h)
148 var portPortion string
149 if colonPos := strings.Index(given, ":"); colonPos != -1 {
150 given, portPortion = given[:colonPos], given[colonPos:]
151 }
152 // We don't normalize the port portion here because we assume it's
153 // already been normalized on the way in.
154
155 result, err := idna.Lookup.ToUnicode(given)
156 if err != nil {
157 // Should never happen, since type Hostname indicates that a string
158 // passed through our validation rules.
159 panic(fmt.Errorf("ForDisplay called on invalid Hostname: %s", err))
160 }
161 return result + portPortion
162}
163
164func (h Hostname) String() string {
165 return string(h)
166}
167
168func (h Hostname) GoString() string {
169 return fmt.Sprintf("svchost.Hostname(%q)", string(h))
170}
171
172// normalizePortPortion attempts to normalize the "port portion" of a hostname,
173// which begins with the first colon in the hostname and should be followed
174// by a string of decimal digits.
175//
176// If the port portion is valid, a normalized version of it is returned along
177// with a nil error.
178//
179// If the port portion is invalid, the input string is returned verbatim along
180// with a non-nil error.
181//
182// An empty string is a valid port portion representing the absense of a port.
183// If non-empty, the first character must be a colon.
184func normalizePortPortion(s string) (string, error) {
185 if s == "" {
186 return s, nil
187 }
188
189 if s[0] != ':' {
190 // should never happen, since caller tends to guarantee the presence
191 // of a colon due to how it's extracted from the string.
192 return s, errors.New("port portion is missing its initial colon")
193 }
194
195 numStr := s[1:]
196 num, err := strconv.Atoi(numStr)
197 if err != nil {
198 return s, errors.New("port portion contains non-digit characters")
199 }
200 if num == 443 {
201 return "", nil // ":443" is the default
202 }
203 if num > 65535 {
204 return s, errors.New("port number is greater than 65535")
205 }
206 return fmt.Sprintf(":%d", num), nil
207}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/context.go b/vendor/github.com/hashicorp/terraform/terraform/context.go
index a814a85..f133cc2 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/context.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/context.go
@@ -8,11 +8,13 @@ import (
8 "strings" 8 "strings"
9 "sync" 9 "sync"
10 10
11 "github.com/hashicorp/terraform/tfdiags"
12
11 "github.com/hashicorp/go-multierror" 13 "github.com/hashicorp/go-multierror"
12 "github.com/hashicorp/hcl" 14 "github.com/hashicorp/hcl"
13 "github.com/hashicorp/terraform/config" 15 "github.com/hashicorp/terraform/config"
14 "github.com/hashicorp/terraform/config/module" 16 "github.com/hashicorp/terraform/config/module"
15 "github.com/hashicorp/terraform/helper/experiment" 17 "github.com/hashicorp/terraform/version"
16) 18)
17 19
18// InputMode defines what sort of input will be asked for when Input 20// InputMode defines what sort of input will be asked for when Input
@@ -123,7 +125,7 @@ type Context struct {
123func NewContext(opts *ContextOpts) (*Context, error) { 125func NewContext(opts *ContextOpts) (*Context, error) {
124 // Validate the version requirement if it is given 126 // Validate the version requirement if it is given
125 if opts.Module != nil { 127 if opts.Module != nil {
126 if err := checkRequiredVersion(opts.Module); err != nil { 128 if err := CheckRequiredVersion(opts.Module); err != nil {
127 return nil, err 129 return nil, err
128 } 130 }
129 } 131 }
@@ -143,19 +145,14 @@ func NewContext(opts *ContextOpts) (*Context, error) {
143 145
144 // If our state is from the future, then error. Callers can avoid 146 // If our state is from the future, then error. Callers can avoid
145 // this error by explicitly setting `StateFutureAllowed`. 147 // this error by explicitly setting `StateFutureAllowed`.
146 if !opts.StateFutureAllowed && state.FromFutureTerraform() { 148 if err := CheckStateVersion(state); err != nil && !opts.StateFutureAllowed {
147 return nil, fmt.Errorf( 149 return nil, err
148 "Terraform doesn't allow running any operations against a state\n"+
149 "that was written by a future Terraform version. The state is\n"+
150 "reporting it is written by Terraform '%s'.\n\n"+
151 "Please run at least that version of Terraform to continue.",
152 state.TFVersion)
153 } 150 }
154 151
155 // Explicitly reset our state version to our current version so that 152 // Explicitly reset our state version to our current version so that
156 // any operations we do will write out that our latest version 153 // any operations we do will write out that our latest version
157 // has run. 154 // has run.
158 state.TFVersion = Version 155 state.TFVersion = version.Version
159 156
160 // Determine parallelism, default to 10. We do this both to limit 157 // Determine parallelism, default to 10. We do this both to limit
161 // CPU pressure but also to have an extra guard against rate throttling 158 // CPU pressure but also to have an extra guard against rate throttling
@@ -465,7 +462,7 @@ func (c *Context) Input(mode InputMode) error {
465 } 462 }
466 463
467 // Do the walk 464 // Do the walk
468 if _, err := c.walk(graph, nil, walkInput); err != nil { 465 if _, err := c.walk(graph, walkInput); err != nil {
469 return err 466 return err
470 } 467 }
471 } 468 }
@@ -490,6 +487,13 @@ func (c *Context) Input(mode InputMode) error {
490func (c *Context) Apply() (*State, error) { 487func (c *Context) Apply() (*State, error) {
491 defer c.acquireRun("apply")() 488 defer c.acquireRun("apply")()
492 489
490 // Check there are no empty target parameter values
491 for _, target := range c.targets {
492 if target == "" {
493 return nil, fmt.Errorf("Target parameter must not have empty value")
494 }
495 }
496
493 // Copy our own state 497 // Copy our own state
494 c.state = c.state.DeepCopy() 498 c.state = c.state.DeepCopy()
495 499
@@ -506,7 +510,7 @@ func (c *Context) Apply() (*State, error) {
506 } 510 }
507 511
508 // Walk the graph 512 // Walk the graph
509 walker, err := c.walk(graph, graph, operation) 513 walker, err := c.walk(graph, operation)
510 if len(walker.ValidationErrors) > 0 { 514 if len(walker.ValidationErrors) > 0 {
511 err = multierror.Append(err, walker.ValidationErrors...) 515 err = multierror.Append(err, walker.ValidationErrors...)
512 } 516 }
@@ -527,19 +531,27 @@ func (c *Context) Apply() (*State, error) {
527func (c *Context) Plan() (*Plan, error) { 531func (c *Context) Plan() (*Plan, error) {
528 defer c.acquireRun("plan")() 532 defer c.acquireRun("plan")()
529 533
534 // Check there are no empty target parameter values
535 for _, target := range c.targets {
536 if target == "" {
537 return nil, fmt.Errorf("Target parameter must not have empty value")
538 }
539 }
540
530 p := &Plan{ 541 p := &Plan{
531 Module: c.module, 542 Module: c.module,
532 Vars: c.variables, 543 Vars: c.variables,
533 State: c.state, 544 State: c.state,
534 Targets: c.targets, 545 Targets: c.targets,
535 546
536 TerraformVersion: VersionString(), 547 TerraformVersion: version.String(),
537 ProviderSHA256s: c.providerSHA256s, 548 ProviderSHA256s: c.providerSHA256s,
538 } 549 }
539 550
540 var operation walkOperation 551 var operation walkOperation
541 if c.destroy { 552 if c.destroy {
542 operation = walkPlanDestroy 553 operation = walkPlanDestroy
554 p.Destroy = true
543 } else { 555 } else {
544 // Set our state to be something temporary. We do this so that 556 // Set our state to be something temporary. We do this so that
545 // the plan can update a fake state so that variables work, then 557 // the plan can update a fake state so that variables work, then
@@ -575,7 +587,7 @@ func (c *Context) Plan() (*Plan, error) {
575 } 587 }
576 588
577 // Do the walk 589 // Do the walk
578 walker, err := c.walk(graph, graph, operation) 590 walker, err := c.walk(graph, operation)
579 if err != nil { 591 if err != nil {
580 return nil, err 592 return nil, err
581 } 593 }
@@ -630,7 +642,7 @@ func (c *Context) Refresh() (*State, error) {
630 } 642 }
631 643
632 // Do the walk 644 // Do the walk
633 if _, err := c.walk(graph, graph, walkRefresh); err != nil { 645 if _, err := c.walk(graph, walkRefresh); err != nil {
634 return nil, err 646 return nil, err
635 } 647 }
636 648
@@ -670,29 +682,27 @@ func (c *Context) Stop() {
670} 682}
671 683
672// Validate validates the configuration and returns any warnings or errors. 684// Validate validates the configuration and returns any warnings or errors.
673func (c *Context) Validate() ([]string, []error) { 685func (c *Context) Validate() tfdiags.Diagnostics {
674 defer c.acquireRun("validate")() 686 defer c.acquireRun("validate")()
675 687
676 var errs error 688 var diags tfdiags.Diagnostics
677 689
678 // Validate the configuration itself 690 // Validate the configuration itself
679 if err := c.module.Validate(); err != nil { 691 diags = diags.Append(c.module.Validate())
680 errs = multierror.Append(errs, err)
681 }
682 692
683 // This only needs to be done for the root module, since inter-module 693 // This only needs to be done for the root module, since inter-module
684 // variables are validated in the module tree. 694 // variables are validated in the module tree.
685 if config := c.module.Config(); config != nil { 695 if config := c.module.Config(); config != nil {
686 // Validate the user variables 696 // Validate the user variables
687 if err := smcUserVariables(config, c.variables); len(err) > 0 { 697 for _, err := range smcUserVariables(config, c.variables) {
688 errs = multierror.Append(errs, err...) 698 diags = diags.Append(err)
689 } 699 }
690 } 700 }
691 701
692 // If we have errors at this point, the graphing has no chance, 702 // If we have errors at this point, the graphing has no chance,
693 // so just bail early. 703 // so just bail early.
694 if errs != nil { 704 if diags.HasErrors() {
695 return nil, []error{errs} 705 return diags
696 } 706 }
697 707
698 // Build the graph so we can walk it and run Validate on nodes. 708 // Build the graph so we can walk it and run Validate on nodes.
@@ -701,24 +711,29 @@ func (c *Context) Validate() ([]string, []error) {
701 // graph again later after Planning. 711 // graph again later after Planning.
702 graph, err := c.Graph(GraphTypeValidate, nil) 712 graph, err := c.Graph(GraphTypeValidate, nil)
703 if err != nil { 713 if err != nil {
704 return nil, []error{err} 714 diags = diags.Append(err)
715 return diags
705 } 716 }
706 717
707 // Walk 718 // Walk
708 walker, err := c.walk(graph, graph, walkValidate) 719 walker, err := c.walk(graph, walkValidate)
709 if err != nil { 720 if err != nil {
710 return nil, multierror.Append(errs, err).Errors 721 diags = diags.Append(err)
711 } 722 }
712 723
713 // Return the result
714 rerrs := multierror.Append(errs, walker.ValidationErrors...)
715
716 sort.Strings(walker.ValidationWarnings) 724 sort.Strings(walker.ValidationWarnings)
717 sort.Slice(rerrs.Errors, func(i, j int) bool { 725 sort.Slice(walker.ValidationErrors, func(i, j int) bool {
718 return rerrs.Errors[i].Error() < rerrs.Errors[j].Error() 726 return walker.ValidationErrors[i].Error() < walker.ValidationErrors[j].Error()
719 }) 727 })
720 728
721 return walker.ValidationWarnings, rerrs.Errors 729 for _, warn := range walker.ValidationWarnings {
730 diags = diags.Append(tfdiags.SimpleWarning(warn))
731 }
732 for _, err := range walker.ValidationErrors {
733 diags = diags.Append(err)
734 }
735
736 return diags
722} 737}
723 738
724// Module returns the module tree associated with this context. 739// Module returns the module tree associated with this context.
@@ -792,33 +807,11 @@ func (c *Context) releaseRun() {
792 c.runContext = nil 807 c.runContext = nil
793} 808}
794 809
795func (c *Context) walk( 810func (c *Context) walk(graph *Graph, operation walkOperation) (*ContextGraphWalker, error) {
796 graph, shadow *Graph, operation walkOperation) (*ContextGraphWalker, error) {
797 // Keep track of the "real" context which is the context that does 811 // Keep track of the "real" context which is the context that does
798 // the real work: talking to real providers, modifying real state, etc. 812 // the real work: talking to real providers, modifying real state, etc.
799 realCtx := c 813 realCtx := c
800 814
801 // If we don't want shadowing, remove it
802 if !experiment.Enabled(experiment.X_shadow) {
803 shadow = nil
804 }
805
806 // Just log this so we can see it in a debug log
807 if !c.shadow {
808 log.Printf("[WARN] terraform: shadow graph disabled")
809 shadow = nil
810 }
811
812 // If we have a shadow graph, walk that as well
813 var shadowCtx *Context
814 var shadowCloser Shadow
815 if shadow != nil {
816 // Build the shadow context. In the process, override the real context
817 // with the one that is wrapped so that the shadow context can verify
818 // the results of the real.
819 realCtx, shadowCtx, shadowCloser = newShadowContext(c)
820 }
821
822 log.Printf("[DEBUG] Starting graph walk: %s", operation.String()) 815 log.Printf("[DEBUG] Starting graph walk: %s", operation.String())
823 816
824 walker := &ContextGraphWalker{ 817 walker := &ContextGraphWalker{
@@ -837,90 +830,6 @@ func (c *Context) walk(
837 close(watchStop) 830 close(watchStop)
838 <-watchWait 831 <-watchWait
839 832
840 // If we have a shadow graph and we interrupted the real graph, then
841 // we just close the shadow and never verify it. It is non-trivial to
842 // recreate the exact execution state up until an interruption so this
843 // isn't supported with shadows at the moment.
844 if shadowCloser != nil && c.sh.Stopped() {
845 // Ignore the error result, there is nothing we could care about
846 shadowCloser.CloseShadow()
847
848 // Set it to nil so we don't do anything
849 shadowCloser = nil
850 }
851
852 // If we have a shadow graph, wait for that to complete.
853 if shadowCloser != nil {
854 // Build the graph walker for the shadow. We also wrap this in
855 // a panicwrap so that panics are captured. For the shadow graph,
856 // we just want panics to be normal errors rather than to crash
857 // Terraform.
858 shadowWalker := GraphWalkerPanicwrap(&ContextGraphWalker{
859 Context: shadowCtx,
860 Operation: operation,
861 })
862
863 // Kick off the shadow walk. This will block on any operations
864 // on the real walk so it is fine to start first.
865 log.Printf("[INFO] Starting shadow graph walk: %s", operation.String())
866 shadowCh := make(chan error)
867 go func() {
868 shadowCh <- shadow.Walk(shadowWalker)
869 }()
870
871 // Notify the shadow that we're done
872 if err := shadowCloser.CloseShadow(); err != nil {
873 c.shadowErr = multierror.Append(c.shadowErr, err)
874 }
875
876 // Wait for the walk to end
877 log.Printf("[DEBUG] Waiting for shadow graph to complete...")
878 shadowWalkErr := <-shadowCh
879
880 // Get any shadow errors
881 if err := shadowCloser.ShadowError(); err != nil {
882 c.shadowErr = multierror.Append(c.shadowErr, err)
883 }
884
885 // Verify the contexts (compare)
886 if err := shadowContextVerify(realCtx, shadowCtx); err != nil {
887 c.shadowErr = multierror.Append(c.shadowErr, err)
888 }
889
890 // At this point, if we're supposed to fail on error, then
891 // we PANIC. Some tests just verify that there is an error,
892 // so simply appending it to realErr and returning could hide
893 // shadow problems.
894 //
895 // This must be done BEFORE appending shadowWalkErr since the
896 // shadowWalkErr may include expected errors.
897 //
898 // We only do this if we don't have a real error. In the case of
899 // a real error, we can't guarantee what nodes were and weren't
900 // traversed in parallel scenarios so we can't guarantee no
901 // shadow errors.
902 if c.shadowErr != nil && contextFailOnShadowError && realErr == nil {
903 panic(multierror.Prefix(c.shadowErr, "shadow graph:"))
904 }
905
906 // Now, if we have a walk error, we append that through
907 if shadowWalkErr != nil {
908 c.shadowErr = multierror.Append(c.shadowErr, shadowWalkErr)
909 }
910
911 if c.shadowErr == nil {
912 log.Printf("[INFO] Shadow graph success!")
913 } else {
914 log.Printf("[ERROR] Shadow graph error: %s", c.shadowErr)
915
916 // If we're supposed to fail on shadow errors, then report it
917 if contextFailOnShadowError {
918 realErr = multierror.Append(realErr, multierror.Prefix(
919 c.shadowErr, "shadow graph:"))
920 }
921 }
922 }
923
924 return walker, realErr 833 return walker, realErr
925} 834}
926 835
diff --git a/vendor/github.com/hashicorp/terraform/terraform/context_import.go b/vendor/github.com/hashicorp/terraform/terraform/context_import.go
index f1d5776..e940143 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/context_import.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/context_import.go
@@ -66,7 +66,7 @@ func (c *Context) Import(opts *ImportOpts) (*State, error) {
66 } 66 }
67 67
68 // Walk it 68 // Walk it
69 if _, err := c.walk(graph, nil, walkImport); err != nil { 69 if _, err := c.walk(graph, walkImport); err != nil {
70 return c.state, err 70 return c.state, err
71 } 71 }
72 72
diff --git a/vendor/github.com/hashicorp/terraform/terraform/diff.go b/vendor/github.com/hashicorp/terraform/terraform/diff.go
index fd1687e..d6dc550 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/diff.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/diff.go
@@ -23,6 +23,12 @@ const (
23 DiffUpdate 23 DiffUpdate
24 DiffDestroy 24 DiffDestroy
25 DiffDestroyCreate 25 DiffDestroyCreate
26
27 // DiffRefresh is only used in the UI for displaying diffs.
28 // Managed resource reads never appear in plan, and when data source
29 // reads appear they are represented as DiffCreate in core before
30 // transforming to DiffRefresh in the UI layer.
31 DiffRefresh // TODO: Actually use DiffRefresh in core too, for less confusion
26) 32)
27 33
28// multiVal matches the index key to a flatmapped set, list or map 34// multiVal matches the index key to a flatmapped set, list or map
@@ -831,7 +837,14 @@ func (d *InstanceDiff) Same(d2 *InstanceDiff) (bool, string) {
831 } 837 }
832 } 838 }
833 839
834 // TODO: check for the same value if not computed 840 // We don't compare the values because we can't currently actually
841 // guarantee to generate the same value two two diffs created from
842 // the same state+config: we have some pesky interpolation functions
843 // that do not behave as pure functions (uuid, timestamp) and so they
844 // can be different each time a diff is produced.
845 // FIXME: Re-organize our config handling so that we don't re-evaluate
846 // expressions when we produce a second comparison diff during
847 // apply (for EvalCompareDiff).
835 } 848 }
836 849
837 // Check for leftover attributes 850 // Check for leftover attributes
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval.go b/vendor/github.com/hashicorp/terraform/terraform/eval.go
index 3cb088a..10d9c22 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval.go
@@ -49,11 +49,11 @@ func EvalRaw(n EvalNode, ctx EvalContext) (interface{}, error) {
49 path = strings.Join(ctx.Path(), ".") 49 path = strings.Join(ctx.Path(), ".")
50 } 50 }
51 51
52 log.Printf("[DEBUG] %s: eval: %T", path, n) 52 log.Printf("[TRACE] %s: eval: %T", path, n)
53 output, err := n.Eval(ctx) 53 output, err := n.Eval(ctx)
54 if err != nil { 54 if err != nil {
55 if _, ok := err.(EvalEarlyExitError); ok { 55 if _, ok := err.(EvalEarlyExitError); ok {
56 log.Printf("[DEBUG] %s: eval: %T, err: %s", path, n, err) 56 log.Printf("[TRACE] %s: eval: %T, err: %s", path, n, err)
57 } else { 57 } else {
58 log.Printf("[ERROR] %s: eval: %T, err: %s", path, n, err) 58 log.Printf("[ERROR] %s: eval: %T, err: %s", path, n, err)
59 } 59 }
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_apply.go b/vendor/github.com/hashicorp/terraform/terraform/eval_apply.go
index 2f6a497..b9b4806 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_apply.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_apply.go
@@ -112,7 +112,7 @@ func (n *EvalApplyPre) Eval(ctx EvalContext) (interface{}, error) {
112 } 112 }
113 state.init() 113 state.init()
114 114
115 { 115 if resourceHasUserVisibleApply(n.Info) {
116 // Call post-apply hook 116 // Call post-apply hook
117 err := ctx.Hook(func(h Hook) (HookAction, error) { 117 err := ctx.Hook(func(h Hook) (HookAction, error) {
118 return h.PreApply(n.Info, state, diff) 118 return h.PreApply(n.Info, state, diff)
@@ -136,7 +136,7 @@ type EvalApplyPost struct {
136func (n *EvalApplyPost) Eval(ctx EvalContext) (interface{}, error) { 136func (n *EvalApplyPost) Eval(ctx EvalContext) (interface{}, error) {
137 state := *n.State 137 state := *n.State
138 138
139 { 139 if resourceHasUserVisibleApply(n.Info) {
140 // Call post-apply hook 140 // Call post-apply hook
141 err := ctx.Hook(func(h Hook) (HookAction, error) { 141 err := ctx.Hook(func(h Hook) (HookAction, error) {
142 return h.PostApply(n.Info, state, *n.Error) 142 return h.PostApply(n.Info, state, *n.Error)
@@ -149,6 +149,22 @@ func (n *EvalApplyPost) Eval(ctx EvalContext) (interface{}, error) {
149 return nil, *n.Error 149 return nil, *n.Error
150} 150}
151 151
152// resourceHasUserVisibleApply returns true if the given resource is one where
153// apply actions should be exposed to the user.
154//
155// Certain resources do apply actions only as an implementation detail, so
156// these should not be advertised to code outside of this package.
157func resourceHasUserVisibleApply(info *InstanceInfo) bool {
158 addr := info.ResourceAddress()
159
160 // Only managed resources have user-visible apply actions.
161 // In particular, this excludes data resources since we "apply" these
162 // only as an implementation detail of removing them from state when
163 // they are destroyed. (When reading, they don't get here at all because
164 // we present them as "Refresh" actions.)
165 return addr.Mode == config.ManagedResourceMode
166}
167
152// EvalApplyProvisioners is an EvalNode implementation that executes 168// EvalApplyProvisioners is an EvalNode implementation that executes
153// the provisioners for a resource. 169// the provisioners for a resource.
154// 170//
@@ -211,11 +227,8 @@ func (n *EvalApplyProvisioners) Eval(ctx EvalContext) (interface{}, error) {
211 state.Tainted = true 227 state.Tainted = true
212 } 228 }
213 229
214 if n.Error != nil { 230 *n.Error = multierror.Append(*n.Error, err)
215 *n.Error = multierror.Append(*n.Error, err) 231 return nil, err
216 } else {
217 return nil, err
218 }
219 } 232 }
220 233
221 { 234 {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_context.go b/vendor/github.com/hashicorp/terraform/terraform/eval_context.go
index a1f815b..86481de 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_context.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_context.go
@@ -22,11 +22,11 @@ type EvalContext interface {
22 // Input is the UIInput object for interacting with the UI. 22 // Input is the UIInput object for interacting with the UI.
23 Input() UIInput 23 Input() UIInput
24 24
25 // InitProvider initializes the provider with the given name and 25 // InitProvider initializes the provider with the given type and name, and
26 // returns the implementation of the resource provider or an error. 26 // returns the implementation of the resource provider or an error.
27 // 27 //
28 // It is an error to initialize the same provider more than once. 28 // It is an error to initialize the same provider more than once.
29 InitProvider(string) (ResourceProvider, error) 29 InitProvider(typ string, name string) (ResourceProvider, error)
30 30
31 // Provider gets the provider instance with the given name (already 31 // Provider gets the provider instance with the given name (already
32 // initialized) or returns nil if the provider isn't initialized. 32 // initialized) or returns nil if the provider isn't initialized.
@@ -40,8 +40,6 @@ type EvalContext interface {
40 // is used to store the provider configuration for inheritance lookups 40 // is used to store the provider configuration for inheritance lookups
41 // with ParentProviderConfig(). 41 // with ParentProviderConfig().
42 ConfigureProvider(string, *ResourceConfig) error 42 ConfigureProvider(string, *ResourceConfig) error
43 SetProviderConfig(string, *ResourceConfig) error
44 ParentProviderConfig(string) *ResourceConfig
45 43
46 // ProviderInput and SetProviderInput are used to configure providers 44 // ProviderInput and SetProviderInput are used to configure providers
47 // from user input. 45 // from user input.
@@ -69,6 +67,13 @@ type EvalContext interface {
69 // that is currently being acted upon. 67 // that is currently being acted upon.
70 Interpolate(*config.RawConfig, *Resource) (*ResourceConfig, error) 68 Interpolate(*config.RawConfig, *Resource) (*ResourceConfig, error)
71 69
70 // InterpolateProvider takes a ProviderConfig and interpolates it with the
71 // stored interpolation scope. Since provider configurations can be
72 // inherited, the interpolation scope may be different from the current
73 // context path. Interplation is otherwise executed the same as in the
74 // Interpolation method.
75 InterpolateProvider(*config.ProviderConfig, *Resource) (*ResourceConfig, error)
76
72 // SetVariables sets the variables for the module within 77 // SetVariables sets the variables for the module within
73 // this context with the name n. This function call is additive: 78 // this context with the name n. This function call is additive:
74 // the second parameter is merged with any previous call. 79 // the second parameter is merged with any previous call.
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_context_builtin.go b/vendor/github.com/hashicorp/terraform/terraform/eval_context_builtin.go
index 3dcfb22..1b6ee5a 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_context_builtin.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_context_builtin.go
@@ -4,7 +4,6 @@ import (
4 "context" 4 "context"
5 "fmt" 5 "fmt"
6 "log" 6 "log"
7 "strings"
8 "sync" 7 "sync"
9 8
10 "github.com/hashicorp/terraform/config" 9 "github.com/hashicorp/terraform/config"
@@ -34,7 +33,6 @@ type BuiltinEvalContext struct {
34 Hooks []Hook 33 Hooks []Hook
35 InputValue UIInput 34 InputValue UIInput
36 ProviderCache map[string]ResourceProvider 35 ProviderCache map[string]ResourceProvider
37 ProviderConfigCache map[string]*ResourceConfig
38 ProviderInputConfig map[string]map[string]interface{} 36 ProviderInputConfig map[string]map[string]interface{}
39 ProviderLock *sync.Mutex 37 ProviderLock *sync.Mutex
40 ProvisionerCache map[string]ResourceProvisioner 38 ProvisionerCache map[string]ResourceProvisioner
@@ -80,12 +78,12 @@ func (ctx *BuiltinEvalContext) Input() UIInput {
80 return ctx.InputValue 78 return ctx.InputValue
81} 79}
82 80
83func (ctx *BuiltinEvalContext) InitProvider(n string) (ResourceProvider, error) { 81func (ctx *BuiltinEvalContext) InitProvider(typeName, name string) (ResourceProvider, error) {
84 ctx.once.Do(ctx.init) 82 ctx.once.Do(ctx.init)
85 83
86 // If we already initialized, it is an error 84 // If we already initialized, it is an error
87 if p := ctx.Provider(n); p != nil { 85 if p := ctx.Provider(name); p != nil {
88 return nil, fmt.Errorf("Provider '%s' already initialized", n) 86 return nil, fmt.Errorf("Provider '%s' already initialized", name)
89 } 87 }
90 88
91 // Warning: make sure to acquire these locks AFTER the call to Provider 89 // Warning: make sure to acquire these locks AFTER the call to Provider
@@ -93,18 +91,12 @@ func (ctx *BuiltinEvalContext) InitProvider(n string) (ResourceProvider, error)
93 ctx.ProviderLock.Lock() 91 ctx.ProviderLock.Lock()
94 defer ctx.ProviderLock.Unlock() 92 defer ctx.ProviderLock.Unlock()
95 93
96 providerPath := make([]string, len(ctx.Path())+1) 94 p, err := ctx.Components.ResourceProvider(typeName, name)
97 copy(providerPath, ctx.Path())
98 providerPath[len(providerPath)-1] = n
99 key := PathCacheKey(providerPath)
100
101 typeName := strings.SplitN(n, ".", 2)[0]
102 p, err := ctx.Components.ResourceProvider(typeName, key)
103 if err != nil { 95 if err != nil {
104 return nil, err 96 return nil, err
105 } 97 }
106 98
107 ctx.ProviderCache[key] = p 99 ctx.ProviderCache[name] = p
108 return p, nil 100 return p, nil
109} 101}
110 102
@@ -114,11 +106,7 @@ func (ctx *BuiltinEvalContext) Provider(n string) ResourceProvider {
114 ctx.ProviderLock.Lock() 106 ctx.ProviderLock.Lock()
115 defer ctx.ProviderLock.Unlock() 107 defer ctx.ProviderLock.Unlock()
116 108
117 providerPath := make([]string, len(ctx.Path())+1) 109 return ctx.ProviderCache[n]
118 copy(providerPath, ctx.Path())
119 providerPath[len(providerPath)-1] = n
120
121 return ctx.ProviderCache[PathCacheKey(providerPath)]
122} 110}
123 111
124func (ctx *BuiltinEvalContext) CloseProvider(n string) error { 112func (ctx *BuiltinEvalContext) CloseProvider(n string) error {
@@ -127,15 +115,11 @@ func (ctx *BuiltinEvalContext) CloseProvider(n string) error {
127 ctx.ProviderLock.Lock() 115 ctx.ProviderLock.Lock()
128 defer ctx.ProviderLock.Unlock() 116 defer ctx.ProviderLock.Unlock()
129 117
130 providerPath := make([]string, len(ctx.Path())+1)
131 copy(providerPath, ctx.Path())
132 providerPath[len(providerPath)-1] = n
133
134 var provider interface{} 118 var provider interface{}
135 provider = ctx.ProviderCache[PathCacheKey(providerPath)] 119 provider = ctx.ProviderCache[n]
136 if provider != nil { 120 if provider != nil {
137 if p, ok := provider.(ResourceProviderCloser); ok { 121 if p, ok := provider.(ResourceProviderCloser); ok {
138 delete(ctx.ProviderCache, PathCacheKey(providerPath)) 122 delete(ctx.ProviderCache, n)
139 return p.Close() 123 return p.Close()
140 } 124 }
141 } 125 }
@@ -149,28 +133,9 @@ func (ctx *BuiltinEvalContext) ConfigureProvider(
149 if p == nil { 133 if p == nil {
150 return fmt.Errorf("Provider '%s' not initialized", n) 134 return fmt.Errorf("Provider '%s' not initialized", n)
151 } 135 }
152
153 if err := ctx.SetProviderConfig(n, cfg); err != nil {
154 return nil
155 }
156
157 return p.Configure(cfg) 136 return p.Configure(cfg)
158} 137}
159 138
160func (ctx *BuiltinEvalContext) SetProviderConfig(
161 n string, cfg *ResourceConfig) error {
162 providerPath := make([]string, len(ctx.Path())+1)
163 copy(providerPath, ctx.Path())
164 providerPath[len(providerPath)-1] = n
165
166 // Save the configuration
167 ctx.ProviderLock.Lock()
168 ctx.ProviderConfigCache[PathCacheKey(providerPath)] = cfg
169 ctx.ProviderLock.Unlock()
170
171 return nil
172}
173
174func (ctx *BuiltinEvalContext) ProviderInput(n string) map[string]interface{} { 139func (ctx *BuiltinEvalContext) ProviderInput(n string) map[string]interface{} {
175 ctx.ProviderLock.Lock() 140 ctx.ProviderLock.Lock()
176 defer ctx.ProviderLock.Unlock() 141 defer ctx.ProviderLock.Unlock()
@@ -203,27 +168,6 @@ func (ctx *BuiltinEvalContext) SetProviderInput(n string, c map[string]interface
203 ctx.ProviderLock.Unlock() 168 ctx.ProviderLock.Unlock()
204} 169}
205 170
206func (ctx *BuiltinEvalContext) ParentProviderConfig(n string) *ResourceConfig {
207 ctx.ProviderLock.Lock()
208 defer ctx.ProviderLock.Unlock()
209
210 // Make a copy of the path so we can safely edit it
211 path := ctx.Path()
212 pathCopy := make([]string, len(path)+1)
213 copy(pathCopy, path)
214
215 // Go up the tree.
216 for i := len(path) - 1; i >= 0; i-- {
217 pathCopy[i+1] = n
218 k := PathCacheKey(pathCopy[:i+2])
219 if v, ok := ctx.ProviderConfigCache[k]; ok {
220 return v
221 }
222 }
223
224 return nil
225}
226
227func (ctx *BuiltinEvalContext) InitProvisioner( 171func (ctx *BuiltinEvalContext) InitProvisioner(
228 n string) (ResourceProvisioner, error) { 172 n string) (ResourceProvisioner, error) {
229 ctx.once.Do(ctx.init) 173 ctx.once.Do(ctx.init)
@@ -289,6 +233,7 @@ func (ctx *BuiltinEvalContext) CloseProvisioner(n string) error {
289 233
290func (ctx *BuiltinEvalContext) Interpolate( 234func (ctx *BuiltinEvalContext) Interpolate(
291 cfg *config.RawConfig, r *Resource) (*ResourceConfig, error) { 235 cfg *config.RawConfig, r *Resource) (*ResourceConfig, error) {
236
292 if cfg != nil { 237 if cfg != nil {
293 scope := &InterpolationScope{ 238 scope := &InterpolationScope{
294 Path: ctx.Path(), 239 Path: ctx.Path(),
@@ -311,6 +256,35 @@ func (ctx *BuiltinEvalContext) Interpolate(
311 return result, nil 256 return result, nil
312} 257}
313 258
259func (ctx *BuiltinEvalContext) InterpolateProvider(
260 pc *config.ProviderConfig, r *Resource) (*ResourceConfig, error) {
261
262 var cfg *config.RawConfig
263
264 if pc != nil && pc.RawConfig != nil {
265 scope := &InterpolationScope{
266 Path: ctx.Path(),
267 Resource: r,
268 }
269
270 cfg = pc.RawConfig
271
272 vs, err := ctx.Interpolater.Values(scope, cfg.Variables)
273 if err != nil {
274 return nil, err
275 }
276
277 // Do the interpolation
278 if err := cfg.Interpolate(vs); err != nil {
279 return nil, err
280 }
281 }
282
283 result := NewResourceConfig(cfg)
284 result.interpolateForce()
285 return result, nil
286}
287
314func (ctx *BuiltinEvalContext) Path() []string { 288func (ctx *BuiltinEvalContext) Path() []string {
315 return ctx.PathValue 289 return ctx.PathValue
316} 290}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_context_mock.go b/vendor/github.com/hashicorp/terraform/terraform/eval_context_mock.go
index 4f90d5b..6464517 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_context_mock.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_context_mock.go
@@ -45,14 +45,6 @@ type MockEvalContext struct {
45 ConfigureProviderConfig *ResourceConfig 45 ConfigureProviderConfig *ResourceConfig
46 ConfigureProviderError error 46 ConfigureProviderError error
47 47
48 SetProviderConfigCalled bool
49 SetProviderConfigName string
50 SetProviderConfigConfig *ResourceConfig
51
52 ParentProviderConfigCalled bool
53 ParentProviderConfigName string
54 ParentProviderConfigConfig *ResourceConfig
55
56 InitProvisionerCalled bool 48 InitProvisionerCalled bool
57 InitProvisionerName string 49 InitProvisionerName string
58 InitProvisionerProvisioner ResourceProvisioner 50 InitProvisionerProvisioner ResourceProvisioner
@@ -72,6 +64,12 @@ type MockEvalContext struct {
72 InterpolateConfigResult *ResourceConfig 64 InterpolateConfigResult *ResourceConfig
73 InterpolateError error 65 InterpolateError error
74 66
67 InterpolateProviderCalled bool
68 InterpolateProviderConfig *config.ProviderConfig
69 InterpolateProviderResource *Resource
70 InterpolateProviderConfigResult *ResourceConfig
71 InterpolateProviderError error
72
75 PathCalled bool 73 PathCalled bool
76 PathPath []string 74 PathPath []string
77 75
@@ -109,7 +107,7 @@ func (c *MockEvalContext) Input() UIInput {
109 return c.InputInput 107 return c.InputInput
110} 108}
111 109
112func (c *MockEvalContext) InitProvider(n string) (ResourceProvider, error) { 110func (c *MockEvalContext) InitProvider(t, n string) (ResourceProvider, error) {
113 c.InitProviderCalled = true 111 c.InitProviderCalled = true
114 c.InitProviderName = n 112 c.InitProviderName = n
115 return c.InitProviderProvider, c.InitProviderError 113 return c.InitProviderProvider, c.InitProviderError
@@ -134,20 +132,6 @@ func (c *MockEvalContext) ConfigureProvider(n string, cfg *ResourceConfig) error
134 return c.ConfigureProviderError 132 return c.ConfigureProviderError
135} 133}
136 134
137func (c *MockEvalContext) SetProviderConfig(
138 n string, cfg *ResourceConfig) error {
139 c.SetProviderConfigCalled = true
140 c.SetProviderConfigName = n
141 c.SetProviderConfigConfig = cfg
142 return nil
143}
144
145func (c *MockEvalContext) ParentProviderConfig(n string) *ResourceConfig {
146 c.ParentProviderConfigCalled = true
147 c.ParentProviderConfigName = n
148 return c.ParentProviderConfigConfig
149}
150
151func (c *MockEvalContext) ProviderInput(n string) map[string]interface{} { 135func (c *MockEvalContext) ProviderInput(n string) map[string]interface{} {
152 c.ProviderInputCalled = true 136 c.ProviderInputCalled = true
153 c.ProviderInputName = n 137 c.ProviderInputName = n
@@ -186,6 +170,14 @@ func (c *MockEvalContext) Interpolate(
186 return c.InterpolateConfigResult, c.InterpolateError 170 return c.InterpolateConfigResult, c.InterpolateError
187} 171}
188 172
173func (c *MockEvalContext) InterpolateProvider(
174 config *config.ProviderConfig, resource *Resource) (*ResourceConfig, error) {
175 c.InterpolateProviderCalled = true
176 c.InterpolateProviderConfig = config
177 c.InterpolateProviderResource = resource
178 return c.InterpolateProviderConfigResult, c.InterpolateError
179}
180
189func (c *MockEvalContext) Path() []string { 181func (c *MockEvalContext) Path() []string {
190 c.PathCalled = true 182 c.PathCalled = true
191 return c.PathPath 183 return c.PathPath
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_diff.go b/vendor/github.com/hashicorp/terraform/terraform/eval_diff.go
index c35f908..26205ce 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_diff.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_diff.go
@@ -6,6 +6,7 @@ import (
6 "strings" 6 "strings"
7 7
8 "github.com/hashicorp/terraform/config" 8 "github.com/hashicorp/terraform/config"
9 "github.com/hashicorp/terraform/version"
9) 10)
10 11
11// EvalCompareDiff is an EvalNode implementation that compares two diffs 12// EvalCompareDiff is an EvalNode implementation that compares two diffs
@@ -60,7 +61,7 @@ func (n *EvalCompareDiff) Eval(ctx EvalContext) (interface{}, error) {
60 "\n"+ 61 "\n"+
61 "Also include as much context as you can about your config, state, "+ 62 "Also include as much context as you can about your config, state, "+
62 "and the steps you performed to trigger this error.\n", 63 "and the steps you performed to trigger this error.\n",
63 n.Info.Id, Version, n.Info.Id, reason, one, two) 64 n.Info.Id, version.Version, n.Info.Id, reason, one, two)
64 } 65 }
65 66
66 return nil, nil 67 return nil, nil
@@ -255,11 +256,15 @@ func (n *EvalDiff) processIgnoreChanges(diff *InstanceDiff) error {
255 containers := groupContainers(diff) 256 containers := groupContainers(diff)
256 keep := map[string]bool{} 257 keep := map[string]bool{}
257 for _, v := range containers { 258 for _, v := range containers {
258 if v.keepDiff() { 259 if v.keepDiff(ignorableAttrKeys) {
259 // At least one key has changes, so list all the sibling keys 260 // At least one key has changes, so list all the sibling keys
260 // to keep in the diff. 261 // to keep in the diff
261 for k := range v { 262 for k := range v {
262 keep[k] = true 263 keep[k] = true
264 // this key may have been added by the user to ignore, but
265 // if it's a subkey in a container, we need to un-ignore it
266 // to keep the complete containter.
267 delete(ignorableAttrKeys, k)
263 } 268 }
264 } 269 }
265 } 270 }
@@ -291,10 +296,17 @@ func (n *EvalDiff) processIgnoreChanges(diff *InstanceDiff) error {
291// a group of key-*ResourceAttrDiff pairs from the same flatmapped container 296// a group of key-*ResourceAttrDiff pairs from the same flatmapped container
292type flatAttrDiff map[string]*ResourceAttrDiff 297type flatAttrDiff map[string]*ResourceAttrDiff
293 298
294// we need to keep all keys if any of them have a diff 299// we need to keep all keys if any of them have a diff that's not ignored
295func (f flatAttrDiff) keepDiff() bool { 300func (f flatAttrDiff) keepDiff(ignoreChanges map[string]bool) bool {
296 for _, v := range f { 301 for k, v := range f {
297 if !v.Empty() && !v.NewComputed { 302 ignore := false
303 for attr := range ignoreChanges {
304 if strings.HasPrefix(k, attr) {
305 ignore = true
306 }
307 }
308
309 if !v.Empty() && !v.NewComputed && !ignore {
298 return true 310 return true
299 } 311 }
300 } 312 }
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_interpolate.go b/vendor/github.com/hashicorp/terraform/terraform/eval_interpolate.go
index 6825ff5..6a78a6b 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_interpolate.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_interpolate.go
@@ -1,18 +1,50 @@
1package terraform 1package terraform
2 2
3import "github.com/hashicorp/terraform/config" 3import (
4 "log"
5
6 "github.com/hashicorp/terraform/config"
7)
4 8
5// EvalInterpolate is an EvalNode implementation that takes a raw 9// EvalInterpolate is an EvalNode implementation that takes a raw
6// configuration and interpolates it. 10// configuration and interpolates it.
7type EvalInterpolate struct { 11type EvalInterpolate struct {
8 Config *config.RawConfig 12 Config *config.RawConfig
9 Resource *Resource 13 Resource *Resource
10 Output **ResourceConfig 14 Output **ResourceConfig
15 ContinueOnErr bool
11} 16}
12 17
13func (n *EvalInterpolate) Eval(ctx EvalContext) (interface{}, error) { 18func (n *EvalInterpolate) Eval(ctx EvalContext) (interface{}, error) {
14 rc, err := ctx.Interpolate(n.Config, n.Resource) 19 rc, err := ctx.Interpolate(n.Config, n.Resource)
15 if err != nil { 20 if err != nil {
21 if n.ContinueOnErr {
22 log.Printf("[WARN] Interpolation %q failed: %s", n.Config.Key, err)
23 return nil, EvalEarlyExitError{}
24 }
25 return nil, err
26 }
27
28 if n.Output != nil {
29 *n.Output = rc
30 }
31
32 return nil, nil
33}
34
35// EvalInterpolateProvider is an EvalNode implementation that takes a
36// ProviderConfig and interpolates it. Provider configurations are the only
37// "inherited" type of configuration we have, and the original raw config may
38// have a different interpolation scope.
39type EvalInterpolateProvider struct {
40 Config *config.ProviderConfig
41 Resource *Resource
42 Output **ResourceConfig
43}
44
45func (n *EvalInterpolateProvider) Eval(ctx EvalContext) (interface{}, error) {
46 rc, err := ctx.InterpolateProvider(n.Config, n.Resource)
47 if err != nil {
16 return nil, err 48 return nil, err
17 } 49 }
18 50
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_local.go b/vendor/github.com/hashicorp/terraform/terraform/eval_local.go
new file mode 100644
index 0000000..a4b2a50
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_local.go
@@ -0,0 +1,86 @@
1package terraform
2
3import (
4 "fmt"
5
6 "github.com/hashicorp/terraform/config"
7)
8
9// EvalLocal is an EvalNode implementation that evaluates the
10// expression for a local value and writes it into a transient part of
11// the state.
12type EvalLocal struct {
13 Name string
14 Value *config.RawConfig
15}
16
17func (n *EvalLocal) Eval(ctx EvalContext) (interface{}, error) {
18 cfg, err := ctx.Interpolate(n.Value, nil)
19 if err != nil {
20 return nil, fmt.Errorf("local.%s: %s", n.Name, err)
21 }
22
23 state, lock := ctx.State()
24 if state == nil {
25 return nil, fmt.Errorf("cannot write local value to nil state")
26 }
27
28 // Get a write lock so we can access the state
29 lock.Lock()
30 defer lock.Unlock()
31
32 // Look for the module state. If we don't have one, create it.
33 mod := state.ModuleByPath(ctx.Path())
34 if mod == nil {
35 mod = state.AddModule(ctx.Path())
36 }
37
38 // Get the value from the config
39 var valueRaw interface{} = config.UnknownVariableValue
40 if cfg != nil {
41 var ok bool
42 valueRaw, ok = cfg.Get("value")
43 if !ok {
44 valueRaw = ""
45 }
46 if cfg.IsComputed("value") {
47 valueRaw = config.UnknownVariableValue
48 }
49 }
50
51 if mod.Locals == nil {
52 // initialize
53 mod.Locals = map[string]interface{}{}
54 }
55 mod.Locals[n.Name] = valueRaw
56
57 return nil, nil
58}
59
60// EvalDeleteLocal is an EvalNode implementation that deletes a Local value
61// from the state. Locals aren't persisted, but we don't need to evaluate them
62// during destroy.
63type EvalDeleteLocal struct {
64 Name string
65}
66
67func (n *EvalDeleteLocal) Eval(ctx EvalContext) (interface{}, error) {
68 state, lock := ctx.State()
69 if state == nil {
70 return nil, nil
71 }
72
73 // Get a write lock so we can access this instance
74 lock.Lock()
75 defer lock.Unlock()
76
77 // Look for the module state. If we don't have one, create it.
78 mod := state.ModuleByPath(ctx.Path())
79 if mod == nil {
80 return nil, nil
81 }
82
83 delete(mod.Locals, n.Name)
84
85 return nil, nil
86}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_output.go b/vendor/github.com/hashicorp/terraform/terraform/eval_output.go
index cf61781..a834627 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_output.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_output.go
@@ -41,15 +41,16 @@ type EvalWriteOutput struct {
41 Name string 41 Name string
42 Sensitive bool 42 Sensitive bool
43 Value *config.RawConfig 43 Value *config.RawConfig
44 // ContinueOnErr allows interpolation to fail during Input
45 ContinueOnErr bool
44} 46}
45 47
46// TODO: test 48// TODO: test
47func (n *EvalWriteOutput) Eval(ctx EvalContext) (interface{}, error) { 49func (n *EvalWriteOutput) Eval(ctx EvalContext) (interface{}, error) {
50 // This has to run before we have a state lock, since interpolation also
51 // reads the state
48 cfg, err := ctx.Interpolate(n.Value, nil) 52 cfg, err := ctx.Interpolate(n.Value, nil)
49 if err != nil { 53 // handle the error after we have the module from the state
50 // Log error but continue anyway
51 log.Printf("[WARN] Output interpolation %q failed: %s", n.Name, err)
52 }
53 54
54 state, lock := ctx.State() 55 state, lock := ctx.State()
55 if state == nil { 56 if state == nil {
@@ -59,13 +60,27 @@ func (n *EvalWriteOutput) Eval(ctx EvalContext) (interface{}, error) {
59 // Get a write lock so we can access this instance 60 // Get a write lock so we can access this instance
60 lock.Lock() 61 lock.Lock()
61 defer lock.Unlock() 62 defer lock.Unlock()
62
63 // Look for the module state. If we don't have one, create it. 63 // Look for the module state. If we don't have one, create it.
64 mod := state.ModuleByPath(ctx.Path()) 64 mod := state.ModuleByPath(ctx.Path())
65 if mod == nil { 65 if mod == nil {
66 mod = state.AddModule(ctx.Path()) 66 mod = state.AddModule(ctx.Path())
67 } 67 }
68 68
69 // handling the interpolation error
70 if err != nil {
71 if n.ContinueOnErr || flagWarnOutputErrors {
72 log.Printf("[ERROR] Output interpolation %q failed: %s", n.Name, err)
73 // if we're continuing, make sure the output is included, and
74 // marked as unknown
75 mod.Outputs[n.Name] = &OutputState{
76 Type: "string",
77 Value: config.UnknownVariableValue,
78 }
79 return nil, EvalEarlyExitError{}
80 }
81 return nil, err
82 }
83
69 // Get the value from the config 84 // Get the value from the config
70 var valueRaw interface{} = config.UnknownVariableValue 85 var valueRaw interface{} = config.UnknownVariableValue
71 if cfg != nil { 86 if cfg != nil {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_provider.go b/vendor/github.com/hashicorp/terraform/terraform/eval_provider.go
index 092fd18..61f6ff9 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_provider.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_provider.go
@@ -6,17 +6,6 @@ import (
6 "github.com/hashicorp/terraform/config" 6 "github.com/hashicorp/terraform/config"
7) 7)
8 8
9// EvalSetProviderConfig sets the parent configuration for a provider
10// without configuring that provider, validating it, etc.
11type EvalSetProviderConfig struct {
12 Provider string
13 Config **ResourceConfig
14}
15
16func (n *EvalSetProviderConfig) Eval(ctx EvalContext) (interface{}, error) {
17 return nil, ctx.SetProviderConfig(n.Provider, *n.Config)
18}
19
20// EvalBuildProviderConfig outputs a *ResourceConfig that is properly 9// EvalBuildProviderConfig outputs a *ResourceConfig that is properly
21// merged with parents and inputs on top of what is configured in the file. 10// merged with parents and inputs on top of what is configured in the file.
22type EvalBuildProviderConfig struct { 11type EvalBuildProviderConfig struct {
@@ -28,7 +17,7 @@ type EvalBuildProviderConfig struct {
28func (n *EvalBuildProviderConfig) Eval(ctx EvalContext) (interface{}, error) { 17func (n *EvalBuildProviderConfig) Eval(ctx EvalContext) (interface{}, error) {
29 cfg := *n.Config 18 cfg := *n.Config
30 19
31 // If we have a configuration set, then merge that in 20 // If we have an Input configuration set, then merge that in
32 if input := ctx.ProviderInput(n.Provider); input != nil { 21 if input := ctx.ProviderInput(n.Provider); input != nil {
33 // "input" is a map of the subset of config values that were known 22 // "input" is a map of the subset of config values that were known
34 // during the input walk, set by EvalInputProvider. Note that 23 // during the input walk, set by EvalInputProvider. Note that
@@ -40,13 +29,7 @@ func (n *EvalBuildProviderConfig) Eval(ctx EvalContext) (interface{}, error) {
40 return nil, err 29 return nil, err
41 } 30 }
42 31
43 merged := cfg.raw.Merge(rc) 32 merged := rc.Merge(cfg.raw)
44 cfg = NewResourceConfig(merged)
45 }
46
47 // Get the parent configuration if there is one
48 if parent := ctx.ParentProviderConfig(n.Provider); parent != nil {
49 merged := cfg.raw.Merge(parent.raw)
50 cfg = NewResourceConfig(merged) 33 cfg = NewResourceConfig(merged)
51 } 34 }
52 35
@@ -69,11 +52,12 @@ func (n *EvalConfigProvider) Eval(ctx EvalContext) (interface{}, error) {
69// and returns nothing. The provider can be retrieved again with the 52// and returns nothing. The provider can be retrieved again with the
70// EvalGetProvider node. 53// EvalGetProvider node.
71type EvalInitProvider struct { 54type EvalInitProvider struct {
72 Name string 55 TypeName string
56 Name string
73} 57}
74 58
75func (n *EvalInitProvider) Eval(ctx EvalContext) (interface{}, error) { 59func (n *EvalInitProvider) Eval(ctx EvalContext) (interface{}, error) {
76 return ctx.InitProvider(n.Name) 60 return ctx.InitProvider(n.TypeName, n.Name)
77} 61}
78 62
79// EvalCloseProvider is an EvalNode implementation that closes provider 63// EvalCloseProvider is an EvalNode implementation that closes provider
@@ -116,12 +100,8 @@ type EvalInputProvider struct {
116} 100}
117 101
118func (n *EvalInputProvider) Eval(ctx EvalContext) (interface{}, error) { 102func (n *EvalInputProvider) Eval(ctx EvalContext) (interface{}, error) {
119 // If we already configured this provider, then don't do this again
120 if v := ctx.ProviderInput(n.Name); v != nil {
121 return nil, nil
122 }
123
124 rc := *n.Config 103 rc := *n.Config
104 orig := rc.DeepCopy()
125 105
126 // Wrap the input into a namespace 106 // Wrap the input into a namespace
127 input := &PrefixUIInput{ 107 input := &PrefixUIInput{
@@ -138,27 +118,20 @@ func (n *EvalInputProvider) Eval(ctx EvalContext) (interface{}, error) {
138 "Error configuring %s: %s", n.Name, err) 118 "Error configuring %s: %s", n.Name, err)
139 } 119 }
140 120
141 // Set the input that we received so that child modules don't attempt 121 // We only store values that have changed through Input.
142 // to ask for input again. 122 // The goal is to cache cache input responses, not to provide a complete
123 // config for other providers.
124 confMap := make(map[string]interface{})
143 if config != nil && len(config.Config) > 0 { 125 if config != nil && len(config.Config) > 0 {
144 // This repository of provider input results on the context doesn't 126 // any values that weren't in the original ResourcConfig will be cached
145 // retain config.ComputedKeys, so we need to filter those out here 127 for k, v := range config.Config {
146 // in order that later users of this data won't try to use the unknown 128 if _, ok := orig.Config[k]; !ok {
147 // value placeholder as if it were a literal value. This map is just 129 confMap[k] = v
148 // of known values we've been able to complete so far; dynamic stuff
149 // will be merged in by EvalBuildProviderConfig on subsequent
150 // (post-input) walks.
151 confMap := config.Config
152 if config.ComputedKeys != nil {
153 for _, key := range config.ComputedKeys {
154 delete(confMap, key)
155 } 130 }
156 } 131 }
157
158 ctx.SetProviderInput(n.Name, confMap)
159 } else {
160 ctx.SetProviderInput(n.Name, map[string]interface{}{})
161 } 132 }
162 133
134 ctx.SetProviderInput(n.Name, confMap)
135
163 return nil, nil 136 return nil, nil
164} 137}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_state.go b/vendor/github.com/hashicorp/terraform/terraform/eval_state.go
index 126a0e6..1182690 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_state.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_state.go
@@ -1,6 +1,8 @@
1package terraform 1package terraform
2 2
3import "fmt" 3import (
4 "fmt"
5)
4 6
5// EvalReadState is an EvalNode implementation that reads the 7// EvalReadState is an EvalNode implementation that reads the
6// primary InstanceState for a specific resource out of the state. 8// primary InstanceState for a specific resource out of the state.
@@ -212,37 +214,6 @@ func writeInstanceToState(
212 return nil, nil 214 return nil, nil
213} 215}
214 216
215// EvalClearPrimaryState is an EvalNode implementation that clears the primary
216// instance from a resource state.
217type EvalClearPrimaryState struct {
218 Name string
219}
220
221func (n *EvalClearPrimaryState) Eval(ctx EvalContext) (interface{}, error) {
222 state, lock := ctx.State()
223
224 // Get a read lock so we can access this instance
225 lock.RLock()
226 defer lock.RUnlock()
227
228 // Look for the module state. If we don't have one, then it doesn't matter.
229 mod := state.ModuleByPath(ctx.Path())
230 if mod == nil {
231 return nil, nil
232 }
233
234 // Look for the resource state. If we don't have one, then it is okay.
235 rs := mod.Resources[n.Name]
236 if rs == nil {
237 return nil, nil
238 }
239
240 // Clear primary from the resource state
241 rs.Primary = nil
242
243 return nil, nil
244}
245
246// EvalDeposeState is an EvalNode implementation that takes the primary 217// EvalDeposeState is an EvalNode implementation that takes the primary
247// out of a state and makes it Deposed. This is done at the beginning of 218// out of a state and makes it Deposed. This is done at the beginning of
248// create-before-destroy calls so that the create can create while preserving 219// create-before-destroy calls so that the create can create while preserving
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_validate.go b/vendor/github.com/hashicorp/terraform/terraform/eval_validate.go
index 478aa64..3e5a84c 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_validate.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/eval_validate.go
@@ -144,16 +144,20 @@ func (n *EvalValidateProvisioner) validateConnConfig(connConfig *ResourceConfig)
144 144
145 // For type=ssh only (enforced in ssh communicator) 145 // For type=ssh only (enforced in ssh communicator)
146 PrivateKey interface{} `mapstructure:"private_key"` 146 PrivateKey interface{} `mapstructure:"private_key"`
147 HostKey interface{} `mapstructure:"host_key"`
147 Agent interface{} `mapstructure:"agent"` 148 Agent interface{} `mapstructure:"agent"`
148 BastionHost interface{} `mapstructure:"bastion_host"` 149 BastionHost interface{} `mapstructure:"bastion_host"`
150 BastionHostKey interface{} `mapstructure:"bastion_host_key"`
149 BastionPort interface{} `mapstructure:"bastion_port"` 151 BastionPort interface{} `mapstructure:"bastion_port"`
150 BastionUser interface{} `mapstructure:"bastion_user"` 152 BastionUser interface{} `mapstructure:"bastion_user"`
151 BastionPassword interface{} `mapstructure:"bastion_password"` 153 BastionPassword interface{} `mapstructure:"bastion_password"`
152 BastionPrivateKey interface{} `mapstructure:"bastion_private_key"` 154 BastionPrivateKey interface{} `mapstructure:"bastion_private_key"`
155 AgentIdentity interface{} `mapstructure:"agent_identity"`
153 156
154 // For type=winrm only (enforced in winrm communicator) 157 // For type=winrm only (enforced in winrm communicator)
155 HTTPS interface{} `mapstructure:"https"` 158 HTTPS interface{} `mapstructure:"https"`
156 Insecure interface{} `mapstructure:"insecure"` 159 Insecure interface{} `mapstructure:"insecure"`
160 NTLM interface{} `mapstructure:"use_ntlm"`
157 CACert interface{} `mapstructure:"cacert"` 161 CACert interface{} `mapstructure:"cacert"`
158 } 162 }
159 163
diff --git a/vendor/github.com/hashicorp/terraform/terraform/evaltree_provider.go b/vendor/github.com/hashicorp/terraform/terraform/evaltree_provider.go
index 00392ef..0c3da48 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/evaltree_provider.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/evaltree_provider.go
@@ -1,17 +1,24 @@
1package terraform 1package terraform
2 2
3import ( 3import (
4 "strings"
5
4 "github.com/hashicorp/terraform/config" 6 "github.com/hashicorp/terraform/config"
5) 7)
6 8
7// ProviderEvalTree returns the evaluation tree for initializing and 9// ProviderEvalTree returns the evaluation tree for initializing and
8// configuring providers. 10// configuring providers.
9func ProviderEvalTree(n string, config *config.RawConfig) EvalNode { 11func ProviderEvalTree(n *NodeApplyableProvider, config *config.ProviderConfig) EvalNode {
10 var provider ResourceProvider 12 var provider ResourceProvider
11 var resourceConfig *ResourceConfig 13 var resourceConfig *ResourceConfig
12 14
15 typeName := strings.SplitN(n.NameValue, ".", 2)[0]
16
13 seq := make([]EvalNode, 0, 5) 17 seq := make([]EvalNode, 0, 5)
14 seq = append(seq, &EvalInitProvider{Name: n}) 18 seq = append(seq, &EvalInitProvider{
19 TypeName: typeName,
20 Name: n.Name(),
21 })
15 22
16 // Input stuff 23 // Input stuff
17 seq = append(seq, &EvalOpFilter{ 24 seq = append(seq, &EvalOpFilter{
@@ -19,20 +26,20 @@ func ProviderEvalTree(n string, config *config.RawConfig) EvalNode {
19 Node: &EvalSequence{ 26 Node: &EvalSequence{
20 Nodes: []EvalNode{ 27 Nodes: []EvalNode{
21 &EvalGetProvider{ 28 &EvalGetProvider{
22 Name: n, 29 Name: n.Name(),
23 Output: &provider, 30 Output: &provider,
24 }, 31 },
25 &EvalInterpolate{ 32 &EvalInterpolateProvider{
26 Config: config, 33 Config: config,
27 Output: &resourceConfig, 34 Output: &resourceConfig,
28 }, 35 },
29 &EvalBuildProviderConfig{ 36 &EvalBuildProviderConfig{
30 Provider: n, 37 Provider: n.NameValue,
31 Config: &resourceConfig, 38 Config: &resourceConfig,
32 Output: &resourceConfig, 39 Output: &resourceConfig,
33 }, 40 },
34 &EvalInputProvider{ 41 &EvalInputProvider{
35 Name: n, 42 Name: n.NameValue,
36 Provider: &provider, 43 Provider: &provider,
37 Config: &resourceConfig, 44 Config: &resourceConfig,
38 }, 45 },
@@ -45,15 +52,15 @@ func ProviderEvalTree(n string, config *config.RawConfig) EvalNode {
45 Node: &EvalSequence{ 52 Node: &EvalSequence{
46 Nodes: []EvalNode{ 53 Nodes: []EvalNode{
47 &EvalGetProvider{ 54 &EvalGetProvider{
48 Name: n, 55 Name: n.Name(),
49 Output: &provider, 56 Output: &provider,
50 }, 57 },
51 &EvalInterpolate{ 58 &EvalInterpolateProvider{
52 Config: config, 59 Config: config,
53 Output: &resourceConfig, 60 Output: &resourceConfig,
54 }, 61 },
55 &EvalBuildProviderConfig{ 62 &EvalBuildProviderConfig{
56 Provider: n, 63 Provider: n.NameValue,
57 Config: &resourceConfig, 64 Config: &resourceConfig,
58 Output: &resourceConfig, 65 Output: &resourceConfig,
59 }, 66 },
@@ -61,10 +68,6 @@ func ProviderEvalTree(n string, config *config.RawConfig) EvalNode {
61 Provider: &provider, 68 Provider: &provider,
62 Config: &resourceConfig, 69 Config: &resourceConfig,
63 }, 70 },
64 &EvalSetProviderConfig{
65 Provider: n,
66 Config: &resourceConfig,
67 },
68 }, 71 },
69 }, 72 },
70 }) 73 })
@@ -75,22 +78,18 @@ func ProviderEvalTree(n string, config *config.RawConfig) EvalNode {
75 Node: &EvalSequence{ 78 Node: &EvalSequence{
76 Nodes: []EvalNode{ 79 Nodes: []EvalNode{
77 &EvalGetProvider{ 80 &EvalGetProvider{
78 Name: n, 81 Name: n.Name(),
79 Output: &provider, 82 Output: &provider,
80 }, 83 },
81 &EvalInterpolate{ 84 &EvalInterpolateProvider{
82 Config: config, 85 Config: config,
83 Output: &resourceConfig, 86 Output: &resourceConfig,
84 }, 87 },
85 &EvalBuildProviderConfig{ 88 &EvalBuildProviderConfig{
86 Provider: n, 89 Provider: n.NameValue,
87 Config: &resourceConfig, 90 Config: &resourceConfig,
88 Output: &resourceConfig, 91 Output: &resourceConfig,
89 }, 92 },
90 &EvalSetProviderConfig{
91 Provider: n,
92 Config: &resourceConfig,
93 },
94 }, 93 },
95 }, 94 },
96 }) 95 })
@@ -102,7 +101,7 @@ func ProviderEvalTree(n string, config *config.RawConfig) EvalNode {
102 Node: &EvalSequence{ 101 Node: &EvalSequence{
103 Nodes: []EvalNode{ 102 Nodes: []EvalNode{
104 &EvalConfigProvider{ 103 &EvalConfigProvider{
105 Provider: n, 104 Provider: n.Name(),
106 Config: &resourceConfig, 105 Config: &resourceConfig,
107 }, 106 },
108 }, 107 },
diff --git a/vendor/github.com/hashicorp/terraform/terraform/features.go b/vendor/github.com/hashicorp/terraform/terraform/features.go
new file mode 100644
index 0000000..97c77bd
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/terraform/features.go
@@ -0,0 +1,7 @@
1package terraform
2
3import "os"
4
5// This file holds feature flags for the next release
6
7var flagWarnOutputErrors = os.Getenv("TF_WARN_OUTPUT_ERRORS") != ""
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph.go b/vendor/github.com/hashicorp/terraform/terraform/graph.go
index 48ce6a3..735ec4e 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/graph.go
@@ -70,7 +70,7 @@ func (g *Graph) walk(walker GraphWalker) error {
70 // Walk the graph. 70 // Walk the graph.
71 var walkFn dag.WalkFunc 71 var walkFn dag.WalkFunc
72 walkFn = func(v dag.Vertex) (rerr error) { 72 walkFn = func(v dag.Vertex) (rerr error) {
73 log.Printf("[DEBUG] vertex '%s.%s': walking", path, dag.VertexName(v)) 73 log.Printf("[TRACE] vertex '%s.%s': walking", path, dag.VertexName(v))
74 g.DebugVisitInfo(v, g.debugName) 74 g.DebugVisitInfo(v, g.debugName)
75 75
76 // If we have a panic wrap GraphWalker and a panic occurs, recover 76 // If we have a panic wrap GraphWalker and a panic occurs, recover
@@ -118,7 +118,7 @@ func (g *Graph) walk(walker GraphWalker) error {
118 118
119 // Allow the walker to change our tree if needed. Eval, 119 // Allow the walker to change our tree if needed. Eval,
120 // then callback with the output. 120 // then callback with the output.
121 log.Printf("[DEBUG] vertex '%s.%s': evaluating", path, dag.VertexName(v)) 121 log.Printf("[TRACE] vertex '%s.%s': evaluating", path, dag.VertexName(v))
122 122
123 g.DebugVertexInfo(v, fmt.Sprintf("evaluating %T(%s)", v, path)) 123 g.DebugVertexInfo(v, fmt.Sprintf("evaluating %T(%s)", v, path))
124 124
@@ -132,7 +132,7 @@ func (g *Graph) walk(walker GraphWalker) error {
132 // If the node is dynamically expanded, then expand it 132 // If the node is dynamically expanded, then expand it
133 if ev, ok := v.(GraphNodeDynamicExpandable); ok { 133 if ev, ok := v.(GraphNodeDynamicExpandable); ok {
134 log.Printf( 134 log.Printf(
135 "[DEBUG] vertex '%s.%s': expanding/walking dynamic subgraph", 135 "[TRACE] vertex '%s.%s': expanding/walking dynamic subgraph",
136 path, 136 path,
137 dag.VertexName(v)) 137 dag.VertexName(v))
138 138
@@ -154,7 +154,7 @@ func (g *Graph) walk(walker GraphWalker) error {
154 // If the node has a subgraph, then walk the subgraph 154 // If the node has a subgraph, then walk the subgraph
155 if sn, ok := v.(GraphNodeSubgraph); ok { 155 if sn, ok := v.(GraphNodeSubgraph); ok {
156 log.Printf( 156 log.Printf(
157 "[DEBUG] vertex '%s.%s': walking subgraph", 157 "[TRACE] vertex '%s.%s': walking subgraph",
158 path, 158 path,
159 dag.VertexName(v)) 159 dag.VertexName(v))
160 160
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_apply.go b/vendor/github.com/hashicorp/terraform/terraform/graph_builder_apply.go
index 38a90f2..0c2b233 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_apply.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/graph_builder_apply.go
@@ -87,12 +87,8 @@ func (b *ApplyGraphBuilder) Steps() []GraphTransformer {
87 // Attach the state 87 // Attach the state
88 &AttachStateTransformer{State: b.State}, 88 &AttachStateTransformer{State: b.State},
89 89
90 // Create all the providers 90 // add providers
91 &MissingProviderTransformer{Providers: b.Providers, Concrete: concreteProvider}, 91 TransformProviders(b.Providers, concreteProvider, b.Module),
92 &ProviderTransformer{},
93 &DisableProviderTransformer{},
94 &ParentProviderTransformer{},
95 &AttachProviderConfigTransformer{Module: b.Module},
96 92
97 // Destruction ordering 93 // Destruction ordering
98 &DestroyEdgeTransformer{Module: b.Module, State: b.State}, 94 &DestroyEdgeTransformer{Module: b.Module, State: b.State},
@@ -108,15 +104,36 @@ func (b *ApplyGraphBuilder) Steps() []GraphTransformer {
108 // Add root variables 104 // Add root variables
109 &RootVariableTransformer{Module: b.Module}, 105 &RootVariableTransformer{Module: b.Module},
110 106
107 // Add the local values
108 &LocalTransformer{Module: b.Module},
109
111 // Add the outputs 110 // Add the outputs
112 &OutputTransformer{Module: b.Module}, 111 &OutputTransformer{Module: b.Module},
113 112
114 // Add module variables 113 // Add module variables
115 &ModuleVariableTransformer{Module: b.Module}, 114 &ModuleVariableTransformer{Module: b.Module},
116 115
116 // Remove modules no longer present in the config
117 &RemovedModuleTransformer{Module: b.Module, State: b.State},
118
117 // Connect references so ordering is correct 119 // Connect references so ordering is correct
118 &ReferenceTransformer{}, 120 &ReferenceTransformer{},
119 121
122 // Handle destroy time transformations for output and local values.
123 // Reverse the edges from outputs and locals, so that
124 // interpolations don't fail during destroy.
125 // Create a destroy node for outputs to remove them from the state.
126 // Prune unreferenced values, which may have interpolations that can't
127 // be resolved.
128 GraphTransformIf(
129 func() bool { return b.Destroy },
130 GraphTransformMulti(
131 &DestroyValueReferenceTransformer{},
132 &DestroyOutputTransformer{},
133 &PruneUnusedValuesTransformer{},
134 ),
135 ),
136
120 // Add the node to fix the state count boundaries 137 // Add the node to fix the state count boundaries
121 &CountBoundaryTransformer{}, 138 &CountBoundaryTransformer{},
122 139
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_import.go b/vendor/github.com/hashicorp/terraform/terraform/graph_builder_import.go
index 7070c59..07a1eaf 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_import.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/graph_builder_import.go
@@ -52,12 +52,7 @@ func (b *ImportGraphBuilder) Steps() []GraphTransformer {
52 // Add the import steps 52 // Add the import steps
53 &ImportStateTransformer{Targets: b.ImportTargets}, 53 &ImportStateTransformer{Targets: b.ImportTargets},
54 54
55 // Provider-related transformations 55 TransformProviders(b.Providers, concreteProvider, mod),
56 &MissingProviderTransformer{Providers: b.Providers, Concrete: concreteProvider},
57 &ProviderTransformer{},
58 &DisableProviderTransformer{},
59 &ParentProviderTransformer{},
60 &AttachProviderConfigTransformer{Module: mod},
61 56
62 // This validates that the providers only depend on variables 57 // This validates that the providers only depend on variables
63 &ImportProviderValidateTransformer{}, 58 &ImportProviderValidateTransformer{},
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_plan.go b/vendor/github.com/hashicorp/terraform/terraform/graph_builder_plan.go
index 4b29bbb..f8dd0fc 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_plan.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/graph_builder_plan.go
@@ -71,6 +71,9 @@ func (b *PlanGraphBuilder) Steps() []GraphTransformer {
71 Module: b.Module, 71 Module: b.Module,
72 }, 72 },
73 73
74 // Add the local values
75 &LocalTransformer{Module: b.Module},
76
74 // Add the outputs 77 // Add the outputs
75 &OutputTransformer{Module: b.Module}, 78 &OutputTransformer{Module: b.Module},
76 79
@@ -81,6 +84,12 @@ func (b *PlanGraphBuilder) Steps() []GraphTransformer {
81 Module: b.Module, 84 Module: b.Module,
82 }, 85 },
83 86
87 // Create orphan output nodes
88 &OrphanOutputTransformer{
89 Module: b.Module,
90 State: b.State,
91 },
92
84 // Attach the configuration to any resources 93 // Attach the configuration to any resources
85 &AttachResourceConfigTransformer{Module: b.Module}, 94 &AttachResourceConfigTransformer{Module: b.Module},
86 95
@@ -90,12 +99,7 @@ func (b *PlanGraphBuilder) Steps() []GraphTransformer {
90 // Add root variables 99 // Add root variables
91 &RootVariableTransformer{Module: b.Module}, 100 &RootVariableTransformer{Module: b.Module},
92 101
93 // Create all the providers 102 TransformProviders(b.Providers, b.ConcreteProvider, b.Module),
94 &MissingProviderTransformer{Providers: b.Providers, Concrete: b.ConcreteProvider},
95 &ProviderTransformer{},
96 &DisableProviderTransformer{},
97 &ParentProviderTransformer{},
98 &AttachProviderConfigTransformer{Module: b.Module},
99 103
100 // Provisioner-related transformations. Only add these if requested. 104 // Provisioner-related transformations. Only add these if requested.
101 GraphTransformIf( 105 GraphTransformIf(
@@ -107,7 +111,12 @@ func (b *PlanGraphBuilder) Steps() []GraphTransformer {
107 ), 111 ),
108 112
109 // Add module variables 113 // Add module variables
110 &ModuleVariableTransformer{Module: b.Module}, 114 &ModuleVariableTransformer{
115 Module: b.Module,
116 },
117
118 // Remove modules no longer present in the config
119 &RemovedModuleTransformer{Module: b.Module, State: b.State},
111 120
112 // Connect so that the references are ready for targeting. We'll 121 // Connect so that the references are ready for targeting. We'll
113 // have to connect again later for providers and so on. 122 // have to connect again later for providers and so on.
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_refresh.go b/vendor/github.com/hashicorp/terraform/terraform/graph_builder_refresh.go
index 3d3e968..9638d4c 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_refresh.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/graph_builder_refresh.go
@@ -126,12 +126,10 @@ func (b *RefreshGraphBuilder) Steps() []GraphTransformer {
126 // Add root variables 126 // Add root variables
127 &RootVariableTransformer{Module: b.Module}, 127 &RootVariableTransformer{Module: b.Module},
128 128
129 // Create all the providers 129 TransformProviders(b.Providers, concreteProvider, b.Module),
130 &MissingProviderTransformer{Providers: b.Providers, Concrete: concreteProvider}, 130
131 &ProviderTransformer{}, 131 // Add the local values
132 &DisableProviderTransformer{}, 132 &LocalTransformer{Module: b.Module},
133 &ParentProviderTransformer{},
134 &AttachProviderConfigTransformer{Module: b.Module},
135 133
136 // Add the outputs 134 // Add the outputs
137 &OutputTransformer{Module: b.Module}, 135 &OutputTransformer{Module: b.Module},
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_walk_context.go b/vendor/github.com/hashicorp/terraform/terraform/graph_walk_context.go
index e63b460..89f376e 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_walk_context.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/graph_walk_context.go
@@ -32,7 +32,6 @@ type ContextGraphWalker struct {
32 interpolaterVars map[string]map[string]interface{} 32 interpolaterVars map[string]map[string]interface{}
33 interpolaterVarLock sync.Mutex 33 interpolaterVarLock sync.Mutex
34 providerCache map[string]ResourceProvider 34 providerCache map[string]ResourceProvider
35 providerConfigCache map[string]*ResourceConfig
36 providerLock sync.Mutex 35 providerLock sync.Mutex
37 provisionerCache map[string]ResourceProvisioner 36 provisionerCache map[string]ResourceProvisioner
38 provisionerLock sync.Mutex 37 provisionerLock sync.Mutex
@@ -73,7 +72,6 @@ func (w *ContextGraphWalker) EnterPath(path []string) EvalContext {
73 InputValue: w.Context.uiInput, 72 InputValue: w.Context.uiInput,
74 Components: w.Context.components, 73 Components: w.Context.components,
75 ProviderCache: w.providerCache, 74 ProviderCache: w.providerCache,
76 ProviderConfigCache: w.providerConfigCache,
77 ProviderInputConfig: w.Context.providerInputConfig, 75 ProviderInputConfig: w.Context.providerInputConfig,
78 ProviderLock: &w.providerLock, 76 ProviderLock: &w.providerLock,
79 ProvisionerCache: w.provisionerCache, 77 ProvisionerCache: w.provisionerCache,
@@ -151,7 +149,6 @@ func (w *ContextGraphWalker) ExitEvalTree(
151func (w *ContextGraphWalker) init() { 149func (w *ContextGraphWalker) init() {
152 w.contexts = make(map[string]*BuiltinEvalContext, 5) 150 w.contexts = make(map[string]*BuiltinEvalContext, 5)
153 w.providerCache = make(map[string]ResourceProvider, 5) 151 w.providerCache = make(map[string]ResourceProvider, 5)
154 w.providerConfigCache = make(map[string]*ResourceConfig, 5)
155 w.provisionerCache = make(map[string]ResourceProvisioner, 5) 152 w.provisionerCache = make(map[string]ResourceProvisioner, 5)
156 w.interpolaterVars = make(map[string]map[string]interface{}, 5) 153 w.interpolaterVars = make(map[string]map[string]interface{}, 5)
157} 154}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graphtype_string.go b/vendor/github.com/hashicorp/terraform/terraform/graphtype_string.go
index e97b485..95ef4e9 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graphtype_string.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/graphtype_string.go
@@ -2,7 +2,7 @@
2 2
3package terraform 3package terraform
4 4
5import "fmt" 5import "strconv"
6 6
7const _GraphType_name = "GraphTypeInvalidGraphTypeLegacyGraphTypeRefreshGraphTypePlanGraphTypePlanDestroyGraphTypeApplyGraphTypeInputGraphTypeValidate" 7const _GraphType_name = "GraphTypeInvalidGraphTypeLegacyGraphTypeRefreshGraphTypePlanGraphTypePlanDestroyGraphTypeApplyGraphTypeInputGraphTypeValidate"
8 8
@@ -10,7 +10,7 @@ var _GraphType_index = [...]uint8{0, 16, 31, 47, 60, 80, 94, 108, 125}
10 10
11func (i GraphType) String() string { 11func (i GraphType) String() string {
12 if i >= GraphType(len(_GraphType_index)-1) { 12 if i >= GraphType(len(_GraphType_index)-1) {
13 return fmt.Sprintf("GraphType(%d)", i) 13 return "GraphType(" + strconv.FormatInt(int64(i), 10) + ")"
14 } 14 }
15 return _GraphType_name[_GraphType_index[i]:_GraphType_index[i+1]] 15 return _GraphType_name[_GraphType_index[i]:_GraphType_index[i+1]]
16} 16}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/instancetype_string.go b/vendor/github.com/hashicorp/terraform/terraform/instancetype_string.go
index f69267c..b8e7d1f 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/instancetype_string.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/instancetype_string.go
@@ -2,7 +2,7 @@
2 2
3package terraform 3package terraform
4 4
5import "fmt" 5import "strconv"
6 6
7const _InstanceType_name = "TypeInvalidTypePrimaryTypeTaintedTypeDeposed" 7const _InstanceType_name = "TypeInvalidTypePrimaryTypeTaintedTypeDeposed"
8 8
@@ -10,7 +10,7 @@ var _InstanceType_index = [...]uint8{0, 11, 22, 33, 44}
10 10
11func (i InstanceType) String() string { 11func (i InstanceType) String() string {
12 if i < 0 || i >= InstanceType(len(_InstanceType_index)-1) { 12 if i < 0 || i >= InstanceType(len(_InstanceType_index)-1) {
13 return fmt.Sprintf("InstanceType(%d)", i) 13 return "InstanceType(" + strconv.FormatInt(int64(i), 10) + ")"
14 } 14 }
15 return _InstanceType_name[_InstanceType_index[i]:_InstanceType_index[i+1]] 15 return _InstanceType_name[_InstanceType_index[i]:_InstanceType_index[i+1]]
16} 16}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/interpolate.go b/vendor/github.com/hashicorp/terraform/terraform/interpolate.go
index 22ddce6..4f4e178 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/interpolate.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/interpolate.go
@@ -90,6 +90,8 @@ func (i *Interpolater) Values(
90 err = i.valueSimpleVar(scope, n, v, result) 90 err = i.valueSimpleVar(scope, n, v, result)
91 case *config.TerraformVariable: 91 case *config.TerraformVariable:
92 err = i.valueTerraformVar(scope, n, v, result) 92 err = i.valueTerraformVar(scope, n, v, result)
93 case *config.LocalVariable:
94 err = i.valueLocalVar(scope, n, v, result)
93 case *config.UserVariable: 95 case *config.UserVariable:
94 err = i.valueUserVar(scope, n, v, result) 96 err = i.valueUserVar(scope, n, v, result)
95 default: 97 default:
@@ -140,7 +142,6 @@ func (i *Interpolater) valueModuleVar(
140 n string, 142 n string,
141 v *config.ModuleVariable, 143 v *config.ModuleVariable,
142 result map[string]ast.Variable) error { 144 result map[string]ast.Variable) error {
143
144 // Build the path to the child module we want 145 // Build the path to the child module we want
145 path := make([]string, len(scope.Path), len(scope.Path)+1) 146 path := make([]string, len(scope.Path), len(scope.Path)+1)
146 copy(path, scope.Path) 147 copy(path, scope.Path)
@@ -317,7 +318,6 @@ func (i *Interpolater) valueTerraformVar(
317 n string, 318 n string,
318 v *config.TerraformVariable, 319 v *config.TerraformVariable,
319 result map[string]ast.Variable) error { 320 result map[string]ast.Variable) error {
320
321 // "env" is supported for backward compatibility, but it's deprecated and 321 // "env" is supported for backward compatibility, but it's deprecated and
322 // so we won't advertise it as being allowed in the error message. It will 322 // so we won't advertise it as being allowed in the error message. It will
323 // be removed in a future version of Terraform. 323 // be removed in a future version of Terraform.
@@ -335,6 +335,59 @@ func (i *Interpolater) valueTerraformVar(
335 return nil 335 return nil
336} 336}
337 337
338func (i *Interpolater) valueLocalVar(
339 scope *InterpolationScope,
340 n string,
341 v *config.LocalVariable,
342 result map[string]ast.Variable,
343) error {
344 i.StateLock.RLock()
345 defer i.StateLock.RUnlock()
346
347 modTree := i.Module
348 if len(scope.Path) > 1 {
349 modTree = i.Module.Child(scope.Path[1:])
350 }
351
352 // Get the resource from the configuration so we can verify
353 // that the resource is in the configuration and so we can access
354 // the configuration if we need to.
355 var cl *config.Local
356 for _, l := range modTree.Config().Locals {
357 if l.Name == v.Name {
358 cl = l
359 break
360 }
361 }
362
363 if cl == nil {
364 return fmt.Errorf("%s: no local value of this name has been declared", n)
365 }
366
367 // Get the relevant module
368 module := i.State.ModuleByPath(scope.Path)
369 if module == nil {
370 result[n] = unknownVariable()
371 return nil
372 }
373
374 rawV, exists := module.Locals[v.Name]
375 if !exists {
376 result[n] = unknownVariable()
377 return nil
378 }
379
380 varV, err := hil.InterfaceToVariable(rawV)
381 if err != nil {
382 // Should never happen, since interpolation should always produce
383 // something we can feed back in to interpolation.
384 return fmt.Errorf("%s: %s", n, err)
385 }
386
387 result[n] = varV
388 return nil
389}
390
338func (i *Interpolater) valueUserVar( 391func (i *Interpolater) valueUserVar(
339 scope *InterpolationScope, 392 scope *InterpolationScope,
340 n string, 393 n string,
@@ -465,6 +518,16 @@ func (i *Interpolater) computeResourceVariable(
465 return &v, err 518 return &v, err
466 } 519 }
467 520
521 // special case for the "id" field which is usually also an attribute
522 if v.Field == "id" && r.Primary.ID != "" {
523 // This is usually pulled from the attributes, but is sometimes missing
524 // during destroy. We can return the ID field in this case.
525 // FIXME: there should only be one ID to rule them all.
526 log.Printf("[WARN] resource %s missing 'id' attribute", v.ResourceId())
527 v, err := hil.InterfaceToVariable(r.Primary.ID)
528 return &v, err
529 }
530
468 // computed list or map attribute 531 // computed list or map attribute
469 _, isList = r.Primary.Attributes[v.Field+".#"] 532 _, isList = r.Primary.Attributes[v.Field+".#"]
470 _, isMap = r.Primary.Attributes[v.Field+".%"] 533 _, isMap = r.Primary.Attributes[v.Field+".%"]
@@ -602,6 +665,11 @@ func (i *Interpolater) computeResourceMultiVariable(
602 continue 665 continue
603 } 666 }
604 667
668 if v.Field == "id" && r.Primary.ID != "" {
669 log.Printf("[WARN] resource %s missing 'id' attribute", v.ResourceId())
670 values = append(values, r.Primary.ID)
671 }
672
605 // computed list or map attribute 673 // computed list or map attribute
606 _, isList := r.Primary.Attributes[v.Field+".#"] 674 _, isList := r.Primary.Attributes[v.Field+".#"]
607 _, isMap := r.Primary.Attributes[v.Field+".%"] 675 _, isMap := r.Primary.Attributes[v.Field+".%"]
@@ -646,7 +714,6 @@ func (i *Interpolater) computeResourceMultiVariable(
646func (i *Interpolater) interpolateComplexTypeAttribute( 714func (i *Interpolater) interpolateComplexTypeAttribute(
647 resourceID string, 715 resourceID string,
648 attributes map[string]string) (ast.Variable, error) { 716 attributes map[string]string) (ast.Variable, error) {
649
650 // We can now distinguish between lists and maps in state by the count field: 717 // We can now distinguish between lists and maps in state by the count field:
651 // - lists (and by extension, sets) use the traditional .# notation 718 // - lists (and by extension, sets) use the traditional .# notation
652 // - maps use the newer .% notation 719 // - maps use the newer .% notation
@@ -722,7 +789,8 @@ func (i *Interpolater) resourceCountMax(
722 // If we're NOT applying, then we assume we can read the count 789 // If we're NOT applying, then we assume we can read the count
723 // from the state. Plan and so on may not have any state yet so 790 // from the state. Plan and so on may not have any state yet so
724 // we do a full interpolation. 791 // we do a full interpolation.
725 if i.Operation != walkApply { 792 // Don't forget walkDestroy, which is a special case of walkApply
793 if !(i.Operation == walkApply || i.Operation == walkDestroy) {
726 if cr == nil { 794 if cr == nil {
727 return 0, nil 795 return 0, nil
728 } 796 }
@@ -753,7 +821,13 @@ func (i *Interpolater) resourceCountMax(
753 // use "cr.Count()" but that doesn't work if the count is interpolated 821 // use "cr.Count()" but that doesn't work if the count is interpolated
754 // and we can't guarantee that so we instead depend on the state. 822 // and we can't guarantee that so we instead depend on the state.
755 max := -1 823 max := -1
756 for k, _ := range ms.Resources { 824 for k, s := range ms.Resources {
825 // This resource may have been just removed, in which case the Primary
826 // may be nil, or just empty.
827 if s == nil || s.Primary == nil || len(s.Primary.Attributes) == 0 {
828 continue
829 }
830
757 // Get the index number for this resource 831 // Get the index number for this resource
758 index := "" 832 index := ""
759 if k == id { 833 if k == id {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go b/vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go
index b9f44a0..4594cb6 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go
@@ -17,7 +17,6 @@ import (
17// present in the configuration. This is guaranteed not to happen for any 17// present in the configuration. This is guaranteed not to happen for any
18// configuration that has passed a call to Config.Validate(). 18// configuration that has passed a call to Config.Validate().
19func ModuleTreeDependencies(root *module.Tree, state *State) *moduledeps.Module { 19func ModuleTreeDependencies(root *module.Tree, state *State) *moduledeps.Module {
20
21 // First we walk the configuration tree to build the overall structure 20 // First we walk the configuration tree to build the overall structure
22 // and capture the explicit/implicit/inherited provider dependencies. 21 // and capture the explicit/implicit/inherited provider dependencies.
23 deps := moduleTreeConfigDependencies(root, nil) 22 deps := moduleTreeConfigDependencies(root, nil)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_data_refresh.go b/vendor/github.com/hashicorp/terraform/terraform/node_data_refresh.go
index 45129b3..d5ca641 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_data_refresh.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_data_refresh.go
@@ -27,6 +27,7 @@ func (n *NodeRefreshableDataResource) DynamicExpand(ctx EvalContext) (*Graph, er
27 concreteResource := func(a *NodeAbstractResource) dag.Vertex { 27 concreteResource := func(a *NodeAbstractResource) dag.Vertex {
28 // Add the config and state since we don't do that via transforms 28 // Add the config and state since we don't do that via transforms
29 a.Config = n.Config 29 a.Config = n.Config
30 a.ResolvedProvider = n.ResolvedProvider
30 31
31 return &NodeRefreshableDataResourceInstance{ 32 return &NodeRefreshableDataResourceInstance{
32 NodeAbstractResource: a, 33 NodeAbstractResource: a,
@@ -107,7 +108,9 @@ func (n *NodeRefreshableDataResourceInstance) EvalTree() EvalNode {
107 // Get the state if we have it, if not we build it 108 // Get the state if we have it, if not we build it
108 rs := n.ResourceState 109 rs := n.ResourceState
109 if rs == nil { 110 if rs == nil {
110 rs = &ResourceState{} 111 rs = &ResourceState{
112 Provider: n.ResolvedProvider,
113 }
111 } 114 }
112 115
113 // If the config isn't empty we update the state 116 // If the config isn't empty we update the state
@@ -145,7 +148,7 @@ func (n *NodeRefreshableDataResourceInstance) EvalTree() EvalNode {
145 &EvalWriteState{ 148 &EvalWriteState{
146 Name: stateId, 149 Name: stateId,
147 ResourceType: rs.Type, 150 ResourceType: rs.Type,
148 Provider: rs.Provider, 151 Provider: n.ResolvedProvider,
149 Dependencies: rs.Dependencies, 152 Dependencies: rs.Dependencies,
150 State: &state, // state is nil here 153 State: &state, // state is nil here
151 }, 154 },
@@ -185,7 +188,7 @@ func (n *NodeRefreshableDataResourceInstance) EvalTree() EvalNode {
185 // provider configurations that need this data during 188 // provider configurations that need this data during
186 // refresh/plan. 189 // refresh/plan.
187 &EvalGetProvider{ 190 &EvalGetProvider{
188 Name: n.ProvidedBy()[0], 191 Name: n.ResolvedProvider,
189 Output: &provider, 192 Output: &provider,
190 }, 193 },
191 194
@@ -207,7 +210,7 @@ func (n *NodeRefreshableDataResourceInstance) EvalTree() EvalNode {
207 &EvalWriteState{ 210 &EvalWriteState{
208 Name: stateId, 211 Name: stateId,
209 ResourceType: rs.Type, 212 ResourceType: rs.Type,
210 Provider: rs.Provider, 213 Provider: n.ResolvedProvider,
211 Dependencies: rs.Dependencies, 214 Dependencies: rs.Dependencies,
212 State: &state, 215 State: &state,
213 }, 216 },
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_local.go b/vendor/github.com/hashicorp/terraform/terraform/node_local.go
new file mode 100644
index 0000000..d387222
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_local.go
@@ -0,0 +1,66 @@
1package terraform
2
3import (
4 "fmt"
5 "strings"
6
7 "github.com/hashicorp/terraform/config"
8)
9
10// NodeLocal represents a named local value in a particular module.
11//
12// Local value nodes only have one operation, common to all walk types:
13// evaluate the result and place it in state.
14type NodeLocal struct {
15 PathValue []string
16 Config *config.Local
17}
18
19func (n *NodeLocal) Name() string {
20 result := fmt.Sprintf("local.%s", n.Config.Name)
21 if len(n.PathValue) > 1 {
22 result = fmt.Sprintf("%s.%s", modulePrefixStr(n.PathValue), result)
23 }
24
25 return result
26}
27
28// GraphNodeSubPath
29func (n *NodeLocal) Path() []string {
30 return n.PathValue
31}
32
33// RemovableIfNotTargeted
34func (n *NodeLocal) RemoveIfNotTargeted() bool {
35 return true
36}
37
38// GraphNodeReferenceable
39func (n *NodeLocal) ReferenceableName() []string {
40 name := fmt.Sprintf("local.%s", n.Config.Name)
41 return []string{name}
42}
43
44// GraphNodeReferencer
45func (n *NodeLocal) References() []string {
46 var result []string
47 result = append(result, ReferencesFromConfig(n.Config.RawConfig)...)
48 for _, v := range result {
49 split := strings.Split(v, "/")
50 for i, s := range split {
51 split[i] = s + ".destroy"
52 }
53
54 result = append(result, strings.Join(split, "/"))
55 }
56
57 return result
58}
59
60// GraphNodeEvalable
61func (n *NodeLocal) EvalTree() EvalNode {
62 return &EvalLocal{
63 Name: n.Config.Name,
64 Value: n.Config.RawConfig,
65 }
66}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_module_destroy.go b/vendor/github.com/hashicorp/terraform/terraform/node_module_destroy.go
deleted file mode 100644
index 319df1e..0000000
--- a/vendor/github.com/hashicorp/terraform/terraform/node_module_destroy.go
+++ /dev/null
@@ -1,29 +0,0 @@
1package terraform
2
3import (
4 "fmt"
5)
6
7// NodeDestroyableModule represents a module destruction.
8type NodeDestroyableModuleVariable struct {
9 PathValue []string
10}
11
12func (n *NodeDestroyableModuleVariable) Name() string {
13 result := "plan-destroy"
14 if len(n.PathValue) > 1 {
15 result = fmt.Sprintf("%s.%s", modulePrefixStr(n.PathValue), result)
16 }
17
18 return result
19}
20
21// GraphNodeSubPath
22func (n *NodeDestroyableModuleVariable) Path() []string {
23 return n.PathValue
24}
25
26// GraphNodeEvalable
27func (n *NodeDestroyableModuleVariable) EvalTree() EvalNode {
28 return &EvalDiffDestroyModule{Path: n.PathValue}
29}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_module_removed.go b/vendor/github.com/hashicorp/terraform/terraform/node_module_removed.go
new file mode 100644
index 0000000..bb3e5ee
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_module_removed.go
@@ -0,0 +1,77 @@
1package terraform
2
3import (
4 "fmt"
5 "log"
6 "reflect"
7)
8
9// NodeModuleRemoved represents a module that is no longer in the
10// config.
11type NodeModuleRemoved struct {
12 PathValue []string
13}
14
15func (n *NodeModuleRemoved) Name() string {
16 return fmt.Sprintf("%s (removed)", modulePrefixStr(n.PathValue))
17}
18
19// GraphNodeSubPath
20func (n *NodeModuleRemoved) Path() []string {
21 return n.PathValue
22}
23
24// GraphNodeEvalable
25func (n *NodeModuleRemoved) EvalTree() EvalNode {
26 return &EvalOpFilter{
27 Ops: []walkOperation{walkRefresh, walkApply, walkDestroy},
28 Node: &EvalDeleteModule{
29 PathValue: n.PathValue,
30 },
31 }
32}
33
34func (n *NodeModuleRemoved) ReferenceGlobal() bool {
35 return true
36}
37
38func (n *NodeModuleRemoved) References() []string {
39 return []string{modulePrefixStr(n.PathValue)}
40}
41
42// EvalDeleteModule is an EvalNode implementation that removes an empty module
43// entry from the state.
44type EvalDeleteModule struct {
45 PathValue []string
46}
47
48func (n *EvalDeleteModule) Eval(ctx EvalContext) (interface{}, error) {
49 state, lock := ctx.State()
50 if state == nil {
51 return nil, nil
52 }
53
54 // Get a write lock so we can access this instance
55 lock.Lock()
56 defer lock.Unlock()
57
58 // Make sure we have a clean state
59 // Destroyed resources aren't deleted, they're written with an ID of "".
60 state.prune()
61
62 // find the module and delete it
63 for i, m := range state.Modules {
64 if reflect.DeepEqual(m.Path, n.PathValue) {
65 if !m.Empty() {
66 // a targeted apply may leave module resources even without a config,
67 // so just log this and return.
68 log.Printf("[DEBUG] cannot remove module %s, not empty", modulePrefixStr(n.PathValue))
69 break
70 }
71 state.Modules = append(state.Modules[:i], state.Modules[i+1:]...)
72 break
73 }
74 }
75
76 return nil, nil
77}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_module_variable.go b/vendor/github.com/hashicorp/terraform/terraform/node_module_variable.go
index 13fe8fc..66ff7d5 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_module_variable.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_module_variable.go
@@ -92,11 +92,24 @@ func (n *NodeApplyableModuleVariable) EvalTree() EvalNode {
92 // within the variables mapping. 92 // within the variables mapping.
93 var config *ResourceConfig 93 var config *ResourceConfig
94 variables := make(map[string]interface{}) 94 variables := make(map[string]interface{})
95
95 return &EvalSequence{ 96 return &EvalSequence{
96 Nodes: []EvalNode{ 97 Nodes: []EvalNode{
97 &EvalInterpolate{ 98 &EvalOpFilter{
98 Config: n.Value, 99 Ops: []walkOperation{walkInput},
99 Output: &config, 100 Node: &EvalInterpolate{
101 Config: n.Value,
102 Output: &config,
103 ContinueOnErr: true,
104 },
105 },
106 &EvalOpFilter{
107 Ops: []walkOperation{walkRefresh, walkPlan, walkApply,
108 walkDestroy, walkValidate},
109 Node: &EvalInterpolate{
110 Config: n.Value,
111 Output: &config,
112 },
100 }, 113 },
101 114
102 &EvalVariableBlock{ 115 &EvalVariableBlock{
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_output.go b/vendor/github.com/hashicorp/terraform/terraform/node_output.go
index 9017a63..83e9925 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_output.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_output.go
@@ -69,12 +69,22 @@ func (n *NodeApplyableOutput) References() []string {
69 69
70// GraphNodeEvalable 70// GraphNodeEvalable
71func (n *NodeApplyableOutput) EvalTree() EvalNode { 71func (n *NodeApplyableOutput) EvalTree() EvalNode {
72 return &EvalOpFilter{ 72 return &EvalSequence{
73 Ops: []walkOperation{walkRefresh, walkPlan, walkApply, 73 Nodes: []EvalNode{
74 walkDestroy, walkInput, walkValidate}, 74 &EvalOpFilter{
75 Node: &EvalSequence{ 75 // Don't let interpolation errors stop Input, since it happens
76 Nodes: []EvalNode{ 76 // before Refresh.
77 &EvalWriteOutput{ 77 Ops: []walkOperation{walkInput},
78 Node: &EvalWriteOutput{
79 Name: n.Config.Name,
80 Sensitive: n.Config.Sensitive,
81 Value: n.Config.RawConfig,
82 ContinueOnErr: true,
83 },
84 },
85 &EvalOpFilter{
86 Ops: []walkOperation{walkRefresh, walkPlan, walkApply, walkValidate, walkDestroy, walkPlanDestroy},
87 Node: &EvalWriteOutput{
78 Name: n.Config.Name, 88 Name: n.Config.Name,
79 Sensitive: n.Config.Sensitive, 89 Sensitive: n.Config.Sensitive,
80 Value: n.Config.RawConfig, 90 Value: n.Config.RawConfig,
@@ -83,3 +93,61 @@ func (n *NodeApplyableOutput) EvalTree() EvalNode {
83 }, 93 },
84 } 94 }
85} 95}
96
97// NodeDestroyableOutput represents an output that is "destroybale":
98// its application will remove the output from the state.
99type NodeDestroyableOutput struct {
100 PathValue []string
101 Config *config.Output // Config is the output in the config
102}
103
104func (n *NodeDestroyableOutput) Name() string {
105 result := fmt.Sprintf("output.%s (destroy)", n.Config.Name)
106 if len(n.PathValue) > 1 {
107 result = fmt.Sprintf("%s.%s", modulePrefixStr(n.PathValue), result)
108 }
109
110 return result
111}
112
113// GraphNodeSubPath
114func (n *NodeDestroyableOutput) Path() []string {
115 return n.PathValue
116}
117
118// RemovableIfNotTargeted
119func (n *NodeDestroyableOutput) RemoveIfNotTargeted() bool {
120 // We need to add this so that this node will be removed if
121 // it isn't targeted or a dependency of a target.
122 return true
123}
124
125// This will keep the destroy node in the graph if its corresponding output
126// node is also in the destroy graph.
127func (n *NodeDestroyableOutput) TargetDownstream(targetedDeps, untargetedDeps *dag.Set) bool {
128 return true
129}
130
131// GraphNodeReferencer
132func (n *NodeDestroyableOutput) References() []string {
133 var result []string
134 result = append(result, n.Config.DependsOn...)
135 result = append(result, ReferencesFromConfig(n.Config.RawConfig)...)
136 for _, v := range result {
137 split := strings.Split(v, "/")
138 for i, s := range split {
139 split[i] = s + ".destroy"
140 }
141
142 result = append(result, strings.Join(split, "/"))
143 }
144
145 return result
146}
147
148// GraphNodeEvalable
149func (n *NodeDestroyableOutput) EvalTree() EvalNode {
150 return &EvalDeleteOutput{
151 Name: n.Config.Name,
152 }
153}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_output_orphan.go b/vendor/github.com/hashicorp/terraform/terraform/node_output_orphan.go
index 636a15d..0fd1554 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_output_orphan.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_output_orphan.go
@@ -19,6 +19,11 @@ func (n *NodeOutputOrphan) Name() string {
19 return result 19 return result
20} 20}
21 21
22// GraphNodeReferenceable
23func (n *NodeOutputOrphan) ReferenceableName() []string {
24 return []string{"output." + n.OutputName}
25}
26
22// GraphNodeSubPath 27// GraphNodeSubPath
23func (n *NodeOutputOrphan) Path() []string { 28func (n *NodeOutputOrphan) Path() []string {
24 return n.PathValue 29 return n.PathValue
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_provider.go b/vendor/github.com/hashicorp/terraform/terraform/node_provider.go
index 8e2c176..2071ab1 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_provider.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_provider.go
@@ -7,5 +7,5 @@ type NodeApplyableProvider struct {
7 7
8// GraphNodeEvalable 8// GraphNodeEvalable
9func (n *NodeApplyableProvider) EvalTree() EvalNode { 9func (n *NodeApplyableProvider) EvalTree() EvalNode {
10 return ProviderEvalTree(n.NameValue, n.ProviderConfig()) 10 return ProviderEvalTree(n, n.ProviderConfig())
11} 11}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_provider_abstract.go b/vendor/github.com/hashicorp/terraform/terraform/node_provider_abstract.go
index 6cc8365..9e490f7 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_provider_abstract.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_provider_abstract.go
@@ -2,6 +2,7 @@ package terraform
2 2
3import ( 3import (
4 "fmt" 4 "fmt"
5 "strings"
5 6
6 "github.com/hashicorp/terraform/config" 7 "github.com/hashicorp/terraform/config"
7 "github.com/hashicorp/terraform/dag" 8 "github.com/hashicorp/terraform/dag"
@@ -24,13 +25,22 @@ type NodeAbstractProvider struct {
24 Config *config.ProviderConfig 25 Config *config.ProviderConfig
25} 26}
26 27
27func (n *NodeAbstractProvider) Name() string { 28func ResolveProviderName(name string, path []string) string {
28 result := fmt.Sprintf("provider.%s", n.NameValue) 29 if strings.Contains(name, "provider.") {
29 if len(n.PathValue) > 1 { 30 // already resolved
30 result = fmt.Sprintf("%s.%s", modulePrefixStr(n.PathValue), result) 31 return name
32 }
33
34 name = fmt.Sprintf("provider.%s", name)
35 if len(path) >= 1 {
36 name = fmt.Sprintf("%s.%s", modulePrefixStr(path), name)
31 } 37 }
32 38
33 return result 39 return name
40}
41
42func (n *NodeAbstractProvider) Name() string {
43 return ResolveProviderName(n.NameValue, n.PathValue)
34} 44}
35 45
36// GraphNodeSubPath 46// GraphNodeSubPath
@@ -60,12 +70,12 @@ func (n *NodeAbstractProvider) ProviderName() string {
60} 70}
61 71
62// GraphNodeProvider 72// GraphNodeProvider
63func (n *NodeAbstractProvider) ProviderConfig() *config.RawConfig { 73func (n *NodeAbstractProvider) ProviderConfig() *config.ProviderConfig {
64 if n.Config == nil { 74 if n.Config == nil {
65 return nil 75 return nil
66 } 76 }
67 77
68 return n.Config.RawConfig 78 return n.Config
69} 79}
70 80
71// GraphNodeAttachProvider 81// GraphNodeAttachProvider
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_provider_disabled.go b/vendor/github.com/hashicorp/terraform/terraform/node_provider_disabled.go
index 25e7e62..a00bc46 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_provider_disabled.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_provider_disabled.go
@@ -20,7 +20,7 @@ func (n *NodeDisabledProvider) EvalTree() EvalNode {
20 var resourceConfig *ResourceConfig 20 var resourceConfig *ResourceConfig
21 return &EvalSequence{ 21 return &EvalSequence{
22 Nodes: []EvalNode{ 22 Nodes: []EvalNode{
23 &EvalInterpolate{ 23 &EvalInterpolateProvider{
24 Config: n.ProviderConfig(), 24 Config: n.ProviderConfig(),
25 Output: &resourceConfig, 25 Output: &resourceConfig,
26 }, 26 },
@@ -29,10 +29,6 @@ func (n *NodeDisabledProvider) EvalTree() EvalNode {
29 Config: &resourceConfig, 29 Config: &resourceConfig,
30 Output: &resourceConfig, 30 Output: &resourceConfig,
31 }, 31 },
32 &EvalSetProviderConfig{
33 Provider: n.ProviderName(),
34 Config: &resourceConfig,
35 },
36 }, 32 },
37 } 33 }
38} 34}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_abstract.go b/vendor/github.com/hashicorp/terraform/terraform/node_resource_abstract.go
index 50bb707..73509c8 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_abstract.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_resource_abstract.go
@@ -33,6 +33,9 @@ type NodeAbstractResource struct {
33 ResourceState *ResourceState // ResourceState is the ResourceState for this 33 ResourceState *ResourceState // ResourceState is the ResourceState for this
34 34
35 Targets []ResourceAddress // Set from GraphNodeTargetable 35 Targets []ResourceAddress // Set from GraphNodeTargetable
36
37 // The address of the provider this resource will use
38 ResolvedProvider string
36} 39}
37 40
38func (n *NodeAbstractResource) Name() string { 41func (n *NodeAbstractResource) Name() string {
@@ -170,20 +173,24 @@ func (n *NodeAbstractResource) StateReferences() []string {
170 return deps 173 return deps
171} 174}
172 175
176func (n *NodeAbstractResource) SetProvider(p string) {
177 n.ResolvedProvider = p
178}
179
173// GraphNodeProviderConsumer 180// GraphNodeProviderConsumer
174func (n *NodeAbstractResource) ProvidedBy() []string { 181func (n *NodeAbstractResource) ProvidedBy() string {
175 // If we have a config we prefer that above all else 182 // If we have a config we prefer that above all else
176 if n.Config != nil { 183 if n.Config != nil {
177 return []string{resourceProvider(n.Config.Type, n.Config.Provider)} 184 return resourceProvider(n.Config.Type, n.Config.Provider)
178 } 185 }
179 186
180 // If we have state, then we will use the provider from there 187 // If we have state, then we will use the provider from there
181 if n.ResourceState != nil && n.ResourceState.Provider != "" { 188 if n.ResourceState != nil && n.ResourceState.Provider != "" {
182 return []string{n.ResourceState.Provider} 189 return n.ResourceState.Provider
183 } 190 }
184 191
185 // Use our type 192 // Use our type
186 return []string{resourceProvider(n.Addr.Type, "")} 193 return resourceProvider(n.Addr.Type, "")
187} 194}
188 195
189// GraphNodeProvisionerConsumer 196// GraphNodeProvisionerConsumer
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_apply.go b/vendor/github.com/hashicorp/terraform/terraform/node_resource_apply.go
index 3599782..40ee1cf 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_apply.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_resource_apply.go
@@ -124,6 +124,27 @@ func (n *NodeApplyableResource) evalTreeDataResource(
124 Then: EvalNoop{}, 124 Then: EvalNoop{},
125 }, 125 },
126 126
127 // Normally we interpolate count as a preparation step before
128 // a DynamicExpand, but an apply graph has pre-expanded nodes
129 // and so the count would otherwise never be interpolated.
130 //
131 // This is redundant when there are multiple instances created
132 // from the same config (count > 1) but harmless since the
133 // underlying structures have mutexes to make this concurrency-safe.
134 //
135 // In most cases this isn't actually needed because we dealt with
136 // all of the counts during the plan walk, but we do it here
137 // for completeness because other code assumes that the
138 // final count is always available during interpolation.
139 //
140 // Here we are just populating the interpolated value in-place
141 // inside this RawConfig object, like we would in
142 // NodeAbstractCountResource.
143 &EvalInterpolate{
144 Config: n.Config.RawCount,
145 ContinueOnErr: true,
146 },
147
127 // We need to re-interpolate the config here, rather than 148 // We need to re-interpolate the config here, rather than
128 // just using the diff's values directly, because we've 149 // just using the diff's values directly, because we've
129 // potentially learned more variable values during the 150 // potentially learned more variable values during the
@@ -135,7 +156,7 @@ func (n *NodeApplyableResource) evalTreeDataResource(
135 }, 156 },
136 157
137 &EvalGetProvider{ 158 &EvalGetProvider{
138 Name: n.ProvidedBy()[0], 159 Name: n.ResolvedProvider,
139 Output: &provider, 160 Output: &provider,
140 }, 161 },
141 162
@@ -158,7 +179,7 @@ func (n *NodeApplyableResource) evalTreeDataResource(
158 &EvalWriteState{ 179 &EvalWriteState{
159 Name: stateId, 180 Name: stateId,
160 ResourceType: n.Config.Type, 181 ResourceType: n.Config.Type,
161 Provider: n.Config.Provider, 182 Provider: n.ResolvedProvider,
162 Dependencies: stateDeps, 183 Dependencies: stateDeps,
163 State: &state, 184 State: &state,
164 }, 185 },
@@ -236,13 +257,35 @@ func (n *NodeApplyableResource) evalTreeManagedResource(
236 }, 257 },
237 }, 258 },
238 259
260 // Normally we interpolate count as a preparation step before
261 // a DynamicExpand, but an apply graph has pre-expanded nodes
262 // and so the count would otherwise never be interpolated.
263 //
264 // This is redundant when there are multiple instances created
265 // from the same config (count > 1) but harmless since the
266 // underlying structures have mutexes to make this concurrency-safe.
267 //
268 // In most cases this isn't actually needed because we dealt with
269 // all of the counts during the plan walk, but we need to do this
270 // in order to support interpolation of resource counts from
271 // apply-time-interpolated expressions, such as those in
272 // "provisioner" blocks.
273 //
274 // Here we are just populating the interpolated value in-place
275 // inside this RawConfig object, like we would in
276 // NodeAbstractCountResource.
277 &EvalInterpolate{
278 Config: n.Config.RawCount,
279 ContinueOnErr: true,
280 },
281
239 &EvalInterpolate{ 282 &EvalInterpolate{
240 Config: n.Config.RawConfig.Copy(), 283 Config: n.Config.RawConfig.Copy(),
241 Resource: resource, 284 Resource: resource,
242 Output: &resourceConfig, 285 Output: &resourceConfig,
243 }, 286 },
244 &EvalGetProvider{ 287 &EvalGetProvider{
245 Name: n.ProvidedBy()[0], 288 Name: n.ResolvedProvider,
246 Output: &provider, 289 Output: &provider,
247 }, 290 },
248 &EvalReadState{ 291 &EvalReadState{
@@ -283,7 +326,7 @@ func (n *NodeApplyableResource) evalTreeManagedResource(
283 }, 326 },
284 327
285 &EvalGetProvider{ 328 &EvalGetProvider{
286 Name: n.ProvidedBy()[0], 329 Name: n.ResolvedProvider,
287 Output: &provider, 330 Output: &provider,
288 }, 331 },
289 &EvalReadState{ 332 &EvalReadState{
@@ -308,7 +351,7 @@ func (n *NodeApplyableResource) evalTreeManagedResource(
308 &EvalWriteState{ 351 &EvalWriteState{
309 Name: stateId, 352 Name: stateId,
310 ResourceType: n.Config.Type, 353 ResourceType: n.Config.Type,
311 Provider: n.Config.Provider, 354 Provider: n.ResolvedProvider,
312 Dependencies: stateDeps, 355 Dependencies: stateDeps,
313 State: &state, 356 State: &state,
314 }, 357 },
@@ -332,7 +375,7 @@ func (n *NodeApplyableResource) evalTreeManagedResource(
332 Else: &EvalWriteState{ 375 Else: &EvalWriteState{
333 Name: stateId, 376 Name: stateId,
334 ResourceType: n.Config.Type, 377 ResourceType: n.Config.Type,
335 Provider: n.Config.Provider, 378 Provider: n.ResolvedProvider,
336 Dependencies: stateDeps, 379 Dependencies: stateDeps,
337 State: &state, 380 State: &state,
338 }, 381 },
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy.go b/vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy.go
index c2efd2c..657bbee 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy.go
@@ -102,8 +102,9 @@ func (n *NodeDestroyResource) DynamicExpand(ctx EvalContext) (*Graph, error) {
102 102
103 // We want deposed resources in the state to be destroyed 103 // We want deposed resources in the state to be destroyed
104 steps = append(steps, &DeposedTransformer{ 104 steps = append(steps, &DeposedTransformer{
105 State: state, 105 State: state,
106 View: n.Addr.stateId(), 106 View: n.Addr.stateId(),
107 ResolvedProvider: n.ResolvedProvider,
107 }) 108 })
108 109
109 // Target 110 // Target
@@ -148,7 +149,9 @@ func (n *NodeDestroyResource) EvalTree() EvalNode {
148 // Get our state 149 // Get our state
149 rs := n.ResourceState 150 rs := n.ResourceState
150 if rs == nil { 151 if rs == nil {
151 rs = &ResourceState{} 152 rs = &ResourceState{
153 Provider: n.ResolvedProvider,
154 }
152 } 155 }
153 156
154 var diffApply *InstanceDiff 157 var diffApply *InstanceDiff
@@ -188,7 +191,7 @@ func (n *NodeDestroyResource) EvalTree() EvalNode {
188 &EvalInstanceInfo{Info: info}, 191 &EvalInstanceInfo{Info: info},
189 192
190 &EvalGetProvider{ 193 &EvalGetProvider{
191 Name: n.ProvidedBy()[0], 194 Name: n.ResolvedProvider,
192 Output: &provider, 195 Output: &provider,
193 }, 196 },
194 &EvalReadState{ 197 &EvalReadState{
@@ -272,7 +275,7 @@ func (n *NodeDestroyResource) EvalTree() EvalNode {
272 &EvalWriteState{ 275 &EvalWriteState{
273 Name: stateId, 276 Name: stateId,
274 ResourceType: n.Addr.Type, 277 ResourceType: n.Addr.Type,
275 Provider: rs.Provider, 278 Provider: n.ResolvedProvider,
276 Dependencies: rs.Dependencies, 279 Dependencies: rs.Dependencies,
277 State: &state, 280 State: &state,
278 }, 281 },
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan.go b/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan.go
index 52bbf88..1afae7a 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan.go
@@ -27,6 +27,7 @@ func (n *NodePlannableResource) DynamicExpand(ctx EvalContext) (*Graph, error) {
27 concreteResource := func(a *NodeAbstractResource) dag.Vertex { 27 concreteResource := func(a *NodeAbstractResource) dag.Vertex {
28 // Add the config and state since we don't do that via transforms 28 // Add the config and state since we don't do that via transforms
29 a.Config = n.Config 29 a.Config = n.Config
30 a.ResolvedProvider = n.ResolvedProvider
30 31
31 return &NodePlannableResourceInstance{ 32 return &NodePlannableResourceInstance{
32 NodeAbstractResource: a, 33 NodeAbstractResource: a,
@@ -37,6 +38,7 @@ func (n *NodePlannableResource) DynamicExpand(ctx EvalContext) (*Graph, error) {
37 concreteResourceOrphan := func(a *NodeAbstractResource) dag.Vertex { 38 concreteResourceOrphan := func(a *NodeAbstractResource) dag.Vertex {
38 // Add the config and state since we don't do that via transforms 39 // Add the config and state since we don't do that via transforms
39 a.Config = n.Config 40 a.Config = n.Config
41 a.ResolvedProvider = n.ResolvedProvider
40 42
41 return &NodePlannableResourceOrphan{ 43 return &NodePlannableResourceOrphan{
42 NodeAbstractResource: a, 44 NodeAbstractResource: a,
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_instance.go b/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_instance.go
index b529569..7d9fcdd 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_instance.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_instance.go
@@ -97,7 +97,7 @@ func (n *NodePlannableResourceInstance) evalTreeDataResource(
97 }, 97 },
98 98
99 &EvalGetProvider{ 99 &EvalGetProvider{
100 Name: n.ProvidedBy()[0], 100 Name: n.ResolvedProvider,
101 Output: &provider, 101 Output: &provider,
102 }, 102 },
103 103
@@ -112,7 +112,7 @@ func (n *NodePlannableResourceInstance) evalTreeDataResource(
112 &EvalWriteState{ 112 &EvalWriteState{
113 Name: stateId, 113 Name: stateId,
114 ResourceType: n.Config.Type, 114 ResourceType: n.Config.Type,
115 Provider: n.Config.Provider, 115 Provider: n.ResolvedProvider,
116 Dependencies: stateDeps, 116 Dependencies: stateDeps,
117 State: &state, 117 State: &state,
118 }, 118 },
@@ -143,7 +143,7 @@ func (n *NodePlannableResourceInstance) evalTreeManagedResource(
143 Output: &resourceConfig, 143 Output: &resourceConfig,
144 }, 144 },
145 &EvalGetProvider{ 145 &EvalGetProvider{
146 Name: n.ProvidedBy()[0], 146 Name: n.ResolvedProvider,
147 Output: &provider, 147 Output: &provider,
148 }, 148 },
149 // Re-run validation to catch any errors we missed, e.g. type 149 // Re-run validation to catch any errors we missed, e.g. type
@@ -177,7 +177,7 @@ func (n *NodePlannableResourceInstance) evalTreeManagedResource(
177 &EvalWriteState{ 177 &EvalWriteState{
178 Name: stateId, 178 Name: stateId,
179 ResourceType: n.Config.Type, 179 ResourceType: n.Config.Type,
180 Provider: n.Config.Provider, 180 Provider: n.ResolvedProvider,
181 Dependencies: stateDeps, 181 Dependencies: stateDeps,
182 State: &state, 182 State: &state,
183 }, 183 },
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_refresh.go b/vendor/github.com/hashicorp/terraform/terraform/node_resource_refresh.go
index cd4fe92..697bd49 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_refresh.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_resource_refresh.go
@@ -30,6 +30,7 @@ func (n *NodeRefreshableManagedResource) DynamicExpand(ctx EvalContext) (*Graph,
30 concreteResource := func(a *NodeAbstractResource) dag.Vertex { 30 concreteResource := func(a *NodeAbstractResource) dag.Vertex {
31 // Add the config and state since we don't do that via transforms 31 // Add the config and state since we don't do that via transforms
32 a.Config = n.Config 32 a.Config = n.Config
33 a.ResolvedProvider = n.ResolvedProvider
33 34
34 return &NodeRefreshableManagedResourceInstance{ 35 return &NodeRefreshableManagedResourceInstance{
35 NodeAbstractResource: a, 36 NodeAbstractResource: a,
@@ -149,7 +150,7 @@ func (n *NodeRefreshableManagedResourceInstance) evalTreeManagedResource() EvalN
149 return &EvalSequence{ 150 return &EvalSequence{
150 Nodes: []EvalNode{ 151 Nodes: []EvalNode{
151 &EvalGetProvider{ 152 &EvalGetProvider{
152 Name: n.ProvidedBy()[0], 153 Name: n.ResolvedProvider,
153 Output: &provider, 154 Output: &provider,
154 }, 155 },
155 &EvalReadState{ 156 &EvalReadState{
@@ -165,7 +166,7 @@ func (n *NodeRefreshableManagedResourceInstance) evalTreeManagedResource() EvalN
165 &EvalWriteState{ 166 &EvalWriteState{
166 Name: stateId, 167 Name: stateId,
167 ResourceType: n.ResourceState.Type, 168 ResourceType: n.ResourceState.Type,
168 Provider: n.ResourceState.Provider, 169 Provider: n.ResolvedProvider,
169 Dependencies: n.ResourceState.Dependencies, 170 Dependencies: n.ResourceState.Dependencies,
170 State: &state, 171 State: &state,
171 }, 172 },
@@ -212,15 +213,21 @@ func (n *NodeRefreshableManagedResourceInstance) evalTreeManagedResourceNoState(
212 // Determine the dependencies for the state. 213 // Determine the dependencies for the state.
213 stateDeps := n.StateReferences() 214 stateDeps := n.StateReferences()
214 215
216 // n.Config can be nil if the config and state don't match
217 var raw *config.RawConfig
218 if n.Config != nil {
219 raw = n.Config.RawConfig.Copy()
220 }
221
215 return &EvalSequence{ 222 return &EvalSequence{
216 Nodes: []EvalNode{ 223 Nodes: []EvalNode{
217 &EvalInterpolate{ 224 &EvalInterpolate{
218 Config: n.Config.RawConfig.Copy(), 225 Config: raw,
219 Resource: resource, 226 Resource: resource,
220 Output: &resourceConfig, 227 Output: &resourceConfig,
221 }, 228 },
222 &EvalGetProvider{ 229 &EvalGetProvider{
223 Name: n.ProvidedBy()[0], 230 Name: n.ResolvedProvider,
224 Output: &provider, 231 Output: &provider,
225 }, 232 },
226 // Re-run validation to catch any errors we missed, e.g. type 233 // Re-run validation to catch any errors we missed, e.g. type
@@ -250,7 +257,7 @@ func (n *NodeRefreshableManagedResourceInstance) evalTreeManagedResourceNoState(
250 &EvalWriteState{ 257 &EvalWriteState{
251 Name: stateID, 258 Name: stateID,
252 ResourceType: n.Config.Type, 259 ResourceType: n.Config.Type,
253 Provider: n.Config.Provider, 260 Provider: n.ResolvedProvider,
254 Dependencies: stateDeps, 261 Dependencies: stateDeps,
255 State: &state, 262 State: &state,
256 }, 263 },
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_validate.go b/vendor/github.com/hashicorp/terraform/terraform/node_resource_validate.go
index f528f24..0df223d 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_validate.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/node_resource_validate.go
@@ -39,6 +39,7 @@ func (n *NodeValidatableResource) DynamicExpand(ctx EvalContext) (*Graph, error)
39 concreteResource := func(a *NodeAbstractResource) dag.Vertex { 39 concreteResource := func(a *NodeAbstractResource) dag.Vertex {
40 // Add the config and state since we don't do that via transforms 40 // Add the config and state since we don't do that via transforms
41 a.Config = n.Config 41 a.Config = n.Config
42 a.ResolvedProvider = n.ResolvedProvider
42 43
43 return &NodeValidatableResourceInstance{ 44 return &NodeValidatableResourceInstance{
44 NodeAbstractResource: a, 45 NodeAbstractResource: a,
@@ -108,7 +109,7 @@ func (n *NodeValidatableResourceInstance) EvalTree() EvalNode {
108 Config: &n.Config.RawConfig, 109 Config: &n.Config.RawConfig,
109 }, 110 },
110 &EvalGetProvider{ 111 &EvalGetProvider{
111 Name: n.ProvidedBy()[0], 112 Name: n.ResolvedProvider,
112 Output: &provider, 113 Output: &provider,
113 }, 114 },
114 &EvalInterpolate{ 115 &EvalInterpolate{
diff --git a/vendor/github.com/hashicorp/terraform/terraform/path.go b/vendor/github.com/hashicorp/terraform/terraform/path.go
index ca99685..51dd412 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/path.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/path.go
@@ -1,24 +1,10 @@
1package terraform 1package terraform
2 2
3import ( 3import (
4 "crypto/md5" 4 "strings"
5 "encoding/hex"
6) 5)
7 6
8// PathCacheKey returns a cache key for a module path. 7// PathCacheKey returns a cache key for a module path.
9//
10// TODO: test
11func PathCacheKey(path []string) string { 8func PathCacheKey(path []string) string {
12 // There is probably a better way to do this, but this is working for now. 9 return strings.Join(path, "|")
13 // We just create an MD5 hash of all the MD5 hashes of all the path
14 // elements. This gets us the property that it is unique per ordering.
15 hash := md5.New()
16 for _, p := range path {
17 single := md5.Sum([]byte(p))
18 if _, err := hash.Write(single[:]); err != nil {
19 panic(err)
20 }
21 }
22
23 return hex.EncodeToString(hash.Sum(nil))
24} 10}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/plan.go b/vendor/github.com/hashicorp/terraform/terraform/plan.go
index 51d6652..30db195 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/plan.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/plan.go
@@ -10,6 +10,7 @@ import (
10 "sync" 10 "sync"
11 11
12 "github.com/hashicorp/terraform/config/module" 12 "github.com/hashicorp/terraform/config/module"
13 "github.com/hashicorp/terraform/version"
13) 14)
14 15
15func init() { 16func init() {
@@ -26,18 +27,54 @@ func init() {
26// necessary to make a change: the state, diff, config, backend config, etc. 27// necessary to make a change: the state, diff, config, backend config, etc.
27// This is so that it can run alone without any other data. 28// This is so that it can run alone without any other data.
28type Plan struct { 29type Plan struct {
29 Diff *Diff 30 // Diff describes the resource actions that must be taken when this
30 Module *module.Tree 31 // plan is applied.
31 State *State 32 Diff *Diff
32 Vars map[string]interface{} 33
34 // Module represents the entire configuration that was present when this
35 // plan was created.
36 Module *module.Tree
37
38 // State is the Terraform state that was current when this plan was
39 // created.
40 //
41 // It is not allowed to apply a plan that has a stale state, since its
42 // diff could be outdated.
43 State *State
44
45 // Vars retains the variables that were set when creating the plan, so
46 // that the same variables can be applied during apply.
47 Vars map[string]interface{}
48
49 // Targets, if non-empty, contains a set of resource address strings that
50 // identify graph nodes that were selected as targets for plan.
51 //
52 // When targets are set, any graph node that is not directly targeted or
53 // indirectly targeted via dependencies is excluded from the graph.
33 Targets []string 54 Targets []string
34 55
56 // TerraformVersion is the version of Terraform that was used to create
57 // this plan.
58 //
59 // It is not allowed to apply a plan created with a different version of
60 // Terraform, since the other fields of this structure may be interpreted
61 // in different ways between versions.
35 TerraformVersion string 62 TerraformVersion string
36 ProviderSHA256s map[string][]byte 63
64 // ProviderSHA256s is a map giving the SHA256 hashes of the exact binaries
65 // used as plugins for each provider during plan.
66 //
67 // These must match between plan and apply to ensure that the diff is
68 // correctly interpreted, since different provider versions may have
69 // different attributes or attribute value constraints.
70 ProviderSHA256s map[string][]byte
37 71
38 // Backend is the backend that this plan should use and store data with. 72 // Backend is the backend that this plan should use and store data with.
39 Backend *BackendState 73 Backend *BackendState
40 74
75 // Destroy indicates that this plan was created for a full destroy operation
76 Destroy bool
77
41 once sync.Once 78 once sync.Once
42} 79}
43 80
@@ -67,6 +104,7 @@ func (p *Plan) contextOpts(base *ContextOpts) (*ContextOpts, error) {
67 opts.Module = p.Module 104 opts.Module = p.Module
68 opts.Targets = p.Targets 105 opts.Targets = p.Targets
69 opts.ProviderSHA256s = p.ProviderSHA256s 106 opts.ProviderSHA256s = p.ProviderSHA256s
107 opts.Destroy = p.Destroy
70 108
71 if opts.State == nil { 109 if opts.State == nil {
72 opts.State = p.State 110 opts.State = p.State
@@ -79,10 +117,10 @@ func (p *Plan) contextOpts(base *ContextOpts) (*ContextOpts, error) {
79 // the state, there is little chance that these aren't actually equal. 117 // the state, there is little chance that these aren't actually equal.
80 // Log the error condition for reference, but continue with the state 118 // Log the error condition for reference, but continue with the state
81 // we have. 119 // we have.
82 log.Println("[WARNING] Plan state and ContextOpts state are not equal") 120 log.Println("[WARN] Plan state and ContextOpts state are not equal")
83 } 121 }
84 122
85 thisVersion := VersionString() 123 thisVersion := version.String()
86 if p.TerraformVersion != "" && p.TerraformVersion != thisVersion { 124 if p.TerraformVersion != "" && p.TerraformVersion != thisVersion {
87 return nil, fmt.Errorf( 125 return nil, fmt.Errorf(
88 "plan was created with a different version of Terraform (created with %s, but running %s)", 126 "plan was created with a different version of Terraform (created with %s, but running %s)",
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource.go b/vendor/github.com/hashicorp/terraform/terraform/resource.go
index 0acf0be..2f5ebb5 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/resource.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/resource.go
@@ -88,6 +88,46 @@ func (i *InstanceInfo) HumanId() string {
88 i.Id) 88 i.Id)
89} 89}
90 90
91// ResourceAddress returns the address of the resource that the receiver is describing.
92func (i *InstanceInfo) ResourceAddress() *ResourceAddress {
93 // GROSS: for tainted and deposed instances, their status gets appended
94 // to i.Id to create a unique id for the graph node. Historically these
95 // ids were displayed to the user, so it's designed to be human-readable:
96 // "aws_instance.bar.0 (deposed #0)"
97 //
98 // So here we detect such suffixes and try to interpret them back to
99 // their original meaning so we can then produce a ResourceAddress
100 // with a suitable InstanceType.
101 id := i.Id
102 instanceType := TypeInvalid
103 if idx := strings.Index(id, " ("); idx != -1 {
104 remain := id[idx:]
105 id = id[:idx]
106
107 switch {
108 case strings.Contains(remain, "tainted"):
109 instanceType = TypeTainted
110 case strings.Contains(remain, "deposed"):
111 instanceType = TypeDeposed
112 }
113 }
114
115 addr, err := parseResourceAddressInternal(id)
116 if err != nil {
117 // should never happen, since that would indicate a bug in the
118 // code that constructed this InstanceInfo.
119 panic(fmt.Errorf("InstanceInfo has invalid Id %s", id))
120 }
121 if len(i.ModulePath) > 1 {
122 addr.Path = i.ModulePath[1:] // trim off "root" prefix, which is implied
123 }
124 if instanceType != TypeInvalid {
125 addr.InstanceTypeSet = true
126 addr.InstanceType = instanceType
127 }
128 return addr
129}
130
91func (i *InstanceInfo) uniqueId() string { 131func (i *InstanceInfo) uniqueId() string {
92 prefix := i.HumanId() 132 prefix := i.HumanId()
93 if v := i.uniqueExtra; v != "" { 133 if v := i.uniqueExtra; v != "" {
@@ -306,7 +346,7 @@ func (c *ResourceConfig) get(
306 if err != nil { 346 if err != nil {
307 return nil, false 347 return nil, false
308 } 348 }
309 if i >= int64(cv.Len()) { 349 if int(i) < 0 || int(i) >= cv.Len() {
310 return nil, false 350 return nil, false
311 } 351 }
312 current = cv.Index(int(i)).Interface() 352 current = cv.Index(int(i)).Interface()
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource_address.go b/vendor/github.com/hashicorp/terraform/terraform/resource_address.go
index 8badca8..a64f5d8 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/resource_address.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/resource_address.go
@@ -42,9 +42,9 @@ func (r *ResourceAddress) Copy() *ResourceAddress {
42 Type: r.Type, 42 Type: r.Type,
43 Mode: r.Mode, 43 Mode: r.Mode,
44 } 44 }
45 for _, p := range r.Path { 45
46 n.Path = append(n.Path, p) 46 n.Path = append(n.Path, r.Path...)
47 } 47
48 return n 48 return n
49} 49}
50 50
@@ -362,40 +362,41 @@ func (addr *ResourceAddress) Less(other *ResourceAddress) bool {
362 362
363 switch { 363 switch {
364 364
365 case len(addr.Path) < len(other.Path): 365 case len(addr.Path) != len(other.Path):
366 return true 366 return len(addr.Path) < len(other.Path)
367 367
368 case !reflect.DeepEqual(addr.Path, other.Path): 368 case !reflect.DeepEqual(addr.Path, other.Path):
369 // If the two paths are the same length but don't match, we'll just 369 // If the two paths are the same length but don't match, we'll just
370 // cheat and compare the string forms since it's easier than 370 // cheat and compare the string forms since it's easier than
371 // comparing all of the path segments in turn. 371 // comparing all of the path segments in turn, and lexicographic
372 // comparison is correct for the module path portion.
372 addrStr := addr.String() 373 addrStr := addr.String()
373 otherStr := other.String() 374 otherStr := other.String()
374 return addrStr < otherStr 375 return addrStr < otherStr
375 376
376 case addr.Mode == config.DataResourceMode && other.Mode != config.DataResourceMode: 377 case addr.Mode != other.Mode:
377 return true 378 return addr.Mode == config.DataResourceMode
378 379
379 case addr.Type < other.Type: 380 case addr.Type != other.Type:
380 return true 381 return addr.Type < other.Type
381 382
382 case addr.Name < other.Name: 383 case addr.Name != other.Name:
383 return true 384 return addr.Name < other.Name
384 385
385 case addr.Index < other.Index: 386 case addr.Index != other.Index:
386 // Since "Index" is -1 for an un-indexed address, this also conveniently 387 // Since "Index" is -1 for an un-indexed address, this also conveniently
387 // sorts unindexed addresses before indexed ones, should they both 388 // sorts unindexed addresses before indexed ones, should they both
388 // appear for some reason. 389 // appear for some reason.
389 return true 390 return addr.Index < other.Index
390 391
391 case other.InstanceTypeSet && !addr.InstanceTypeSet: 392 case addr.InstanceTypeSet != other.InstanceTypeSet:
392 return true 393 return !addr.InstanceTypeSet
393 394
394 case addr.InstanceType < other.InstanceType: 395 case addr.InstanceType != other.InstanceType:
395 // InstanceType is actually an enum, so this is just an arbitrary 396 // InstanceType is actually an enum, so this is just an arbitrary
396 // sort based on the enum numeric values, and thus not particularly 397 // sort based on the enum numeric values, and thus not particularly
397 // meaningful. 398 // meaningful.
398 return true 399 return addr.InstanceType < other.InstanceType
399 400
400 default: 401 default:
401 return false 402 return false
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource_provider.go b/vendor/github.com/hashicorp/terraform/terraform/resource_provider.go
index 7d78f67..93fd14f 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/resource_provider.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/resource_provider.go
@@ -21,6 +21,15 @@ type ResourceProvider interface {
21 * Functions related to the provider 21 * Functions related to the provider
22 *********************************************************************/ 22 *********************************************************************/
23 23
24 // ProviderSchema returns the config schema for the main provider
25 // configuration, as would appear in a "provider" block in the
26 // configuration files.
27 //
28 // Currently not all providers support schema. Callers must therefore
29 // first call Resources and DataSources and ensure that at least one
30 // resource or data source has the SchemaAvailable flag set.
31 GetSchema(*ProviderSchemaRequest) (*ProviderSchema, error)
32
24 // Input is called to ask the provider to ask the user for input 33 // Input is called to ask the provider to ask the user for input
25 // for completing the configuration if necesarry. 34 // for completing the configuration if necesarry.
26 // 35 //
@@ -183,11 +192,25 @@ type ResourceProviderCloser interface {
183type ResourceType struct { 192type ResourceType struct {
184 Name string // Name of the resource, example "instance" (no provider prefix) 193 Name string // Name of the resource, example "instance" (no provider prefix)
185 Importable bool // Whether this resource supports importing 194 Importable bool // Whether this resource supports importing
195
196 // SchemaAvailable is set if the provider supports the ProviderSchema,
197 // ResourceTypeSchema and DataSourceSchema methods. Although it is
198 // included on each resource type, it's actually a provider-wide setting
199 // that's smuggled here only because that avoids a breaking change to
200 // the plugin protocol.
201 SchemaAvailable bool
186} 202}
187 203
188// DataSource is a data source that a resource provider implements. 204// DataSource is a data source that a resource provider implements.
189type DataSource struct { 205type DataSource struct {
190 Name string 206 Name string
207
208 // SchemaAvailable is set if the provider supports the ProviderSchema,
209 // ResourceTypeSchema and DataSourceSchema methods. Although it is
210 // included on each resource type, it's actually a provider-wide setting
211 // that's smuggled here only because that avoids a breaking change to
212 // the plugin protocol.
213 SchemaAvailable bool
191} 214}
192 215
193// ResourceProviderResolver is an interface implemented by objects that are 216// ResourceProviderResolver is an interface implemented by objects that are
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource_provider_mock.go b/vendor/github.com/hashicorp/terraform/terraform/resource_provider_mock.go
index f531533..4000e3d 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/resource_provider_mock.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/resource_provider_mock.go
@@ -1,6 +1,8 @@
1package terraform 1package terraform
2 2
3import "sync" 3import (
4 "sync"
5)
4 6
5// MockResourceProvider implements ResourceProvider but mocks out all the 7// MockResourceProvider implements ResourceProvider but mocks out all the
6// calls for testing purposes. 8// calls for testing purposes.
@@ -12,6 +14,10 @@ type MockResourceProvider struct {
12 14
13 CloseCalled bool 15 CloseCalled bool
14 CloseError error 16 CloseError error
17 GetSchemaCalled bool
18 GetSchemaRequest *ProviderSchemaRequest
19 GetSchemaReturn *ProviderSchema
20 GetSchemaReturnError error
15 InputCalled bool 21 InputCalled bool
16 InputInput UIInput 22 InputInput UIInput
17 InputConfig *ResourceConfig 23 InputConfig *ResourceConfig
@@ -92,8 +98,19 @@ func (p *MockResourceProvider) Close() error {
92 return p.CloseError 98 return p.CloseError
93} 99}
94 100
101func (p *MockResourceProvider) GetSchema(req *ProviderSchemaRequest) (*ProviderSchema, error) {
102 p.Lock()
103 defer p.Unlock()
104
105 p.GetSchemaCalled = true
106 p.GetSchemaRequest = req
107 return p.GetSchemaReturn, p.GetSchemaReturnError
108}
109
95func (p *MockResourceProvider) Input( 110func (p *MockResourceProvider) Input(
96 input UIInput, c *ResourceConfig) (*ResourceConfig, error) { 111 input UIInput, c *ResourceConfig) (*ResourceConfig, error) {
112 p.Lock()
113 defer p.Unlock()
97 p.InputCalled = true 114 p.InputCalled = true
98 p.InputInput = input 115 p.InputInput = input
99 p.InputConfig = c 116 p.InputConfig = c
@@ -186,6 +203,7 @@ func (p *MockResourceProvider) Diff(
186 p.DiffInfo = info 203 p.DiffInfo = info
187 p.DiffState = state 204 p.DiffState = state
188 p.DiffDesired = desired 205 p.DiffDesired = desired
206
189 if p.DiffFn != nil { 207 if p.DiffFn != nil {
190 return p.DiffFn(info, state, desired) 208 return p.DiffFn(info, state, desired)
191 } 209 }
diff --git a/vendor/github.com/hashicorp/terraform/terraform/schemas.go b/vendor/github.com/hashicorp/terraform/terraform/schemas.go
new file mode 100644
index 0000000..ec46efc
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/terraform/schemas.go
@@ -0,0 +1,34 @@
1package terraform
2
3import (
4 "github.com/hashicorp/terraform/config/configschema"
5)
6
7type Schemas struct {
8 Providers ProviderSchemas
9}
10
11// ProviderSchemas is a map from provider names to provider schemas.
12//
13// The names in this map are the direct plugin name (e.g. "aws") rather than
14// any alias name (e.g. "aws.foo"), since.
15type ProviderSchemas map[string]*ProviderSchema
16
17// ProviderSchema represents the schema for a provider's own configuration
18// and the configuration for some or all of its resources and data sources.
19//
20// The completeness of this structure depends on how it was constructed.
21// When constructed for a configuration, it will generally include only
22// resource types and data sources used by that configuration.
23type ProviderSchema struct {
24 Provider *configschema.Block
25 ResourceTypes map[string]*configschema.Block
26 DataSources map[string]*configschema.Block
27}
28
29// ProviderSchemaRequest is used to describe to a ResourceProvider which
30// aspects of schema are required, when calling the GetSchema method.
31type ProviderSchemaRequest struct {
32 ResourceTypes []string
33 DataSources []string
34}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/shadow.go b/vendor/github.com/hashicorp/terraform/terraform/shadow.go
deleted file mode 100644
index 4632559..0000000
--- a/vendor/github.com/hashicorp/terraform/terraform/shadow.go
+++ /dev/null
@@ -1,28 +0,0 @@
1package terraform
2
3// Shadow is the interface that any "shadow" structures must implement.
4//
5// A shadow structure is an interface implementation (typically) that
6// shadows a real implementation and verifies that the same behavior occurs
7// on both. The semantics of this behavior are up to the interface itself.
8//
9// A shadow NEVER modifies real values or state. It must always be safe to use.
10//
11// For example, a ResourceProvider shadow ensures that the same operations
12// are done on the same resources with the same configurations.
13//
14// The typical usage of a shadow following this interface is to complete
15// the real operations, then call CloseShadow which tells the shadow that
16// the real side is done. Then, once the shadow is also complete, call
17// ShadowError to find any errors that may have been caught.
18type Shadow interface {
19 // CloseShadow tells the shadow that the REAL implementation is
20 // complete. Therefore, any calls that would block should now return
21 // immediately since no more changes will happen to the real side.
22 CloseShadow() error
23
24 // ShadowError returns the errors that the shadow has found.
25 // This should be called AFTER CloseShadow and AFTER the shadow is
26 // known to be complete (no more calls to it).
27 ShadowError() error
28}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/shadow_components.go b/vendor/github.com/hashicorp/terraform/terraform/shadow_components.go
deleted file mode 100644
index 116cf84..0000000
--- a/vendor/github.com/hashicorp/terraform/terraform/shadow_components.go
+++ /dev/null
@@ -1,273 +0,0 @@
1package terraform
2
3import (
4 "fmt"
5 "sync"
6
7 "github.com/hashicorp/go-multierror"
8 "github.com/hashicorp/terraform/helper/shadow"
9)
10
11// newShadowComponentFactory creates a shadowed contextComponentFactory
12// so that requests to create new components result in both a real and
13// shadow side.
14func newShadowComponentFactory(
15 f contextComponentFactory) (contextComponentFactory, *shadowComponentFactory) {
16 // Create the shared data
17 shared := &shadowComponentFactoryShared{contextComponentFactory: f}
18
19 // Create the real side
20 real := &shadowComponentFactory{
21 shadowComponentFactoryShared: shared,
22 }
23
24 // Create the shadow
25 shadow := &shadowComponentFactory{
26 shadowComponentFactoryShared: shared,
27 Shadow: true,
28 }
29
30 return real, shadow
31}
32
33// shadowComponentFactory is the shadow side. Any components created
34// with this factory are fake and will not cause real work to happen.
35//
36// Unlike other shadowers, the shadow component factory will allow the
37// shadow to create _any_ component even if it is never requested on the
38// real side. This is because errors will happen later downstream as function
39// calls are made to the shadows that are never matched on the real side.
40type shadowComponentFactory struct {
41 *shadowComponentFactoryShared
42
43 Shadow bool // True if this should return the shadow
44 lock sync.Mutex
45}
46
47func (f *shadowComponentFactory) ResourceProvider(
48 n, uid string) (ResourceProvider, error) {
49 f.lock.Lock()
50 defer f.lock.Unlock()
51
52 real, shadow, err := f.shadowComponentFactoryShared.ResourceProvider(n, uid)
53 var result ResourceProvider = real
54 if f.Shadow {
55 result = shadow
56 }
57
58 return result, err
59}
60
61func (f *shadowComponentFactory) ResourceProvisioner(
62 n, uid string) (ResourceProvisioner, error) {
63 f.lock.Lock()
64 defer f.lock.Unlock()
65
66 real, shadow, err := f.shadowComponentFactoryShared.ResourceProvisioner(n, uid)
67 var result ResourceProvisioner = real
68 if f.Shadow {
69 result = shadow
70 }
71
72 return result, err
73}
74
75// CloseShadow is called when the _real_ side is complete. This will cause
76// all future blocking operations to return immediately on the shadow to
77// ensure the shadow also completes.
78func (f *shadowComponentFactory) CloseShadow() error {
79 // If we aren't the shadow, just return
80 if !f.Shadow {
81 return nil
82 }
83
84 // Lock ourselves so we don't modify state
85 f.lock.Lock()
86 defer f.lock.Unlock()
87
88 // Grab our shared state
89 shared := f.shadowComponentFactoryShared
90
91 // If we're already closed, its an error
92 if shared.closed {
93 return fmt.Errorf("component factory shadow already closed")
94 }
95
96 // Close all the providers and provisioners and return the error
97 var result error
98 for _, n := range shared.providerKeys {
99 _, shadow, err := shared.ResourceProvider(n, n)
100 if err == nil && shadow != nil {
101 if err := shadow.CloseShadow(); err != nil {
102 result = multierror.Append(result, err)
103 }
104 }
105 }
106
107 for _, n := range shared.provisionerKeys {
108 _, shadow, err := shared.ResourceProvisioner(n, n)
109 if err == nil && shadow != nil {
110 if err := shadow.CloseShadow(); err != nil {
111 result = multierror.Append(result, err)
112 }
113 }
114 }
115
116 // Mark ourselves as closed
117 shared.closed = true
118
119 return result
120}
121
122func (f *shadowComponentFactory) ShadowError() error {
123 // If we aren't the shadow, just return
124 if !f.Shadow {
125 return nil
126 }
127
128 // Lock ourselves so we don't modify state
129 f.lock.Lock()
130 defer f.lock.Unlock()
131
132 // Grab our shared state
133 shared := f.shadowComponentFactoryShared
134
135 // If we're not closed, its an error
136 if !shared.closed {
137 return fmt.Errorf("component factory must be closed to retrieve errors")
138 }
139
140 // Close all the providers and provisioners and return the error
141 var result error
142 for _, n := range shared.providerKeys {
143 _, shadow, err := shared.ResourceProvider(n, n)
144 if err == nil && shadow != nil {
145 if err := shadow.ShadowError(); err != nil {
146 result = multierror.Append(result, err)
147 }
148 }
149 }
150
151 for _, n := range shared.provisionerKeys {
152 _, shadow, err := shared.ResourceProvisioner(n, n)
153 if err == nil && shadow != nil {
154 if err := shadow.ShadowError(); err != nil {
155 result = multierror.Append(result, err)
156 }
157 }
158 }
159
160 return result
161}
162
163// shadowComponentFactoryShared is shared data between the two factories.
164//
165// It is NOT SAFE to run any function on this struct in parallel. Lock
166// access to this struct.
167type shadowComponentFactoryShared struct {
168 contextComponentFactory
169
170 closed bool
171 providers shadow.KeyedValue
172 providerKeys []string
173 provisioners shadow.KeyedValue
174 provisionerKeys []string
175}
176
177// shadowResourceProviderFactoryEntry is the entry that is stored in
178// the Shadows key/value for a provider.
179type shadowComponentFactoryProviderEntry struct {
180 Real ResourceProvider
181 Shadow shadowResourceProvider
182 Err error
183}
184
185type shadowComponentFactoryProvisionerEntry struct {
186 Real ResourceProvisioner
187 Shadow shadowResourceProvisioner
188 Err error
189}
190
191func (f *shadowComponentFactoryShared) ResourceProvider(
192 n, uid string) (ResourceProvider, shadowResourceProvider, error) {
193 // Determine if we already have a value
194 raw, ok := f.providers.ValueOk(uid)
195 if !ok {
196 // Build the entry
197 var entry shadowComponentFactoryProviderEntry
198
199 // No value, initialize. Create the original
200 p, err := f.contextComponentFactory.ResourceProvider(n, uid)
201 if err != nil {
202 entry.Err = err
203 p = nil // Just to be sure
204 }
205
206 if p != nil {
207 // Create the shadow
208 real, shadow := newShadowResourceProvider(p)
209 entry.Real = real
210 entry.Shadow = shadow
211
212 if f.closed {
213 shadow.CloseShadow()
214 }
215 }
216
217 // Store the value
218 f.providers.SetValue(uid, &entry)
219 f.providerKeys = append(f.providerKeys, uid)
220 raw = &entry
221 }
222
223 // Read the entry
224 entry, ok := raw.(*shadowComponentFactoryProviderEntry)
225 if !ok {
226 return nil, nil, fmt.Errorf("Unknown value for shadow provider: %#v", raw)
227 }
228
229 // Return
230 return entry.Real, entry.Shadow, entry.Err
231}
232
233func (f *shadowComponentFactoryShared) ResourceProvisioner(
234 n, uid string) (ResourceProvisioner, shadowResourceProvisioner, error) {
235 // Determine if we already have a value
236 raw, ok := f.provisioners.ValueOk(uid)
237 if !ok {
238 // Build the entry
239 var entry shadowComponentFactoryProvisionerEntry
240
241 // No value, initialize. Create the original
242 p, err := f.contextComponentFactory.ResourceProvisioner(n, uid)
243 if err != nil {
244 entry.Err = err
245 p = nil // Just to be sure
246 }
247
248 if p != nil {
249 // For now, just create a mock since we don't support provisioners yet
250 real, shadow := newShadowResourceProvisioner(p)
251 entry.Real = real
252 entry.Shadow = shadow
253
254 if f.closed {
255 shadow.CloseShadow()
256 }
257 }
258
259 // Store the value
260 f.provisioners.SetValue(uid, &entry)
261 f.provisionerKeys = append(f.provisionerKeys, uid)
262 raw = &entry
263 }
264
265 // Read the entry
266 entry, ok := raw.(*shadowComponentFactoryProvisionerEntry)
267 if !ok {
268 return nil, nil, fmt.Errorf("Unknown value for shadow provisioner: %#v", raw)
269 }
270
271 // Return
272 return entry.Real, entry.Shadow, entry.Err
273}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/shadow_context.go b/vendor/github.com/hashicorp/terraform/terraform/shadow_context.go
deleted file mode 100644
index 5588af2..0000000
--- a/vendor/github.com/hashicorp/terraform/terraform/shadow_context.go
+++ /dev/null
@@ -1,158 +0,0 @@
1package terraform
2
3import (
4 "fmt"
5 "strings"
6
7 "github.com/hashicorp/go-multierror"
8 "github.com/mitchellh/copystructure"
9)
10
11// newShadowContext creates a new context that will shadow the given context
12// when walking the graph. The resulting context should be used _only once_
13// for a graph walk.
14//
15// The returned Shadow should be closed after the graph walk with the
16// real context is complete. Errors from the shadow can be retrieved there.
17//
18// Most importantly, any operations done on the shadow context (the returned
19// context) will NEVER affect the real context. All structures are deep
20// copied, no real providers or resources are used, etc.
21func newShadowContext(c *Context) (*Context, *Context, Shadow) {
22 // Copy the targets
23 targetRaw, err := copystructure.Copy(c.targets)
24 if err != nil {
25 panic(err)
26 }
27
28 // Copy the variables
29 varRaw, err := copystructure.Copy(c.variables)
30 if err != nil {
31 panic(err)
32 }
33
34 // Copy the provider inputs
35 providerInputRaw, err := copystructure.Copy(c.providerInputConfig)
36 if err != nil {
37 panic(err)
38 }
39
40 // The factories
41 componentsReal, componentsShadow := newShadowComponentFactory(c.components)
42
43 // Create the shadow
44 shadow := &Context{
45 components: componentsShadow,
46 destroy: c.destroy,
47 diff: c.diff.DeepCopy(),
48 hooks: nil,
49 meta: c.meta,
50 module: c.module,
51 state: c.state.DeepCopy(),
52 targets: targetRaw.([]string),
53 variables: varRaw.(map[string]interface{}),
54
55 // NOTE(mitchellh): This is not going to work for shadows that are
56 // testing that input results in the proper end state. At the time
57 // of writing, input is not used in any state-changing graph
58 // walks anyways, so this checks nothing. We set it to this to avoid
59 // any panics but even a "nil" value worked here.
60 uiInput: new(MockUIInput),
61
62 // Hardcoded to 4 since parallelism in the shadow doesn't matter
63 // a ton since we're doing far less compared to the real side
64 // and our operations are MUCH faster.
65 parallelSem: NewSemaphore(4),
66 providerInputConfig: providerInputRaw.(map[string]map[string]interface{}),
67 }
68
69 // Create the real context. This is effectively just a copy of
70 // the context given except we need to modify some of the values
71 // to point to the real side of a shadow so the shadow can compare values.
72 real := &Context{
73 // The fields below are changed.
74 components: componentsReal,
75
76 // The fields below are direct copies
77 destroy: c.destroy,
78 diff: c.diff,
79 // diffLock - no copy
80 hooks: c.hooks,
81 meta: c.meta,
82 module: c.module,
83 sh: c.sh,
84 state: c.state,
85 // stateLock - no copy
86 targets: c.targets,
87 uiInput: c.uiInput,
88 variables: c.variables,
89
90 // l - no copy
91 parallelSem: c.parallelSem,
92 providerInputConfig: c.providerInputConfig,
93 runContext: c.runContext,
94 runContextCancel: c.runContextCancel,
95 shadowErr: c.shadowErr,
96 }
97
98 return real, shadow, &shadowContextCloser{
99 Components: componentsShadow,
100 }
101}
102
103// shadowContextVerify takes the real and shadow context and verifies they
104// have equal diffs and states.
105func shadowContextVerify(real, shadow *Context) error {
106 var result error
107
108 // The states compared must be pruned so they're minimal/clean
109 real.state.prune()
110 shadow.state.prune()
111
112 // Compare the states
113 if !real.state.Equal(shadow.state) {
114 result = multierror.Append(result, fmt.Errorf(
115 "Real and shadow states do not match! "+
116 "Real state:\n\n%s\n\n"+
117 "Shadow state:\n\n%s\n\n",
118 real.state, shadow.state))
119 }
120
121 // Compare the diffs
122 if !real.diff.Equal(shadow.diff) {
123 result = multierror.Append(result, fmt.Errorf(
124 "Real and shadow diffs do not match! "+
125 "Real diff:\n\n%s\n\n"+
126 "Shadow diff:\n\n%s\n\n",
127 real.diff, shadow.diff))
128 }
129
130 return result
131}
132
133// shadowContextCloser is the io.Closer returned by newShadowContext that
134// closes all the shadows and returns the results.
135type shadowContextCloser struct {
136 Components *shadowComponentFactory
137}
138
139// Close closes the shadow context.
140func (c *shadowContextCloser) CloseShadow() error {
141 return c.Components.CloseShadow()
142}
143
144func (c *shadowContextCloser) ShadowError() error {
145 err := c.Components.ShadowError()
146 if err == nil {
147 return nil
148 }
149
150 // This is a sad edge case: if the configuration contains uuid() at
151 // any point, we cannot reason aboyt the shadow execution. Tested
152 // with Context2Plan_shadowUuid.
153 if strings.Contains(err.Error(), "uuid()") {
154 err = nil
155 }
156
157 return err
158}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/shadow_resource_provider.go b/vendor/github.com/hashicorp/terraform/terraform/shadow_resource_provider.go
deleted file mode 100644
index 9741d7e..0000000
--- a/vendor/github.com/hashicorp/terraform/terraform/shadow_resource_provider.go
+++ /dev/null
@@ -1,815 +0,0 @@
1package terraform
2
3import (
4 "fmt"
5 "log"
6 "sync"
7
8 "github.com/hashicorp/go-multierror"
9 "github.com/hashicorp/terraform/helper/shadow"
10)
11
12// shadowResourceProvider implements ResourceProvider for the shadow
13// eval context defined in eval_context_shadow.go.
14//
15// This is used to verify behavior with a real provider. This shouldn't
16// be used directly.
17type shadowResourceProvider interface {
18 ResourceProvider
19 Shadow
20}
21
22// newShadowResourceProvider creates a new shadowed ResourceProvider.
23//
24// This will assume a well behaved real ResourceProvider. For example,
25// it assumes that the `Resources` call underneath doesn't change values
26// since once it is called on the real provider, it will be cached and
27// returned in the shadow since number of calls to that shouldn't affect
28// actual behavior.
29//
30// However, with calls like Apply, call order is taken into account,
31// parameters are checked for equality, etc.
32func newShadowResourceProvider(p ResourceProvider) (ResourceProvider, shadowResourceProvider) {
33 // Create the shared data
34 shared := shadowResourceProviderShared{}
35
36 // Create the real provider that does actual work
37 real := &shadowResourceProviderReal{
38 ResourceProvider: p,
39 Shared: &shared,
40 }
41
42 // Create the shadow that watches the real value
43 shadow := &shadowResourceProviderShadow{
44 Shared: &shared,
45
46 resources: p.Resources(),
47 dataSources: p.DataSources(),
48 }
49
50 return real, shadow
51}
52
53// shadowResourceProviderReal is the real resource provider. Function calls
54// to this will perform real work. This records the parameters and return
55// values and call order for the shadow to reproduce.
56type shadowResourceProviderReal struct {
57 ResourceProvider
58
59 Shared *shadowResourceProviderShared
60}
61
62func (p *shadowResourceProviderReal) Close() error {
63 var result error
64 if c, ok := p.ResourceProvider.(ResourceProviderCloser); ok {
65 result = c.Close()
66 }
67
68 p.Shared.CloseErr.SetValue(result)
69 return result
70}
71
72func (p *shadowResourceProviderReal) Input(
73 input UIInput, c *ResourceConfig) (*ResourceConfig, error) {
74 cCopy := c.DeepCopy()
75
76 result, err := p.ResourceProvider.Input(input, c)
77 p.Shared.Input.SetValue(&shadowResourceProviderInput{
78 Config: cCopy,
79 Result: result.DeepCopy(),
80 ResultErr: err,
81 })
82
83 return result, err
84}
85
86func (p *shadowResourceProviderReal) Validate(c *ResourceConfig) ([]string, []error) {
87 warns, errs := p.ResourceProvider.Validate(c)
88 p.Shared.Validate.SetValue(&shadowResourceProviderValidate{
89 Config: c.DeepCopy(),
90 ResultWarn: warns,
91 ResultErr: errs,
92 })
93
94 return warns, errs
95}
96
97func (p *shadowResourceProviderReal) Configure(c *ResourceConfig) error {
98 cCopy := c.DeepCopy()
99
100 err := p.ResourceProvider.Configure(c)
101 p.Shared.Configure.SetValue(&shadowResourceProviderConfigure{
102 Config: cCopy,
103 Result: err,
104 })
105
106 return err
107}
108
109func (p *shadowResourceProviderReal) Stop() error {
110 return p.ResourceProvider.Stop()
111}
112
113func (p *shadowResourceProviderReal) ValidateResource(
114 t string, c *ResourceConfig) ([]string, []error) {
115 key := t
116 configCopy := c.DeepCopy()
117
118 // Real operation
119 warns, errs := p.ResourceProvider.ValidateResource(t, c)
120
121 // Initialize to ensure we always have a wrapper with a lock
122 p.Shared.ValidateResource.Init(
123 key, &shadowResourceProviderValidateResourceWrapper{})
124
125 // Get the result
126 raw := p.Shared.ValidateResource.Value(key)
127 wrapper, ok := raw.(*shadowResourceProviderValidateResourceWrapper)
128 if !ok {
129 // If this fails then we just continue with our day... the shadow
130 // will fail to but there isn't much we can do.
131 log.Printf(
132 "[ERROR] unknown value in ValidateResource shadow value: %#v", raw)
133 return warns, errs
134 }
135
136 // Lock the wrapper for writing and record our call
137 wrapper.Lock()
138 defer wrapper.Unlock()
139
140 wrapper.Calls = append(wrapper.Calls, &shadowResourceProviderValidateResource{
141 Config: configCopy,
142 Warns: warns,
143 Errors: errs,
144 })
145
146 // With it locked, call SetValue again so that it triggers WaitForChange
147 p.Shared.ValidateResource.SetValue(key, wrapper)
148
149 // Return the result
150 return warns, errs
151}
152
153func (p *shadowResourceProviderReal) Apply(
154 info *InstanceInfo,
155 state *InstanceState,
156 diff *InstanceDiff) (*InstanceState, error) {
157 // Thse have to be copied before the call since call can modify
158 stateCopy := state.DeepCopy()
159 diffCopy := diff.DeepCopy()
160
161 result, err := p.ResourceProvider.Apply(info, state, diff)
162 p.Shared.Apply.SetValue(info.uniqueId(), &shadowResourceProviderApply{
163 State: stateCopy,
164 Diff: diffCopy,
165 Result: result.DeepCopy(),
166 ResultErr: err,
167 })
168
169 return result, err
170}
171
172func (p *shadowResourceProviderReal) Diff(
173 info *InstanceInfo,
174 state *InstanceState,
175 desired *ResourceConfig) (*InstanceDiff, error) {
176 // Thse have to be copied before the call since call can modify
177 stateCopy := state.DeepCopy()
178 desiredCopy := desired.DeepCopy()
179
180 result, err := p.ResourceProvider.Diff(info, state, desired)
181 p.Shared.Diff.SetValue(info.uniqueId(), &shadowResourceProviderDiff{
182 State: stateCopy,
183 Desired: desiredCopy,
184 Result: result.DeepCopy(),
185 ResultErr: err,
186 })
187
188 return result, err
189}
190
191func (p *shadowResourceProviderReal) Refresh(
192 info *InstanceInfo,
193 state *InstanceState) (*InstanceState, error) {
194 // Thse have to be copied before the call since call can modify
195 stateCopy := state.DeepCopy()
196
197 result, err := p.ResourceProvider.Refresh(info, state)
198 p.Shared.Refresh.SetValue(info.uniqueId(), &shadowResourceProviderRefresh{
199 State: stateCopy,
200 Result: result.DeepCopy(),
201 ResultErr: err,
202 })
203
204 return result, err
205}
206
207func (p *shadowResourceProviderReal) ValidateDataSource(
208 t string, c *ResourceConfig) ([]string, []error) {
209 key := t
210 configCopy := c.DeepCopy()
211
212 // Real operation
213 warns, errs := p.ResourceProvider.ValidateDataSource(t, c)
214
215 // Initialize
216 p.Shared.ValidateDataSource.Init(
217 key, &shadowResourceProviderValidateDataSourceWrapper{})
218
219 // Get the result
220 raw := p.Shared.ValidateDataSource.Value(key)
221 wrapper, ok := raw.(*shadowResourceProviderValidateDataSourceWrapper)
222 if !ok {
223 // If this fails then we just continue with our day... the shadow
224 // will fail to but there isn't much we can do.
225 log.Printf(
226 "[ERROR] unknown value in ValidateDataSource shadow value: %#v", raw)
227 return warns, errs
228 }
229
230 // Lock the wrapper for writing and record our call
231 wrapper.Lock()
232 defer wrapper.Unlock()
233
234 wrapper.Calls = append(wrapper.Calls, &shadowResourceProviderValidateDataSource{
235 Config: configCopy,
236 Warns: warns,
237 Errors: errs,
238 })
239
240 // Set it
241 p.Shared.ValidateDataSource.SetValue(key, wrapper)
242
243 // Return the result
244 return warns, errs
245}
246
247func (p *shadowResourceProviderReal) ReadDataDiff(
248 info *InstanceInfo,
249 desired *ResourceConfig) (*InstanceDiff, error) {
250 // These have to be copied before the call since call can modify
251 desiredCopy := desired.DeepCopy()
252
253 result, err := p.ResourceProvider.ReadDataDiff(info, desired)
254 p.Shared.ReadDataDiff.SetValue(info.uniqueId(), &shadowResourceProviderReadDataDiff{
255 Desired: desiredCopy,
256 Result: result.DeepCopy(),
257 ResultErr: err,
258 })
259
260 return result, err
261}
262
263func (p *shadowResourceProviderReal) ReadDataApply(
264 info *InstanceInfo,
265 diff *InstanceDiff) (*InstanceState, error) {
266 // Thse have to be copied before the call since call can modify
267 diffCopy := diff.DeepCopy()
268
269 result, err := p.ResourceProvider.ReadDataApply(info, diff)
270 p.Shared.ReadDataApply.SetValue(info.uniqueId(), &shadowResourceProviderReadDataApply{
271 Diff: diffCopy,
272 Result: result.DeepCopy(),
273 ResultErr: err,
274 })
275
276 return result, err
277}
278
279// shadowResourceProviderShadow is the shadow resource provider. Function
280// calls never affect real resources. This is paired with the "real" side
281// which must be called properly to enable recording.
282type shadowResourceProviderShadow struct {
283 Shared *shadowResourceProviderShared
284
285 // Cached values that are expected to not change
286 resources []ResourceType
287 dataSources []DataSource
288
289 Error error // Error is the list of errors from the shadow
290 ErrorLock sync.Mutex
291}
292
293type shadowResourceProviderShared struct {
294 // NOTE: Anytime a value is added here, be sure to add it to
295 // the Close() method so that it is closed.
296
297 CloseErr shadow.Value
298 Input shadow.Value
299 Validate shadow.Value
300 Configure shadow.Value
301 ValidateResource shadow.KeyedValue
302 Apply shadow.KeyedValue
303 Diff shadow.KeyedValue
304 Refresh shadow.KeyedValue
305 ValidateDataSource shadow.KeyedValue
306 ReadDataDiff shadow.KeyedValue
307 ReadDataApply shadow.KeyedValue
308}
309
310func (p *shadowResourceProviderShared) Close() error {
311 return shadow.Close(p)
312}
313
314func (p *shadowResourceProviderShadow) CloseShadow() error {
315 err := p.Shared.Close()
316 if err != nil {
317 err = fmt.Errorf("close error: %s", err)
318 }
319
320 return err
321}
322
323func (p *shadowResourceProviderShadow) ShadowError() error {
324 return p.Error
325}
326
327func (p *shadowResourceProviderShadow) Resources() []ResourceType {
328 return p.resources
329}
330
331func (p *shadowResourceProviderShadow) DataSources() []DataSource {
332 return p.dataSources
333}
334
335func (p *shadowResourceProviderShadow) Close() error {
336 v := p.Shared.CloseErr.Value()
337 if v == nil {
338 return nil
339 }
340
341 return v.(error)
342}
343
344func (p *shadowResourceProviderShadow) Input(
345 input UIInput, c *ResourceConfig) (*ResourceConfig, error) {
346 // Get the result of the input call
347 raw := p.Shared.Input.Value()
348 if raw == nil {
349 return nil, nil
350 }
351
352 result, ok := raw.(*shadowResourceProviderInput)
353 if !ok {
354 p.ErrorLock.Lock()
355 defer p.ErrorLock.Unlock()
356 p.Error = multierror.Append(p.Error, fmt.Errorf(
357 "Unknown 'input' shadow value: %#v", raw))
358 return nil, nil
359 }
360
361 // Compare the parameters, which should be identical
362 if !c.Equal(result.Config) {
363 p.ErrorLock.Lock()
364 p.Error = multierror.Append(p.Error, fmt.Errorf(
365 "Input had unequal configurations (real, then shadow):\n\n%#v\n\n%#v",
366 result.Config, c))
367 p.ErrorLock.Unlock()
368 }
369
370 // Return the results
371 return result.Result, result.ResultErr
372}
373
374func (p *shadowResourceProviderShadow) Validate(c *ResourceConfig) ([]string, []error) {
375 // Get the result of the validate call
376 raw := p.Shared.Validate.Value()
377 if raw == nil {
378 return nil, nil
379 }
380
381 result, ok := raw.(*shadowResourceProviderValidate)
382 if !ok {
383 p.ErrorLock.Lock()
384 defer p.ErrorLock.Unlock()
385 p.Error = multierror.Append(p.Error, fmt.Errorf(
386 "Unknown 'validate' shadow value: %#v", raw))
387 return nil, nil
388 }
389
390 // Compare the parameters, which should be identical
391 if !c.Equal(result.Config) {
392 p.ErrorLock.Lock()
393 p.Error = multierror.Append(p.Error, fmt.Errorf(
394 "Validate had unequal configurations (real, then shadow):\n\n%#v\n\n%#v",
395 result.Config, c))
396 p.ErrorLock.Unlock()
397 }
398
399 // Return the results
400 return result.ResultWarn, result.ResultErr
401}
402
403func (p *shadowResourceProviderShadow) Configure(c *ResourceConfig) error {
404 // Get the result of the call
405 raw := p.Shared.Configure.Value()
406 if raw == nil {
407 return nil
408 }
409
410 result, ok := raw.(*shadowResourceProviderConfigure)
411 if !ok {
412 p.ErrorLock.Lock()
413 defer p.ErrorLock.Unlock()
414 p.Error = multierror.Append(p.Error, fmt.Errorf(
415 "Unknown 'configure' shadow value: %#v", raw))
416 return nil
417 }
418
419 // Compare the parameters, which should be identical
420 if !c.Equal(result.Config) {
421 p.ErrorLock.Lock()
422 p.Error = multierror.Append(p.Error, fmt.Errorf(
423 "Configure had unequal configurations (real, then shadow):\n\n%#v\n\n%#v",
424 result.Config, c))
425 p.ErrorLock.Unlock()
426 }
427
428 // Return the results
429 return result.Result
430}
431
432// Stop returns immediately.
433func (p *shadowResourceProviderShadow) Stop() error {
434 return nil
435}
436
437func (p *shadowResourceProviderShadow) ValidateResource(t string, c *ResourceConfig) ([]string, []error) {
438 // Unique key
439 key := t
440
441 // Get the initial value
442 raw := p.Shared.ValidateResource.Value(key)
443
444 // Find a validation with our configuration
445 var result *shadowResourceProviderValidateResource
446 for {
447 // Get the value
448 if raw == nil {
449 p.ErrorLock.Lock()
450 defer p.ErrorLock.Unlock()
451 p.Error = multierror.Append(p.Error, fmt.Errorf(
452 "Unknown 'ValidateResource' call for %q:\n\n%#v",
453 key, c))
454 return nil, nil
455 }
456
457 wrapper, ok := raw.(*shadowResourceProviderValidateResourceWrapper)
458 if !ok {
459 p.ErrorLock.Lock()
460 defer p.ErrorLock.Unlock()
461 p.Error = multierror.Append(p.Error, fmt.Errorf(
462 "Unknown 'ValidateResource' shadow value for %q: %#v", key, raw))
463 return nil, nil
464 }
465
466 // Look for the matching call with our configuration
467 wrapper.RLock()
468 for _, call := range wrapper.Calls {
469 if call.Config.Equal(c) {
470 result = call
471 break
472 }
473 }
474 wrapper.RUnlock()
475
476 // If we found a result, exit
477 if result != nil {
478 break
479 }
480
481 // Wait for a change so we can get the wrapper again
482 raw = p.Shared.ValidateResource.WaitForChange(key)
483 }
484
485 return result.Warns, result.Errors
486}
487
488func (p *shadowResourceProviderShadow) Apply(
489 info *InstanceInfo,
490 state *InstanceState,
491 diff *InstanceDiff) (*InstanceState, error) {
492 // Unique key
493 key := info.uniqueId()
494 raw := p.Shared.Apply.Value(key)
495 if raw == nil {
496 p.ErrorLock.Lock()
497 defer p.ErrorLock.Unlock()
498 p.Error = multierror.Append(p.Error, fmt.Errorf(
499 "Unknown 'apply' call for %q:\n\n%#v\n\n%#v",
500 key, state, diff))
501 return nil, nil
502 }
503
504 result, ok := raw.(*shadowResourceProviderApply)
505 if !ok {
506 p.ErrorLock.Lock()
507 defer p.ErrorLock.Unlock()
508 p.Error = multierror.Append(p.Error, fmt.Errorf(
509 "Unknown 'apply' shadow value for %q: %#v", key, raw))
510 return nil, nil
511 }
512
513 // Compare the parameters, which should be identical
514 if !state.Equal(result.State) {
515 p.ErrorLock.Lock()
516 p.Error = multierror.Append(p.Error, fmt.Errorf(
517 "Apply %q: state had unequal states (real, then shadow):\n\n%#v\n\n%#v",
518 key, result.State, state))
519 p.ErrorLock.Unlock()
520 }
521
522 if !diff.Equal(result.Diff) {
523 p.ErrorLock.Lock()
524 p.Error = multierror.Append(p.Error, fmt.Errorf(
525 "Apply %q: unequal diffs (real, then shadow):\n\n%#v\n\n%#v",
526 key, result.Diff, diff))
527 p.ErrorLock.Unlock()
528 }
529
530 return result.Result, result.ResultErr
531}
532
533func (p *shadowResourceProviderShadow) Diff(
534 info *InstanceInfo,
535 state *InstanceState,
536 desired *ResourceConfig) (*InstanceDiff, error) {
537 // Unique key
538 key := info.uniqueId()
539 raw := p.Shared.Diff.Value(key)
540 if raw == nil {
541 p.ErrorLock.Lock()
542 defer p.ErrorLock.Unlock()
543 p.Error = multierror.Append(p.Error, fmt.Errorf(
544 "Unknown 'diff' call for %q:\n\n%#v\n\n%#v",
545 key, state, desired))
546 return nil, nil
547 }
548
549 result, ok := raw.(*shadowResourceProviderDiff)
550 if !ok {
551 p.ErrorLock.Lock()
552 defer p.ErrorLock.Unlock()
553 p.Error = multierror.Append(p.Error, fmt.Errorf(
554 "Unknown 'diff' shadow value for %q: %#v", key, raw))
555 return nil, nil
556 }
557
558 // Compare the parameters, which should be identical
559 if !state.Equal(result.State) {
560 p.ErrorLock.Lock()
561 p.Error = multierror.Append(p.Error, fmt.Errorf(
562 "Diff %q had unequal states (real, then shadow):\n\n%#v\n\n%#v",
563 key, result.State, state))
564 p.ErrorLock.Unlock()
565 }
566 if !desired.Equal(result.Desired) {
567 p.ErrorLock.Lock()
568 p.Error = multierror.Append(p.Error, fmt.Errorf(
569 "Diff %q had unequal states (real, then shadow):\n\n%#v\n\n%#v",
570 key, result.Desired, desired))
571 p.ErrorLock.Unlock()
572 }
573
574 return result.Result, result.ResultErr
575}
576
577func (p *shadowResourceProviderShadow) Refresh(
578 info *InstanceInfo,
579 state *InstanceState) (*InstanceState, error) {
580 // Unique key
581 key := info.uniqueId()
582 raw := p.Shared.Refresh.Value(key)
583 if raw == nil {
584 p.ErrorLock.Lock()
585 defer p.ErrorLock.Unlock()
586 p.Error = multierror.Append(p.Error, fmt.Errorf(
587 "Unknown 'refresh' call for %q:\n\n%#v",
588 key, state))
589 return nil, nil
590 }
591
592 result, ok := raw.(*shadowResourceProviderRefresh)
593 if !ok {
594 p.ErrorLock.Lock()
595 defer p.ErrorLock.Unlock()
596 p.Error = multierror.Append(p.Error, fmt.Errorf(
597 "Unknown 'refresh' shadow value: %#v", raw))
598 return nil, nil
599 }
600
601 // Compare the parameters, which should be identical
602 if !state.Equal(result.State) {
603 p.ErrorLock.Lock()
604 p.Error = multierror.Append(p.Error, fmt.Errorf(
605 "Refresh %q had unequal states (real, then shadow):\n\n%#v\n\n%#v",
606 key, result.State, state))
607 p.ErrorLock.Unlock()
608 }
609
610 return result.Result, result.ResultErr
611}
612
613func (p *shadowResourceProviderShadow) ValidateDataSource(
614 t string, c *ResourceConfig) ([]string, []error) {
615 // Unique key
616 key := t
617
618 // Get the initial value
619 raw := p.Shared.ValidateDataSource.Value(key)
620
621 // Find a validation with our configuration
622 var result *shadowResourceProviderValidateDataSource
623 for {
624 // Get the value
625 if raw == nil {
626 p.ErrorLock.Lock()
627 defer p.ErrorLock.Unlock()
628 p.Error = multierror.Append(p.Error, fmt.Errorf(
629 "Unknown 'ValidateDataSource' call for %q:\n\n%#v",
630 key, c))
631 return nil, nil
632 }
633
634 wrapper, ok := raw.(*shadowResourceProviderValidateDataSourceWrapper)
635 if !ok {
636 p.ErrorLock.Lock()
637 defer p.ErrorLock.Unlock()
638 p.Error = multierror.Append(p.Error, fmt.Errorf(
639 "Unknown 'ValidateDataSource' shadow value: %#v", raw))
640 return nil, nil
641 }
642
643 // Look for the matching call with our configuration
644 wrapper.RLock()
645 for _, call := range wrapper.Calls {
646 if call.Config.Equal(c) {
647 result = call
648 break
649 }
650 }
651 wrapper.RUnlock()
652
653 // If we found a result, exit
654 if result != nil {
655 break
656 }
657
658 // Wait for a change so we can get the wrapper again
659 raw = p.Shared.ValidateDataSource.WaitForChange(key)
660 }
661
662 return result.Warns, result.Errors
663}
664
665func (p *shadowResourceProviderShadow) ReadDataDiff(
666 info *InstanceInfo,
667 desired *ResourceConfig) (*InstanceDiff, error) {
668 // Unique key
669 key := info.uniqueId()
670 raw := p.Shared.ReadDataDiff.Value(key)
671 if raw == nil {
672 p.ErrorLock.Lock()
673 defer p.ErrorLock.Unlock()
674 p.Error = multierror.Append(p.Error, fmt.Errorf(
675 "Unknown 'ReadDataDiff' call for %q:\n\n%#v",
676 key, desired))
677 return nil, nil
678 }
679
680 result, ok := raw.(*shadowResourceProviderReadDataDiff)
681 if !ok {
682 p.ErrorLock.Lock()
683 defer p.ErrorLock.Unlock()
684 p.Error = multierror.Append(p.Error, fmt.Errorf(
685 "Unknown 'ReadDataDiff' shadow value for %q: %#v", key, raw))
686 return nil, nil
687 }
688
689 // Compare the parameters, which should be identical
690 if !desired.Equal(result.Desired) {
691 p.ErrorLock.Lock()
692 p.Error = multierror.Append(p.Error, fmt.Errorf(
693 "ReadDataDiff %q had unequal configs (real, then shadow):\n\n%#v\n\n%#v",
694 key, result.Desired, desired))
695 p.ErrorLock.Unlock()
696 }
697
698 return result.Result, result.ResultErr
699}
700
701func (p *shadowResourceProviderShadow) ReadDataApply(
702 info *InstanceInfo,
703 d *InstanceDiff) (*InstanceState, error) {
704 // Unique key
705 key := info.uniqueId()
706 raw := p.Shared.ReadDataApply.Value(key)
707 if raw == nil {
708 p.ErrorLock.Lock()
709 defer p.ErrorLock.Unlock()
710 p.Error = multierror.Append(p.Error, fmt.Errorf(
711 "Unknown 'ReadDataApply' call for %q:\n\n%#v",
712 key, d))
713 return nil, nil
714 }
715
716 result, ok := raw.(*shadowResourceProviderReadDataApply)
717 if !ok {
718 p.ErrorLock.Lock()
719 defer p.ErrorLock.Unlock()
720 p.Error = multierror.Append(p.Error, fmt.Errorf(
721 "Unknown 'ReadDataApply' shadow value for %q: %#v", key, raw))
722 return nil, nil
723 }
724
725 // Compare the parameters, which should be identical
726 if !d.Equal(result.Diff) {
727 p.ErrorLock.Lock()
728 p.Error = multierror.Append(p.Error, fmt.Errorf(
729 "ReadDataApply: unequal diffs (real, then shadow):\n\n%#v\n\n%#v",
730 result.Diff, d))
731 p.ErrorLock.Unlock()
732 }
733
734 return result.Result, result.ResultErr
735}
736
737func (p *shadowResourceProviderShadow) ImportState(info *InstanceInfo, id string) ([]*InstanceState, error) {
738 panic("import not supported by shadow graph")
739}
740
741// The structs for the various function calls are put below. These structs
742// are used to carry call information across the real/shadow boundaries.
743
744type shadowResourceProviderInput struct {
745 Config *ResourceConfig
746 Result *ResourceConfig
747 ResultErr error
748}
749
750type shadowResourceProviderValidate struct {
751 Config *ResourceConfig
752 ResultWarn []string
753 ResultErr []error
754}
755
756type shadowResourceProviderConfigure struct {
757 Config *ResourceConfig
758 Result error
759}
760
761type shadowResourceProviderValidateResourceWrapper struct {
762 sync.RWMutex
763
764 Calls []*shadowResourceProviderValidateResource
765}
766
767type shadowResourceProviderValidateResource struct {
768 Config *ResourceConfig
769 Warns []string
770 Errors []error
771}
772
773type shadowResourceProviderApply struct {
774 State *InstanceState
775 Diff *InstanceDiff
776 Result *InstanceState
777 ResultErr error
778}
779
780type shadowResourceProviderDiff struct {
781 State *InstanceState
782 Desired *ResourceConfig
783 Result *InstanceDiff
784 ResultErr error
785}
786
787type shadowResourceProviderRefresh struct {
788 State *InstanceState
789 Result *InstanceState
790 ResultErr error
791}
792
793type shadowResourceProviderValidateDataSourceWrapper struct {
794 sync.RWMutex
795
796 Calls []*shadowResourceProviderValidateDataSource
797}
798
799type shadowResourceProviderValidateDataSource struct {
800 Config *ResourceConfig
801 Warns []string
802 Errors []error
803}
804
805type shadowResourceProviderReadDataDiff struct {
806 Desired *ResourceConfig
807 Result *InstanceDiff
808 ResultErr error
809}
810
811type shadowResourceProviderReadDataApply struct {
812 Diff *InstanceDiff
813 Result *InstanceState
814 ResultErr error
815}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/shadow_resource_provisioner.go b/vendor/github.com/hashicorp/terraform/terraform/shadow_resource_provisioner.go
deleted file mode 100644
index 60a4908..0000000
--- a/vendor/github.com/hashicorp/terraform/terraform/shadow_resource_provisioner.go
+++ /dev/null
@@ -1,282 +0,0 @@
1package terraform
2
3import (
4 "fmt"
5 "io"
6 "log"
7 "sync"
8
9 "github.com/hashicorp/go-multierror"
10 "github.com/hashicorp/terraform/helper/shadow"
11)
12
13// shadowResourceProvisioner implements ResourceProvisioner for the shadow
14// eval context defined in eval_context_shadow.go.
15//
16// This is used to verify behavior with a real provisioner. This shouldn't
17// be used directly.
18type shadowResourceProvisioner interface {
19 ResourceProvisioner
20 Shadow
21}
22
23// newShadowResourceProvisioner creates a new shadowed ResourceProvisioner.
24func newShadowResourceProvisioner(
25 p ResourceProvisioner) (ResourceProvisioner, shadowResourceProvisioner) {
26 // Create the shared data
27 shared := shadowResourceProvisionerShared{
28 Validate: shadow.ComparedValue{
29 Func: shadowResourceProvisionerValidateCompare,
30 },
31 }
32
33 // Create the real provisioner that does actual work
34 real := &shadowResourceProvisionerReal{
35 ResourceProvisioner: p,
36 Shared: &shared,
37 }
38
39 // Create the shadow that watches the real value
40 shadow := &shadowResourceProvisionerShadow{
41 Shared: &shared,
42 }
43
44 return real, shadow
45}
46
47// shadowResourceProvisionerReal is the real resource provisioner. Function calls
48// to this will perform real work. This records the parameters and return
49// values and call order for the shadow to reproduce.
50type shadowResourceProvisionerReal struct {
51 ResourceProvisioner
52
53 Shared *shadowResourceProvisionerShared
54}
55
56func (p *shadowResourceProvisionerReal) Close() error {
57 var result error
58 if c, ok := p.ResourceProvisioner.(ResourceProvisionerCloser); ok {
59 result = c.Close()
60 }
61
62 p.Shared.CloseErr.SetValue(result)
63 return result
64}
65
66func (p *shadowResourceProvisionerReal) Validate(c *ResourceConfig) ([]string, []error) {
67 warns, errs := p.ResourceProvisioner.Validate(c)
68 p.Shared.Validate.SetValue(&shadowResourceProvisionerValidate{
69 Config: c,
70 ResultWarn: warns,
71 ResultErr: errs,
72 })
73
74 return warns, errs
75}
76
77func (p *shadowResourceProvisionerReal) Apply(
78 output UIOutput, s *InstanceState, c *ResourceConfig) error {
79 err := p.ResourceProvisioner.Apply(output, s, c)
80
81 // Write the result, grab a lock for writing. This should nver
82 // block long since the operations below don't block.
83 p.Shared.ApplyLock.Lock()
84 defer p.Shared.ApplyLock.Unlock()
85
86 key := s.ID
87 raw, ok := p.Shared.Apply.ValueOk(key)
88 if !ok {
89 // Setup a new value
90 raw = &shadow.ComparedValue{
91 Func: shadowResourceProvisionerApplyCompare,
92 }
93
94 // Set it
95 p.Shared.Apply.SetValue(key, raw)
96 }
97
98 compareVal, ok := raw.(*shadow.ComparedValue)
99 if !ok {
100 // Just log and return so that we don't cause the real side
101 // any side effects.
102 log.Printf("[ERROR] unknown value in 'apply': %#v", raw)
103 return err
104 }
105
106 // Write the resulting value
107 compareVal.SetValue(&shadowResourceProvisionerApply{
108 Config: c,
109 ResultErr: err,
110 })
111
112 return err
113}
114
115func (p *shadowResourceProvisionerReal) Stop() error {
116 return p.ResourceProvisioner.Stop()
117}
118
119// shadowResourceProvisionerShadow is the shadow resource provisioner. Function
120// calls never affect real resources. This is paired with the "real" side
121// which must be called properly to enable recording.
122type shadowResourceProvisionerShadow struct {
123 Shared *shadowResourceProvisionerShared
124
125 Error error // Error is the list of errors from the shadow
126 ErrorLock sync.Mutex
127}
128
129type shadowResourceProvisionerShared struct {
130 // NOTE: Anytime a value is added here, be sure to add it to
131 // the Close() method so that it is closed.
132
133 CloseErr shadow.Value
134 Validate shadow.ComparedValue
135 Apply shadow.KeyedValue
136 ApplyLock sync.Mutex // For writing only
137}
138
139func (p *shadowResourceProvisionerShared) Close() error {
140 closers := []io.Closer{
141 &p.CloseErr,
142 }
143
144 for _, c := range closers {
145 // This should never happen, but we don't panic because a panic
146 // could affect the real behavior of Terraform and a shadow should
147 // never be able to do that.
148 if err := c.Close(); err != nil {
149 return err
150 }
151 }
152
153 return nil
154}
155
156func (p *shadowResourceProvisionerShadow) CloseShadow() error {
157 err := p.Shared.Close()
158 if err != nil {
159 err = fmt.Errorf("close error: %s", err)
160 }
161
162 return err
163}
164
165func (p *shadowResourceProvisionerShadow) ShadowError() error {
166 return p.Error
167}
168
169func (p *shadowResourceProvisionerShadow) Close() error {
170 v := p.Shared.CloseErr.Value()
171 if v == nil {
172 return nil
173 }
174
175 return v.(error)
176}
177
178func (p *shadowResourceProvisionerShadow) Validate(c *ResourceConfig) ([]string, []error) {
179 // Get the result of the validate call
180 raw := p.Shared.Validate.Value(c)
181 if raw == nil {
182 return nil, nil
183 }
184
185 result, ok := raw.(*shadowResourceProvisionerValidate)
186 if !ok {
187 p.ErrorLock.Lock()
188 defer p.ErrorLock.Unlock()
189 p.Error = multierror.Append(p.Error, fmt.Errorf(
190 "Unknown 'validate' shadow value: %#v", raw))
191 return nil, nil
192 }
193
194 // We don't need to compare configurations because we key on the
195 // configuration so just return right away.
196 return result.ResultWarn, result.ResultErr
197}
198
199func (p *shadowResourceProvisionerShadow) Apply(
200 output UIOutput, s *InstanceState, c *ResourceConfig) error {
201 // Get the value based on the key
202 key := s.ID
203 raw := p.Shared.Apply.Value(key)
204 if raw == nil {
205 return nil
206 }
207
208 compareVal, ok := raw.(*shadow.ComparedValue)
209 if !ok {
210 p.ErrorLock.Lock()
211 defer p.ErrorLock.Unlock()
212 p.Error = multierror.Append(p.Error, fmt.Errorf(
213 "Unknown 'apply' shadow value: %#v", raw))
214 return nil
215 }
216
217 // With the compared value, we compare against our config
218 raw = compareVal.Value(c)
219 if raw == nil {
220 return nil
221 }
222
223 result, ok := raw.(*shadowResourceProvisionerApply)
224 if !ok {
225 p.ErrorLock.Lock()
226 defer p.ErrorLock.Unlock()
227 p.Error = multierror.Append(p.Error, fmt.Errorf(
228 "Unknown 'apply' shadow value: %#v", raw))
229 return nil
230 }
231
232 return result.ResultErr
233}
234
235func (p *shadowResourceProvisionerShadow) Stop() error {
236 // For the shadow, we always just return nil since a Stop indicates
237 // that we were interrupted and shadows are disabled during interrupts
238 // anyways.
239 return nil
240}
241
242// The structs for the various function calls are put below. These structs
243// are used to carry call information across the real/shadow boundaries.
244
245type shadowResourceProvisionerValidate struct {
246 Config *ResourceConfig
247 ResultWarn []string
248 ResultErr []error
249}
250
251type shadowResourceProvisionerApply struct {
252 Config *ResourceConfig
253 ResultErr error
254}
255
256func shadowResourceProvisionerValidateCompare(k, v interface{}) bool {
257 c, ok := k.(*ResourceConfig)
258 if !ok {
259 return false
260 }
261
262 result, ok := v.(*shadowResourceProvisionerValidate)
263 if !ok {
264 return false
265 }
266
267 return c.Equal(result.Config)
268}
269
270func shadowResourceProvisionerApplyCompare(k, v interface{}) bool {
271 c, ok := k.(*ResourceConfig)
272 if !ok {
273 return false
274 }
275
276 result, ok := v.(*shadowResourceProvisionerApply)
277 if !ok {
278 return false
279 }
280
281 return c.Equal(result.Config)
282}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/state.go b/vendor/github.com/hashicorp/terraform/terraform/state.go
index 0c46194..04b14a6 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/state.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/state.go
@@ -9,6 +9,7 @@ import (
9 "io" 9 "io"
10 "io/ioutil" 10 "io/ioutil"
11 "log" 11 "log"
12 "os"
12 "reflect" 13 "reflect"
13 "sort" 14 "sort"
14 "strconv" 15 "strconv"
@@ -16,10 +17,12 @@ import (
16 "sync" 17 "sync"
17 18
18 "github.com/hashicorp/go-multierror" 19 "github.com/hashicorp/go-multierror"
20 "github.com/hashicorp/go-uuid"
19 "github.com/hashicorp/go-version" 21 "github.com/hashicorp/go-version"
20 "github.com/hashicorp/terraform/config" 22 "github.com/hashicorp/terraform/config"
21 "github.com/mitchellh/copystructure" 23 "github.com/mitchellh/copystructure"
22 "github.com/satori/go.uuid" 24
25 tfversion "github.com/hashicorp/terraform/version"
23) 26)
24 27
25const ( 28const (
@@ -664,7 +667,7 @@ func (s *State) FromFutureTerraform() bool {
664 } 667 }
665 668
666 v := version.Must(version.NewVersion(s.TFVersion)) 669 v := version.Must(version.NewVersion(s.TFVersion))
667 return SemVersion.LessThan(v) 670 return tfversion.SemVer.LessThan(v)
668} 671}
669 672
670func (s *State) Init() { 673func (s *State) Init() {
@@ -704,7 +707,11 @@ func (s *State) EnsureHasLineage() {
704 707
705func (s *State) ensureHasLineage() { 708func (s *State) ensureHasLineage() {
706 if s.Lineage == "" { 709 if s.Lineage == "" {
707 s.Lineage = uuid.NewV4().String() 710 lineage, err := uuid.GenerateUUID()
711 if err != nil {
712 panic(fmt.Errorf("Failed to generate lineage: %v", err))
713 }
714 s.Lineage = lineage
708 log.Printf("[DEBUG] New state was assigned lineage %q\n", s.Lineage) 715 log.Printf("[DEBUG] New state was assigned lineage %q\n", s.Lineage)
709 } else { 716 } else {
710 log.Printf("[TRACE] Preserving existing state lineage %q\n", s.Lineage) 717 log.Printf("[TRACE] Preserving existing state lineage %q\n", s.Lineage)
@@ -977,6 +984,10 @@ type ModuleState struct {
977 // always disjoint, so the path represents amodule tree 984 // always disjoint, so the path represents amodule tree
978 Path []string `json:"path"` 985 Path []string `json:"path"`
979 986
987 // Locals are kept only transiently in-memory, because we can always
988 // re-compute them.
989 Locals map[string]interface{} `json:"-"`
990
980 // Outputs declared by the module and maintained for each module 991 // Outputs declared by the module and maintained for each module
981 // even though only the root module technically needs to be kept. 992 // even though only the root module technically needs to be kept.
982 // This allows operators to inspect values at the boundaries. 993 // This allows operators to inspect values at the boundaries.
@@ -1083,7 +1094,7 @@ func (m *ModuleState) Orphans(c *config.Config) []string {
1083 defer m.Unlock() 1094 defer m.Unlock()
1084 1095
1085 keys := make(map[string]struct{}) 1096 keys := make(map[string]struct{})
1086 for k, _ := range m.Resources { 1097 for k := range m.Resources {
1087 keys[k] = struct{}{} 1098 keys[k] = struct{}{}
1088 } 1099 }
1089 1100
@@ -1091,7 +1102,7 @@ func (m *ModuleState) Orphans(c *config.Config) []string {
1091 for _, r := range c.Resources { 1102 for _, r := range c.Resources {
1092 delete(keys, r.Id()) 1103 delete(keys, r.Id())
1093 1104
1094 for k, _ := range keys { 1105 for k := range keys {
1095 if strings.HasPrefix(k, r.Id()+".") { 1106 if strings.HasPrefix(k, r.Id()+".") {
1096 delete(keys, k) 1107 delete(keys, k)
1097 } 1108 }
@@ -1100,7 +1111,32 @@ func (m *ModuleState) Orphans(c *config.Config) []string {
1100 } 1111 }
1101 1112
1102 result := make([]string, 0, len(keys)) 1113 result := make([]string, 0, len(keys))
1103 for k, _ := range keys { 1114 for k := range keys {
1115 result = append(result, k)
1116 }
1117
1118 return result
1119}
1120
1121// RemovedOutputs returns a list of outputs that are in the State but aren't
1122// present in the configuration itself.
1123func (m *ModuleState) RemovedOutputs(c *config.Config) []string {
1124 m.Lock()
1125 defer m.Unlock()
1126
1127 keys := make(map[string]struct{})
1128 for k := range m.Outputs {
1129 keys[k] = struct{}{}
1130 }
1131
1132 if c != nil {
1133 for _, o := range c.Outputs {
1134 delete(keys, o.Name)
1135 }
1136 }
1137
1138 result := make([]string, 0, len(keys))
1139 for k := range keys {
1104 result = append(result, k) 1140 result = append(result, k)
1105 } 1141 }
1106 1142
@@ -1308,6 +1344,10 @@ func (m *ModuleState) String() string {
1308 return buf.String() 1344 return buf.String()
1309} 1345}
1310 1346
1347func (m *ModuleState) Empty() bool {
1348 return len(m.Locals) == 0 && len(m.Outputs) == 0 && len(m.Resources) == 0
1349}
1350
1311// ResourceStateKey is a structured representation of the key used for the 1351// ResourceStateKey is a structured representation of the key used for the
1312// ModuleState.Resources mapping 1352// ModuleState.Resources mapping
1313type ResourceStateKey struct { 1353type ResourceStateKey struct {
@@ -1681,7 +1721,20 @@ func (s *InstanceState) Equal(other *InstanceState) bool {
1681 // We only do the deep check if both are non-nil. If one is nil 1721 // We only do the deep check if both are non-nil. If one is nil
1682 // we treat it as equal since their lengths are both zero (check 1722 // we treat it as equal since their lengths are both zero (check
1683 // above). 1723 // above).
1684 if !reflect.DeepEqual(s.Meta, other.Meta) { 1724 //
1725 // Since this can contain numeric values that may change types during
1726 // serialization, let's compare the serialized values.
1727 sMeta, err := json.Marshal(s.Meta)
1728 if err != nil {
1729 // marshaling primitives shouldn't ever error out
1730 panic(err)
1731 }
1732 otherMeta, err := json.Marshal(other.Meta)
1733 if err != nil {
1734 panic(err)
1735 }
1736
1737 if !bytes.Equal(sMeta, otherMeta) {
1685 return false 1738 return false
1686 } 1739 }
1687 } 1740 }
@@ -1824,11 +1877,19 @@ var ErrNoState = errors.New("no state")
1824// ReadState reads a state structure out of a reader in the format that 1877// ReadState reads a state structure out of a reader in the format that
1825// was written by WriteState. 1878// was written by WriteState.
1826func ReadState(src io.Reader) (*State, error) { 1879func ReadState(src io.Reader) (*State, error) {
1880 // check for a nil file specifically, since that produces a platform
1881 // specific error if we try to use it in a bufio.Reader.
1882 if f, ok := src.(*os.File); ok && f == nil {
1883 return nil, ErrNoState
1884 }
1885
1827 buf := bufio.NewReader(src) 1886 buf := bufio.NewReader(src)
1887
1828 if _, err := buf.Peek(1); err != nil { 1888 if _, err := buf.Peek(1); err != nil {
1829 // the error is either io.EOF or "invalid argument", and both are from 1889 if err == io.EOF {
1830 // an empty state. 1890 return nil, ErrNoState
1831 return nil, ErrNoState 1891 }
1892 return nil, err
1832 } 1893 }
1833 1894
1834 if err := testForV0State(buf); err != nil { 1895 if err := testForV0State(buf); err != nil {
@@ -1891,7 +1952,7 @@ func ReadState(src io.Reader) (*State, error) {
1891 result = v3State 1952 result = v3State
1892 default: 1953 default:
1893 return nil, fmt.Errorf("Terraform %s does not support state version %d, please update.", 1954 return nil, fmt.Errorf("Terraform %s does not support state version %d, please update.",
1894 SemVersion.String(), versionIdentifier.Version) 1955 tfversion.SemVer.String(), versionIdentifier.Version)
1895 } 1956 }
1896 1957
1897 // If we reached this place we must have a result set 1958 // If we reached this place we must have a result set
@@ -1935,7 +1996,7 @@ func ReadStateV2(jsonBytes []byte) (*State, error) {
1935 // version that we don't understand 1996 // version that we don't understand
1936 if state.Version > StateVersion { 1997 if state.Version > StateVersion {
1937 return nil, fmt.Errorf("Terraform %s does not support state version %d, please update.", 1998 return nil, fmt.Errorf("Terraform %s does not support state version %d, please update.",
1938 SemVersion.String(), state.Version) 1999 tfversion.SemVer.String(), state.Version)
1939 } 2000 }
1940 2001
1941 // Make sure the version is semantic 2002 // Make sure the version is semantic
@@ -1970,7 +2031,7 @@ func ReadStateV3(jsonBytes []byte) (*State, error) {
1970 // version that we don't understand 2031 // version that we don't understand
1971 if state.Version > StateVersion { 2032 if state.Version > StateVersion {
1972 return nil, fmt.Errorf("Terraform %s does not support state version %d, please update.", 2033 return nil, fmt.Errorf("Terraform %s does not support state version %d, please update.",
1973 SemVersion.String(), state.Version) 2034 tfversion.SemVer.String(), state.Version)
1974 } 2035 }
1975 2036
1976 // Make sure the version is semantic 2037 // Make sure the version is semantic
@@ -2126,6 +2187,19 @@ func (s moduleStateSort) Swap(i, j int) {
2126 s[i], s[j] = s[j], s[i] 2187 s[i], s[j] = s[j], s[i]
2127} 2188}
2128 2189
2190// StateCompatible returns an error if the state is not compatible with the
2191// current version of terraform.
2192func CheckStateVersion(state *State) error {
2193 if state == nil {
2194 return nil
2195 }
2196
2197 if state.FromFutureTerraform() {
2198 return fmt.Errorf(stateInvalidTerraformVersionErr, state.TFVersion)
2199 }
2200 return nil
2201}
2202
2129const stateValidateErrMultiModule = ` 2203const stateValidateErrMultiModule = `
2130Multiple modules with the same path: %s 2204Multiple modules with the same path: %s
2131 2205
@@ -2134,3 +2208,11 @@ in your state file that point to the same module. This will cause Terraform
2134to behave in unexpected and error prone ways and is invalid. Please back up 2208to behave in unexpected and error prone ways and is invalid. Please back up
2135and modify your state file manually to resolve this. 2209and modify your state file manually to resolve this.
2136` 2210`
2211
2212const stateInvalidTerraformVersionErr = `
2213Terraform doesn't allow running any operations against a state
2214that was written by a future Terraform version. The state is
2215reporting it is written by Terraform '%s'
2216
2217Please run at least that version of Terraform to continue.
2218`
diff --git a/vendor/github.com/hashicorp/terraform/terraform/test_failure b/vendor/github.com/hashicorp/terraform/terraform/test_failure
deleted file mode 100644
index 5d3ad1a..0000000
--- a/vendor/github.com/hashicorp/terraform/terraform/test_failure
+++ /dev/null
@@ -1,9 +0,0 @@
1--- FAIL: TestContext2Plan_moduleProviderInherit (0.01s)
2 context_plan_test.go:552: bad: []string{"child"}
3map[string]dag.Vertex{}
4"module.middle.null"
5map[string]dag.Vertex{}
6"module.middle.module.inner.null"
7map[string]dag.Vertex{}
8"aws"
9FAIL
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform.go b/vendor/github.com/hashicorp/terraform/terraform/transform.go
index f4a431a..0e47f20 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform.go
@@ -1,6 +1,8 @@
1package terraform 1package terraform
2 2
3import ( 3import (
4 "log"
5
4 "github.com/hashicorp/terraform/dag" 6 "github.com/hashicorp/terraform/dag"
5) 7)
6 8
@@ -40,6 +42,9 @@ func (t *graphTransformerMulti) Transform(g *Graph) error {
40 if err := t.Transform(g); err != nil { 42 if err := t.Transform(g); err != nil {
41 return err 43 return err
42 } 44 }
45 log.Printf(
46 "[TRACE] Graph after step %T:\n\n%s",
47 t, g.StringWithNodeTypes())
43 } 48 }
44 49
45 return nil 50 return nil
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_provider.go b/vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_provider.go
index 10506ea..39cf097 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_provider.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_provider.go
@@ -1,10 +1,7 @@
1package terraform 1package terraform
2 2
3import ( 3import (
4 "log"
5
6 "github.com/hashicorp/terraform/config" 4 "github.com/hashicorp/terraform/config"
7 "github.com/hashicorp/terraform/config/module"
8) 5)
9 6
10// GraphNodeAttachProvider is an interface that must be implemented by nodes 7// GraphNodeAttachProvider is an interface that must be implemented by nodes
@@ -19,62 +16,3 @@ type GraphNodeAttachProvider interface {
19 // Sets the configuration 16 // Sets the configuration
20 AttachProvider(*config.ProviderConfig) 17 AttachProvider(*config.ProviderConfig)
21} 18}
22
23// AttachProviderConfigTransformer goes through the graph and attaches
24// provider configuration structures to nodes that implement the interfaces
25// above.
26//
27// The attached configuration structures are directly from the configuration.
28// If they're going to be modified, a copy should be made.
29type AttachProviderConfigTransformer struct {
30 Module *module.Tree // Module is the root module for the config
31}
32
33func (t *AttachProviderConfigTransformer) Transform(g *Graph) error {
34 if err := t.attachProviders(g); err != nil {
35 return err
36 }
37
38 return nil
39}
40
41func (t *AttachProviderConfigTransformer) attachProviders(g *Graph) error {
42 // Go through and find GraphNodeAttachProvider
43 for _, v := range g.Vertices() {
44 // Only care about GraphNodeAttachProvider implementations
45 apn, ok := v.(GraphNodeAttachProvider)
46 if !ok {
47 continue
48 }
49
50 // Determine what we're looking for
51 path := normalizeModulePath(apn.Path())
52 path = path[1:]
53 name := apn.ProviderName()
54 log.Printf("[TRACE] Attach provider request: %#v %s", path, name)
55
56 // Get the configuration.
57 tree := t.Module.Child(path)
58 if tree == nil {
59 continue
60 }
61
62 // Go through the provider configs to find the matching config
63 for _, p := range tree.Config().ProviderConfigs {
64 // Build the name, which is "name.alias" if an alias exists
65 current := p.Name
66 if p.Alias != "" {
67 current += "." + p.Alias
68 }
69
70 // If the configs match then attach!
71 if current == name {
72 log.Printf("[TRACE] Attaching provider config: %#v", p)
73 apn.AttachProvider(p)
74 break
75 }
76 }
77 }
78
79 return nil
80}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_deposed.go b/vendor/github.com/hashicorp/terraform/terraform/transform_deposed.go
index 2148cef..87a1f9c 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_deposed.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_deposed.go
@@ -12,6 +12,9 @@ type DeposedTransformer struct {
12 // View, if non-empty, is the ModuleState.View used around the state 12 // View, if non-empty, is the ModuleState.View used around the state
13 // to find deposed resources. 13 // to find deposed resources.
14 View string 14 View string
15
16 // The provider used by the resourced which were deposed
17 ResolvedProvider string
15} 18}
16 19
17func (t *DeposedTransformer) Transform(g *Graph) error { 20func (t *DeposedTransformer) Transform(g *Graph) error {
@@ -33,14 +36,16 @@ func (t *DeposedTransformer) Transform(g *Graph) error {
33 if len(rs.Deposed) == 0 { 36 if len(rs.Deposed) == 0 {
34 continue 37 continue
35 } 38 }
39
36 deposed := rs.Deposed 40 deposed := rs.Deposed
37 41
38 for i, _ := range deposed { 42 for i, _ := range deposed {
39 g.Add(&graphNodeDeposedResource{ 43 g.Add(&graphNodeDeposedResource{
40 Index: i, 44 Index: i,
41 ResourceName: k, 45 ResourceName: k,
42 ResourceType: rs.Type, 46 ResourceType: rs.Type,
43 Provider: rs.Provider, 47 ProviderName: rs.Provider,
48 ResolvedProvider: t.ResolvedProvider,
44 }) 49 })
45 } 50 }
46 } 51 }
@@ -50,18 +55,23 @@ func (t *DeposedTransformer) Transform(g *Graph) error {
50 55
51// graphNodeDeposedResource is the graph vertex representing a deposed resource. 56// graphNodeDeposedResource is the graph vertex representing a deposed resource.
52type graphNodeDeposedResource struct { 57type graphNodeDeposedResource struct {
53 Index int 58 Index int
54 ResourceName string 59 ResourceName string
55 ResourceType string 60 ResourceType string
56 Provider string 61 ProviderName string
62 ResolvedProvider string
57} 63}
58 64
59func (n *graphNodeDeposedResource) Name() string { 65func (n *graphNodeDeposedResource) Name() string {
60 return fmt.Sprintf("%s (deposed #%d)", n.ResourceName, n.Index) 66 return fmt.Sprintf("%s (deposed #%d)", n.ResourceName, n.Index)
61} 67}
62 68
63func (n *graphNodeDeposedResource) ProvidedBy() []string { 69func (n *graphNodeDeposedResource) ProvidedBy() string {
64 return []string{resourceProvider(n.ResourceName, n.Provider)} 70 return resourceProvider(n.ResourceName, n.ProviderName)
71}
72
73func (n *graphNodeDeposedResource) SetProvider(p string) {
74 n.ResolvedProvider = p
65} 75}
66 76
67// GraphNodeEvalable impl. 77// GraphNodeEvalable impl.
@@ -81,7 +91,7 @@ func (n *graphNodeDeposedResource) EvalTree() EvalNode {
81 Node: &EvalSequence{ 91 Node: &EvalSequence{
82 Nodes: []EvalNode{ 92 Nodes: []EvalNode{
83 &EvalGetProvider{ 93 &EvalGetProvider{
84 Name: n.ProvidedBy()[0], 94 Name: n.ResolvedProvider,
85 Output: &provider, 95 Output: &provider,
86 }, 96 },
87 &EvalReadStateDeposed{ 97 &EvalReadStateDeposed{
@@ -98,7 +108,7 @@ func (n *graphNodeDeposedResource) EvalTree() EvalNode {
98 &EvalWriteStateDeposed{ 108 &EvalWriteStateDeposed{
99 Name: n.ResourceName, 109 Name: n.ResourceName,
100 ResourceType: n.ResourceType, 110 ResourceType: n.ResourceType,
101 Provider: n.Provider, 111 Provider: n.ResolvedProvider,
102 State: &state, 112 State: &state,
103 Index: n.Index, 113 Index: n.Index,
104 }, 114 },
@@ -114,7 +124,7 @@ func (n *graphNodeDeposedResource) EvalTree() EvalNode {
114 Node: &EvalSequence{ 124 Node: &EvalSequence{
115 Nodes: []EvalNode{ 125 Nodes: []EvalNode{
116 &EvalGetProvider{ 126 &EvalGetProvider{
117 Name: n.ProvidedBy()[0], 127 Name: n.ResolvedProvider,
118 Output: &provider, 128 Output: &provider,
119 }, 129 },
120 &EvalReadStateDeposed{ 130 &EvalReadStateDeposed{
@@ -147,7 +157,7 @@ func (n *graphNodeDeposedResource) EvalTree() EvalNode {
147 &EvalWriteStateDeposed{ 157 &EvalWriteStateDeposed{
148 Name: n.ResourceName, 158 Name: n.ResourceName,
149 ResourceType: n.ResourceType, 159 ResourceType: n.ResourceType,
150 Provider: n.Provider, 160 Provider: n.ResolvedProvider,
151 State: &state, 161 State: &state,
152 Index: n.Index, 162 Index: n.Index,
153 }, 163 },
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_destroy_edge.go b/vendor/github.com/hashicorp/terraform/terraform/transform_destroy_edge.go
index 22be1ab..a06ff29 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_destroy_edge.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_destroy_edge.go
@@ -119,17 +119,15 @@ func (t *DestroyEdgeTransformer) Transform(g *Graph) error {
119 return &NodeApplyableProvider{NodeAbstractProvider: a} 119 return &NodeApplyableProvider{NodeAbstractProvider: a}
120 } 120 }
121 steps := []GraphTransformer{ 121 steps := []GraphTransformer{
122 // Add the local values
123 &LocalTransformer{Module: t.Module},
124
122 // Add outputs and metadata 125 // Add outputs and metadata
123 &OutputTransformer{Module: t.Module}, 126 &OutputTransformer{Module: t.Module},
124 &AttachResourceConfigTransformer{Module: t.Module}, 127 &AttachResourceConfigTransformer{Module: t.Module},
125 &AttachStateTransformer{State: t.State}, 128 &AttachStateTransformer{State: t.State},
126 129
127 // Add providers since they can affect destroy order as well 130 TransformProviders(nil, providerFn, t.Module),
128 &MissingProviderTransformer{AllowAny: true, Concrete: providerFn},
129 &ProviderTransformer{},
130 &DisableProviderTransformer{},
131 &ParentProviderTransformer{},
132 &AttachProviderConfigTransformer{Module: t.Module},
133 131
134 // Add all the variables. We can depend on resources through 132 // Add all the variables. We can depend on resources through
135 // variables due to module parameters, and we need to properly 133 // variables due to module parameters, and we need to properly
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_import_state.go b/vendor/github.com/hashicorp/terraform/terraform/transform_import_state.go
index 081df2f..fcbff65 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_import_state.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_import_state.go
@@ -21,9 +21,9 @@ func (t *ImportStateTransformer) Transform(g *Graph) error {
21 } 21 }
22 22
23 nodes = append(nodes, &graphNodeImportState{ 23 nodes = append(nodes, &graphNodeImportState{
24 Addr: addr, 24 Addr: addr,
25 ID: target.ID, 25 ID: target.ID,
26 Provider: target.Provider, 26 ProviderName: target.Provider,
27 }) 27 })
28 } 28 }
29 29
@@ -36,9 +36,10 @@ func (t *ImportStateTransformer) Transform(g *Graph) error {
36} 36}
37 37
38type graphNodeImportState struct { 38type graphNodeImportState struct {
39 Addr *ResourceAddress // Addr is the resource address to import to 39 Addr *ResourceAddress // Addr is the resource address to import to
40 ID string // ID is the ID to import as 40 ID string // ID is the ID to import as
41 Provider string // Provider string 41 ProviderName string // Provider string
42 ResolvedProvider string // provider node address
42 43
43 states []*InstanceState 44 states []*InstanceState
44} 45}
@@ -47,8 +48,12 @@ func (n *graphNodeImportState) Name() string {
47 return fmt.Sprintf("%s (import id: %s)", n.Addr, n.ID) 48 return fmt.Sprintf("%s (import id: %s)", n.Addr, n.ID)
48} 49}
49 50
50func (n *graphNodeImportState) ProvidedBy() []string { 51func (n *graphNodeImportState) ProvidedBy() string {
51 return []string{resourceProvider(n.Addr.Type, n.Provider)} 52 return resourceProvider(n.Addr.Type, n.ProviderName)
53}
54
55func (n *graphNodeImportState) SetProvider(p string) {
56 n.ResolvedProvider = p
52} 57}
53 58
54// GraphNodeSubPath 59// GraphNodeSubPath
@@ -72,7 +77,7 @@ func (n *graphNodeImportState) EvalTree() EvalNode {
72 return &EvalSequence{ 77 return &EvalSequence{
73 Nodes: []EvalNode{ 78 Nodes: []EvalNode{
74 &EvalGetProvider{ 79 &EvalGetProvider{
75 Name: n.ProvidedBy()[0], 80 Name: n.ResolvedProvider,
76 Output: &provider, 81 Output: &provider,
77 }, 82 },
78 &EvalImportState{ 83 &EvalImportState{
@@ -149,10 +154,11 @@ func (n *graphNodeImportState) DynamicExpand(ctx EvalContext) (*Graph, error) {
149 // is safe. 154 // is safe.
150 for i, state := range n.states { 155 for i, state := range n.states {
151 g.Add(&graphNodeImportStateSub{ 156 g.Add(&graphNodeImportStateSub{
152 Target: addrs[i], 157 Target: addrs[i],
153 Path_: n.Path(), 158 Path_: n.Path(),
154 State: state, 159 State: state,
155 Provider: n.Provider, 160 ProviderName: n.ProviderName,
161 ResolvedProvider: n.ResolvedProvider,
156 }) 162 })
157 } 163 }
158 164
@@ -170,10 +176,11 @@ func (n *graphNodeImportState) DynamicExpand(ctx EvalContext) (*Graph, error) {
170// and is part of the subgraph. This node is responsible for refreshing 176// and is part of the subgraph. This node is responsible for refreshing
171// and adding a resource to the state once it is imported. 177// and adding a resource to the state once it is imported.
172type graphNodeImportStateSub struct { 178type graphNodeImportStateSub struct {
173 Target *ResourceAddress 179 Target *ResourceAddress
174 State *InstanceState 180 State *InstanceState
175 Path_ []string 181 Path_ []string
176 Provider string 182 ProviderName string
183 ResolvedProvider string
177} 184}
178 185
179func (n *graphNodeImportStateSub) Name() string { 186func (n *graphNodeImportStateSub) Name() string {
@@ -216,7 +223,7 @@ func (n *graphNodeImportStateSub) EvalTree() EvalNode {
216 return &EvalSequence{ 223 return &EvalSequence{
217 Nodes: []EvalNode{ 224 Nodes: []EvalNode{
218 &EvalGetProvider{ 225 &EvalGetProvider{
219 Name: resourceProvider(info.Type, n.Provider), 226 Name: n.ResolvedProvider,
220 Output: &provider, 227 Output: &provider,
221 }, 228 },
222 &EvalRefresh{ 229 &EvalRefresh{
@@ -233,7 +240,7 @@ func (n *graphNodeImportStateSub) EvalTree() EvalNode {
233 &EvalWriteState{ 240 &EvalWriteState{
234 Name: key.String(), 241 Name: key.String(),
235 ResourceType: info.Type, 242 ResourceType: info.Type,
236 Provider: resourceProvider(info.Type, n.Provider), 243 Provider: n.ResolvedProvider,
237 State: &state, 244 State: &state,
238 }, 245 },
239 }, 246 },
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_local.go b/vendor/github.com/hashicorp/terraform/terraform/transform_local.go
new file mode 100644
index 0000000..95ecfc0
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_local.go
@@ -0,0 +1,40 @@
1package terraform
2
3import (
4 "github.com/hashicorp/terraform/config/module"
5)
6
7// LocalTransformer is a GraphTransformer that adds all the local values
8// from the configuration to the graph.
9type LocalTransformer struct {
10 Module *module.Tree
11}
12
13func (t *LocalTransformer) Transform(g *Graph) error {
14 return t.transformModule(g, t.Module)
15}
16
17func (t *LocalTransformer) transformModule(g *Graph, m *module.Tree) error {
18 if m == nil {
19 // Can't have any locals if there's no config
20 return nil
21 }
22
23 for _, local := range m.Config().Locals {
24 node := &NodeLocal{
25 PathValue: normalizeModulePath(m.Path()),
26 Config: local,
27 }
28
29 g.Add(node)
30 }
31
32 // Also populate locals for child modules
33 for _, c := range m.Children() {
34 if err := t.transformModule(g, c); err != nil {
35 return err
36 }
37 }
38
39 return nil
40}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_orphan_output.go b/vendor/github.com/hashicorp/terraform/terraform/transform_orphan_output.go
index 49568d5..aea2bd0 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_orphan_output.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_orphan_output.go
@@ -21,43 +21,32 @@ func (t *OrphanOutputTransformer) Transform(g *Graph) error {
21 return nil 21 return nil
22 } 22 }
23 23
24 return t.transform(g, t.Module) 24 for _, ms := range t.State.Modules {
25} 25 if err := t.transform(g, ms); err != nil {
26 26 return err
27func (t *OrphanOutputTransformer) transform(g *Graph, m *module.Tree) error {
28 // Get our configuration, and recurse into children
29 var c *config.Config
30 if m != nil {
31 c = m.Config()
32 for _, child := range m.Children() {
33 if err := t.transform(g, child); err != nil {
34 return err
35 }
36 } 27 }
37 } 28 }
29 return nil
30}
38 31
39 // Get the state. If there is no state, then we have no orphans! 32func (t *OrphanOutputTransformer) transform(g *Graph, ms *ModuleState) error {
40 path := normalizeModulePath(m.Path()) 33 if ms == nil {
41 state := t.State.ModuleByPath(path)
42 if state == nil {
43 return nil 34 return nil
44 } 35 }
45 36
46 // Make a map of the valid outputs 37 path := normalizeModulePath(ms.Path)
47 valid := make(map[string]struct{})
48 for _, o := range c.Outputs {
49 valid[o.Name] = struct{}{}
50 }
51 38
52 // Go through the outputs and find the ones that aren't in our config. 39 // Get the config for this path, which is nil if the entire module has been
53 for n, _ := range state.Outputs { 40 // removed.
54 // If it is in the valid map, then ignore 41 var c *config.Config
55 if _, ok := valid[n]; ok { 42 if m := t.Module.Child(path[1:]); m != nil {
56 continue 43 c = m.Config()
57 } 44 }
58 45
59 // Orphan! 46 // add all the orphaned outputs to the graph
47 for _, n := range ms.RemovedOutputs(c) {
60 g.Add(&NodeOutputOrphan{OutputName: n, PathValue: path}) 48 g.Add(&NodeOutputOrphan{OutputName: n, PathValue: path})
49
61 } 50 }
62 51
63 return nil 52 return nil
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_output.go b/vendor/github.com/hashicorp/terraform/terraform/transform_output.go
index b260f4c..faa25e4 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_output.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_output.go
@@ -1,7 +1,10 @@
1package terraform 1package terraform
2 2
3import ( 3import (
4 "log"
5
4 "github.com/hashicorp/terraform/config/module" 6 "github.com/hashicorp/terraform/config/module"
7 "github.com/hashicorp/terraform/dag"
5) 8)
6 9
7// OutputTransformer is a GraphTransformer that adds all the outputs 10// OutputTransformer is a GraphTransformer that adds all the outputs
@@ -41,11 +44,6 @@ func (t *OutputTransformer) transform(g *Graph, m *module.Tree) error {
41 44
42 // Add all outputs here 45 // Add all outputs here
43 for _, o := range os { 46 for _, o := range os {
44 // Build the node.
45 //
46 // NOTE: For now this is just an "applyable" output. As we build
47 // new graph builders for the other operations I suspect we'll
48 // find a way to parameterize this, require new transforms, etc.
49 node := &NodeApplyableOutput{ 47 node := &NodeApplyableOutput{
50 PathValue: normalizeModulePath(m.Path()), 48 PathValue: normalizeModulePath(m.Path()),
51 Config: o, 49 Config: o,
@@ -57,3 +55,41 @@ func (t *OutputTransformer) transform(g *Graph, m *module.Tree) error {
57 55
58 return nil 56 return nil
59} 57}
58
59// DestroyOutputTransformer is a GraphTransformer that adds nodes to delete
60// outputs during destroy. We need to do this to ensure that no stale outputs
61// are ever left in the state.
62type DestroyOutputTransformer struct {
63}
64
65func (t *DestroyOutputTransformer) Transform(g *Graph) error {
66 for _, v := range g.Vertices() {
67 output, ok := v.(*NodeApplyableOutput)
68 if !ok {
69 continue
70 }
71
72 // create the destroy node for this output
73 node := &NodeDestroyableOutput{
74 PathValue: output.PathValue,
75 Config: output.Config,
76 }
77
78 log.Printf("[TRACE] creating %s", node.Name())
79 g.Add(node)
80
81 deps, err := g.Descendents(v)
82 if err != nil {
83 return err
84 }
85
86 // the destroy node must depend on the eval node
87 deps.Add(v)
88
89 for _, d := range deps.List() {
90 log.Printf("[TRACE] %s depends on %s", node.Name(), dag.VertexName(d))
91 g.Connect(dag.BasicEdge(node, d))
92 }
93 }
94 return nil
95}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_provider.go b/vendor/github.com/hashicorp/terraform/terraform/transform_provider.go
index b9695d5..c4772b4 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_provider.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_provider.go
@@ -1,19 +1,46 @@
1package terraform 1package terraform
2 2
3import ( 3import (
4 "errors"
4 "fmt" 5 "fmt"
5 "log" 6 "log"
6 "strings" 7 "strings"
7 8
8 "github.com/hashicorp/go-multierror" 9 "github.com/hashicorp/go-multierror"
10 "github.com/hashicorp/terraform/config"
11 "github.com/hashicorp/terraform/config/module"
9 "github.com/hashicorp/terraform/dag" 12 "github.com/hashicorp/terraform/dag"
10) 13)
11 14
15func TransformProviders(providers []string, concrete ConcreteProviderNodeFunc, mod *module.Tree) GraphTransformer {
16 return GraphTransformMulti(
17 // Add providers from the config
18 &ProviderConfigTransformer{
19 Module: mod,
20 Providers: providers,
21 Concrete: concrete,
22 },
23 // Add any remaining missing providers
24 &MissingProviderTransformer{
25 Providers: providers,
26 Concrete: concrete,
27 },
28 // Connect the providers
29 &ProviderTransformer{},
30 // Remove unused providers and proxies
31 &PruneProviderTransformer{},
32 // Connect provider to their parent provider nodes
33 &ParentProviderTransformer{},
34 )
35}
36
12// GraphNodeProvider is an interface that nodes that can be a provider 37// GraphNodeProvider is an interface that nodes that can be a provider
13// must implement. The ProviderName returned is the name of the provider 38// must implement.
14// they satisfy. 39// ProviderName returns the name of the provider this satisfies.
40// Name returns the full name of the provider in the config.
15type GraphNodeProvider interface { 41type GraphNodeProvider interface {
16 ProviderName() string 42 ProviderName() string
43 Name() string
17} 44}
18 45
19// GraphNodeCloseProvider is an interface that nodes that can be a close 46// GraphNodeCloseProvider is an interface that nodes that can be a close
@@ -25,9 +52,12 @@ type GraphNodeCloseProvider interface {
25 52
26// GraphNodeProviderConsumer is an interface that nodes that require 53// GraphNodeProviderConsumer is an interface that nodes that require
27// a provider must implement. ProvidedBy must return the name of the provider 54// a provider must implement. ProvidedBy must return the name of the provider
28// to use. 55// to use. This may be a provider by type, type.alias or a fully resolved
56// provider name
29type GraphNodeProviderConsumer interface { 57type GraphNodeProviderConsumer interface {
30 ProvidedBy() []string 58 ProvidedBy() string
59 // Set the resolved provider address for this resource.
60 SetProvider(string)
31} 61}
32 62
33// ProviderTransformer is a GraphTransformer that maps resources to 63// ProviderTransformer is a GraphTransformer that maps resources to
@@ -41,18 +71,52 @@ func (t *ProviderTransformer) Transform(g *Graph) error {
41 m := providerVertexMap(g) 71 m := providerVertexMap(g)
42 for _, v := range g.Vertices() { 72 for _, v := range g.Vertices() {
43 if pv, ok := v.(GraphNodeProviderConsumer); ok { 73 if pv, ok := v.(GraphNodeProviderConsumer); ok {
44 for _, p := range pv.ProvidedBy() { 74 p := pv.ProvidedBy()
45 target := m[providerMapKey(p, pv)] 75
46 if target == nil { 76 key := providerMapKey(p, pv)
47 println(fmt.Sprintf("%#v\n\n%#v", m, providerMapKey(p, pv))) 77 target := m[key]
48 err = multierror.Append(err, fmt.Errorf( 78
49 "%s: provider %s couldn't be found", 79 sp, ok := pv.(GraphNodeSubPath)
50 dag.VertexName(v), p)) 80 if !ok && target == nil {
51 continue 81 // no target, and no path to walk up
82 err = multierror.Append(err, fmt.Errorf(
83 "%s: provider %s couldn't be found",
84 dag.VertexName(v), p))
85 break
86 }
87
88 // if we don't have a provider at this level, walk up the path looking for one
89 for i := 1; target == nil; i++ {
90 path := normalizeModulePath(sp.Path())
91 if len(path) < i {
92 break
93 }
94
95 key = ResolveProviderName(p, path[:len(path)-i])
96 target = m[key]
97 if target != nil {
98 break
52 } 99 }
100 }
101
102 if target == nil {
103 err = multierror.Append(err, fmt.Errorf(
104 "%s: configuration for %s is not present; a provider configuration block is required for all operations",
105 dag.VertexName(v), p,
106 ))
107 break
108 }
53 109
54 g.Connect(dag.BasicEdge(v, target)) 110 // see if this in an inherited provider
111 if p, ok := target.(*graphNodeProxyProvider); ok {
112 g.Remove(p)
113 target = p.Target()
114 key = target.(GraphNodeProvider).Name()
55 } 115 }
116
117 log.Printf("[DEBUG] resource %s using provider %s", dag.VertexName(pv), key)
118 pv.SetProvider(key)
119 g.Connect(dag.BasicEdge(v, target))
56 } 120 }
57 } 121 }
58 122
@@ -67,36 +131,32 @@ type CloseProviderTransformer struct{}
67 131
68func (t *CloseProviderTransformer) Transform(g *Graph) error { 132func (t *CloseProviderTransformer) Transform(g *Graph) error {
69 pm := providerVertexMap(g) 133 pm := providerVertexMap(g)
70 cpm := closeProviderVertexMap(g) 134 cpm := make(map[string]*graphNodeCloseProvider)
71 var err error 135 var err error
72 for _, v := range g.Vertices() {
73 if pv, ok := v.(GraphNodeProviderConsumer); ok {
74 for _, p := range pv.ProvidedBy() {
75 key := p
76 source := cpm[key]
77
78 if source == nil {
79 // Create a new graphNodeCloseProvider and add it to the graph
80 source = &graphNodeCloseProvider{ProviderNameValue: p}
81 g.Add(source)
82
83 // Close node needs to depend on provider
84 provider, ok := pm[key]
85 if !ok {
86 err = multierror.Append(err, fmt.Errorf(
87 "%s: provider %s couldn't be found for closing",
88 dag.VertexName(v), p))
89 continue
90 }
91 g.Connect(dag.BasicEdge(source, provider))
92
93 // Make sure we also add the new graphNodeCloseProvider to the map
94 // so we don't create and add any duplicate graphNodeCloseProviders.
95 cpm[key] = source
96 }
97 136
98 // Close node depends on all nodes provided by the provider 137 for _, v := range pm {
99 g.Connect(dag.BasicEdge(source, v)) 138 p := v.(GraphNodeProvider)
139
140 // get the close provider of this type if we alread created it
141 closer := cpm[p.Name()]
142
143 if closer == nil {
144 // create a closer for this provider type
145 closer = &graphNodeCloseProvider{ProviderNameValue: p.Name()}
146 g.Add(closer)
147 cpm[p.Name()] = closer
148 }
149
150 // Close node depends on the provider itself
151 // this is added unconditionally, so it will connect to all instances
152 // of the provider. Extra edges will be removed by transitive
153 // reduction.
154 g.Connect(dag.BasicEdge(closer, p))
155
156 // connect all the provider's resources to the close node
157 for _, s := range g.UpEdges(p).List() {
158 if _, ok := s.(GraphNodeProviderConsumer); ok {
159 g.Connect(dag.BasicEdge(closer, s))
100 } 160 }
101 } 161 }
102 } 162 }
@@ -104,18 +164,14 @@ func (t *CloseProviderTransformer) Transform(g *Graph) error {
104 return err 164 return err
105} 165}
106 166
107// MissingProviderTransformer is a GraphTransformer that adds nodes 167// MissingProviderTransformer is a GraphTransformer that adds nodes for all
108// for missing providers into the graph. Specifically, it creates provider 168// required providers into the graph. Specifically, it creates provider
109// configuration nodes for all the providers that we support. These are 169// configuration nodes for all the providers that we support. These are pruned
110// pruned later during an optimization pass. 170// later during an optimization pass.
111type MissingProviderTransformer struct { 171type MissingProviderTransformer struct {
112 // Providers is the list of providers we support. 172 // Providers is the list of providers we support.
113 Providers []string 173 Providers []string
114 174
115 // AllowAny will not check that a provider is supported before adding
116 // it to the graph.
117 AllowAny bool
118
119 // Concrete, if set, overrides how the providers are made. 175 // Concrete, if set, overrides how the providers are made.
120 Concrete ConcreteProviderNodeFunc 176 Concrete ConcreteProviderNodeFunc
121} 177}
@@ -128,99 +184,57 @@ func (t *MissingProviderTransformer) Transform(g *Graph) error {
128 } 184 }
129 } 185 }
130 186
131 // Create a set of our supported providers 187 var err error
132 supported := make(map[string]struct{}, len(t.Providers))
133 for _, v := range t.Providers {
134 supported[v] = struct{}{}
135 }
136
137 // Get the map of providers we already have in our graph
138 m := providerVertexMap(g) 188 m := providerVertexMap(g)
139 189 for _, v := range g.Vertices() {
140 // Go through all the provider consumers and make sure we add
141 // that provider if it is missing. We use a for loop here instead
142 // of "range" since we'll modify check as we go to add more to check.
143 check := g.Vertices()
144 for i := 0; i < len(check); i++ {
145 v := check[i]
146
147 pv, ok := v.(GraphNodeProviderConsumer) 190 pv, ok := v.(GraphNodeProviderConsumer)
148 if !ok { 191 if !ok {
149 continue 192 continue
150 } 193 }
151 194
152 // If this node has a subpath, then we use that as a prefix 195 p := pv.ProvidedBy()
153 // into our map to check for an existing provider. 196 // this may be the resolved provider from the state, so we need to get
154 var path []string 197 // the base provider name.
155 if sp, ok := pv.(GraphNodeSubPath); ok { 198 parts := strings.SplitAfter(p, "provider.")
156 raw := normalizeModulePath(sp.Path()) 199 p = parts[len(parts)-1]
157 if len(raw) > len(rootModulePath) {
158 path = raw
159 }
160 }
161 200
162 for _, p := range pv.ProvidedBy() { 201 key := ResolveProviderName(p, nil)
163 key := providerMapKey(p, pv) 202 provider := m[key]
164 if _, ok := m[key]; ok {
165 // This provider already exists as a configure node
166 continue
167 }
168 203
169 // If the provider has an alias in it, we just want the type 204 // we already have it
170 ptype := p 205 if provider != nil {
171 if idx := strings.IndexRune(p, '.'); idx != -1 { 206 continue
172 ptype = p[:idx] 207 }
173 }
174 208
175 if !t.AllowAny { 209 // we don't implicitly create aliased providers
176 if _, ok := supported[ptype]; !ok { 210 if strings.Contains(p, ".") {
177 // If we don't support the provider type, skip it. 211 log.Println("[DEBUG] not adding missing provider alias:", p)
178 // Validation later will catch this as an error. 212 continue
179 continue 213 }
180 }
181 }
182 214
183 // Add the missing provider node to the graph 215 log.Println("[DEBUG] adding missing provider:", p)
184 v := t.Concrete(&NodeAbstractProvider{
185 NameValue: p,
186 PathValue: path,
187 }).(dag.Vertex)
188 if len(path) > 0 {
189 // We'll need the parent provider as well, so let's
190 // add a dummy node to check to make sure that we add
191 // that parent provider.
192 check = append(check, &graphNodeProviderConsumerDummy{
193 ProviderValue: p,
194 PathValue: path[:len(path)-1],
195 })
196 }
197 216
198 m[key] = g.Add(v) 217 // create the misisng top-level provider
199 } 218 provider = t.Concrete(&NodeAbstractProvider{
219 NameValue: p,
220 }).(dag.Vertex)
221
222 m[key] = g.Add(provider)
200 } 223 }
201 224
202 return nil 225 return err
203} 226}
204 227
205// ParentProviderTransformer connects provider nodes to their parents. 228// ParentProviderTransformer connects provider nodes to their parents.
206// 229//
207// This works by finding nodes that are both GraphNodeProviders and 230// This works by finding nodes that are both GraphNodeProviders and
208// GraphNodeSubPath. It then connects the providers to their parent 231// GraphNodeSubPath. It then connects the providers to their parent
209// path. 232// path. The parent provider is always at the root level.
210type ParentProviderTransformer struct{} 233type ParentProviderTransformer struct{}
211 234
212func (t *ParentProviderTransformer) Transform(g *Graph) error { 235func (t *ParentProviderTransformer) Transform(g *Graph) error {
213 // Make a mapping of path to dag.Vertex, where path is: "path.name" 236 pm := providerVertexMap(g)
214 m := make(map[string]dag.Vertex) 237 for _, v := range g.Vertices() {
215
216 // Also create a map that maps a provider to its parent
217 parentMap := make(map[dag.Vertex]string)
218 for _, raw := range g.Vertices() {
219 // If it is the flat version, then make it the non-flat version.
220 // We eventually want to get rid of the flat version entirely so
221 // this is a stop-gap while it still exists.
222 var v dag.Vertex = raw
223
224 // Only care about providers 238 // Only care about providers
225 pn, ok := v.(GraphNodeProvider) 239 pn, ok := v.(GraphNodeProvider)
226 if !ok || pn.ProviderName() == "" { 240 if !ok || pn.ProviderName() == "" {
@@ -228,53 +242,48 @@ func (t *ParentProviderTransformer) Transform(g *Graph) error {
228 } 242 }
229 243
230 // Also require a subpath, if there is no subpath then we 244 // Also require a subpath, if there is no subpath then we
231 // just totally ignore it. The expectation of this transform is 245 // can't have a parent.
232 // that it is used with a graph builder that is already flattened. 246 if pn, ok := v.(GraphNodeSubPath); ok {
233 var path []string 247 if len(normalizeModulePath(pn.Path())) <= 1 {
234 if pn, ok := raw.(GraphNodeSubPath); ok { 248 continue
235 path = pn.Path() 249 }
236 }
237 path = normalizeModulePath(path)
238
239 // Build the key with path.name i.e. "child.subchild.aws"
240 key := fmt.Sprintf("%s.%s", strings.Join(path, "."), pn.ProviderName())
241 m[key] = raw
242
243 // Determine the parent if we're non-root. This is length 1 since
244 // the 0 index should be "root" since we normalize above.
245 if len(path) > 1 {
246 path = path[:len(path)-1]
247 key := fmt.Sprintf("%s.%s", strings.Join(path, "."), pn.ProviderName())
248 parentMap[raw] = key
249 } 250 }
250 }
251 251
252 // Connect! 252 // this provider may be disabled, but we can only get it's name from
253 for v, key := range parentMap { 253 // the ProviderName string
254 if parent, ok := m[key]; ok { 254 name := ResolveProviderName(strings.SplitN(pn.ProviderName(), " ", 2)[0], nil)
255 parent := pm[name]
256 if parent != nil {
255 g.Connect(dag.BasicEdge(v, parent)) 257 g.Connect(dag.BasicEdge(v, parent))
256 } 258 }
257 }
258 259
260 }
259 return nil 261 return nil
260} 262}
261 263
262// PruneProviderTransformer is a GraphTransformer that prunes all the 264// PruneProviderTransformer removes any providers that are not actually used by
263// providers that aren't needed from the graph. A provider is unneeded if 265// anything, and provider proxies. This avoids the provider being initialized
264// no resource or module is using that provider. 266// and configured. This both saves resources but also avoids errors since
267// configuration may imply initialization which may require auth.
265type PruneProviderTransformer struct{} 268type PruneProviderTransformer struct{}
266 269
267func (t *PruneProviderTransformer) Transform(g *Graph) error { 270func (t *PruneProviderTransformer) Transform(g *Graph) error {
268 for _, v := range g.Vertices() { 271 for _, v := range g.Vertices() {
269 // We only care about the providers 272 // We only care about providers
270 if pn, ok := v.(GraphNodeProvider); !ok || pn.ProviderName() == "" { 273 pn, ok := v.(GraphNodeProvider)
274 if !ok || pn.ProviderName() == "" {
271 continue 275 continue
272 } 276 }
273 // Does anything depend on this? If not, then prune it. 277
274 if s := g.UpEdges(v); s.Len() == 0 { 278 // ProxyProviders will have up edges, but we're now done with them in the graph
275 if nv, ok := v.(dag.NamedVertex); ok { 279 if _, ok := v.(*graphNodeProxyProvider); ok {
276 log.Printf("[DEBUG] Pruning provider with no dependencies: %s", nv.Name()) 280 log.Printf("[DEBUG] pruning proxy provider %s", dag.VertexName(v))
277 } 281 g.Remove(v)
282 }
283
284 // Remove providers with no dependencies.
285 if g.UpEdges(v).Len() == 0 {
286 log.Printf("[DEBUG] pruning unused provider %s", dag.VertexName(v))
278 g.Remove(v) 287 g.Remove(v)
279 } 288 }
280 } 289 }
@@ -285,23 +294,26 @@ func (t *PruneProviderTransformer) Transform(g *Graph) error {
285// providerMapKey is a helper that gives us the key to use for the 294// providerMapKey is a helper that gives us the key to use for the
286// maps returned by things such as providerVertexMap. 295// maps returned by things such as providerVertexMap.
287func providerMapKey(k string, v dag.Vertex) string { 296func providerMapKey(k string, v dag.Vertex) string {
288 pathPrefix := "" 297 if strings.Contains(k, "provider.") {
289 if sp, ok := v.(GraphNodeSubPath); ok { 298 // this is already resolved
290 raw := normalizeModulePath(sp.Path()) 299 return k
291 if len(raw) > len(rootModulePath) {
292 pathPrefix = modulePrefixStr(raw) + "."
293 }
294 } 300 }
295 301
296 return pathPrefix + k 302 // we create a dummy provider to
303 var path []string
304 if sp, ok := v.(GraphNodeSubPath); ok {
305 path = normalizeModulePath(sp.Path())
306 }
307 return ResolveProviderName(k, path)
297} 308}
298 309
299func providerVertexMap(g *Graph) map[string]dag.Vertex { 310func providerVertexMap(g *Graph) map[string]dag.Vertex {
300 m := make(map[string]dag.Vertex) 311 m := make(map[string]dag.Vertex)
301 for _, v := range g.Vertices() { 312 for _, v := range g.Vertices() {
302 if pv, ok := v.(GraphNodeProvider); ok { 313 if pv, ok := v.(GraphNodeProvider); ok {
303 key := providerMapKey(pv.ProviderName(), v) 314 // TODO: The Name may have meta info, like " (disabled)"
304 m[key] = v 315 name := strings.SplitN(pv.Name(), " ", 2)[0]
316 m[name] = v
305 } 317 }
306 } 318 }
307 319
@@ -324,7 +336,7 @@ type graphNodeCloseProvider struct {
324} 336}
325 337
326func (n *graphNodeCloseProvider) Name() string { 338func (n *graphNodeCloseProvider) Name() string {
327 return fmt.Sprintf("provider.%s (close)", n.ProviderNameValue) 339 return n.ProviderNameValue + " (close)"
328} 340}
329 341
330// GraphNodeEvalable impl. 342// GraphNodeEvalable impl.
@@ -362,19 +374,233 @@ func (n *graphNodeCloseProvider) RemoveIfNotTargeted() bool {
362 return true 374 return true
363} 375}
364 376
365// graphNodeProviderConsumerDummy is a struct that never enters the real 377// graphNodeProxyProvider is a GraphNodeProvider implementation that is used to
366// graph (though it could to no ill effect). It implements 378// store the name and value of a provider node for inheritance between modules.
367// GraphNodeProviderConsumer and GraphNodeSubpath as a way to force 379// These nodes are only used to store the data while loading the provider
368// certain transformations. 380// configurations, and are removed after all the resources have been connected
369type graphNodeProviderConsumerDummy struct { 381// to their providers.
370 ProviderValue string 382type graphNodeProxyProvider struct {
371 PathValue []string 383 nameValue string
384 path []string
385 target GraphNodeProvider
386}
387
388func (n *graphNodeProxyProvider) ProviderName() string {
389 return n.Target().ProviderName()
390}
391
392func (n *graphNodeProxyProvider) Name() string {
393 return ResolveProviderName(n.nameValue, n.path)
394}
395
396// find the concrete provider instance
397func (n *graphNodeProxyProvider) Target() GraphNodeProvider {
398 switch t := n.target.(type) {
399 case *graphNodeProxyProvider:
400 return t.Target()
401 default:
402 return n.target
403 }
404}
405
406// ProviderConfigTransformer adds all provider nodes from the configuration and
407// attaches the configs.
408type ProviderConfigTransformer struct {
409 Providers []string
410 Concrete ConcreteProviderNodeFunc
411
412 // each provider node is stored here so that the proxy nodes can look up
413 // their targets by name.
414 providers map[string]GraphNodeProvider
415 // record providers that can be overriden with a proxy
416 proxiable map[string]bool
417
418 // Module is the module to add resources from.
419 Module *module.Tree
372} 420}
373 421
374func (n *graphNodeProviderConsumerDummy) Path() []string { 422func (t *ProviderConfigTransformer) Transform(g *Graph) error {
375 return n.PathValue 423 // If no module is given, we don't do anything
424 if t.Module == nil {
425 return nil
426 }
427
428 // If the module isn't loaded, that is simply an error
429 if !t.Module.Loaded() {
430 return errors.New("module must be loaded for ProviderConfigTransformer")
431 }
432
433 t.providers = make(map[string]GraphNodeProvider)
434 t.proxiable = make(map[string]bool)
435
436 // Start the transformation process
437 if err := t.transform(g, t.Module); err != nil {
438 return err
439 }
440
441 // finally attach the configs to the new nodes
442 return t.attachProviderConfigs(g)
376} 443}
377 444
378func (n *graphNodeProviderConsumerDummy) ProvidedBy() []string { 445func (t *ProviderConfigTransformer) transform(g *Graph, m *module.Tree) error {
379 return []string{n.ProviderValue} 446 // If no config, do nothing
447 if m == nil {
448 return nil
449 }
450
451 // Add our resources
452 if err := t.transformSingle(g, m); err != nil {
453 return err
454 }
455
456 // Transform all the children.
457 for _, c := range m.Children() {
458 if err := t.transform(g, c); err != nil {
459 return err
460 }
461 }
462 return nil
463}
464
465func (t *ProviderConfigTransformer) transformSingle(g *Graph, m *module.Tree) error {
466 log.Printf("[TRACE] ProviderConfigTransformer: Starting for path: %v", m.Path())
467
468 // Get the configuration for this module
469 conf := m.Config()
470
471 // Build the path we're at
472 path := m.Path()
473 if len(path) > 0 {
474 path = append([]string{RootModuleName}, path...)
475 }
476
477 // add all providers from the configuration
478 for _, p := range conf.ProviderConfigs {
479 name := p.Name
480 if p.Alias != "" {
481 name += "." + p.Alias
482 }
483
484 v := t.Concrete(&NodeAbstractProvider{
485 NameValue: name,
486 PathValue: path,
487 })
488
489 // Add it to the graph
490 g.Add(v)
491 fullName := ResolveProviderName(name, path)
492 t.providers[fullName] = v.(GraphNodeProvider)
493 t.proxiable[fullName] = len(p.RawConfig.RawMap()) == 0
494 }
495
496 // Now replace the provider nodes with proxy nodes if a provider was being
497 // passed in, and create implicit proxies if there was no config. Any extra
498 // proxies will be removed in the prune step.
499 return t.addProxyProviders(g, m)
500}
501
502func (t *ProviderConfigTransformer) addProxyProviders(g *Graph, m *module.Tree) error {
503 path := m.Path()
504
505 // can't add proxies at the root
506 if len(path) == 0 {
507 return nil
508 }
509
510 parentPath := path[:len(path)-1]
511 parent := t.Module.Child(parentPath)
512 if parent == nil {
513 return nil
514 }
515
516 var parentCfg *config.Module
517 for _, mod := range parent.Config().Modules {
518 if mod.Name == m.Name() {
519 parentCfg = mod
520 break
521 }
522 }
523
524 if parentCfg == nil {
525 // this can't really happen during normal execution.
526 return fmt.Errorf("parent module config not found for %s", m.Name())
527 }
528
529 // Go through all the providers the parent is passing in, and add proxies to
530 // the parent provider nodes.
531 for name, parentName := range parentCfg.Providers {
532 fullName := ResolveProviderName(name, path)
533 fullParentName := ResolveProviderName(parentName, parentPath)
534
535 parentProvider := t.providers[fullParentName]
536
537 if parentProvider == nil {
538 return fmt.Errorf("missing provider %s", fullParentName)
539 }
540
541 proxy := &graphNodeProxyProvider{
542 nameValue: name,
543 path: path,
544 target: parentProvider,
545 }
546
547 concreteProvider := t.providers[fullName]
548
549 // replace the concrete node with the provider passed in
550 if concreteProvider != nil && t.proxiable[fullName] {
551 g.Replace(concreteProvider, proxy)
552 t.providers[fullName] = proxy
553 continue
554 }
555
556 // aliased providers can't be implicitly passed in
557 if strings.Contains(name, ".") {
558 continue
559 }
560
561 // There was no concrete provider, so add this as an implicit provider.
562 // The extra proxy will be pruned later if it's unused.
563 g.Add(proxy)
564 t.providers[fullName] = proxy
565 }
566 return nil
567}
568
569func (t *ProviderConfigTransformer) attachProviderConfigs(g *Graph) error {
570 for _, v := range g.Vertices() {
571 // Only care about GraphNodeAttachProvider implementations
572 apn, ok := v.(GraphNodeAttachProvider)
573 if !ok {
574 continue
575 }
576
577 // Determine what we're looking for
578 path := normalizeModulePath(apn.Path())[1:]
579 name := apn.ProviderName()
580 log.Printf("[TRACE] Attach provider request: %#v %s", path, name)
581
582 // Get the configuration.
583 tree := t.Module.Child(path)
584 if tree == nil {
585 continue
586 }
587
588 // Go through the provider configs to find the matching config
589 for _, p := range tree.Config().ProviderConfigs {
590 // Build the name, which is "name.alias" if an alias exists
591 current := p.Name
592 if p.Alias != "" {
593 current += "." + p.Alias
594 }
595
596 // If the configs match then attach!
597 if current == name {
598 log.Printf("[TRACE] Attaching provider config: %#v", p)
599 apn.AttachProvider(p)
600 break
601 }
602 }
603 }
604
605 return nil
380} 606}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_provider_disable.go b/vendor/github.com/hashicorp/terraform/terraform/transform_provider_disable.go
deleted file mode 100644
index d9919f3..0000000
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_provider_disable.go
+++ /dev/null
@@ -1,50 +0,0 @@
1package terraform
2
3import (
4 "fmt"
5
6 "github.com/hashicorp/terraform/dag"
7)
8
9// DisableProviderTransformer "disables" any providers that are not actually
10// used by anything. This avoids the provider being initialized and configured.
11// This both saves resources but also avoids errors since configuration
12// may imply initialization which may require auth.
13type DisableProviderTransformer struct{}
14
15func (t *DisableProviderTransformer) Transform(g *Graph) error {
16 for _, v := range g.Vertices() {
17 // We only care about providers
18 pn, ok := v.(GraphNodeProvider)
19 if !ok || pn.ProviderName() == "" {
20 continue
21 }
22
23 // If we have dependencies, then don't disable
24 if g.UpEdges(v).Len() > 0 {
25 continue
26 }
27
28 // Get the path
29 var path []string
30 if pn, ok := v.(GraphNodeSubPath); ok {
31 path = pn.Path()
32 }
33
34 // Disable the provider by replacing it with a "disabled" provider
35 disabled := &NodeDisabledProvider{
36 NodeAbstractProvider: &NodeAbstractProvider{
37 NameValue: pn.ProviderName(),
38 PathValue: path,
39 },
40 }
41
42 if !g.Replace(v, disabled) {
43 panic(fmt.Sprintf(
44 "vertex disappeared from under us: %s",
45 dag.VertexName(v)))
46 }
47 }
48
49 return nil
50}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_reference.go b/vendor/github.com/hashicorp/terraform/terraform/transform_reference.go
index c545235..be8c7f9 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_reference.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_reference.go
@@ -76,6 +76,85 @@ func (t *ReferenceTransformer) Transform(g *Graph) error {
76 return nil 76 return nil
77} 77}
78 78
79// DestroyReferenceTransformer is a GraphTransformer that reverses the edges
80// for locals and outputs that depend on other nodes which will be
81// removed during destroy. If a destroy node is evaluated before the local or
82// output value, it will be removed from the state, and the later interpolation
83// will fail.
84type DestroyValueReferenceTransformer struct{}
85
86func (t *DestroyValueReferenceTransformer) Transform(g *Graph) error {
87 vs := g.Vertices()
88 for _, v := range vs {
89 switch v.(type) {
90 case *NodeApplyableOutput, *NodeLocal:
91 // OK
92 default:
93 continue
94 }
95
96 // reverse any outgoing edges so that the value is evaluated first.
97 for _, e := range g.EdgesFrom(v) {
98 target := e.Target()
99
100 // only destroy nodes will be evaluated in reverse
101 if _, ok := target.(GraphNodeDestroyer); !ok {
102 continue
103 }
104
105 log.Printf("[TRACE] output dep: %s", dag.VertexName(target))
106
107 g.RemoveEdge(e)
108 g.Connect(&DestroyEdge{S: target, T: v})
109 }
110 }
111
112 return nil
113}
114
115// PruneUnusedValuesTransformer is s GraphTransformer that removes local and
116// output values which are not referenced in the graph. Since outputs and
117// locals always need to be evaluated, if they reference a resource that is not
118// available in the state the interpolation could fail.
119type PruneUnusedValuesTransformer struct{}
120
121func (t *PruneUnusedValuesTransformer) Transform(g *Graph) error {
122 // this might need multiple runs in order to ensure that pruning a value
123 // doesn't effect a previously checked value.
124 for removed := 0; ; removed = 0 {
125 for _, v := range g.Vertices() {
126 switch v.(type) {
127 case *NodeApplyableOutput, *NodeLocal:
128 // OK
129 default:
130 continue
131 }
132
133 dependants := g.UpEdges(v)
134
135 switch dependants.Len() {
136 case 0:
137 // nothing at all depends on this
138 g.Remove(v)
139 removed++
140 case 1:
141 // because an output's destroy node always depends on the output,
142 // we need to check for the case of a single destroy node.
143 d := dependants.List()[0]
144 if _, ok := d.(*NodeDestroyableOutput); ok {
145 g.Remove(v)
146 removed++
147 }
148 }
149 }
150 if removed == 0 {
151 break
152 }
153 }
154
155 return nil
156}
157
79// ReferenceMap is a structure that can be used to efficiently check 158// ReferenceMap is a structure that can be used to efficiently check
80// for references on a graph. 159// for references on a graph.
81type ReferenceMap struct { 160type ReferenceMap struct {
@@ -96,6 +175,7 @@ func (m *ReferenceMap) References(v dag.Vertex) ([]dag.Vertex, []string) {
96 var matches []dag.Vertex 175 var matches []dag.Vertex
97 var missing []string 176 var missing []string
98 prefix := m.prefix(v) 177 prefix := m.prefix(v)
178
99 for _, ns := range rn.References() { 179 for _, ns := range rn.References() {
100 found := false 180 found := false
101 for _, n := range strings.Split(ns, "/") { 181 for _, n := range strings.Split(ns, "/") {
@@ -108,19 +188,14 @@ func (m *ReferenceMap) References(v dag.Vertex) ([]dag.Vertex, []string) {
108 // Mark that we found a match 188 // Mark that we found a match
109 found = true 189 found = true
110 190
111 // Make sure this isn't a self reference, which isn't included
112 selfRef := false
113 for _, p := range parents { 191 for _, p := range parents {
192 // don't include self-references
114 if p == v { 193 if p == v {
115 selfRef = true 194 continue
116 break
117 } 195 }
118 } 196 matches = append(matches, p)
119 if selfRef {
120 continue
121 } 197 }
122 198
123 matches = append(matches, parents...)
124 break 199 break
125 } 200 }
126 201
@@ -296,14 +371,21 @@ func ReferenceFromInterpolatedVar(v config.InterpolatedVariable) []string {
296 return []string{fmt.Sprintf("%s.%d/%s.N", id, idx, id)} 371 return []string{fmt.Sprintf("%s.%d/%s.N", id, idx, id)}
297 case *config.UserVariable: 372 case *config.UserVariable:
298 return []string{fmt.Sprintf("var.%s", v.Name)} 373 return []string{fmt.Sprintf("var.%s", v.Name)}
374 case *config.LocalVariable:
375 return []string{fmt.Sprintf("local.%s", v.Name)}
299 default: 376 default:
300 return nil 377 return nil
301 } 378 }
302} 379}
303 380
304func modulePrefixStr(p []string) string { 381func modulePrefixStr(p []string) string {
382 // strip "root"
383 if len(p) > 0 && p[0] == rootModulePath[0] {
384 p = p[1:]
385 }
386
305 parts := make([]string, 0, len(p)*2) 387 parts := make([]string, 0, len(p)*2)
306 for _, p := range p[1:] { 388 for _, p := range p {
307 parts = append(parts, "module", p) 389 parts = append(parts, "module", p)
308 } 390 }
309 391
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_removed_modules.go b/vendor/github.com/hashicorp/terraform/terraform/transform_removed_modules.go
new file mode 100644
index 0000000..2e05edb
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_removed_modules.go
@@ -0,0 +1,32 @@
1package terraform
2
3import (
4 "log"
5
6 "github.com/hashicorp/terraform/config/module"
7)
8
9// RemoveModuleTransformer implements GraphTransformer to add nodes indicating
10// when a module was removed from the configuration.
11type RemovedModuleTransformer struct {
12 Module *module.Tree // root module
13 State *State
14}
15
16func (t *RemovedModuleTransformer) Transform(g *Graph) error {
17 // nothing to remove if there's no state!
18 if t.State == nil {
19 return nil
20 }
21
22 for _, m := range t.State.Modules {
23 c := t.Module.Child(m.Path[1:])
24 if c != nil {
25 continue
26 }
27
28 log.Printf("[DEBUG] module %s no longer in config\n", modulePrefixStr(m.Path))
29 g.Add(&NodeModuleRemoved{PathValue: m.Path})
30 }
31 return nil
32}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_resource_count.go b/vendor/github.com/hashicorp/terraform/terraform/transform_resource_count.go
index cda35cb..e528b37 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_resource_count.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_resource_count.go
@@ -37,7 +37,9 @@ func (t *ResourceCountTransformer) Transform(g *Graph) error {
37 addr.Index = index 37 addr.Index = index
38 38
39 // Build the abstract node and the concrete one 39 // Build the abstract node and the concrete one
40 abstract := &NodeAbstractResource{Addr: addr} 40 abstract := &NodeAbstractResource{
41 Addr: addr,
42 }
41 var node dag.Vertex = abstract 43 var node dag.Vertex = abstract
42 if f := t.Concrete; f != nil { 44 if f := t.Concrete; f != nil {
43 node = f(abstract) 45 node = f(abstract)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_targets.go b/vendor/github.com/hashicorp/terraform/terraform/transform_targets.go
index 4f117b4..af6defe 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_targets.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/transform_targets.go
@@ -73,9 +73,11 @@ func (t *TargetsTransformer) Transform(g *Graph) error {
73 if _, ok := v.(GraphNodeResource); ok { 73 if _, ok := v.(GraphNodeResource); ok {
74 removable = true 74 removable = true
75 } 75 }
76
76 if vr, ok := v.(RemovableIfNotTargeted); ok { 77 if vr, ok := v.(RemovableIfNotTargeted); ok {
77 removable = vr.RemoveIfNotTargeted() 78 removable = vr.RemoveIfNotTargeted()
78 } 79 }
80
79 if removable && !targetedNodes.Include(v) { 81 if removable && !targetedNodes.Include(v) {
80 log.Printf("[DEBUG] Removing %q, filtered by targeting.", dag.VertexName(v)) 82 log.Printf("[DEBUG] Removing %q, filtered by targeting.", dag.VertexName(v))
81 g.Remove(v) 83 g.Remove(v)
@@ -135,7 +137,10 @@ func (t *TargetsTransformer) selectTargetedNodes(
135 } 137 }
136 } 138 }
137 } 139 }
140 return t.addDependencies(targetedNodes, g)
141}
138 142
143func (t *TargetsTransformer) addDependencies(targetedNodes *dag.Set, g *Graph) (*dag.Set, error) {
139 // Handle nodes that need to be included if their dependencies are included. 144 // Handle nodes that need to be included if their dependencies are included.
140 // This requires multiple passes since we need to catch transitive 145 // This requires multiple passes since we need to catch transitive
141 // dependencies if and only if they are via other nodes that also 146 // dependencies if and only if they are via other nodes that also
@@ -157,11 +162,6 @@ func (t *TargetsTransformer) selectTargetedNodes(
157 } 162 }
158 163
159 dependers = dependers.Filter(func(dv interface{}) bool { 164 dependers = dependers.Filter(func(dv interface{}) bool {
160 // Can ignore nodes that are already targeted
161 /*if targetedNodes.Include(dv) {
162 return false
163 }*/
164
165 _, ok := dv.(GraphNodeTargetDownstream) 165 _, ok := dv.(GraphNodeTargetDownstream)
166 return ok 166 return ok
167 }) 167 })
@@ -180,6 +180,7 @@ func (t *TargetsTransformer) selectTargetedNodes(
180 // depending on in case that informs its decision about whether 180 // depending on in case that informs its decision about whether
181 // it is safe to be targeted. 181 // it is safe to be targeted.
182 deps := g.DownEdges(v) 182 deps := g.DownEdges(v)
183
183 depsTargeted := deps.Intersection(targetedNodes) 184 depsTargeted := deps.Intersection(targetedNodes)
184 depsUntargeted := deps.Difference(depsTargeted) 185 depsUntargeted := deps.Difference(depsTargeted)
185 186
@@ -193,7 +194,50 @@ func (t *TargetsTransformer) selectTargetedNodes(
193 } 194 }
194 } 195 }
195 196
196 return targetedNodes, nil 197 return targetedNodes.Filter(func(dv interface{}) bool {
198 return filterPartialOutputs(dv, targetedNodes, g)
199 }), nil
200}
201
202// Outputs may have been included transitively, but if any of their
203// dependencies have been pruned they won't be resolvable.
204// If nothing depends on the output, and the output is missing any
205// dependencies, remove it from the graph.
206// This essentially maintains the previous behavior where interpolation in
207// outputs would fail silently, but can now surface errors where the output
208// is required.
209func filterPartialOutputs(v interface{}, targetedNodes *dag.Set, g *Graph) bool {
210 // should this just be done with TargetDownstream?
211 if _, ok := v.(*NodeApplyableOutput); !ok {
212 return true
213 }
214
215 dependers := g.UpEdges(v)
216 for _, d := range dependers.List() {
217 if _, ok := d.(*NodeCountBoundary); ok {
218 continue
219 }
220
221 if !targetedNodes.Include(d) {
222 // this one is going to be removed, so it doesn't count
223 continue
224 }
225
226 // as soon as we see a real dependency, we mark this as
227 // non-removable
228 return true
229 }
230
231 depends := g.DownEdges(v)
232
233 for _, d := range depends.List() {
234 if !targetedNodes.Include(d) {
235 log.Printf("[WARN] %s missing targeted dependency %s, removing from the graph",
236 dag.VertexName(v), dag.VertexName(d))
237 return false
238 }
239 }
240 return true
197} 241}
198 242
199func (t *TargetsTransformer) nodeIsTarget( 243func (t *TargetsTransformer) nodeIsTarget(
diff --git a/vendor/github.com/hashicorp/terraform/terraform/ui_output_mock.go b/vendor/github.com/hashicorp/terraform/terraform/ui_output_mock.go
index 7852bc4..d828c92 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/ui_output_mock.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/ui_output_mock.go
@@ -1,13 +1,18 @@
1package terraform 1package terraform
2 2
3import "sync"
4
3// MockUIOutput is an implementation of UIOutput that can be used for tests. 5// MockUIOutput is an implementation of UIOutput that can be used for tests.
4type MockUIOutput struct { 6type MockUIOutput struct {
7 sync.Mutex
5 OutputCalled bool 8 OutputCalled bool
6 OutputMessage string 9 OutputMessage string
7 OutputFn func(string) 10 OutputFn func(string)
8} 11}
9 12
10func (o *MockUIOutput) Output(v string) { 13func (o *MockUIOutput) Output(v string) {
14 o.Lock()
15 defer o.Unlock()
11 o.OutputCalled = true 16 o.OutputCalled = true
12 o.OutputMessage = v 17 o.OutputMessage = v
13 if o.OutputFn != nil { 18 if o.OutputFn != nil {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/user_agent.go b/vendor/github.com/hashicorp/terraform/terraform/user_agent.go
index 700be2a..a42613e 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/user_agent.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/user_agent.go
@@ -1,14 +1,13 @@
1package terraform 1package terraform
2 2
3import ( 3import (
4 "fmt" 4 "github.com/hashicorp/terraform/httpclient"
5 "runtime"
6) 5)
7 6
8// The standard Terraform User-Agent format
9const UserAgent = "Terraform %s (%s)"
10
11// Generate a UserAgent string 7// Generate a UserAgent string
8//
9// Deprecated: Use httpclient.UserAgentString if you are setting your
10// own User-Agent header.
12func UserAgentString() string { 11func UserAgentString() string {
13 return fmt.Sprintf(UserAgent, VersionString(), runtime.Version()) 12 return httpclient.UserAgentString()
14} 13}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/version.go b/vendor/github.com/hashicorp/terraform/terraform/version.go
index d61b11e..ac73015 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/version.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/version.go
@@ -1,31 +1,10 @@
1package terraform 1package terraform
2 2
3import ( 3import (
4 "fmt" 4 "github.com/hashicorp/terraform/version"
5
6 "github.com/hashicorp/go-version"
7) 5)
8 6
9// The main version number that is being run at the moment. 7// TODO: update providers to use the version package directly
10const Version = "0.10.0"
11
12// A pre-release marker for the version. If this is "" (empty string)
13// then it means that it is a final release. Otherwise, this is a pre-release
14// such as "dev" (in development), "beta", "rc1", etc.
15var VersionPrerelease = "dev"
16
17// SemVersion is an instance of version.Version. This has the secondary
18// benefit of verifying during tests and init time that our version is a
19// proper semantic version, which should always be the case.
20var SemVersion = version.Must(version.NewVersion(Version))
21
22// VersionHeader is the header name used to send the current terraform version
23// in http requests.
24const VersionHeader = "Terraform-Version"
25
26func VersionString() string { 8func VersionString() string {
27 if VersionPrerelease != "" { 9 return version.String()
28 return fmt.Sprintf("%s-%s", Version, VersionPrerelease)
29 }
30 return Version
31} 10}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/version_required.go b/vendor/github.com/hashicorp/terraform/terraform/version_required.go
index 3cbbf56..1f43045 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/version_required.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/version_required.go
@@ -6,19 +6,21 @@ import (
6 "github.com/hashicorp/go-version" 6 "github.com/hashicorp/go-version"
7 "github.com/hashicorp/terraform/config" 7 "github.com/hashicorp/terraform/config"
8 "github.com/hashicorp/terraform/config/module" 8 "github.com/hashicorp/terraform/config/module"
9
10 tfversion "github.com/hashicorp/terraform/version"
9) 11)
10 12
11// checkRequiredVersion verifies that any version requirements specified by 13// CheckRequiredVersion verifies that any version requirements specified by
12// the configuration are met. 14// the configuration are met.
13// 15//
14// This checks the root module as well as any additional version requirements 16// This checks the root module as well as any additional version requirements
15// from child modules. 17// from child modules.
16// 18//
17// This is tested in context_test.go. 19// This is tested in context_test.go.
18func checkRequiredVersion(m *module.Tree) error { 20func CheckRequiredVersion(m *module.Tree) error {
19 // Check any children 21 // Check any children
20 for _, c := range m.Children() { 22 for _, c := range m.Children() {
21 if err := checkRequiredVersion(c); err != nil { 23 if err := CheckRequiredVersion(c); err != nil {
22 return err 24 return err
23 } 25 }
24 } 26 }
@@ -49,7 +51,7 @@ func checkRequiredVersion(m *module.Tree) error {
49 tf.RequiredVersion, err) 51 tf.RequiredVersion, err)
50 } 52 }
51 53
52 if !cs.Check(SemVersion) { 54 if !cs.Check(tfversion.SemVer) {
53 return fmt.Errorf( 55 return fmt.Errorf(
54 "The currently running version of Terraform doesn't meet the\n"+ 56 "The currently running version of Terraform doesn't meet the\n"+
55 "version requirements explicitly specified by the configuration.\n"+ 57 "version requirements explicitly specified by the configuration.\n"+
@@ -62,7 +64,7 @@ func checkRequiredVersion(m *module.Tree) error {
62 " Current version: %s", 64 " Current version: %s",
63 module, 65 module,
64 tf.RequiredVersion, 66 tf.RequiredVersion,
65 SemVersion) 67 tfversion.SemVer)
66 } 68 }
67 69
68 return nil 70 return nil
diff --git a/vendor/github.com/hashicorp/terraform/terraform/walkoperation_string.go b/vendor/github.com/hashicorp/terraform/terraform/walkoperation_string.go
index cbd78dd..4cfc528 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/walkoperation_string.go
+++ b/vendor/github.com/hashicorp/terraform/terraform/walkoperation_string.go
@@ -2,7 +2,7 @@
2 2
3package terraform 3package terraform
4 4
5import "fmt" 5import "strconv"
6 6
7const _walkOperation_name = "walkInvalidwalkInputwalkApplywalkPlanwalkPlanDestroywalkRefreshwalkValidatewalkDestroywalkImport" 7const _walkOperation_name = "walkInvalidwalkInputwalkApplywalkPlanwalkPlanDestroywalkRefreshwalkValidatewalkDestroywalkImport"
8 8
@@ -10,7 +10,7 @@ var _walkOperation_index = [...]uint8{0, 11, 20, 29, 37, 52, 63, 75, 86, 96}
10 10
11func (i walkOperation) String() string { 11func (i walkOperation) String() string {
12 if i >= walkOperation(len(_walkOperation_index)-1) { 12 if i >= walkOperation(len(_walkOperation_index)-1) {
13 return fmt.Sprintf("walkOperation(%d)", i) 13 return "walkOperation(" + strconv.FormatInt(int64(i), 10) + ")"
14 } 14 }
15 return _walkOperation_name[_walkOperation_index[i]:_walkOperation_index[i+1]] 15 return _walkOperation_name[_walkOperation_index[i]:_walkOperation_index[i+1]]
16} 16}
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/diagnostic.go b/vendor/github.com/hashicorp/terraform/tfdiags/diagnostic.go
new file mode 100644
index 0000000..2c23f76
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/tfdiags/diagnostic.go
@@ -0,0 +1,26 @@
1package tfdiags
2
3type Diagnostic interface {
4 Severity() Severity
5 Description() Description
6 Source() Source
7}
8
9type Severity rune
10
11//go:generate stringer -type=Severity
12
13const (
14 Error Severity = 'E'
15 Warning Severity = 'W'
16)
17
18type Description struct {
19 Summary string
20 Detail string
21}
22
23type Source struct {
24 Subject *SourceRange
25 Context *SourceRange
26}
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/diagnostics.go b/vendor/github.com/hashicorp/terraform/tfdiags/diagnostics.go
new file mode 100644
index 0000000..667ba80
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/tfdiags/diagnostics.go
@@ -0,0 +1,181 @@
1package tfdiags
2
3import (
4 "bytes"
5 "fmt"
6
7 "github.com/hashicorp/errwrap"
8 multierror "github.com/hashicorp/go-multierror"
9 "github.com/hashicorp/hcl2/hcl"
10)
11
12// Diagnostics is a list of diagnostics. Diagnostics is intended to be used
13// where a Go "error" might normally be used, allowing richer information
14// to be conveyed (more context, support for warnings).
15//
16// A nil Diagnostics is a valid, empty diagnostics list, thus allowing
17// heap allocation to be avoided in the common case where there are no
18// diagnostics to report at all.
19type Diagnostics []Diagnostic
20
21// Append is the main interface for constructing Diagnostics lists, taking
22// an existing list (which may be nil) and appending the new objects to it
23// after normalizing them to be implementations of Diagnostic.
24//
25// The usual pattern for a function that natively "speaks" diagnostics is:
26//
27// // Create a nil Diagnostics at the start of the function
28// var diags diag.Diagnostics
29//
30// // At later points, build on it if errors / warnings occur:
31// foo, err := DoSomethingRisky()
32// if err != nil {
33// diags = diags.Append(err)
34// }
35//
36// // Eventually return the result and diagnostics in place of error
37// return result, diags
38//
39// Append accepts a variety of different diagnostic-like types, including
40// native Go errors and HCL diagnostics. It also knows how to unwrap
41// a multierror.Error into separate error diagnostics. It can be passed
42// another Diagnostics to concatenate the two lists. If given something
43// it cannot handle, this function will panic.
44func (diags Diagnostics) Append(new ...interface{}) Diagnostics {
45 for _, item := range new {
46 if item == nil {
47 continue
48 }
49
50 switch ti := item.(type) {
51 case Diagnostic:
52 diags = append(diags, ti)
53 case Diagnostics:
54 diags = append(diags, ti...) // flatten
55 case diagnosticsAsError:
56 diags = diags.Append(ti.Diagnostics) // unwrap
57 case hcl.Diagnostics:
58 for _, hclDiag := range ti {
59 diags = append(diags, hclDiagnostic{hclDiag})
60 }
61 case *hcl.Diagnostic:
62 diags = append(diags, hclDiagnostic{ti})
63 case *multierror.Error:
64 for _, err := range ti.Errors {
65 diags = append(diags, nativeError{err})
66 }
67 case error:
68 switch {
69 case errwrap.ContainsType(ti, Diagnostics(nil)):
70 // If we have an errwrap wrapper with a Diagnostics hiding
71 // inside then we'll unpick it here to get access to the
72 // individual diagnostics.
73 diags = diags.Append(errwrap.GetType(ti, Diagnostics(nil)))
74 case errwrap.ContainsType(ti, hcl.Diagnostics(nil)):
75 // Likewise, if we have HCL diagnostics we'll unpick that too.
76 diags = diags.Append(errwrap.GetType(ti, hcl.Diagnostics(nil)))
77 default:
78 diags = append(diags, nativeError{ti})
79 }
80 default:
81 panic(fmt.Errorf("can't construct diagnostic(s) from %T", item))
82 }
83 }
84
85 // Given the above, we should never end up with a non-nil empty slice
86 // here, but we'll make sure of that so callers can rely on empty == nil
87 if len(diags) == 0 {
88 return nil
89 }
90
91 return diags
92}
93
94// HasErrors returns true if any of the diagnostics in the list have
95// a severity of Error.
96func (diags Diagnostics) HasErrors() bool {
97 for _, diag := range diags {
98 if diag.Severity() == Error {
99 return true
100 }
101 }
102 return false
103}
104
105// ForRPC returns a version of the receiver that has been simplified so that
106// it is friendly to RPC protocols.
107//
108// Currently this means that it can be serialized with encoding/gob and
109// subsequently re-inflated. It may later grow to include other serialization
110// formats.
111//
112// Note that this loses information about the original objects used to
113// construct the diagnostics, so e.g. the errwrap API will not work as
114// expected on an error-wrapped Diagnostics that came from ForRPC.
115func (diags Diagnostics) ForRPC() Diagnostics {
116 ret := make(Diagnostics, len(diags))
117 for i := range diags {
118 ret[i] = makeRPCFriendlyDiag(diags[i])
119 }
120 return ret
121}
122
123// Err flattens a diagnostics list into a single Go error, or to nil
124// if the diagnostics list does not include any error-level diagnostics.
125//
126// This can be used to smuggle diagnostics through an API that deals in
127// native errors, but unfortunately it will lose naked warnings (warnings
128// that aren't accompanied by at least one error) since such APIs have no
129// mechanism through which to report these.
130//
131// return result, diags.Error()
132func (diags Diagnostics) Err() error {
133 if !diags.HasErrors() {
134 return nil
135 }
136 return diagnosticsAsError{diags}
137}
138
139type diagnosticsAsError struct {
140 Diagnostics
141}
142
143func (dae diagnosticsAsError) Error() string {
144 diags := dae.Diagnostics
145 switch {
146 case len(diags) == 0:
147 // should never happen, since we don't create this wrapper if
148 // there are no diagnostics in the list.
149 return "no errors"
150 case len(diags) == 1:
151 desc := diags[0].Description()
152 if desc.Detail == "" {
153 return desc.Summary
154 }
155 return fmt.Sprintf("%s: %s", desc.Summary, desc.Detail)
156 default:
157 var ret bytes.Buffer
158 fmt.Fprintf(&ret, "%d problems:\n", len(diags))
159 for _, diag := range dae.Diagnostics {
160 desc := diag.Description()
161 if desc.Detail == "" {
162 fmt.Fprintf(&ret, "\n- %s", desc.Summary)
163 } else {
164 fmt.Fprintf(&ret, "\n- %s: %s", desc.Summary, desc.Detail)
165 }
166 }
167 return ret.String()
168 }
169}
170
171// WrappedErrors is an implementation of errwrap.Wrapper so that an error-wrapped
172// diagnostics object can be picked apart by errwrap-aware code.
173func (dae diagnosticsAsError) WrappedErrors() []error {
174 var errs []error
175 for _, diag := range dae.Diagnostics {
176 if wrapper, isErr := diag.(nativeError); isErr {
177 errs = append(errs, wrapper.err)
178 }
179 }
180 return errs
181}
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/doc.go b/vendor/github.com/hashicorp/terraform/tfdiags/doc.go
new file mode 100644
index 0000000..c427879
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/tfdiags/doc.go
@@ -0,0 +1,16 @@
1// Package tfdiags is a utility package for representing errors and
2// warnings in a manner that allows us to produce good messages for the
3// user.
4//
5// "diag" is short for "diagnostics", and is meant as a general word for
6// feedback to a user about potential or actual problems.
7//
8// A design goal for this package is for it to be able to provide rich
9// messaging where possible but to also be pragmatic about dealing with
10// generic errors produced by system components that _can't_ provide
11// such rich messaging. As a consequence, the main types in this package --
12// Diagnostics and Diagnostic -- are designed so that they can be "smuggled"
13// over an error channel and then be unpacked at the other end, so that
14// error diagnostics (at least) can transit through APIs that are not
15// aware of this package.
16package tfdiags
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/error.go b/vendor/github.com/hashicorp/terraform/tfdiags/error.go
new file mode 100644
index 0000000..35edc30
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/tfdiags/error.go
@@ -0,0 +1,23 @@
1package tfdiags
2
3// nativeError is a Diagnostic implementation that wraps a normal Go error
4type nativeError struct {
5 err error
6}
7
8var _ Diagnostic = nativeError{}
9
10func (e nativeError) Severity() Severity {
11 return Error
12}
13
14func (e nativeError) Description() Description {
15 return Description{
16 Summary: e.err.Error(),
17 }
18}
19
20func (e nativeError) Source() Source {
21 // No source information available for a native error
22 return Source{}
23}
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/hcl.go b/vendor/github.com/hashicorp/terraform/tfdiags/hcl.go
new file mode 100644
index 0000000..24851f4
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/tfdiags/hcl.go
@@ -0,0 +1,77 @@
1package tfdiags
2
3import (
4 "github.com/hashicorp/hcl2/hcl"
5)
6
7// hclDiagnostic is a Diagnostic implementation that wraps a HCL Diagnostic
8type hclDiagnostic struct {
9 diag *hcl.Diagnostic
10}
11
12var _ Diagnostic = hclDiagnostic{}
13
14func (d hclDiagnostic) Severity() Severity {
15 switch d.diag.Severity {
16 case hcl.DiagWarning:
17 return Warning
18 default:
19 return Error
20 }
21}
22
23func (d hclDiagnostic) Description() Description {
24 return Description{
25 Summary: d.diag.Summary,
26 Detail: d.diag.Detail,
27 }
28}
29
30func (d hclDiagnostic) Source() Source {
31 var ret Source
32 if d.diag.Subject != nil {
33 rng := SourceRangeFromHCL(*d.diag.Subject)
34 ret.Subject = &rng
35 }
36 if d.diag.Context != nil {
37 rng := SourceRangeFromHCL(*d.diag.Context)
38 ret.Context = &rng
39 }
40 return ret
41}
42
43// SourceRangeFromHCL constructs a SourceRange from the corresponding range
44// type within the HCL package.
45func SourceRangeFromHCL(hclRange hcl.Range) SourceRange {
46 return SourceRange{
47 Filename: hclRange.Filename,
48 Start: SourcePos{
49 Line: hclRange.Start.Line,
50 Column: hclRange.Start.Column,
51 Byte: hclRange.Start.Byte,
52 },
53 End: SourcePos{
54 Line: hclRange.End.Line,
55 Column: hclRange.End.Column,
56 Byte: hclRange.End.Byte,
57 },
58 }
59}
60
61// ToHCL constructs a HCL Range from the receiving SourceRange. This is the
62// opposite of SourceRangeFromHCL.
63func (r SourceRange) ToHCL() hcl.Range {
64 return hcl.Range{
65 Filename: r.Filename,
66 Start: hcl.Pos{
67 Line: r.Start.Line,
68 Column: r.Start.Column,
69 Byte: r.Start.Byte,
70 },
71 End: hcl.Pos{
72 Line: r.End.Line,
73 Column: r.End.Column,
74 Byte: r.End.Byte,
75 },
76 }
77}
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/rpc_friendly.go b/vendor/github.com/hashicorp/terraform/tfdiags/rpc_friendly.go
new file mode 100644
index 0000000..6cc95cc
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/tfdiags/rpc_friendly.go
@@ -0,0 +1,53 @@
1package tfdiags
2
3import (
4 "encoding/gob"
5)
6
7type rpcFriendlyDiag struct {
8 Severity_ Severity
9 Summary_ string
10 Detail_ string
11 Subject_ *SourceRange
12 Context_ *SourceRange
13}
14
15// rpcFriendlyDiag transforms a given diagnostic so that is more friendly to
16// RPC.
17//
18// In particular, it currently returns an object that can be serialized and
19// later re-inflated using gob. This definition may grow to include other
20// serializations later.
21func makeRPCFriendlyDiag(diag Diagnostic) Diagnostic {
22 desc := diag.Description()
23 source := diag.Source()
24 return &rpcFriendlyDiag{
25 Severity_: diag.Severity(),
26 Summary_: desc.Summary,
27 Detail_: desc.Detail,
28 Subject_: source.Subject,
29 Context_: source.Context,
30 }
31}
32
33func (d *rpcFriendlyDiag) Severity() Severity {
34 return d.Severity_
35}
36
37func (d *rpcFriendlyDiag) Description() Description {
38 return Description{
39 Summary: d.Summary_,
40 Detail: d.Detail_,
41 }
42}
43
44func (d *rpcFriendlyDiag) Source() Source {
45 return Source{
46 Subject: d.Subject_,
47 Context: d.Context_,
48 }
49}
50
51func init() {
52 gob.Register((*rpcFriendlyDiag)(nil))
53}
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/severity_string.go b/vendor/github.com/hashicorp/terraform/tfdiags/severity_string.go
new file mode 100644
index 0000000..0b1249b
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/tfdiags/severity_string.go
@@ -0,0 +1,21 @@
1// Code generated by "stringer -type=Severity"; DO NOT EDIT.
2
3package tfdiags
4
5import "strconv"
6
7const (
8 _Severity_name_0 = "Error"
9 _Severity_name_1 = "Warning"
10)
11
12func (i Severity) String() string {
13 switch {
14 case i == 69:
15 return _Severity_name_0
16 case i == 87:
17 return _Severity_name_1
18 default:
19 return "Severity(" + strconv.FormatInt(int64(i), 10) + ")"
20 }
21}
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/simple_warning.go b/vendor/github.com/hashicorp/terraform/tfdiags/simple_warning.go
new file mode 100644
index 0000000..fb3ac98
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/tfdiags/simple_warning.go
@@ -0,0 +1,25 @@
1package tfdiags
2
3type simpleWarning string
4
5var _ Diagnostic = simpleWarning("")
6
7// SimpleWarning constructs a simple (summary-only) warning diagnostic.
8func SimpleWarning(msg string) Diagnostic {
9 return simpleWarning(msg)
10}
11
12func (e simpleWarning) Severity() Severity {
13 return Warning
14}
15
16func (e simpleWarning) Description() Description {
17 return Description{
18 Summary: string(e),
19 }
20}
21
22func (e simpleWarning) Source() Source {
23 // No source information available for a native error
24 return Source{}
25}
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/source_range.go b/vendor/github.com/hashicorp/terraform/tfdiags/source_range.go
new file mode 100644
index 0000000..3031168
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/tfdiags/source_range.go
@@ -0,0 +1,35 @@
1package tfdiags
2
3import (
4 "fmt"
5 "os"
6 "path/filepath"
7)
8
9type SourceRange struct {
10 Filename string
11 Start, End SourcePos
12}
13
14type SourcePos struct {
15 Line, Column, Byte int
16}
17
18// StartString returns a string representation of the start of the range,
19// including the filename and the line and column numbers.
20func (r SourceRange) StartString() string {
21 filename := r.Filename
22
23 // We'll try to relative-ize our filename here so it's less verbose
24 // in the common case of being in the current working directory. If not,
25 // we'll just show the full path.
26 wd, err := os.Getwd()
27 if err == nil {
28 relFn, err := filepath.Rel(wd, filename)
29 if err == nil {
30 filename = relFn
31 }
32 }
33
34 return fmt.Sprintf("%s:%d,%d", filename, r.Start.Line, r.Start.Column)
35}
diff --git a/vendor/github.com/hashicorp/terraform/version/version.go b/vendor/github.com/hashicorp/terraform/version/version.go
new file mode 100644
index 0000000..b21b297
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/version/version.go
@@ -0,0 +1,36 @@
1// The version package provides a location to set the release versions for all
2// packages to consume, without creating import cycles.
3//
4// This package should not import any other terraform packages.
5package version
6
7import (
8 "fmt"
9
10 version "github.com/hashicorp/go-version"
11)
12
13// The main version number that is being run at the moment.
14var Version = "0.11.12"
15
16// A pre-release marker for the version. If this is "" (empty string)
17// then it means that it is a final release. Otherwise, this is a pre-release
18// such as "dev" (in development), "beta", "rc1", etc.
19var Prerelease = "dev"
20
21// SemVer is an instance of version.Version. This has the secondary
22// benefit of verifying during tests and init time that our version is a
23// proper semantic version, which should always be the case.
24var SemVer = version.Must(version.NewVersion(Version))
25
26// Header is the header name used to send the current terraform version
27// in http requests.
28const Header = "Terraform-Version"
29
30// String returns the complete version string, including prerelease
31func String() string {
32 if Prerelease != "" {
33 return fmt.Sprintf("%s-%s", Version, Prerelease)
34 }
35 return Version
36}