aboutsummaryrefslogtreecommitdiffhomepage
path: root/vendor/github.com/hashicorp/terraform/config
diff options
context:
space:
mode:
authorJake Champlin <jake.champlin.27@gmail.com>2017-06-06 12:40:07 -0400
committerJake Champlin <jake.champlin.27@gmail.com>2017-06-06 12:40:07 -0400
commitbae9f6d2fd5eb5bc80929bd393932b23f14d7c93 (patch)
treeca9ab12a7d78b1fc27a8f734729081357ce6d252 /vendor/github.com/hashicorp/terraform/config
parent254c495b6bebab3fb72a243c4bce858d79e6ee99 (diff)
downloadterraform-provider-statuscake-bae9f6d2fd5eb5bc80929bd393932b23f14d7c93.tar.gz
terraform-provider-statuscake-bae9f6d2fd5eb5bc80929bd393932b23f14d7c93.tar.zst
terraform-provider-statuscake-bae9f6d2fd5eb5bc80929bd393932b23f14d7c93.zip
Initial transfer of provider code
Diffstat (limited to 'vendor/github.com/hashicorp/terraform/config')
-rw-r--r--vendor/github.com/hashicorp/terraform/config/append.go86
-rw-r--r--vendor/github.com/hashicorp/terraform/config/config.go1096
-rw-r--r--vendor/github.com/hashicorp/terraform/config/config_string.go338
-rw-r--r--vendor/github.com/hashicorp/terraform/config/config_terraform.go117
-rw-r--r--vendor/github.com/hashicorp/terraform/config/config_tree.go43
-rw-r--r--vendor/github.com/hashicorp/terraform/config/import_tree.go113
-rw-r--r--vendor/github.com/hashicorp/terraform/config/interpolate.go386
-rw-r--r--vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go1390
-rw-r--r--vendor/github.com/hashicorp/terraform/config/interpolate_walk.go283
-rw-r--r--vendor/github.com/hashicorp/terraform/config/lang.go11
-rw-r--r--vendor/github.com/hashicorp/terraform/config/loader.go224
-rw-r--r--vendor/github.com/hashicorp/terraform/config/loader_hcl.go1130
-rw-r--r--vendor/github.com/hashicorp/terraform/config/merge.go193
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/copy_dir.go114
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/get.go71
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/inode.go21
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/inode_freebsd.go21
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/inode_windows.go8
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/module.go7
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/testing.go38
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/tree.go428
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/tree_gob.go57
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go118
-rw-r--r--vendor/github.com/hashicorp/terraform/config/provisioner_enums.go40
-rw-r--r--vendor/github.com/hashicorp/terraform/config/raw_config.go335
-rw-r--r--vendor/github.com/hashicorp/terraform/config/resource_mode.go9
-rw-r--r--vendor/github.com/hashicorp/terraform/config/resource_mode_string.go16
-rw-r--r--vendor/github.com/hashicorp/terraform/config/testing.go15
28 files changed, 6708 insertions, 0 deletions
diff --git a/vendor/github.com/hashicorp/terraform/config/append.go b/vendor/github.com/hashicorp/terraform/config/append.go
new file mode 100644
index 0000000..5f4e89e
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/append.go
@@ -0,0 +1,86 @@
1package config
2
3// Append appends one configuration to another.
4//
5// Append assumes that both configurations will not have
6// conflicting variables, resources, etc. If they do, the
7// problems will be caught in the validation phase.
8//
9// It is possible that c1, c2 on their own are not valid. For
10// example, a resource in c2 may reference a variable in c1. But
11// together, they would be valid.
12func Append(c1, c2 *Config) (*Config, error) {
13 c := new(Config)
14
15 // Append unknown keys, but keep them unique since it is a set
16 unknowns := make(map[string]struct{})
17 for _, k := range c1.unknownKeys {
18 _, present := unknowns[k]
19 if !present {
20 unknowns[k] = struct{}{}
21 c.unknownKeys = append(c.unknownKeys, k)
22 }
23 }
24
25 for _, k := range c2.unknownKeys {
26 _, present := unknowns[k]
27 if !present {
28 unknowns[k] = struct{}{}
29 c.unknownKeys = append(c.unknownKeys, k)
30 }
31 }
32
33 c.Atlas = c1.Atlas
34 if c2.Atlas != nil {
35 c.Atlas = c2.Atlas
36 }
37
38 // merge Terraform blocks
39 if c1.Terraform != nil {
40 c.Terraform = c1.Terraform
41 if c2.Terraform != nil {
42 c.Terraform.Merge(c2.Terraform)
43 }
44 } else {
45 c.Terraform = c2.Terraform
46 }
47
48 if len(c1.Modules) > 0 || len(c2.Modules) > 0 {
49 c.Modules = make(
50 []*Module, 0, len(c1.Modules)+len(c2.Modules))
51 c.Modules = append(c.Modules, c1.Modules...)
52 c.Modules = append(c.Modules, c2.Modules...)
53 }
54
55 if len(c1.Outputs) > 0 || len(c2.Outputs) > 0 {
56 c.Outputs = make(
57 []*Output, 0, len(c1.Outputs)+len(c2.Outputs))
58 c.Outputs = append(c.Outputs, c1.Outputs...)
59 c.Outputs = append(c.Outputs, c2.Outputs...)
60 }
61
62 if len(c1.ProviderConfigs) > 0 || len(c2.ProviderConfigs) > 0 {
63 c.ProviderConfigs = make(
64 []*ProviderConfig,
65 0, len(c1.ProviderConfigs)+len(c2.ProviderConfigs))
66 c.ProviderConfigs = append(c.ProviderConfigs, c1.ProviderConfigs...)
67 c.ProviderConfigs = append(c.ProviderConfigs, c2.ProviderConfigs...)
68 }
69
70 if len(c1.Resources) > 0 || len(c2.Resources) > 0 {
71 c.Resources = make(
72 []*Resource,
73 0, len(c1.Resources)+len(c2.Resources))
74 c.Resources = append(c.Resources, c1.Resources...)
75 c.Resources = append(c.Resources, c2.Resources...)
76 }
77
78 if len(c1.Variables) > 0 || len(c2.Variables) > 0 {
79 c.Variables = make(
80 []*Variable, 0, len(c1.Variables)+len(c2.Variables))
81 c.Variables = append(c.Variables, c1.Variables...)
82 c.Variables = append(c.Variables, c2.Variables...)
83 }
84
85 return c, nil
86}
diff --git a/vendor/github.com/hashicorp/terraform/config/config.go b/vendor/github.com/hashicorp/terraform/config/config.go
new file mode 100644
index 0000000..9a764ac
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/config.go
@@ -0,0 +1,1096 @@
1// The config package is responsible for loading and validating the
2// configuration.
3package config
4
5import (
6 "fmt"
7 "regexp"
8 "strconv"
9 "strings"
10
11 "github.com/hashicorp/go-multierror"
12 "github.com/hashicorp/hil"
13 "github.com/hashicorp/hil/ast"
14 "github.com/hashicorp/terraform/helper/hilmapstructure"
15 "github.com/mitchellh/reflectwalk"
16)
17
18// NameRegexp is the regular expression that all names (modules, providers,
19// resources, etc.) must follow.
20var NameRegexp = regexp.MustCompile(`(?i)\A[A-Z0-9_][A-Z0-9\-\_]*\z`)
21
22// Config is the configuration that comes from loading a collection
23// of Terraform templates.
24type Config struct {
25 // Dir is the path to the directory where this configuration was
26 // loaded from. If it is blank, this configuration wasn't loaded from
27 // any meaningful directory.
28 Dir string
29
30 Terraform *Terraform
31 Atlas *AtlasConfig
32 Modules []*Module
33 ProviderConfigs []*ProviderConfig
34 Resources []*Resource
35 Variables []*Variable
36 Outputs []*Output
37
38 // The fields below can be filled in by loaders for validation
39 // purposes.
40 unknownKeys []string
41}
42
43// AtlasConfig is the configuration for building in HashiCorp's Atlas.
44type AtlasConfig struct {
45 Name string
46 Include []string
47 Exclude []string
48}
49
50// Module is a module used within a configuration.
51//
52// This does not represent a module itself, this represents a module
53// call-site within an existing configuration.
54type Module struct {
55 Name string
56 Source string
57 RawConfig *RawConfig
58}
59
60// ProviderConfig is the configuration for a resource provider.
61//
62// For example, Terraform needs to set the AWS access keys for the AWS
63// resource provider.
64type ProviderConfig struct {
65 Name string
66 Alias string
67 RawConfig *RawConfig
68}
69
70// A resource represents a single Terraform resource in the configuration.
71// A Terraform resource is something that supports some or all of the
72// usual "create, read, update, delete" operations, depending on
73// the given Mode.
74type Resource struct {
75 Mode ResourceMode // which operations the resource supports
76 Name string
77 Type string
78 RawCount *RawConfig
79 RawConfig *RawConfig
80 Provisioners []*Provisioner
81 Provider string
82 DependsOn []string
83 Lifecycle ResourceLifecycle
84}
85
86// Copy returns a copy of this Resource. Helpful for avoiding shared
87// config pointers across multiple pieces of the graph that need to do
88// interpolation.
89func (r *Resource) Copy() *Resource {
90 n := &Resource{
91 Mode: r.Mode,
92 Name: r.Name,
93 Type: r.Type,
94 RawCount: r.RawCount.Copy(),
95 RawConfig: r.RawConfig.Copy(),
96 Provisioners: make([]*Provisioner, 0, len(r.Provisioners)),
97 Provider: r.Provider,
98 DependsOn: make([]string, len(r.DependsOn)),
99 Lifecycle: *r.Lifecycle.Copy(),
100 }
101 for _, p := range r.Provisioners {
102 n.Provisioners = append(n.Provisioners, p.Copy())
103 }
104 copy(n.DependsOn, r.DependsOn)
105 return n
106}
107
108// ResourceLifecycle is used to store the lifecycle tuning parameters
109// to allow customized behavior
110type ResourceLifecycle struct {
111 CreateBeforeDestroy bool `mapstructure:"create_before_destroy"`
112 PreventDestroy bool `mapstructure:"prevent_destroy"`
113 IgnoreChanges []string `mapstructure:"ignore_changes"`
114}
115
116// Copy returns a copy of this ResourceLifecycle
117func (r *ResourceLifecycle) Copy() *ResourceLifecycle {
118 n := &ResourceLifecycle{
119 CreateBeforeDestroy: r.CreateBeforeDestroy,
120 PreventDestroy: r.PreventDestroy,
121 IgnoreChanges: make([]string, len(r.IgnoreChanges)),
122 }
123 copy(n.IgnoreChanges, r.IgnoreChanges)
124 return n
125}
126
127// Provisioner is a configured provisioner step on a resource.
128type Provisioner struct {
129 Type string
130 RawConfig *RawConfig
131 ConnInfo *RawConfig
132
133 When ProvisionerWhen
134 OnFailure ProvisionerOnFailure
135}
136
137// Copy returns a copy of this Provisioner
138func (p *Provisioner) Copy() *Provisioner {
139 return &Provisioner{
140 Type: p.Type,
141 RawConfig: p.RawConfig.Copy(),
142 ConnInfo: p.ConnInfo.Copy(),
143 When: p.When,
144 OnFailure: p.OnFailure,
145 }
146}
147
148// Variable is a variable defined within the configuration.
149type Variable struct {
150 Name string
151 DeclaredType string `mapstructure:"type"`
152 Default interface{}
153 Description string
154}
155
156// Output is an output defined within the configuration. An output is
157// resulting data that is highlighted by Terraform when finished. An
158// output marked Sensitive will be output in a masked form following
159// application, but will still be available in state.
160type Output struct {
161 Name string
162 DependsOn []string
163 Description string
164 Sensitive bool
165 RawConfig *RawConfig
166}
167
168// VariableType is the type of value a variable is holding, and returned
169// by the Type() function on variables.
170type VariableType byte
171
172const (
173 VariableTypeUnknown VariableType = iota
174 VariableTypeString
175 VariableTypeList
176 VariableTypeMap
177)
178
179func (v VariableType) Printable() string {
180 switch v {
181 case VariableTypeString:
182 return "string"
183 case VariableTypeMap:
184 return "map"
185 case VariableTypeList:
186 return "list"
187 default:
188 return "unknown"
189 }
190}
191
192// ProviderConfigName returns the name of the provider configuration in
193// the given mapping that maps to the proper provider configuration
194// for this resource.
195func ProviderConfigName(t string, pcs []*ProviderConfig) string {
196 lk := ""
197 for _, v := range pcs {
198 k := v.Name
199 if strings.HasPrefix(t, k) && len(k) > len(lk) {
200 lk = k
201 }
202 }
203
204 return lk
205}
206
207// A unique identifier for this module.
208func (r *Module) Id() string {
209 return fmt.Sprintf("%s", r.Name)
210}
211
212// Count returns the count of this resource.
213func (r *Resource) Count() (int, error) {
214 raw := r.RawCount.Value()
215 count, ok := r.RawCount.Value().(string)
216 if !ok {
217 return 0, fmt.Errorf(
218 "expected count to be a string or int, got %T", raw)
219 }
220
221 v, err := strconv.ParseInt(count, 0, 0)
222 if err != nil {
223 return 0, err
224 }
225
226 return int(v), nil
227}
228
229// A unique identifier for this resource.
230func (r *Resource) Id() string {
231 switch r.Mode {
232 case ManagedResourceMode:
233 return fmt.Sprintf("%s.%s", r.Type, r.Name)
234 case DataResourceMode:
235 return fmt.Sprintf("data.%s.%s", r.Type, r.Name)
236 default:
237 panic(fmt.Errorf("unknown resource mode %s", r.Mode))
238 }
239}
240
241// Validate does some basic semantic checking of the configuration.
242func (c *Config) Validate() error {
243 if c == nil {
244 return nil
245 }
246
247 var errs []error
248
249 for _, k := range c.unknownKeys {
250 errs = append(errs, fmt.Errorf(
251 "Unknown root level key: %s", k))
252 }
253
254 // Validate the Terraform config
255 if tf := c.Terraform; tf != nil {
256 errs = append(errs, c.Terraform.Validate()...)
257 }
258
259 vars := c.InterpolatedVariables()
260 varMap := make(map[string]*Variable)
261 for _, v := range c.Variables {
262 if _, ok := varMap[v.Name]; ok {
263 errs = append(errs, fmt.Errorf(
264 "Variable '%s': duplicate found. Variable names must be unique.",
265 v.Name))
266 }
267
268 varMap[v.Name] = v
269 }
270
271 for k, _ := range varMap {
272 if !NameRegexp.MatchString(k) {
273 errs = append(errs, fmt.Errorf(
274 "variable %q: variable name must match regular expresion %s",
275 k, NameRegexp))
276 }
277 }
278
279 for _, v := range c.Variables {
280 if v.Type() == VariableTypeUnknown {
281 errs = append(errs, fmt.Errorf(
282 "Variable '%s': must be a string or a map",
283 v.Name))
284 continue
285 }
286
287 interp := false
288 fn := func(n ast.Node) (interface{}, error) {
289 // LiteralNode is a literal string (outside of a ${ ... } sequence).
290 // interpolationWalker skips most of these. but in particular it
291 // visits those that have escaped sequences (like $${foo}) as a
292 // signal that *some* processing is required on this string. For
293 // our purposes here though, this is fine and not an interpolation.
294 if _, ok := n.(*ast.LiteralNode); !ok {
295 interp = true
296 }
297 return "", nil
298 }
299
300 w := &interpolationWalker{F: fn}
301 if v.Default != nil {
302 if err := reflectwalk.Walk(v.Default, w); err == nil {
303 if interp {
304 errs = append(errs, fmt.Errorf(
305 "Variable '%s': cannot contain interpolations",
306 v.Name))
307 }
308 }
309 }
310 }
311
312 // Check for references to user variables that do not actually
313 // exist and record those errors.
314 for source, vs := range vars {
315 for _, v := range vs {
316 uv, ok := v.(*UserVariable)
317 if !ok {
318 continue
319 }
320
321 if _, ok := varMap[uv.Name]; !ok {
322 errs = append(errs, fmt.Errorf(
323 "%s: unknown variable referenced: '%s'. define it with 'variable' blocks",
324 source,
325 uv.Name))
326 }
327 }
328 }
329
330 // Check that all count variables are valid.
331 for source, vs := range vars {
332 for _, rawV := range vs {
333 switch v := rawV.(type) {
334 case *CountVariable:
335 if v.Type == CountValueInvalid {
336 errs = append(errs, fmt.Errorf(
337 "%s: invalid count variable: %s",
338 source,
339 v.FullKey()))
340 }
341 case *PathVariable:
342 if v.Type == PathValueInvalid {
343 errs = append(errs, fmt.Errorf(
344 "%s: invalid path variable: %s",
345 source,
346 v.FullKey()))
347 }
348 }
349 }
350 }
351
352 // Check that providers aren't declared multiple times.
353 providerSet := make(map[string]struct{})
354 for _, p := range c.ProviderConfigs {
355 name := p.FullName()
356 if _, ok := providerSet[name]; ok {
357 errs = append(errs, fmt.Errorf(
358 "provider.%s: declared multiple times, you can only declare a provider once",
359 name))
360 continue
361 }
362
363 providerSet[name] = struct{}{}
364 }
365
366 // Check that all references to modules are valid
367 modules := make(map[string]*Module)
368 dupped := make(map[string]struct{})
369 for _, m := range c.Modules {
370 // Check for duplicates
371 if _, ok := modules[m.Id()]; ok {
372 if _, ok := dupped[m.Id()]; !ok {
373 dupped[m.Id()] = struct{}{}
374
375 errs = append(errs, fmt.Errorf(
376 "%s: module repeated multiple times",
377 m.Id()))
378 }
379
380 // Already seen this module, just skip it
381 continue
382 }
383
384 modules[m.Id()] = m
385
386 // Check that the source has no interpolations
387 rc, err := NewRawConfig(map[string]interface{}{
388 "root": m.Source,
389 })
390 if err != nil {
391 errs = append(errs, fmt.Errorf(
392 "%s: module source error: %s",
393 m.Id(), err))
394 } else if len(rc.Interpolations) > 0 {
395 errs = append(errs, fmt.Errorf(
396 "%s: module source cannot contain interpolations",
397 m.Id()))
398 }
399
400 // Check that the name matches our regexp
401 if !NameRegexp.Match([]byte(m.Name)) {
402 errs = append(errs, fmt.Errorf(
403 "%s: module name can only contain letters, numbers, "+
404 "dashes, and underscores",
405 m.Id()))
406 }
407
408 // Check that the configuration can all be strings, lists or maps
409 raw := make(map[string]interface{})
410 for k, v := range m.RawConfig.Raw {
411 var strVal string
412 if err := hilmapstructure.WeakDecode(v, &strVal); err == nil {
413 raw[k] = strVal
414 continue
415 }
416
417 var mapVal map[string]interface{}
418 if err := hilmapstructure.WeakDecode(v, &mapVal); err == nil {
419 raw[k] = mapVal
420 continue
421 }
422
423 var sliceVal []interface{}
424 if err := hilmapstructure.WeakDecode(v, &sliceVal); err == nil {
425 raw[k] = sliceVal
426 continue
427 }
428
429 errs = append(errs, fmt.Errorf(
430 "%s: variable %s must be a string, list or map value",
431 m.Id(), k))
432 }
433
434 // Check for invalid count variables
435 for _, v := range m.RawConfig.Variables {
436 switch v.(type) {
437 case *CountVariable:
438 errs = append(errs, fmt.Errorf(
439 "%s: count variables are only valid within resources", m.Name))
440 case *SelfVariable:
441 errs = append(errs, fmt.Errorf(
442 "%s: self variables are only valid within resources", m.Name))
443 }
444 }
445
446 // Update the raw configuration to only contain the string values
447 m.RawConfig, err = NewRawConfig(raw)
448 if err != nil {
449 errs = append(errs, fmt.Errorf(
450 "%s: can't initialize configuration: %s",
451 m.Id(), err))
452 }
453 }
454 dupped = nil
455
456 // Check that all variables for modules reference modules that
457 // exist.
458 for source, vs := range vars {
459 for _, v := range vs {
460 mv, ok := v.(*ModuleVariable)
461 if !ok {
462 continue
463 }
464
465 if _, ok := modules[mv.Name]; !ok {
466 errs = append(errs, fmt.Errorf(
467 "%s: unknown module referenced: %s",
468 source,
469 mv.Name))
470 }
471 }
472 }
473
474 // Check that all references to resources are valid
475 resources := make(map[string]*Resource)
476 dupped = make(map[string]struct{})
477 for _, r := range c.Resources {
478 if _, ok := resources[r.Id()]; ok {
479 if _, ok := dupped[r.Id()]; !ok {
480 dupped[r.Id()] = struct{}{}
481
482 errs = append(errs, fmt.Errorf(
483 "%s: resource repeated multiple times",
484 r.Id()))
485 }
486 }
487
488 resources[r.Id()] = r
489 }
490 dupped = nil
491
492 // Validate resources
493 for n, r := range resources {
494 // Verify count variables
495 for _, v := range r.RawCount.Variables {
496 switch v.(type) {
497 case *CountVariable:
498 errs = append(errs, fmt.Errorf(
499 "%s: resource count can't reference count variable: %s",
500 n,
501 v.FullKey()))
502 case *SimpleVariable:
503 errs = append(errs, fmt.Errorf(
504 "%s: resource count can't reference variable: %s",
505 n,
506 v.FullKey()))
507
508 // Good
509 case *ModuleVariable:
510 case *ResourceVariable:
511 case *TerraformVariable:
512 case *UserVariable:
513
514 default:
515 errs = append(errs, fmt.Errorf(
516 "Internal error. Unknown type in count var in %s: %T",
517 n, v))
518 }
519 }
520
521 // Interpolate with a fixed number to verify that its a number.
522 r.RawCount.interpolate(func(root ast.Node) (interface{}, error) {
523 // Execute the node but transform the AST so that it returns
524 // a fixed value of "5" for all interpolations.
525 result, err := hil.Eval(
526 hil.FixedValueTransform(
527 root, &ast.LiteralNode{Value: "5", Typex: ast.TypeString}),
528 nil)
529 if err != nil {
530 return "", err
531 }
532
533 return result.Value, nil
534 })
535 _, err := strconv.ParseInt(r.RawCount.Value().(string), 0, 0)
536 if err != nil {
537 errs = append(errs, fmt.Errorf(
538 "%s: resource count must be an integer",
539 n))
540 }
541 r.RawCount.init()
542
543 // Validate DependsOn
544 errs = append(errs, c.validateDependsOn(n, r.DependsOn, resources, modules)...)
545
546 // Verify provisioners
547 for _, p := range r.Provisioners {
548 // This validation checks that there are now splat variables
549 // referencing ourself. This currently is not allowed.
550
551 for _, v := range p.ConnInfo.Variables {
552 rv, ok := v.(*ResourceVariable)
553 if !ok {
554 continue
555 }
556
557 if rv.Multi && rv.Index == -1 && rv.Type == r.Type && rv.Name == r.Name {
558 errs = append(errs, fmt.Errorf(
559 "%s: connection info cannot contain splat variable "+
560 "referencing itself", n))
561 break
562 }
563 }
564
565 for _, v := range p.RawConfig.Variables {
566 rv, ok := v.(*ResourceVariable)
567 if !ok {
568 continue
569 }
570
571 if rv.Multi && rv.Index == -1 && rv.Type == r.Type && rv.Name == r.Name {
572 errs = append(errs, fmt.Errorf(
573 "%s: connection info cannot contain splat variable "+
574 "referencing itself", n))
575 break
576 }
577 }
578
579 // Check for invalid when/onFailure values, though this should be
580 // picked up by the loader we check here just in case.
581 if p.When == ProvisionerWhenInvalid {
582 errs = append(errs, fmt.Errorf(
583 "%s: provisioner 'when' value is invalid", n))
584 }
585 if p.OnFailure == ProvisionerOnFailureInvalid {
586 errs = append(errs, fmt.Errorf(
587 "%s: provisioner 'on_failure' value is invalid", n))
588 }
589 }
590
591 // Verify ignore_changes contains valid entries
592 for _, v := range r.Lifecycle.IgnoreChanges {
593 if strings.Contains(v, "*") && v != "*" {
594 errs = append(errs, fmt.Errorf(
595 "%s: ignore_changes does not support using a partial string "+
596 "together with a wildcard: %s", n, v))
597 }
598 }
599
600 // Verify ignore_changes has no interpolations
601 rc, err := NewRawConfig(map[string]interface{}{
602 "root": r.Lifecycle.IgnoreChanges,
603 })
604 if err != nil {
605 errs = append(errs, fmt.Errorf(
606 "%s: lifecycle ignore_changes error: %s",
607 n, err))
608 } else if len(rc.Interpolations) > 0 {
609 errs = append(errs, fmt.Errorf(
610 "%s: lifecycle ignore_changes cannot contain interpolations",
611 n))
612 }
613
614 // If it is a data source then it can't have provisioners
615 if r.Mode == DataResourceMode {
616 if _, ok := r.RawConfig.Raw["provisioner"]; ok {
617 errs = append(errs, fmt.Errorf(
618 "%s: data sources cannot have provisioners",
619 n))
620 }
621 }
622 }
623
624 for source, vs := range vars {
625 for _, v := range vs {
626 rv, ok := v.(*ResourceVariable)
627 if !ok {
628 continue
629 }
630
631 id := rv.ResourceId()
632 if _, ok := resources[id]; !ok {
633 errs = append(errs, fmt.Errorf(
634 "%s: unknown resource '%s' referenced in variable %s",
635 source,
636 id,
637 rv.FullKey()))
638 continue
639 }
640 }
641 }
642
643 // Check that all outputs are valid
644 {
645 found := make(map[string]struct{})
646 for _, o := range c.Outputs {
647 // Verify the output is new
648 if _, ok := found[o.Name]; ok {
649 errs = append(errs, fmt.Errorf(
650 "%s: duplicate output. output names must be unique.",
651 o.Name))
652 continue
653 }
654 found[o.Name] = struct{}{}
655
656 var invalidKeys []string
657 valueKeyFound := false
658 for k := range o.RawConfig.Raw {
659 if k == "value" {
660 valueKeyFound = true
661 continue
662 }
663 if k == "sensitive" {
664 if sensitive, ok := o.RawConfig.config[k].(bool); ok {
665 if sensitive {
666 o.Sensitive = true
667 }
668 continue
669 }
670
671 errs = append(errs, fmt.Errorf(
672 "%s: value for 'sensitive' must be boolean",
673 o.Name))
674 continue
675 }
676 if k == "description" {
677 if desc, ok := o.RawConfig.config[k].(string); ok {
678 o.Description = desc
679 continue
680 }
681
682 errs = append(errs, fmt.Errorf(
683 "%s: value for 'description' must be string",
684 o.Name))
685 continue
686 }
687 invalidKeys = append(invalidKeys, k)
688 }
689 if len(invalidKeys) > 0 {
690 errs = append(errs, fmt.Errorf(
691 "%s: output has invalid keys: %s",
692 o.Name, strings.Join(invalidKeys, ", ")))
693 }
694 if !valueKeyFound {
695 errs = append(errs, fmt.Errorf(
696 "%s: output is missing required 'value' key", o.Name))
697 }
698
699 for _, v := range o.RawConfig.Variables {
700 if _, ok := v.(*CountVariable); ok {
701 errs = append(errs, fmt.Errorf(
702 "%s: count variables are only valid within resources", o.Name))
703 }
704 }
705 }
706 }
707
708 // Check that all variables are in the proper context
709 for source, rc := range c.rawConfigs() {
710 walker := &interpolationWalker{
711 ContextF: c.validateVarContextFn(source, &errs),
712 }
713 if err := reflectwalk.Walk(rc.Raw, walker); err != nil {
714 errs = append(errs, fmt.Errorf(
715 "%s: error reading config: %s", source, err))
716 }
717 }
718
719 // Validate the self variable
720 for source, rc := range c.rawConfigs() {
721 // Ignore provisioners. This is a pretty brittle way to do this,
722 // but better than also repeating all the resources.
723 if strings.Contains(source, "provision") {
724 continue
725 }
726
727 for _, v := range rc.Variables {
728 if _, ok := v.(*SelfVariable); ok {
729 errs = append(errs, fmt.Errorf(
730 "%s: cannot contain self-reference %s", source, v.FullKey()))
731 }
732 }
733 }
734
735 if len(errs) > 0 {
736 return &multierror.Error{Errors: errs}
737 }
738
739 return nil
740}
741
742// InterpolatedVariables is a helper that returns a mapping of all the interpolated
743// variables within the configuration. This is used to verify references
744// are valid in the Validate step.
745func (c *Config) InterpolatedVariables() map[string][]InterpolatedVariable {
746 result := make(map[string][]InterpolatedVariable)
747 for source, rc := range c.rawConfigs() {
748 for _, v := range rc.Variables {
749 result[source] = append(result[source], v)
750 }
751 }
752 return result
753}
754
755// rawConfigs returns all of the RawConfigs that are available keyed by
756// a human-friendly source.
757func (c *Config) rawConfigs() map[string]*RawConfig {
758 result := make(map[string]*RawConfig)
759 for _, m := range c.Modules {
760 source := fmt.Sprintf("module '%s'", m.Name)
761 result[source] = m.RawConfig
762 }
763
764 for _, pc := range c.ProviderConfigs {
765 source := fmt.Sprintf("provider config '%s'", pc.Name)
766 result[source] = pc.RawConfig
767 }
768
769 for _, rc := range c.Resources {
770 source := fmt.Sprintf("resource '%s'", rc.Id())
771 result[source+" count"] = rc.RawCount
772 result[source+" config"] = rc.RawConfig
773
774 for i, p := range rc.Provisioners {
775 subsource := fmt.Sprintf(
776 "%s provisioner %s (#%d)",
777 source, p.Type, i+1)
778 result[subsource] = p.RawConfig
779 }
780 }
781
782 for _, o := range c.Outputs {
783 source := fmt.Sprintf("output '%s'", o.Name)
784 result[source] = o.RawConfig
785 }
786
787 return result
788}
789
790func (c *Config) validateVarContextFn(
791 source string, errs *[]error) interpolationWalkerContextFunc {
792 return func(loc reflectwalk.Location, node ast.Node) {
793 // If we're in a slice element, then its fine, since you can do
794 // anything in there.
795 if loc == reflectwalk.SliceElem {
796 return
797 }
798
799 // Otherwise, let's check if there is a splat resource variable
800 // at the top level in here. We do this by doing a transform that
801 // replaces everything with a noop node unless its a variable
802 // access or concat. This should turn the AST into a flat tree
803 // of Concat(Noop, ...). If there are any variables left that are
804 // multi-access, then its still broken.
805 node = node.Accept(func(n ast.Node) ast.Node {
806 // If it is a concat or variable access, we allow it.
807 switch n.(type) {
808 case *ast.Output:
809 return n
810 case *ast.VariableAccess:
811 return n
812 }
813
814 // Otherwise, noop
815 return &noopNode{}
816 })
817
818 vars, err := DetectVariables(node)
819 if err != nil {
820 // Ignore it since this will be caught during parse. This
821 // actually probably should never happen by the time this
822 // is called, but its okay.
823 return
824 }
825
826 for _, v := range vars {
827 rv, ok := v.(*ResourceVariable)
828 if !ok {
829 return
830 }
831
832 if rv.Multi && rv.Index == -1 {
833 *errs = append(*errs, fmt.Errorf(
834 "%s: use of the splat ('*') operator must be wrapped in a list declaration",
835 source))
836 }
837 }
838 }
839}
840
841func (c *Config) validateDependsOn(
842 n string,
843 v []string,
844 resources map[string]*Resource,
845 modules map[string]*Module) []error {
846 // Verify depends on points to resources that all exist
847 var errs []error
848 for _, d := range v {
849 // Check if we contain interpolations
850 rc, err := NewRawConfig(map[string]interface{}{
851 "value": d,
852 })
853 if err == nil && len(rc.Variables) > 0 {
854 errs = append(errs, fmt.Errorf(
855 "%s: depends on value cannot contain interpolations: %s",
856 n, d))
857 continue
858 }
859
860 // If it is a module, verify it is a module
861 if strings.HasPrefix(d, "module.") {
862 name := d[len("module."):]
863 if _, ok := modules[name]; !ok {
864 errs = append(errs, fmt.Errorf(
865 "%s: resource depends on non-existent module '%s'",
866 n, name))
867 }
868
869 continue
870 }
871
872 // Check resources
873 if _, ok := resources[d]; !ok {
874 errs = append(errs, fmt.Errorf(
875 "%s: resource depends on non-existent resource '%s'",
876 n, d))
877 }
878 }
879
880 return errs
881}
882
883func (m *Module) mergerName() string {
884 return m.Id()
885}
886
887func (m *Module) mergerMerge(other merger) merger {
888 m2 := other.(*Module)
889
890 result := *m
891 result.Name = m2.Name
892 result.RawConfig = result.RawConfig.merge(m2.RawConfig)
893
894 if m2.Source != "" {
895 result.Source = m2.Source
896 }
897
898 return &result
899}
900
901func (o *Output) mergerName() string {
902 return o.Name
903}
904
905func (o *Output) mergerMerge(m merger) merger {
906 o2 := m.(*Output)
907
908 result := *o
909 result.Name = o2.Name
910 result.Description = o2.Description
911 result.RawConfig = result.RawConfig.merge(o2.RawConfig)
912 result.Sensitive = o2.Sensitive
913 result.DependsOn = o2.DependsOn
914
915 return &result
916}
917
918func (c *ProviderConfig) GoString() string {
919 return fmt.Sprintf("*%#v", *c)
920}
921
922func (c *ProviderConfig) FullName() string {
923 if c.Alias == "" {
924 return c.Name
925 }
926
927 return fmt.Sprintf("%s.%s", c.Name, c.Alias)
928}
929
930func (c *ProviderConfig) mergerName() string {
931 return c.Name
932}
933
934func (c *ProviderConfig) mergerMerge(m merger) merger {
935 c2 := m.(*ProviderConfig)
936
937 result := *c
938 result.Name = c2.Name
939 result.RawConfig = result.RawConfig.merge(c2.RawConfig)
940
941 if c2.Alias != "" {
942 result.Alias = c2.Alias
943 }
944
945 return &result
946}
947
948func (r *Resource) mergerName() string {
949 return r.Id()
950}
951
952func (r *Resource) mergerMerge(m merger) merger {
953 r2 := m.(*Resource)
954
955 result := *r
956 result.Mode = r2.Mode
957 result.Name = r2.Name
958 result.Type = r2.Type
959 result.RawConfig = result.RawConfig.merge(r2.RawConfig)
960
961 if r2.RawCount.Value() != "1" {
962 result.RawCount = r2.RawCount
963 }
964
965 if len(r2.Provisioners) > 0 {
966 result.Provisioners = r2.Provisioners
967 }
968
969 return &result
970}
971
972// Merge merges two variables to create a new third variable.
973func (v *Variable) Merge(v2 *Variable) *Variable {
974 // Shallow copy the variable
975 result := *v
976
977 // The names should be the same, but the second name always wins.
978 result.Name = v2.Name
979
980 if v2.DeclaredType != "" {
981 result.DeclaredType = v2.DeclaredType
982 }
983 if v2.Default != nil {
984 result.Default = v2.Default
985 }
986 if v2.Description != "" {
987 result.Description = v2.Description
988 }
989
990 return &result
991}
992
993var typeStringMap = map[string]VariableType{
994 "string": VariableTypeString,
995 "map": VariableTypeMap,
996 "list": VariableTypeList,
997}
998
999// Type returns the type of variable this is.
1000func (v *Variable) Type() VariableType {
1001 if v.DeclaredType != "" {
1002 declaredType, ok := typeStringMap[v.DeclaredType]
1003 if !ok {
1004 return VariableTypeUnknown
1005 }
1006
1007 return declaredType
1008 }
1009
1010 return v.inferTypeFromDefault()
1011}
1012
1013// ValidateTypeAndDefault ensures that default variable value is compatible
1014// with the declared type (if one exists), and that the type is one which is
1015// known to Terraform
1016func (v *Variable) ValidateTypeAndDefault() error {
1017 // If an explicit type is declared, ensure it is valid
1018 if v.DeclaredType != "" {
1019 if _, ok := typeStringMap[v.DeclaredType]; !ok {
1020 validTypes := []string{}
1021 for k := range typeStringMap {
1022 validTypes = append(validTypes, k)
1023 }
1024 return fmt.Errorf(
1025 "Variable '%s' type must be one of [%s] - '%s' is not a valid type",
1026 v.Name,
1027 strings.Join(validTypes, ", "),
1028 v.DeclaredType,
1029 )
1030 }
1031 }
1032
1033 if v.DeclaredType == "" || v.Default == nil {
1034 return nil
1035 }
1036
1037 if v.inferTypeFromDefault() != v.Type() {
1038 return fmt.Errorf("'%s' has a default value which is not of type '%s' (got '%s')",
1039 v.Name, v.DeclaredType, v.inferTypeFromDefault().Printable())
1040 }
1041
1042 return nil
1043}
1044
1045func (v *Variable) mergerName() string {
1046 return v.Name
1047}
1048
1049func (v *Variable) mergerMerge(m merger) merger {
1050 return v.Merge(m.(*Variable))
1051}
1052
1053// Required tests whether a variable is required or not.
1054func (v *Variable) Required() bool {
1055 return v.Default == nil
1056}
1057
1058// inferTypeFromDefault contains the logic for the old method of inferring
1059// variable types - we can also use this for validating that the declared
1060// type matches the type of the default value
1061func (v *Variable) inferTypeFromDefault() VariableType {
1062 if v.Default == nil {
1063 return VariableTypeString
1064 }
1065
1066 var s string
1067 if err := hilmapstructure.WeakDecode(v.Default, &s); err == nil {
1068 v.Default = s
1069 return VariableTypeString
1070 }
1071
1072 var m map[string]interface{}
1073 if err := hilmapstructure.WeakDecode(v.Default, &m); err == nil {
1074 v.Default = m
1075 return VariableTypeMap
1076 }
1077
1078 var l []interface{}
1079 if err := hilmapstructure.WeakDecode(v.Default, &l); err == nil {
1080 v.Default = l
1081 return VariableTypeList
1082 }
1083
1084 return VariableTypeUnknown
1085}
1086
1087func (m ResourceMode) Taintable() bool {
1088 switch m {
1089 case ManagedResourceMode:
1090 return true
1091 case DataResourceMode:
1092 return false
1093 default:
1094 panic(fmt.Errorf("unsupported ResourceMode value %s", m))
1095 }
1096}
diff --git a/vendor/github.com/hashicorp/terraform/config/config_string.go b/vendor/github.com/hashicorp/terraform/config/config_string.go
new file mode 100644
index 0000000..0b3abbc
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/config_string.go
@@ -0,0 +1,338 @@
1package config
2
3import (
4 "bytes"
5 "fmt"
6 "sort"
7 "strings"
8)
9
10// TestString is a Stringer-like function that outputs a string that can
11// be used to easily compare multiple Config structures in unit tests.
12//
13// This function has no practical use outside of unit tests and debugging.
14func (c *Config) TestString() string {
15 if c == nil {
16 return "<nil config>"
17 }
18
19 var buf bytes.Buffer
20 if len(c.Modules) > 0 {
21 buf.WriteString("Modules:\n\n")
22 buf.WriteString(modulesStr(c.Modules))
23 buf.WriteString("\n\n")
24 }
25
26 if len(c.Variables) > 0 {
27 buf.WriteString("Variables:\n\n")
28 buf.WriteString(variablesStr(c.Variables))
29 buf.WriteString("\n\n")
30 }
31
32 if len(c.ProviderConfigs) > 0 {
33 buf.WriteString("Provider Configs:\n\n")
34 buf.WriteString(providerConfigsStr(c.ProviderConfigs))
35 buf.WriteString("\n\n")
36 }
37
38 if len(c.Resources) > 0 {
39 buf.WriteString("Resources:\n\n")
40 buf.WriteString(resourcesStr(c.Resources))
41 buf.WriteString("\n\n")
42 }
43
44 if len(c.Outputs) > 0 {
45 buf.WriteString("Outputs:\n\n")
46 buf.WriteString(outputsStr(c.Outputs))
47 buf.WriteString("\n")
48 }
49
50 return strings.TrimSpace(buf.String())
51}
52
53func terraformStr(t *Terraform) string {
54 result := ""
55
56 if b := t.Backend; b != nil {
57 result += fmt.Sprintf("backend (%s)\n", b.Type)
58
59 keys := make([]string, 0, len(b.RawConfig.Raw))
60 for k, _ := range b.RawConfig.Raw {
61 keys = append(keys, k)
62 }
63 sort.Strings(keys)
64
65 for _, k := range keys {
66 result += fmt.Sprintf(" %s\n", k)
67 }
68 }
69
70 return strings.TrimSpace(result)
71}
72
73func modulesStr(ms []*Module) string {
74 result := ""
75 order := make([]int, 0, len(ms))
76 ks := make([]string, 0, len(ms))
77 mapping := make(map[string]int)
78 for i, m := range ms {
79 k := m.Id()
80 ks = append(ks, k)
81 mapping[k] = i
82 }
83 sort.Strings(ks)
84 for _, k := range ks {
85 order = append(order, mapping[k])
86 }
87
88 for _, i := range order {
89 m := ms[i]
90 result += fmt.Sprintf("%s\n", m.Id())
91
92 ks := make([]string, 0, len(m.RawConfig.Raw))
93 for k, _ := range m.RawConfig.Raw {
94 ks = append(ks, k)
95 }
96 sort.Strings(ks)
97
98 result += fmt.Sprintf(" source = %s\n", m.Source)
99
100 for _, k := range ks {
101 result += fmt.Sprintf(" %s\n", k)
102 }
103 }
104
105 return strings.TrimSpace(result)
106}
107
108func outputsStr(os []*Output) string {
109 ns := make([]string, 0, len(os))
110 m := make(map[string]*Output)
111 for _, o := range os {
112 ns = append(ns, o.Name)
113 m[o.Name] = o
114 }
115 sort.Strings(ns)
116
117 result := ""
118 for _, n := range ns {
119 o := m[n]
120
121 result += fmt.Sprintf("%s\n", n)
122
123 if len(o.DependsOn) > 0 {
124 result += fmt.Sprintf(" dependsOn\n")
125 for _, d := range o.DependsOn {
126 result += fmt.Sprintf(" %s\n", d)
127 }
128 }
129
130 if len(o.RawConfig.Variables) > 0 {
131 result += fmt.Sprintf(" vars\n")
132 for _, rawV := range o.RawConfig.Variables {
133 kind := "unknown"
134 str := rawV.FullKey()
135
136 switch rawV.(type) {
137 case *ResourceVariable:
138 kind = "resource"
139 case *UserVariable:
140 kind = "user"
141 }
142
143 result += fmt.Sprintf(" %s: %s\n", kind, str)
144 }
145 }
146 }
147
148 return strings.TrimSpace(result)
149}
150
151// This helper turns a provider configs field into a deterministic
152// string value for comparison in tests.
153func providerConfigsStr(pcs []*ProviderConfig) string {
154 result := ""
155
156 ns := make([]string, 0, len(pcs))
157 m := make(map[string]*ProviderConfig)
158 for _, n := range pcs {
159 ns = append(ns, n.Name)
160 m[n.Name] = n
161 }
162 sort.Strings(ns)
163
164 for _, n := range ns {
165 pc := m[n]
166
167 result += fmt.Sprintf("%s\n", n)
168
169 keys := make([]string, 0, len(pc.RawConfig.Raw))
170 for k, _ := range pc.RawConfig.Raw {
171 keys = append(keys, k)
172 }
173 sort.Strings(keys)
174
175 for _, k := range keys {
176 result += fmt.Sprintf(" %s\n", k)
177 }
178
179 if len(pc.RawConfig.Variables) > 0 {
180 result += fmt.Sprintf(" vars\n")
181 for _, rawV := range pc.RawConfig.Variables {
182 kind := "unknown"
183 str := rawV.FullKey()
184
185 switch rawV.(type) {
186 case *ResourceVariable:
187 kind = "resource"
188 case *UserVariable:
189 kind = "user"
190 }
191
192 result += fmt.Sprintf(" %s: %s\n", kind, str)
193 }
194 }
195 }
196
197 return strings.TrimSpace(result)
198}
199
200// This helper turns a resources field into a deterministic
201// string value for comparison in tests.
202func resourcesStr(rs []*Resource) string {
203 result := ""
204 order := make([]int, 0, len(rs))
205 ks := make([]string, 0, len(rs))
206 mapping := make(map[string]int)
207 for i, r := range rs {
208 k := r.Id()
209 ks = append(ks, k)
210 mapping[k] = i
211 }
212 sort.Strings(ks)
213 for _, k := range ks {
214 order = append(order, mapping[k])
215 }
216
217 for _, i := range order {
218 r := rs[i]
219 result += fmt.Sprintf(
220 "%s (x%s)\n",
221 r.Id(),
222 r.RawCount.Value())
223
224 ks := make([]string, 0, len(r.RawConfig.Raw))
225 for k, _ := range r.RawConfig.Raw {
226 ks = append(ks, k)
227 }
228 sort.Strings(ks)
229
230 for _, k := range ks {
231 result += fmt.Sprintf(" %s\n", k)
232 }
233
234 if len(r.Provisioners) > 0 {
235 result += fmt.Sprintf(" provisioners\n")
236 for _, p := range r.Provisioners {
237 when := ""
238 if p.When != ProvisionerWhenCreate {
239 when = fmt.Sprintf(" (%s)", p.When.String())
240 }
241
242 result += fmt.Sprintf(" %s%s\n", p.Type, when)
243
244 if p.OnFailure != ProvisionerOnFailureFail {
245 result += fmt.Sprintf(" on_failure = %s\n", p.OnFailure.String())
246 }
247
248 ks := make([]string, 0, len(p.RawConfig.Raw))
249 for k, _ := range p.RawConfig.Raw {
250 ks = append(ks, k)
251 }
252 sort.Strings(ks)
253
254 for _, k := range ks {
255 result += fmt.Sprintf(" %s\n", k)
256 }
257 }
258 }
259
260 if len(r.DependsOn) > 0 {
261 result += fmt.Sprintf(" dependsOn\n")
262 for _, d := range r.DependsOn {
263 result += fmt.Sprintf(" %s\n", d)
264 }
265 }
266
267 if len(r.RawConfig.Variables) > 0 {
268 result += fmt.Sprintf(" vars\n")
269
270 ks := make([]string, 0, len(r.RawConfig.Variables))
271 for k, _ := range r.RawConfig.Variables {
272 ks = append(ks, k)
273 }
274 sort.Strings(ks)
275
276 for _, k := range ks {
277 rawV := r.RawConfig.Variables[k]
278 kind := "unknown"
279 str := rawV.FullKey()
280
281 switch rawV.(type) {
282 case *ResourceVariable:
283 kind = "resource"
284 case *UserVariable:
285 kind = "user"
286 }
287
288 result += fmt.Sprintf(" %s: %s\n", kind, str)
289 }
290 }
291 }
292
293 return strings.TrimSpace(result)
294}
295
296// This helper turns a variables field into a deterministic
297// string value for comparison in tests.
298func variablesStr(vs []*Variable) string {
299 result := ""
300 ks := make([]string, 0, len(vs))
301 m := make(map[string]*Variable)
302 for _, v := range vs {
303 ks = append(ks, v.Name)
304 m[v.Name] = v
305 }
306 sort.Strings(ks)
307
308 for _, k := range ks {
309 v := m[k]
310
311 required := ""
312 if v.Required() {
313 required = " (required)"
314 }
315
316 declaredType := ""
317 if v.DeclaredType != "" {
318 declaredType = fmt.Sprintf(" (%s)", v.DeclaredType)
319 }
320
321 if v.Default == nil || v.Default == "" {
322 v.Default = "<>"
323 }
324 if v.Description == "" {
325 v.Description = "<>"
326 }
327
328 result += fmt.Sprintf(
329 "%s%s%s\n %v\n %s\n",
330 k,
331 required,
332 declaredType,
333 v.Default,
334 v.Description)
335 }
336
337 return strings.TrimSpace(result)
338}
diff --git a/vendor/github.com/hashicorp/terraform/config/config_terraform.go b/vendor/github.com/hashicorp/terraform/config/config_terraform.go
new file mode 100644
index 0000000..8535c96
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/config_terraform.go
@@ -0,0 +1,117 @@
1package config
2
3import (
4 "fmt"
5 "strings"
6
7 "github.com/hashicorp/go-version"
8 "github.com/mitchellh/hashstructure"
9)
10
11// Terraform is the Terraform meta-configuration that can be present
12// in configuration files for configuring Terraform itself.
13type Terraform struct {
14 RequiredVersion string `hcl:"required_version"` // Required Terraform version (constraint)
15 Backend *Backend // See Backend struct docs
16}
17
18// Validate performs the validation for just the Terraform configuration.
19func (t *Terraform) Validate() []error {
20 var errs []error
21
22 if raw := t.RequiredVersion; raw != "" {
23 // Check that the value has no interpolations
24 rc, err := NewRawConfig(map[string]interface{}{
25 "root": raw,
26 })
27 if err != nil {
28 errs = append(errs, fmt.Errorf(
29 "terraform.required_version: %s", err))
30 } else if len(rc.Interpolations) > 0 {
31 errs = append(errs, fmt.Errorf(
32 "terraform.required_version: cannot contain interpolations"))
33 } else {
34 // Check it is valid
35 _, err := version.NewConstraint(raw)
36 if err != nil {
37 errs = append(errs, fmt.Errorf(
38 "terraform.required_version: invalid syntax: %s", err))
39 }
40 }
41 }
42
43 if t.Backend != nil {
44 errs = append(errs, t.Backend.Validate()...)
45 }
46
47 return errs
48}
49
50// Merge t with t2.
51// Any conflicting fields are overwritten by t2.
52func (t *Terraform) Merge(t2 *Terraform) {
53 if t2.RequiredVersion != "" {
54 t.RequiredVersion = t2.RequiredVersion
55 }
56
57 if t2.Backend != nil {
58 t.Backend = t2.Backend
59 }
60}
61
62// Backend is the configuration for the "backend" to use with Terraform.
63// A backend is responsible for all major behavior of Terraform's core.
64// The abstraction layer above the core (the "backend") allows for behavior
65// such as remote operation.
66type Backend struct {
67 Type string
68 RawConfig *RawConfig
69
70 // Hash is a unique hash code representing the original configuration
71 // of the backend. This won't be recomputed unless Rehash is called.
72 Hash uint64
73}
74
75// Rehash returns a unique content hash for this backend's configuration
76// as a uint64 value.
77func (b *Backend) Rehash() uint64 {
78 // If we have no backend, the value is zero
79 if b == nil {
80 return 0
81 }
82
83 // Use hashstructure to hash only our type with the config.
84 code, err := hashstructure.Hash(map[string]interface{}{
85 "type": b.Type,
86 "config": b.RawConfig.Raw,
87 }, nil)
88
89 // This should never happen since we have just some basic primitives
90 // so panic if there is an error.
91 if err != nil {
92 panic(err)
93 }
94
95 return code
96}
97
98func (b *Backend) Validate() []error {
99 if len(b.RawConfig.Interpolations) > 0 {
100 return []error{fmt.Errorf(strings.TrimSpace(errBackendInterpolations))}
101 }
102
103 return nil
104}
105
106const errBackendInterpolations = `
107terraform.backend: configuration cannot contain interpolations
108
109The backend configuration is loaded by Terraform extremely early, before
110the core of Terraform can be initialized. This is necessary because the backend
111dictates the behavior of that core. The core is what handles interpolation
112processing. Because of this, interpolations cannot be used in backend
113configuration.
114
115If you'd like to parameterize backend configuration, we recommend using
116partial configuration with the "-backend-config" flag to "terraform init".
117`
diff --git a/vendor/github.com/hashicorp/terraform/config/config_tree.go b/vendor/github.com/hashicorp/terraform/config/config_tree.go
new file mode 100644
index 0000000..08dc0fe
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/config_tree.go
@@ -0,0 +1,43 @@
1package config
2
3// configTree represents a tree of configurations where the root is the
4// first file and its children are the configurations it has imported.
5type configTree struct {
6 Path string
7 Config *Config
8 Children []*configTree
9}
10
11// Flatten flattens the entire tree down to a single merged Config
12// structure.
13func (t *configTree) Flatten() (*Config, error) {
14 // No children is easy: we're already merged!
15 if len(t.Children) == 0 {
16 return t.Config, nil
17 }
18
19 // Depth-first, merge all the children first.
20 childConfigs := make([]*Config, len(t.Children))
21 for i, ct := range t.Children {
22 c, err := ct.Flatten()
23 if err != nil {
24 return nil, err
25 }
26
27 childConfigs[i] = c
28 }
29
30 // Merge all the children in order
31 config := childConfigs[0]
32 childConfigs = childConfigs[1:]
33 for _, config2 := range childConfigs {
34 var err error
35 config, err = Merge(config, config2)
36 if err != nil {
37 return nil, err
38 }
39 }
40
41 // Merge the final merged child config with our own
42 return Merge(config, t.Config)
43}
diff --git a/vendor/github.com/hashicorp/terraform/config/import_tree.go b/vendor/github.com/hashicorp/terraform/config/import_tree.go
new file mode 100644
index 0000000..37ec11a
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/import_tree.go
@@ -0,0 +1,113 @@
1package config
2
3import (
4 "fmt"
5 "io"
6)
7
8// configurable is an interface that must be implemented by any configuration
9// formats of Terraform in order to return a *Config.
10type configurable interface {
11 Config() (*Config, error)
12}
13
14// importTree is the result of the first-pass load of the configuration
15// files. It is a tree of raw configurables and then any children (their
16// imports).
17//
18// An importTree can be turned into a configTree.
19type importTree struct {
20 Path string
21 Raw configurable
22 Children []*importTree
23}
24
25// This is the function type that must be implemented by the configuration
26// file loader to turn a single file into a configurable and any additional
27// imports.
28type fileLoaderFunc func(path string) (configurable, []string, error)
29
30// loadTree takes a single file and loads the entire importTree for that
31// file. This function detects what kind of configuration file it is an
32// executes the proper fileLoaderFunc.
33func loadTree(root string) (*importTree, error) {
34 var f fileLoaderFunc
35 switch ext(root) {
36 case ".tf", ".tf.json":
37 f = loadFileHcl
38 default:
39 }
40
41 if f == nil {
42 return nil, fmt.Errorf(
43 "%s: unknown configuration format. Use '.tf' or '.tf.json' extension",
44 root)
45 }
46
47 c, imps, err := f(root)
48 if err != nil {
49 return nil, err
50 }
51
52 children := make([]*importTree, len(imps))
53 for i, imp := range imps {
54 t, err := loadTree(imp)
55 if err != nil {
56 return nil, err
57 }
58
59 children[i] = t
60 }
61
62 return &importTree{
63 Path: root,
64 Raw: c,
65 Children: children,
66 }, nil
67}
68
69// Close releases any resources we might be holding open for the importTree.
70//
71// This can safely be called even while ConfigTree results are alive. The
72// importTree is not bound to these.
73func (t *importTree) Close() error {
74 if c, ok := t.Raw.(io.Closer); ok {
75 c.Close()
76 }
77 for _, ct := range t.Children {
78 ct.Close()
79 }
80
81 return nil
82}
83
84// ConfigTree traverses the importTree and turns each node into a *Config
85// object, ultimately returning a *configTree.
86func (t *importTree) ConfigTree() (*configTree, error) {
87 config, err := t.Raw.Config()
88 if err != nil {
89 return nil, fmt.Errorf(
90 "Error loading %s: %s",
91 t.Path,
92 err)
93 }
94
95 // Build our result
96 result := &configTree{
97 Path: t.Path,
98 Config: config,
99 }
100
101 // Build the config trees for the children
102 result.Children = make([]*configTree, len(t.Children))
103 for i, ct := range t.Children {
104 t, err := ct.ConfigTree()
105 if err != nil {
106 return nil, err
107 }
108
109 result.Children[i] = t
110 }
111
112 return result, nil
113}
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate.go b/vendor/github.com/hashicorp/terraform/config/interpolate.go
new file mode 100644
index 0000000..bbb3555
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/interpolate.go
@@ -0,0 +1,386 @@
1package config
2
3import (
4 "fmt"
5 "strconv"
6 "strings"
7
8 "github.com/hashicorp/hil/ast"
9)
10
11// An InterpolatedVariable is a variable reference within an interpolation.
12//
13// Implementations of this interface represents various sources where
14// variables can come from: user variables, resources, etc.
15type InterpolatedVariable interface {
16 FullKey() string
17}
18
19// CountVariable is a variable for referencing information about
20// the count.
21type CountVariable struct {
22 Type CountValueType
23 key string
24}
25
26// CountValueType is the type of the count variable that is referenced.
27type CountValueType byte
28
29const (
30 CountValueInvalid CountValueType = iota
31 CountValueIndex
32)
33
34// A ModuleVariable is a variable that is referencing the output
35// of a module, such as "${module.foo.bar}"
36type ModuleVariable struct {
37 Name string
38 Field string
39 key string
40}
41
42// A PathVariable is a variable that references path information about the
43// module.
44type PathVariable struct {
45 Type PathValueType
46 key string
47}
48
49type PathValueType byte
50
51const (
52 PathValueInvalid PathValueType = iota
53 PathValueCwd
54 PathValueModule
55 PathValueRoot
56)
57
58// A ResourceVariable is a variable that is referencing the field
59// of a resource, such as "${aws_instance.foo.ami}"
60type ResourceVariable struct {
61 Mode ResourceMode
62 Type string // Resource type, i.e. "aws_instance"
63 Name string // Resource name
64 Field string // Resource field
65
66 Multi bool // True if multi-variable: aws_instance.foo.*.id
67 Index int // Index for multi-variable: aws_instance.foo.1.id == 1
68
69 key string
70}
71
72// SelfVariable is a variable that is referencing the same resource
73// it is running on: "${self.address}"
74type SelfVariable struct {
75 Field string
76
77 key string
78}
79
80// SimpleVariable is an unprefixed variable, which can show up when users have
81// strings they are passing down to resources that use interpolation
82// internally. The template_file resource is an example of this.
83type SimpleVariable struct {
84 Key string
85}
86
87// TerraformVariable is a "terraform."-prefixed variable used to access
88// metadata about the Terraform run.
89type TerraformVariable struct {
90 Field string
91 key string
92}
93
94// A UserVariable is a variable that is referencing a user variable
95// that is inputted from outside the configuration. This looks like
96// "${var.foo}"
97type UserVariable struct {
98 Name string
99 Elem string
100
101 key string
102}
103
104func NewInterpolatedVariable(v string) (InterpolatedVariable, error) {
105 if strings.HasPrefix(v, "count.") {
106 return NewCountVariable(v)
107 } else if strings.HasPrefix(v, "path.") {
108 return NewPathVariable(v)
109 } else if strings.HasPrefix(v, "self.") {
110 return NewSelfVariable(v)
111 } else if strings.HasPrefix(v, "terraform.") {
112 return NewTerraformVariable(v)
113 } else if strings.HasPrefix(v, "var.") {
114 return NewUserVariable(v)
115 } else if strings.HasPrefix(v, "module.") {
116 return NewModuleVariable(v)
117 } else if !strings.ContainsRune(v, '.') {
118 return NewSimpleVariable(v)
119 } else {
120 return NewResourceVariable(v)
121 }
122}
123
124func NewCountVariable(key string) (*CountVariable, error) {
125 var fieldType CountValueType
126 parts := strings.SplitN(key, ".", 2)
127 switch parts[1] {
128 case "index":
129 fieldType = CountValueIndex
130 }
131
132 return &CountVariable{
133 Type: fieldType,
134 key: key,
135 }, nil
136}
137
138func (c *CountVariable) FullKey() string {
139 return c.key
140}
141
142func NewModuleVariable(key string) (*ModuleVariable, error) {
143 parts := strings.SplitN(key, ".", 3)
144 if len(parts) < 3 {
145 return nil, fmt.Errorf(
146 "%s: module variables must be three parts: module.name.attr",
147 key)
148 }
149
150 return &ModuleVariable{
151 Name: parts[1],
152 Field: parts[2],
153 key: key,
154 }, nil
155}
156
157func (v *ModuleVariable) FullKey() string {
158 return v.key
159}
160
161func (v *ModuleVariable) GoString() string {
162 return fmt.Sprintf("*%#v", *v)
163}
164
165func NewPathVariable(key string) (*PathVariable, error) {
166 var fieldType PathValueType
167 parts := strings.SplitN(key, ".", 2)
168 switch parts[1] {
169 case "cwd":
170 fieldType = PathValueCwd
171 case "module":
172 fieldType = PathValueModule
173 case "root":
174 fieldType = PathValueRoot
175 }
176
177 return &PathVariable{
178 Type: fieldType,
179 key: key,
180 }, nil
181}
182
183func (v *PathVariable) FullKey() string {
184 return v.key
185}
186
187func NewResourceVariable(key string) (*ResourceVariable, error) {
188 var mode ResourceMode
189 var parts []string
190 if strings.HasPrefix(key, "data.") {
191 mode = DataResourceMode
192 parts = strings.SplitN(key, ".", 4)
193 if len(parts) < 4 {
194 return nil, fmt.Errorf(
195 "%s: data variables must be four parts: data.TYPE.NAME.ATTR",
196 key)
197 }
198
199 // Don't actually need the "data." prefix for parsing, since it's
200 // always constant.
201 parts = parts[1:]
202 } else {
203 mode = ManagedResourceMode
204 parts = strings.SplitN(key, ".", 3)
205 if len(parts) < 3 {
206 return nil, fmt.Errorf(
207 "%s: resource variables must be three parts: TYPE.NAME.ATTR",
208 key)
209 }
210 }
211
212 field := parts[2]
213 multi := false
214 var index int
215
216 if idx := strings.Index(field, "."); idx != -1 {
217 indexStr := field[:idx]
218 multi = indexStr == "*"
219 index = -1
220
221 if !multi {
222 indexInt, err := strconv.ParseInt(indexStr, 0, 0)
223 if err == nil {
224 multi = true
225 index = int(indexInt)
226 }
227 }
228
229 if multi {
230 field = field[idx+1:]
231 }
232 }
233
234 return &ResourceVariable{
235 Mode: mode,
236 Type: parts[0],
237 Name: parts[1],
238 Field: field,
239 Multi: multi,
240 Index: index,
241 key: key,
242 }, nil
243}
244
245func (v *ResourceVariable) ResourceId() string {
246 switch v.Mode {
247 case ManagedResourceMode:
248 return fmt.Sprintf("%s.%s", v.Type, v.Name)
249 case DataResourceMode:
250 return fmt.Sprintf("data.%s.%s", v.Type, v.Name)
251 default:
252 panic(fmt.Errorf("unknown resource mode %s", v.Mode))
253 }
254}
255
256func (v *ResourceVariable) FullKey() string {
257 return v.key
258}
259
260func NewSelfVariable(key string) (*SelfVariable, error) {
261 field := key[len("self."):]
262
263 return &SelfVariable{
264 Field: field,
265
266 key: key,
267 }, nil
268}
269
270func (v *SelfVariable) FullKey() string {
271 return v.key
272}
273
274func (v *SelfVariable) GoString() string {
275 return fmt.Sprintf("*%#v", *v)
276}
277
278func NewSimpleVariable(key string) (*SimpleVariable, error) {
279 return &SimpleVariable{key}, nil
280}
281
282func (v *SimpleVariable) FullKey() string {
283 return v.Key
284}
285
286func (v *SimpleVariable) GoString() string {
287 return fmt.Sprintf("*%#v", *v)
288}
289
290func NewTerraformVariable(key string) (*TerraformVariable, error) {
291 field := key[len("terraform."):]
292 return &TerraformVariable{
293 Field: field,
294 key: key,
295 }, nil
296}
297
298func (v *TerraformVariable) FullKey() string {
299 return v.key
300}
301
302func (v *TerraformVariable) GoString() string {
303 return fmt.Sprintf("*%#v", *v)
304}
305
306func NewUserVariable(key string) (*UserVariable, error) {
307 name := key[len("var."):]
308 elem := ""
309 if idx := strings.Index(name, "."); idx > -1 {
310 elem = name[idx+1:]
311 name = name[:idx]
312 }
313
314 if len(elem) > 0 {
315 return nil, fmt.Errorf("Invalid dot index found: 'var.%s.%s'. Values in maps and lists can be referenced using square bracket indexing, like: 'var.mymap[\"key\"]' or 'var.mylist[1]'.", name, elem)
316 }
317
318 return &UserVariable{
319 key: key,
320
321 Name: name,
322 Elem: elem,
323 }, nil
324}
325
326func (v *UserVariable) FullKey() string {
327 return v.key
328}
329
330func (v *UserVariable) GoString() string {
331 return fmt.Sprintf("*%#v", *v)
332}
333
334// DetectVariables takes an AST root and returns all the interpolated
335// variables that are detected in the AST tree.
336func DetectVariables(root ast.Node) ([]InterpolatedVariable, error) {
337 var result []InterpolatedVariable
338 var resultErr error
339
340 // Visitor callback
341 fn := func(n ast.Node) ast.Node {
342 if resultErr != nil {
343 return n
344 }
345
346 switch vn := n.(type) {
347 case *ast.VariableAccess:
348 v, err := NewInterpolatedVariable(vn.Name)
349 if err != nil {
350 resultErr = err
351 return n
352 }
353 result = append(result, v)
354 case *ast.Index:
355 if va, ok := vn.Target.(*ast.VariableAccess); ok {
356 v, err := NewInterpolatedVariable(va.Name)
357 if err != nil {
358 resultErr = err
359 return n
360 }
361 result = append(result, v)
362 }
363 if va, ok := vn.Key.(*ast.VariableAccess); ok {
364 v, err := NewInterpolatedVariable(va.Name)
365 if err != nil {
366 resultErr = err
367 return n
368 }
369 result = append(result, v)
370 }
371 default:
372 return n
373 }
374
375 return n
376 }
377
378 // Visitor pattern
379 root.Accept(fn)
380
381 if resultErr != nil {
382 return nil, resultErr
383 }
384
385 return result, nil
386}
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go b/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go
new file mode 100644
index 0000000..f1f97b0
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go
@@ -0,0 +1,1390 @@
1package config
2
3import (
4 "crypto/md5"
5 "crypto/sha1"
6 "crypto/sha256"
7 "crypto/sha512"
8 "encoding/base64"
9 "encoding/hex"
10 "encoding/json"
11 "fmt"
12 "io/ioutil"
13 "math"
14 "net"
15 "path/filepath"
16 "regexp"
17 "sort"
18 "strconv"
19 "strings"
20 "time"
21
22 "github.com/apparentlymart/go-cidr/cidr"
23 "github.com/hashicorp/go-uuid"
24 "github.com/hashicorp/hil"
25 "github.com/hashicorp/hil/ast"
26 "github.com/mitchellh/go-homedir"
27)
28
29// stringSliceToVariableValue converts a string slice into the value
30// required to be returned from interpolation functions which return
31// TypeList.
32func stringSliceToVariableValue(values []string) []ast.Variable {
33 output := make([]ast.Variable, len(values))
34 for index, value := range values {
35 output[index] = ast.Variable{
36 Type: ast.TypeString,
37 Value: value,
38 }
39 }
40 return output
41}
42
43func listVariableValueToStringSlice(values []ast.Variable) ([]string, error) {
44 output := make([]string, len(values))
45 for index, value := range values {
46 if value.Type != ast.TypeString {
47 return []string{}, fmt.Errorf("list has non-string element (%T)", value.Type.String())
48 }
49 output[index] = value.Value.(string)
50 }
51 return output, nil
52}
53
54// Funcs is the mapping of built-in functions for configuration.
55func Funcs() map[string]ast.Function {
56 return map[string]ast.Function{
57 "basename": interpolationFuncBasename(),
58 "base64decode": interpolationFuncBase64Decode(),
59 "base64encode": interpolationFuncBase64Encode(),
60 "base64sha256": interpolationFuncBase64Sha256(),
61 "base64sha512": interpolationFuncBase64Sha512(),
62 "ceil": interpolationFuncCeil(),
63 "chomp": interpolationFuncChomp(),
64 "cidrhost": interpolationFuncCidrHost(),
65 "cidrnetmask": interpolationFuncCidrNetmask(),
66 "cidrsubnet": interpolationFuncCidrSubnet(),
67 "coalesce": interpolationFuncCoalesce(),
68 "coalescelist": interpolationFuncCoalesceList(),
69 "compact": interpolationFuncCompact(),
70 "concat": interpolationFuncConcat(),
71 "dirname": interpolationFuncDirname(),
72 "distinct": interpolationFuncDistinct(),
73 "element": interpolationFuncElement(),
74 "file": interpolationFuncFile(),
75 "matchkeys": interpolationFuncMatchKeys(),
76 "floor": interpolationFuncFloor(),
77 "format": interpolationFuncFormat(),
78 "formatlist": interpolationFuncFormatList(),
79 "index": interpolationFuncIndex(),
80 "join": interpolationFuncJoin(),
81 "jsonencode": interpolationFuncJSONEncode(),
82 "length": interpolationFuncLength(),
83 "list": interpolationFuncList(),
84 "log": interpolationFuncLog(),
85 "lower": interpolationFuncLower(),
86 "map": interpolationFuncMap(),
87 "max": interpolationFuncMax(),
88 "md5": interpolationFuncMd5(),
89 "merge": interpolationFuncMerge(),
90 "min": interpolationFuncMin(),
91 "pathexpand": interpolationFuncPathExpand(),
92 "uuid": interpolationFuncUUID(),
93 "replace": interpolationFuncReplace(),
94 "sha1": interpolationFuncSha1(),
95 "sha256": interpolationFuncSha256(),
96 "sha512": interpolationFuncSha512(),
97 "signum": interpolationFuncSignum(),
98 "slice": interpolationFuncSlice(),
99 "sort": interpolationFuncSort(),
100 "split": interpolationFuncSplit(),
101 "substr": interpolationFuncSubstr(),
102 "timestamp": interpolationFuncTimestamp(),
103 "title": interpolationFuncTitle(),
104 "trimspace": interpolationFuncTrimSpace(),
105 "upper": interpolationFuncUpper(),
106 "zipmap": interpolationFuncZipMap(),
107 }
108}
109
110// interpolationFuncList creates a list from the parameters passed
111// to it.
112func interpolationFuncList() ast.Function {
113 return ast.Function{
114 ArgTypes: []ast.Type{},
115 ReturnType: ast.TypeList,
116 Variadic: true,
117 VariadicType: ast.TypeAny,
118 Callback: func(args []interface{}) (interface{}, error) {
119 var outputList []ast.Variable
120
121 for i, val := range args {
122 switch v := val.(type) {
123 case string:
124 outputList = append(outputList, ast.Variable{Type: ast.TypeString, Value: v})
125 case []ast.Variable:
126 outputList = append(outputList, ast.Variable{Type: ast.TypeList, Value: v})
127 case map[string]ast.Variable:
128 outputList = append(outputList, ast.Variable{Type: ast.TypeMap, Value: v})
129 default:
130 return nil, fmt.Errorf("unexpected type %T for argument %d in list", v, i)
131 }
132 }
133
134 // we don't support heterogeneous types, so make sure all types match the first
135 if len(outputList) > 0 {
136 firstType := outputList[0].Type
137 for i, v := range outputList[1:] {
138 if v.Type != firstType {
139 return nil, fmt.Errorf("unexpected type %s for argument %d in list", v.Type, i+1)
140 }
141 }
142 }
143
144 return outputList, nil
145 },
146 }
147}
148
149// interpolationFuncMap creates a map from the parameters passed
150// to it.
151func interpolationFuncMap() ast.Function {
152 return ast.Function{
153 ArgTypes: []ast.Type{},
154 ReturnType: ast.TypeMap,
155 Variadic: true,
156 VariadicType: ast.TypeAny,
157 Callback: func(args []interface{}) (interface{}, error) {
158 outputMap := make(map[string]ast.Variable)
159
160 if len(args)%2 != 0 {
161 return nil, fmt.Errorf("requires an even number of arguments, got %d", len(args))
162 }
163
164 var firstType *ast.Type
165 for i := 0; i < len(args); i += 2 {
166 key, ok := args[i].(string)
167 if !ok {
168 return nil, fmt.Errorf("argument %d represents a key, so it must be a string", i+1)
169 }
170 val := args[i+1]
171 variable, err := hil.InterfaceToVariable(val)
172 if err != nil {
173 return nil, err
174 }
175 // Enforce map type homogeneity
176 if firstType == nil {
177 firstType = &variable.Type
178 } else if variable.Type != *firstType {
179 return nil, fmt.Errorf("all map values must have the same type, got %s then %s", firstType.Printable(), variable.Type.Printable())
180 }
181 // Check for duplicate keys
182 if _, ok := outputMap[key]; ok {
183 return nil, fmt.Errorf("argument %d is a duplicate key: %q", i+1, key)
184 }
185 outputMap[key] = variable
186 }
187
188 return outputMap, nil
189 },
190 }
191}
192
193// interpolationFuncCompact strips a list of multi-variable values
194// (e.g. as returned by "split") of any empty strings.
195func interpolationFuncCompact() ast.Function {
196 return ast.Function{
197 ArgTypes: []ast.Type{ast.TypeList},
198 ReturnType: ast.TypeList,
199 Variadic: false,
200 Callback: func(args []interface{}) (interface{}, error) {
201 inputList := args[0].([]ast.Variable)
202
203 var outputList []string
204 for _, val := range inputList {
205 strVal, ok := val.Value.(string)
206 if !ok {
207 return nil, fmt.Errorf(
208 "compact() may only be used with flat lists, this list contains elements of %s",
209 val.Type.Printable())
210 }
211 if strVal == "" {
212 continue
213 }
214
215 outputList = append(outputList, strVal)
216 }
217 return stringSliceToVariableValue(outputList), nil
218 },
219 }
220}
221
222// interpolationFuncCidrHost implements the "cidrhost" function that
223// fills in the host part of a CIDR range address to create a single
224// host address
225func interpolationFuncCidrHost() ast.Function {
226 return ast.Function{
227 ArgTypes: []ast.Type{
228 ast.TypeString, // starting CIDR mask
229 ast.TypeInt, // host number to insert
230 },
231 ReturnType: ast.TypeString,
232 Variadic: false,
233 Callback: func(args []interface{}) (interface{}, error) {
234 hostNum := args[1].(int)
235 _, network, err := net.ParseCIDR(args[0].(string))
236 if err != nil {
237 return nil, fmt.Errorf("invalid CIDR expression: %s", err)
238 }
239
240 ip, err := cidr.Host(network, hostNum)
241 if err != nil {
242 return nil, err
243 }
244
245 return ip.String(), nil
246 },
247 }
248}
249
250// interpolationFuncCidrNetmask implements the "cidrnetmask" function
251// that returns the subnet mask in IP address notation.
252func interpolationFuncCidrNetmask() ast.Function {
253 return ast.Function{
254 ArgTypes: []ast.Type{
255 ast.TypeString, // CIDR mask
256 },
257 ReturnType: ast.TypeString,
258 Variadic: false,
259 Callback: func(args []interface{}) (interface{}, error) {
260 _, network, err := net.ParseCIDR(args[0].(string))
261 if err != nil {
262 return nil, fmt.Errorf("invalid CIDR expression: %s", err)
263 }
264
265 return net.IP(network.Mask).String(), nil
266 },
267 }
268}
269
270// interpolationFuncCidrSubnet implements the "cidrsubnet" function that
271// adds an additional subnet of the given length onto an existing
272// IP block expressed in CIDR notation.
273func interpolationFuncCidrSubnet() ast.Function {
274 return ast.Function{
275 ArgTypes: []ast.Type{
276 ast.TypeString, // starting CIDR mask
277 ast.TypeInt, // number of bits to extend the prefix
278 ast.TypeInt, // network number to append to the prefix
279 },
280 ReturnType: ast.TypeString,
281 Variadic: false,
282 Callback: func(args []interface{}) (interface{}, error) {
283 extraBits := args[1].(int)
284 subnetNum := args[2].(int)
285 _, network, err := net.ParseCIDR(args[0].(string))
286 if err != nil {
287 return nil, fmt.Errorf("invalid CIDR expression: %s", err)
288 }
289
290 // For portability with 32-bit systems where the subnet number
291 // will be a 32-bit int, we only allow extension of 32 bits in
292 // one call even if we're running on a 64-bit machine.
293 // (Of course, this is significant only for IPv6.)
294 if extraBits > 32 {
295 return nil, fmt.Errorf("may not extend prefix by more than 32 bits")
296 }
297
298 newNetwork, err := cidr.Subnet(network, extraBits, subnetNum)
299 if err != nil {
300 return nil, err
301 }
302
303 return newNetwork.String(), nil
304 },
305 }
306}
307
308// interpolationFuncCoalesce implements the "coalesce" function that
309// returns the first non null / empty string from the provided input
310func interpolationFuncCoalesce() ast.Function {
311 return ast.Function{
312 ArgTypes: []ast.Type{ast.TypeString},
313 ReturnType: ast.TypeString,
314 Variadic: true,
315 VariadicType: ast.TypeString,
316 Callback: func(args []interface{}) (interface{}, error) {
317 if len(args) < 2 {
318 return nil, fmt.Errorf("must provide at least two arguments")
319 }
320 for _, arg := range args {
321 argument := arg.(string)
322
323 if argument != "" {
324 return argument, nil
325 }
326 }
327 return "", nil
328 },
329 }
330}
331
332// interpolationFuncCoalesceList implements the "coalescelist" function that
333// returns the first non empty list from the provided input
334func interpolationFuncCoalesceList() ast.Function {
335 return ast.Function{
336 ArgTypes: []ast.Type{ast.TypeList},
337 ReturnType: ast.TypeList,
338 Variadic: true,
339 VariadicType: ast.TypeList,
340 Callback: func(args []interface{}) (interface{}, error) {
341 if len(args) < 2 {
342 return nil, fmt.Errorf("must provide at least two arguments")
343 }
344 for _, arg := range args {
345 argument := arg.([]ast.Variable)
346
347 if len(argument) > 0 {
348 return argument, nil
349 }
350 }
351 return make([]ast.Variable, 0), nil
352 },
353 }
354}
355
356// interpolationFuncConcat implements the "concat" function that concatenates
357// multiple lists.
358func interpolationFuncConcat() ast.Function {
359 return ast.Function{
360 ArgTypes: []ast.Type{ast.TypeList},
361 ReturnType: ast.TypeList,
362 Variadic: true,
363 VariadicType: ast.TypeList,
364 Callback: func(args []interface{}) (interface{}, error) {
365 var outputList []ast.Variable
366
367 for _, arg := range args {
368 for _, v := range arg.([]ast.Variable) {
369 switch v.Type {
370 case ast.TypeString:
371 outputList = append(outputList, v)
372 case ast.TypeList:
373 outputList = append(outputList, v)
374 case ast.TypeMap:
375 outputList = append(outputList, v)
376 default:
377 return nil, fmt.Errorf("concat() does not support lists of %s", v.Type.Printable())
378 }
379 }
380 }
381
382 // we don't support heterogeneous types, so make sure all types match the first
383 if len(outputList) > 0 {
384 firstType := outputList[0].Type
385 for _, v := range outputList[1:] {
386 if v.Type != firstType {
387 return nil, fmt.Errorf("unexpected %s in list of %s", v.Type.Printable(), firstType.Printable())
388 }
389 }
390 }
391
392 return outputList, nil
393 },
394 }
395}
396
397// interpolationFuncFile implements the "file" function that allows
398// loading contents from a file.
399func interpolationFuncFile() ast.Function {
400 return ast.Function{
401 ArgTypes: []ast.Type{ast.TypeString},
402 ReturnType: ast.TypeString,
403 Callback: func(args []interface{}) (interface{}, error) {
404 path, err := homedir.Expand(args[0].(string))
405 if err != nil {
406 return "", err
407 }
408 data, err := ioutil.ReadFile(path)
409 if err != nil {
410 return "", err
411 }
412
413 return string(data), nil
414 },
415 }
416}
417
418// interpolationFuncFormat implements the "format" function that does
419// string formatting.
420func interpolationFuncFormat() ast.Function {
421 return ast.Function{
422 ArgTypes: []ast.Type{ast.TypeString},
423 Variadic: true,
424 VariadicType: ast.TypeAny,
425 ReturnType: ast.TypeString,
426 Callback: func(args []interface{}) (interface{}, error) {
427 format := args[0].(string)
428 return fmt.Sprintf(format, args[1:]...), nil
429 },
430 }
431}
432
433// interpolationFuncMax returns the maximum of the numeric arguments
434func interpolationFuncMax() ast.Function {
435 return ast.Function{
436 ArgTypes: []ast.Type{ast.TypeFloat},
437 ReturnType: ast.TypeFloat,
438 Variadic: true,
439 VariadicType: ast.TypeFloat,
440 Callback: func(args []interface{}) (interface{}, error) {
441 max := args[0].(float64)
442
443 for i := 1; i < len(args); i++ {
444 max = math.Max(max, args[i].(float64))
445 }
446
447 return max, nil
448 },
449 }
450}
451
452// interpolationFuncMin returns the minimum of the numeric arguments
453func interpolationFuncMin() ast.Function {
454 return ast.Function{
455 ArgTypes: []ast.Type{ast.TypeFloat},
456 ReturnType: ast.TypeFloat,
457 Variadic: true,
458 VariadicType: ast.TypeFloat,
459 Callback: func(args []interface{}) (interface{}, error) {
460 min := args[0].(float64)
461
462 for i := 1; i < len(args); i++ {
463 min = math.Min(min, args[i].(float64))
464 }
465
466 return min, nil
467 },
468 }
469}
470
471// interpolationFuncPathExpand will expand any `~`'s found with the full file path
472func interpolationFuncPathExpand() ast.Function {
473 return ast.Function{
474 ArgTypes: []ast.Type{ast.TypeString},
475 ReturnType: ast.TypeString,
476 Callback: func(args []interface{}) (interface{}, error) {
477 return homedir.Expand(args[0].(string))
478 },
479 }
480}
481
482// interpolationFuncCeil returns the the least integer value greater than or equal to the argument
483func interpolationFuncCeil() ast.Function {
484 return ast.Function{
485 ArgTypes: []ast.Type{ast.TypeFloat},
486 ReturnType: ast.TypeInt,
487 Callback: func(args []interface{}) (interface{}, error) {
488 return int(math.Ceil(args[0].(float64))), nil
489 },
490 }
491}
492
493// interpolationFuncLog returns the logarithnm.
494func interpolationFuncLog() ast.Function {
495 return ast.Function{
496 ArgTypes: []ast.Type{ast.TypeFloat, ast.TypeFloat},
497 ReturnType: ast.TypeFloat,
498 Callback: func(args []interface{}) (interface{}, error) {
499 return math.Log(args[0].(float64)) / math.Log(args[1].(float64)), nil
500 },
501 }
502}
503
504// interpolationFuncChomp removes trailing newlines from the given string
505func interpolationFuncChomp() ast.Function {
506 newlines := regexp.MustCompile(`(?:\r\n?|\n)*\z`)
507 return ast.Function{
508 ArgTypes: []ast.Type{ast.TypeString},
509 ReturnType: ast.TypeString,
510 Callback: func(args []interface{}) (interface{}, error) {
511 return newlines.ReplaceAllString(args[0].(string), ""), nil
512 },
513 }
514}
515
516// interpolationFuncFloorreturns returns the greatest integer value less than or equal to the argument
517func interpolationFuncFloor() ast.Function {
518 return ast.Function{
519 ArgTypes: []ast.Type{ast.TypeFloat},
520 ReturnType: ast.TypeInt,
521 Callback: func(args []interface{}) (interface{}, error) {
522 return int(math.Floor(args[0].(float64))), nil
523 },
524 }
525}
526
527func interpolationFuncZipMap() ast.Function {
528 return ast.Function{
529 ArgTypes: []ast.Type{
530 ast.TypeList, // Keys
531 ast.TypeList, // Values
532 },
533 ReturnType: ast.TypeMap,
534 Callback: func(args []interface{}) (interface{}, error) {
535 keys := args[0].([]ast.Variable)
536 values := args[1].([]ast.Variable)
537
538 if len(keys) != len(values) {
539 return nil, fmt.Errorf("count of keys (%d) does not match count of values (%d)",
540 len(keys), len(values))
541 }
542
543 for i, val := range keys {
544 if val.Type != ast.TypeString {
545 return nil, fmt.Errorf("keys must be strings. value at position %d is %s",
546 i, val.Type.Printable())
547 }
548 }
549
550 result := map[string]ast.Variable{}
551 for i := 0; i < len(keys); i++ {
552 result[keys[i].Value.(string)] = values[i]
553 }
554
555 return result, nil
556 },
557 }
558}
559
560// interpolationFuncFormatList implements the "formatlist" function that does
561// string formatting on lists.
562func interpolationFuncFormatList() ast.Function {
563 return ast.Function{
564 ArgTypes: []ast.Type{ast.TypeAny},
565 Variadic: true,
566 VariadicType: ast.TypeAny,
567 ReturnType: ast.TypeList,
568 Callback: func(args []interface{}) (interface{}, error) {
569 // Make a copy of the variadic part of args
570 // to avoid modifying the original.
571 varargs := make([]interface{}, len(args)-1)
572 copy(varargs, args[1:])
573
574 // Verify we have some arguments
575 if len(varargs) == 0 {
576 return nil, fmt.Errorf("no arguments to formatlist")
577 }
578
579 // Convert arguments that are lists into slices.
580 // Confirm along the way that all lists have the same length (n).
581 var n int
582 listSeen := false
583 for i := 1; i < len(args); i++ {
584 s, ok := args[i].([]ast.Variable)
585 if !ok {
586 continue
587 }
588
589 // Mark that we've seen at least one list
590 listSeen = true
591
592 // Convert the ast.Variable to a slice of strings
593 parts, err := listVariableValueToStringSlice(s)
594 if err != nil {
595 return nil, err
596 }
597
598 // otherwise the list is sent down to be indexed
599 varargs[i-1] = parts
600
601 // Check length
602 if n == 0 {
603 // first list we've seen
604 n = len(parts)
605 continue
606 }
607 if n != len(parts) {
608 return nil, fmt.Errorf("format: mismatched list lengths: %d != %d", n, len(parts))
609 }
610 }
611
612 // If we didn't see a list this is an error because we
613 // can't determine the return value length.
614 if !listSeen {
615 return nil, fmt.Errorf(
616 "formatlist requires at least one list argument")
617 }
618
619 // Do the formatting.
620 format := args[0].(string)
621
622 // Generate a list of formatted strings.
623 list := make([]string, n)
624 fmtargs := make([]interface{}, len(varargs))
625 for i := 0; i < n; i++ {
626 for j, arg := range varargs {
627 switch arg := arg.(type) {
628 default:
629 fmtargs[j] = arg
630 case []string:
631 fmtargs[j] = arg[i]
632 }
633 }
634 list[i] = fmt.Sprintf(format, fmtargs...)
635 }
636 return stringSliceToVariableValue(list), nil
637 },
638 }
639}
640
641// interpolationFuncIndex implements the "index" function that allows one to
642// find the index of a specific element in a list
643func interpolationFuncIndex() ast.Function {
644 return ast.Function{
645 ArgTypes: []ast.Type{ast.TypeList, ast.TypeString},
646 ReturnType: ast.TypeInt,
647 Callback: func(args []interface{}) (interface{}, error) {
648 haystack := args[0].([]ast.Variable)
649 needle := args[1].(string)
650 for index, element := range haystack {
651 if needle == element.Value {
652 return index, nil
653 }
654 }
655 return nil, fmt.Errorf("Could not find '%s' in '%s'", needle, haystack)
656 },
657 }
658}
659
660// interpolationFuncBasename implements the "dirname" function.
661func interpolationFuncDirname() ast.Function {
662 return ast.Function{
663 ArgTypes: []ast.Type{ast.TypeString},
664 ReturnType: ast.TypeString,
665 Callback: func(args []interface{}) (interface{}, error) {
666 return filepath.Dir(args[0].(string)), nil
667 },
668 }
669}
670
671// interpolationFuncDistinct implements the "distinct" function that
672// removes duplicate elements from a list.
673func interpolationFuncDistinct() ast.Function {
674 return ast.Function{
675 ArgTypes: []ast.Type{ast.TypeList},
676 ReturnType: ast.TypeList,
677 Variadic: true,
678 VariadicType: ast.TypeList,
679 Callback: func(args []interface{}) (interface{}, error) {
680 var list []string
681
682 if len(args) != 1 {
683 return nil, fmt.Errorf("accepts only one argument.")
684 }
685
686 if argument, ok := args[0].([]ast.Variable); ok {
687 for _, element := range argument {
688 if element.Type != ast.TypeString {
689 return nil, fmt.Errorf(
690 "only works for flat lists, this list contains elements of %s",
691 element.Type.Printable())
692 }
693 list = appendIfMissing(list, element.Value.(string))
694 }
695 }
696
697 return stringSliceToVariableValue(list), nil
698 },
699 }
700}
701
702// helper function to add an element to a list, if it does not already exsit
703func appendIfMissing(slice []string, element string) []string {
704 for _, ele := range slice {
705 if ele == element {
706 return slice
707 }
708 }
709 return append(slice, element)
710}
711
712// for two lists `keys` and `values` of equal length, returns all elements
713// from `values` where the corresponding element from `keys` is in `searchset`.
714func interpolationFuncMatchKeys() ast.Function {
715 return ast.Function{
716 ArgTypes: []ast.Type{ast.TypeList, ast.TypeList, ast.TypeList},
717 ReturnType: ast.TypeList,
718 Callback: func(args []interface{}) (interface{}, error) {
719 output := make([]ast.Variable, 0)
720
721 values, _ := args[0].([]ast.Variable)
722 keys, _ := args[1].([]ast.Variable)
723 searchset, _ := args[2].([]ast.Variable)
724
725 if len(keys) != len(values) {
726 return nil, fmt.Errorf("length of keys and values should be equal")
727 }
728
729 for i, key := range keys {
730 for _, search := range searchset {
731 if res, err := compareSimpleVariables(key, search); err != nil {
732 return nil, err
733 } else if res == true {
734 output = append(output, values[i])
735 break
736 }
737 }
738 }
739 // if searchset is empty, then output is an empty list as well.
740 // if we haven't matched any key, then output is an empty list.
741 return output, nil
742 },
743 }
744}
745
746// compare two variables of the same type, i.e. non complex one, such as TypeList or TypeMap
747func compareSimpleVariables(a, b ast.Variable) (bool, error) {
748 if a.Type != b.Type {
749 return false, fmt.Errorf(
750 "won't compare items of different types %s and %s",
751 a.Type.Printable(), b.Type.Printable())
752 }
753 switch a.Type {
754 case ast.TypeString:
755 return a.Value.(string) == b.Value.(string), nil
756 default:
757 return false, fmt.Errorf(
758 "can't compare items of type %s",
759 a.Type.Printable())
760 }
761}
762
763// interpolationFuncJoin implements the "join" function that allows
764// multi-variable values to be joined by some character.
765func interpolationFuncJoin() ast.Function {
766 return ast.Function{
767 ArgTypes: []ast.Type{ast.TypeString},
768 Variadic: true,
769 VariadicType: ast.TypeList,
770 ReturnType: ast.TypeString,
771 Callback: func(args []interface{}) (interface{}, error) {
772 var list []string
773
774 if len(args) < 2 {
775 return nil, fmt.Errorf("not enough arguments to join()")
776 }
777
778 for _, arg := range args[1:] {
779 for _, part := range arg.([]ast.Variable) {
780 if part.Type != ast.TypeString {
781 return nil, fmt.Errorf(
782 "only works on flat lists, this list contains elements of %s",
783 part.Type.Printable())
784 }
785 list = append(list, part.Value.(string))
786 }
787 }
788
789 return strings.Join(list, args[0].(string)), nil
790 },
791 }
792}
793
794// interpolationFuncJSONEncode implements the "jsonencode" function that encodes
795// a string, list, or map as its JSON representation. For now, values in the
796// list or map may only be strings.
797func interpolationFuncJSONEncode() ast.Function {
798 return ast.Function{
799 ArgTypes: []ast.Type{ast.TypeAny},
800 ReturnType: ast.TypeString,
801 Callback: func(args []interface{}) (interface{}, error) {
802 var toEncode interface{}
803
804 switch typedArg := args[0].(type) {
805 case string:
806 toEncode = typedArg
807
808 case []ast.Variable:
809 // We preallocate the list here. Note that it's important that in
810 // the length 0 case, we have an empty list rather than nil, as
811 // they encode differently.
812 // XXX It would be nice to support arbitrarily nested data here. Is
813 // there an inverse of hil.InterfaceToVariable?
814 strings := make([]string, len(typedArg))
815
816 for i, v := range typedArg {
817 if v.Type != ast.TypeString {
818 return "", fmt.Errorf("list elements must be strings")
819 }
820 strings[i] = v.Value.(string)
821 }
822 toEncode = strings
823
824 case map[string]ast.Variable:
825 // XXX It would be nice to support arbitrarily nested data here. Is
826 // there an inverse of hil.InterfaceToVariable?
827 stringMap := make(map[string]string)
828 for k, v := range typedArg {
829 if v.Type != ast.TypeString {
830 return "", fmt.Errorf("map values must be strings")
831 }
832 stringMap[k] = v.Value.(string)
833 }
834 toEncode = stringMap
835
836 default:
837 return "", fmt.Errorf("unknown type for JSON encoding: %T", args[0])
838 }
839
840 jEnc, err := json.Marshal(toEncode)
841 if err != nil {
842 return "", fmt.Errorf("failed to encode JSON data '%s'", toEncode)
843 }
844 return string(jEnc), nil
845 },
846 }
847}
848
849// interpolationFuncReplace implements the "replace" function that does
850// string replacement.
851func interpolationFuncReplace() ast.Function {
852 return ast.Function{
853 ArgTypes: []ast.Type{ast.TypeString, ast.TypeString, ast.TypeString},
854 ReturnType: ast.TypeString,
855 Callback: func(args []interface{}) (interface{}, error) {
856 s := args[0].(string)
857 search := args[1].(string)
858 replace := args[2].(string)
859
860 // We search/replace using a regexp if the string is surrounded
861 // in forward slashes.
862 if len(search) > 1 && search[0] == '/' && search[len(search)-1] == '/' {
863 re, err := regexp.Compile(search[1 : len(search)-1])
864 if err != nil {
865 return nil, err
866 }
867
868 return re.ReplaceAllString(s, replace), nil
869 }
870
871 return strings.Replace(s, search, replace, -1), nil
872 },
873 }
874}
875
876func interpolationFuncLength() ast.Function {
877 return ast.Function{
878 ArgTypes: []ast.Type{ast.TypeAny},
879 ReturnType: ast.TypeInt,
880 Variadic: false,
881 Callback: func(args []interface{}) (interface{}, error) {
882 subject := args[0]
883
884 switch typedSubject := subject.(type) {
885 case string:
886 return len(typedSubject), nil
887 case []ast.Variable:
888 return len(typedSubject), nil
889 case map[string]ast.Variable:
890 return len(typedSubject), nil
891 }
892
893 return 0, fmt.Errorf("arguments to length() must be a string, list, or map")
894 },
895 }
896}
897
898func interpolationFuncSignum() ast.Function {
899 return ast.Function{
900 ArgTypes: []ast.Type{ast.TypeInt},
901 ReturnType: ast.TypeInt,
902 Variadic: false,
903 Callback: func(args []interface{}) (interface{}, error) {
904 num := args[0].(int)
905 switch {
906 case num < 0:
907 return -1, nil
908 case num > 0:
909 return +1, nil
910 default:
911 return 0, nil
912 }
913 },
914 }
915}
916
917// interpolationFuncSlice returns a portion of the input list between from, inclusive and to, exclusive.
918func interpolationFuncSlice() ast.Function {
919 return ast.Function{
920 ArgTypes: []ast.Type{
921 ast.TypeList, // inputList
922 ast.TypeInt, // from
923 ast.TypeInt, // to
924 },
925 ReturnType: ast.TypeList,
926 Variadic: false,
927 Callback: func(args []interface{}) (interface{}, error) {
928 inputList := args[0].([]ast.Variable)
929 from := args[1].(int)
930 to := args[2].(int)
931
932 if from < 0 {
933 return nil, fmt.Errorf("from index must be >= 0")
934 }
935 if to > len(inputList) {
936 return nil, fmt.Errorf("to index must be <= length of the input list")
937 }
938 if from > to {
939 return nil, fmt.Errorf("from index must be <= to index")
940 }
941
942 var outputList []ast.Variable
943 for i, val := range inputList {
944 if i >= from && i < to {
945 outputList = append(outputList, val)
946 }
947 }
948 return outputList, nil
949 },
950 }
951}
952
953// interpolationFuncSort sorts a list of a strings lexographically
954func interpolationFuncSort() ast.Function {
955 return ast.Function{
956 ArgTypes: []ast.Type{ast.TypeList},
957 ReturnType: ast.TypeList,
958 Variadic: false,
959 Callback: func(args []interface{}) (interface{}, error) {
960 inputList := args[0].([]ast.Variable)
961
962 // Ensure that all the list members are strings and
963 // create a string slice from them
964 members := make([]string, len(inputList))
965 for i, val := range inputList {
966 if val.Type != ast.TypeString {
967 return nil, fmt.Errorf(
968 "sort() may only be used with lists of strings - %s at index %d",
969 val.Type.String(), i)
970 }
971
972 members[i] = val.Value.(string)
973 }
974
975 sort.Strings(members)
976 return stringSliceToVariableValue(members), nil
977 },
978 }
979}
980
981// interpolationFuncSplit implements the "split" function that allows
982// strings to split into multi-variable values
983func interpolationFuncSplit() ast.Function {
984 return ast.Function{
985 ArgTypes: []ast.Type{ast.TypeString, ast.TypeString},
986 ReturnType: ast.TypeList,
987 Callback: func(args []interface{}) (interface{}, error) {
988 sep := args[0].(string)
989 s := args[1].(string)
990 elements := strings.Split(s, sep)
991 return stringSliceToVariableValue(elements), nil
992 },
993 }
994}
995
996// interpolationFuncLookup implements the "lookup" function that allows
997// dynamic lookups of map types within a Terraform configuration.
998func interpolationFuncLookup(vs map[string]ast.Variable) ast.Function {
999 return ast.Function{
1000 ArgTypes: []ast.Type{ast.TypeMap, ast.TypeString},
1001 ReturnType: ast.TypeString,
1002 Variadic: true,
1003 VariadicType: ast.TypeString,
1004 Callback: func(args []interface{}) (interface{}, error) {
1005 defaultValue := ""
1006 defaultValueSet := false
1007 if len(args) > 2 {
1008 defaultValue = args[2].(string)
1009 defaultValueSet = true
1010 }
1011 if len(args) > 3 {
1012 return "", fmt.Errorf("lookup() takes no more than three arguments")
1013 }
1014 index := args[1].(string)
1015 mapVar := args[0].(map[string]ast.Variable)
1016
1017 v, ok := mapVar[index]
1018 if !ok {
1019 if defaultValueSet {
1020 return defaultValue, nil
1021 } else {
1022 return "", fmt.Errorf(
1023 "lookup failed to find '%s'",
1024 args[1].(string))
1025 }
1026 }
1027 if v.Type != ast.TypeString {
1028 return nil, fmt.Errorf(
1029 "lookup() may only be used with flat maps, this map contains elements of %s",
1030 v.Type.Printable())
1031 }
1032
1033 return v.Value.(string), nil
1034 },
1035 }
1036}
1037
1038// interpolationFuncElement implements the "element" function that allows
1039// a specific index to be looked up in a multi-variable value. Note that this will
1040// wrap if the index is larger than the number of elements in the multi-variable value.
1041func interpolationFuncElement() ast.Function {
1042 return ast.Function{
1043 ArgTypes: []ast.Type{ast.TypeList, ast.TypeString},
1044 ReturnType: ast.TypeString,
1045 Callback: func(args []interface{}) (interface{}, error) {
1046 list := args[0].([]ast.Variable)
1047 if len(list) == 0 {
1048 return nil, fmt.Errorf("element() may not be used with an empty list")
1049 }
1050
1051 index, err := strconv.Atoi(args[1].(string))
1052 if err != nil || index < 0 {
1053 return "", fmt.Errorf(
1054 "invalid number for index, got %s", args[1])
1055 }
1056
1057 resolvedIndex := index % len(list)
1058
1059 v := list[resolvedIndex]
1060 if v.Type != ast.TypeString {
1061 return nil, fmt.Errorf(
1062 "element() may only be used with flat lists, this list contains elements of %s",
1063 v.Type.Printable())
1064 }
1065 return v.Value, nil
1066 },
1067 }
1068}
1069
1070// interpolationFuncKeys implements the "keys" function that yields a list of
1071// keys of map types within a Terraform configuration.
1072func interpolationFuncKeys(vs map[string]ast.Variable) ast.Function {
1073 return ast.Function{
1074 ArgTypes: []ast.Type{ast.TypeMap},
1075 ReturnType: ast.TypeList,
1076 Callback: func(args []interface{}) (interface{}, error) {
1077 mapVar := args[0].(map[string]ast.Variable)
1078 keys := make([]string, 0)
1079
1080 for k, _ := range mapVar {
1081 keys = append(keys, k)
1082 }
1083
1084 sort.Strings(keys)
1085
1086 // Keys are guaranteed to be strings
1087 return stringSliceToVariableValue(keys), nil
1088 },
1089 }
1090}
1091
1092// interpolationFuncValues implements the "values" function that yields a list of
1093// keys of map types within a Terraform configuration.
1094func interpolationFuncValues(vs map[string]ast.Variable) ast.Function {
1095 return ast.Function{
1096 ArgTypes: []ast.Type{ast.TypeMap},
1097 ReturnType: ast.TypeList,
1098 Callback: func(args []interface{}) (interface{}, error) {
1099 mapVar := args[0].(map[string]ast.Variable)
1100 keys := make([]string, 0)
1101
1102 for k, _ := range mapVar {
1103 keys = append(keys, k)
1104 }
1105
1106 sort.Strings(keys)
1107
1108 values := make([]string, len(keys))
1109 for index, key := range keys {
1110 if value, ok := mapVar[key].Value.(string); ok {
1111 values[index] = value
1112 } else {
1113 return "", fmt.Errorf("values(): %q has element with bad type %s",
1114 key, mapVar[key].Type)
1115 }
1116 }
1117
1118 variable, err := hil.InterfaceToVariable(values)
1119 if err != nil {
1120 return nil, err
1121 }
1122
1123 return variable.Value, nil
1124 },
1125 }
1126}
1127
1128// interpolationFuncBasename implements the "basename" function.
1129func interpolationFuncBasename() ast.Function {
1130 return ast.Function{
1131 ArgTypes: []ast.Type{ast.TypeString},
1132 ReturnType: ast.TypeString,
1133 Callback: func(args []interface{}) (interface{}, error) {
1134 return filepath.Base(args[0].(string)), nil
1135 },
1136 }
1137}
1138
1139// interpolationFuncBase64Encode implements the "base64encode" function that
1140// allows Base64 encoding.
1141func interpolationFuncBase64Encode() ast.Function {
1142 return ast.Function{
1143 ArgTypes: []ast.Type{ast.TypeString},
1144 ReturnType: ast.TypeString,
1145 Callback: func(args []interface{}) (interface{}, error) {
1146 s := args[0].(string)
1147 return base64.StdEncoding.EncodeToString([]byte(s)), nil
1148 },
1149 }
1150}
1151
1152// interpolationFuncBase64Decode implements the "base64decode" function that
1153// allows Base64 decoding.
1154func interpolationFuncBase64Decode() ast.Function {
1155 return ast.Function{
1156 ArgTypes: []ast.Type{ast.TypeString},
1157 ReturnType: ast.TypeString,
1158 Callback: func(args []interface{}) (interface{}, error) {
1159 s := args[0].(string)
1160 sDec, err := base64.StdEncoding.DecodeString(s)
1161 if err != nil {
1162 return "", fmt.Errorf("failed to decode base64 data '%s'", s)
1163 }
1164 return string(sDec), nil
1165 },
1166 }
1167}
1168
1169// interpolationFuncLower implements the "lower" function that does
1170// string lower casing.
1171func interpolationFuncLower() ast.Function {
1172 return ast.Function{
1173 ArgTypes: []ast.Type{ast.TypeString},
1174 ReturnType: ast.TypeString,
1175 Callback: func(args []interface{}) (interface{}, error) {
1176 toLower := args[0].(string)
1177 return strings.ToLower(toLower), nil
1178 },
1179 }
1180}
1181
1182func interpolationFuncMd5() ast.Function {
1183 return ast.Function{
1184 ArgTypes: []ast.Type{ast.TypeString},
1185 ReturnType: ast.TypeString,
1186 Callback: func(args []interface{}) (interface{}, error) {
1187 s := args[0].(string)
1188 h := md5.New()
1189 h.Write([]byte(s))
1190 hash := hex.EncodeToString(h.Sum(nil))
1191 return hash, nil
1192 },
1193 }
1194}
1195
1196func interpolationFuncMerge() ast.Function {
1197 return ast.Function{
1198 ArgTypes: []ast.Type{ast.TypeMap},
1199 ReturnType: ast.TypeMap,
1200 Variadic: true,
1201 VariadicType: ast.TypeMap,
1202 Callback: func(args []interface{}) (interface{}, error) {
1203 outputMap := make(map[string]ast.Variable)
1204
1205 for _, arg := range args {
1206 for k, v := range arg.(map[string]ast.Variable) {
1207 outputMap[k] = v
1208 }
1209 }
1210
1211 return outputMap, nil
1212 },
1213 }
1214}
1215
1216// interpolationFuncUpper implements the "upper" function that does
1217// string upper casing.
1218func interpolationFuncUpper() ast.Function {
1219 return ast.Function{
1220 ArgTypes: []ast.Type{ast.TypeString},
1221 ReturnType: ast.TypeString,
1222 Callback: func(args []interface{}) (interface{}, error) {
1223 toUpper := args[0].(string)
1224 return strings.ToUpper(toUpper), nil
1225 },
1226 }
1227}
1228
1229func interpolationFuncSha1() ast.Function {
1230 return ast.Function{
1231 ArgTypes: []ast.Type{ast.TypeString},
1232 ReturnType: ast.TypeString,
1233 Callback: func(args []interface{}) (interface{}, error) {
1234 s := args[0].(string)
1235 h := sha1.New()
1236 h.Write([]byte(s))
1237 hash := hex.EncodeToString(h.Sum(nil))
1238 return hash, nil
1239 },
1240 }
1241}
1242
1243// hexadecimal representation of sha256 sum
1244func interpolationFuncSha256() ast.Function {
1245 return ast.Function{
1246 ArgTypes: []ast.Type{ast.TypeString},
1247 ReturnType: ast.TypeString,
1248 Callback: func(args []interface{}) (interface{}, error) {
1249 s := args[0].(string)
1250 h := sha256.New()
1251 h.Write([]byte(s))
1252 hash := hex.EncodeToString(h.Sum(nil))
1253 return hash, nil
1254 },
1255 }
1256}
1257
1258func interpolationFuncSha512() ast.Function {
1259 return ast.Function{
1260 ArgTypes: []ast.Type{ast.TypeString},
1261 ReturnType: ast.TypeString,
1262 Callback: func(args []interface{}) (interface{}, error) {
1263 s := args[0].(string)
1264 h := sha512.New()
1265 h.Write([]byte(s))
1266 hash := hex.EncodeToString(h.Sum(nil))
1267 return hash, nil
1268 },
1269 }
1270}
1271
1272func interpolationFuncTrimSpace() ast.Function {
1273 return ast.Function{
1274 ArgTypes: []ast.Type{ast.TypeString},
1275 ReturnType: ast.TypeString,
1276 Callback: func(args []interface{}) (interface{}, error) {
1277 trimSpace := args[0].(string)
1278 return strings.TrimSpace(trimSpace), nil
1279 },
1280 }
1281}
1282
1283func interpolationFuncBase64Sha256() ast.Function {
1284 return ast.Function{
1285 ArgTypes: []ast.Type{ast.TypeString},
1286 ReturnType: ast.TypeString,
1287 Callback: func(args []interface{}) (interface{}, error) {
1288 s := args[0].(string)
1289 h := sha256.New()
1290 h.Write([]byte(s))
1291 shaSum := h.Sum(nil)
1292 encoded := base64.StdEncoding.EncodeToString(shaSum[:])
1293 return encoded, nil
1294 },
1295 }
1296}
1297
1298func interpolationFuncBase64Sha512() ast.Function {
1299 return ast.Function{
1300 ArgTypes: []ast.Type{ast.TypeString},
1301 ReturnType: ast.TypeString,
1302 Callback: func(args []interface{}) (interface{}, error) {
1303 s := args[0].(string)
1304 h := sha512.New()
1305 h.Write([]byte(s))
1306 shaSum := h.Sum(nil)
1307 encoded := base64.StdEncoding.EncodeToString(shaSum[:])
1308 return encoded, nil
1309 },
1310 }
1311}
1312
1313func interpolationFuncUUID() ast.Function {
1314 return ast.Function{
1315 ArgTypes: []ast.Type{},
1316 ReturnType: ast.TypeString,
1317 Callback: func(args []interface{}) (interface{}, error) {
1318 return uuid.GenerateUUID()
1319 },
1320 }
1321}
1322
1323// interpolationFuncTimestamp
1324func interpolationFuncTimestamp() ast.Function {
1325 return ast.Function{
1326 ArgTypes: []ast.Type{},
1327 ReturnType: ast.TypeString,
1328 Callback: func(args []interface{}) (interface{}, error) {
1329 return time.Now().UTC().Format(time.RFC3339), nil
1330 },
1331 }
1332}
1333
1334// interpolationFuncTitle implements the "title" function that returns a copy of the
1335// string in which first characters of all the words are capitalized.
1336func interpolationFuncTitle() ast.Function {
1337 return ast.Function{
1338 ArgTypes: []ast.Type{ast.TypeString},
1339 ReturnType: ast.TypeString,
1340 Callback: func(args []interface{}) (interface{}, error) {
1341 toTitle := args[0].(string)
1342 return strings.Title(toTitle), nil
1343 },
1344 }
1345}
1346
1347// interpolationFuncSubstr implements the "substr" function that allows strings
1348// to be truncated.
1349func interpolationFuncSubstr() ast.Function {
1350 return ast.Function{
1351 ArgTypes: []ast.Type{
1352 ast.TypeString, // input string
1353 ast.TypeInt, // offset
1354 ast.TypeInt, // length
1355 },
1356 ReturnType: ast.TypeString,
1357 Callback: func(args []interface{}) (interface{}, error) {
1358 str := args[0].(string)
1359 offset := args[1].(int)
1360 length := args[2].(int)
1361
1362 // Interpret a negative offset as being equivalent to a positive
1363 // offset taken from the end of the string.
1364 if offset < 0 {
1365 offset += len(str)
1366 }
1367
1368 // Interpret a length of `-1` as indicating that the substring
1369 // should start at `offset` and continue until the end of the
1370 // string. Any other negative length (other than `-1`) is invalid.
1371 if length == -1 {
1372 length = len(str)
1373 } else if length >= 0 {
1374 length += offset
1375 } else {
1376 return nil, fmt.Errorf("length should be a non-negative integer")
1377 }
1378
1379 if offset > len(str) {
1380 return nil, fmt.Errorf("offset cannot be larger than the length of the string")
1381 }
1382
1383 if length > len(str) {
1384 return nil, fmt.Errorf("'offset + length' cannot be larger than the length of the string")
1385 }
1386
1387 return str[offset:length], nil
1388 },
1389 }
1390}
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go b/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go
new file mode 100644
index 0000000..ead3d10
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go
@@ -0,0 +1,283 @@
1package config
2
3import (
4 "fmt"
5 "reflect"
6 "strings"
7
8 "github.com/hashicorp/hil"
9 "github.com/hashicorp/hil/ast"
10 "github.com/mitchellh/reflectwalk"
11)
12
13// interpolationWalker implements interfaces for the reflectwalk package
14// (github.com/mitchellh/reflectwalk) that can be used to automatically
15// execute a callback for an interpolation.
16type interpolationWalker struct {
17 // F is the function to call for every interpolation. It can be nil.
18 //
19 // If Replace is true, then the return value of F will be used to
20 // replace the interpolation.
21 F interpolationWalkerFunc
22 Replace bool
23
24 // ContextF is an advanced version of F that also receives the
25 // location of where it is in the structure. This lets you do
26 // context-aware validation.
27 ContextF interpolationWalkerContextFunc
28
29 key []string
30 lastValue reflect.Value
31 loc reflectwalk.Location
32 cs []reflect.Value
33 csKey []reflect.Value
34 csData interface{}
35 sliceIndex []int
36 unknownKeys []string
37}
38
39// interpolationWalkerFunc is the callback called by interpolationWalk.
40// It is called with any interpolation found. It should return a value
41// to replace the interpolation with, along with any errors.
42//
43// If Replace is set to false in interpolationWalker, then the replace
44// value can be anything as it will have no effect.
45type interpolationWalkerFunc func(ast.Node) (interface{}, error)
46
47// interpolationWalkerContextFunc is called by interpolationWalk if
48// ContextF is set. This receives both the interpolation and the location
49// where the interpolation is.
50//
51// This callback can be used to validate the location of the interpolation
52// within the configuration.
53type interpolationWalkerContextFunc func(reflectwalk.Location, ast.Node)
54
55func (w *interpolationWalker) Enter(loc reflectwalk.Location) error {
56 w.loc = loc
57 return nil
58}
59
60func (w *interpolationWalker) Exit(loc reflectwalk.Location) error {
61 w.loc = reflectwalk.None
62
63 switch loc {
64 case reflectwalk.Map:
65 w.cs = w.cs[:len(w.cs)-1]
66 case reflectwalk.MapValue:
67 w.key = w.key[:len(w.key)-1]
68 w.csKey = w.csKey[:len(w.csKey)-1]
69 case reflectwalk.Slice:
70 // Split any values that need to be split
71 w.splitSlice()
72 w.cs = w.cs[:len(w.cs)-1]
73 case reflectwalk.SliceElem:
74 w.csKey = w.csKey[:len(w.csKey)-1]
75 w.sliceIndex = w.sliceIndex[:len(w.sliceIndex)-1]
76 }
77
78 return nil
79}
80
81func (w *interpolationWalker) Map(m reflect.Value) error {
82 w.cs = append(w.cs, m)
83 return nil
84}
85
86func (w *interpolationWalker) MapElem(m, k, v reflect.Value) error {
87 w.csData = k
88 w.csKey = append(w.csKey, k)
89
90 if l := len(w.sliceIndex); l > 0 {
91 w.key = append(w.key, fmt.Sprintf("%d.%s", w.sliceIndex[l-1], k.String()))
92 } else {
93 w.key = append(w.key, k.String())
94 }
95
96 w.lastValue = v
97 return nil
98}
99
100func (w *interpolationWalker) Slice(s reflect.Value) error {
101 w.cs = append(w.cs, s)
102 return nil
103}
104
105func (w *interpolationWalker) SliceElem(i int, elem reflect.Value) error {
106 w.csKey = append(w.csKey, reflect.ValueOf(i))
107 w.sliceIndex = append(w.sliceIndex, i)
108 return nil
109}
110
111func (w *interpolationWalker) Primitive(v reflect.Value) error {
112 setV := v
113
114 // We only care about strings
115 if v.Kind() == reflect.Interface {
116 setV = v
117 v = v.Elem()
118 }
119 if v.Kind() != reflect.String {
120 return nil
121 }
122
123 astRoot, err := hil.Parse(v.String())
124 if err != nil {
125 return err
126 }
127
128 // If the AST we got is just a literal string value with the same
129 // value then we ignore it. We have to check if its the same value
130 // because it is possible to input a string, get out a string, and
131 // have it be different. For example: "foo-$${bar}" turns into
132 // "foo-${bar}"
133 if n, ok := astRoot.(*ast.LiteralNode); ok {
134 if s, ok := n.Value.(string); ok && s == v.String() {
135 return nil
136 }
137 }
138
139 if w.ContextF != nil {
140 w.ContextF(w.loc, astRoot)
141 }
142
143 if w.F == nil {
144 return nil
145 }
146
147 replaceVal, err := w.F(astRoot)
148 if err != nil {
149 return fmt.Errorf(
150 "%s in:\n\n%s",
151 err, v.String())
152 }
153
154 if w.Replace {
155 // We need to determine if we need to remove this element
156 // if the result contains any "UnknownVariableValue" which is
157 // set if it is computed. This behavior is different if we're
158 // splitting (in a SliceElem) or not.
159 remove := false
160 if w.loc == reflectwalk.SliceElem {
161 switch typedReplaceVal := replaceVal.(type) {
162 case string:
163 if typedReplaceVal == UnknownVariableValue {
164 remove = true
165 }
166 case []interface{}:
167 if hasUnknownValue(typedReplaceVal) {
168 remove = true
169 }
170 }
171 } else if replaceVal == UnknownVariableValue {
172 remove = true
173 }
174
175 if remove {
176 w.unknownKeys = append(w.unknownKeys, strings.Join(w.key, "."))
177 }
178
179 resultVal := reflect.ValueOf(replaceVal)
180 switch w.loc {
181 case reflectwalk.MapKey:
182 m := w.cs[len(w.cs)-1]
183
184 // Delete the old value
185 var zero reflect.Value
186 m.SetMapIndex(w.csData.(reflect.Value), zero)
187
188 // Set the new key with the existing value
189 m.SetMapIndex(resultVal, w.lastValue)
190
191 // Set the key to be the new key
192 w.csData = resultVal
193 case reflectwalk.MapValue:
194 // If we're in a map, then the only way to set a map value is
195 // to set it directly.
196 m := w.cs[len(w.cs)-1]
197 mk := w.csData.(reflect.Value)
198 m.SetMapIndex(mk, resultVal)
199 default:
200 // Otherwise, we should be addressable
201 setV.Set(resultVal)
202 }
203 }
204
205 return nil
206}
207
208func (w *interpolationWalker) replaceCurrent(v reflect.Value) {
209 // if we don't have at least 2 values, we're not going to find a map, but
210 // we could panic.
211 if len(w.cs) < 2 {
212 return
213 }
214
215 c := w.cs[len(w.cs)-2]
216 switch c.Kind() {
217 case reflect.Map:
218 // Get the key and delete it
219 k := w.csKey[len(w.csKey)-1]
220 c.SetMapIndex(k, v)
221 }
222}
223
224func hasUnknownValue(variable []interface{}) bool {
225 for _, value := range variable {
226 if strVal, ok := value.(string); ok {
227 if strVal == UnknownVariableValue {
228 return true
229 }
230 }
231 }
232 return false
233}
234
235func (w *interpolationWalker) splitSlice() {
236 raw := w.cs[len(w.cs)-1]
237
238 var s []interface{}
239 switch v := raw.Interface().(type) {
240 case []interface{}:
241 s = v
242 case []map[string]interface{}:
243 return
244 }
245
246 split := false
247 for _, val := range s {
248 if varVal, ok := val.(ast.Variable); ok && varVal.Type == ast.TypeList {
249 split = true
250 }
251 if _, ok := val.([]interface{}); ok {
252 split = true
253 }
254 }
255
256 if !split {
257 return
258 }
259
260 result := make([]interface{}, 0)
261 for _, v := range s {
262 switch val := v.(type) {
263 case ast.Variable:
264 switch val.Type {
265 case ast.TypeList:
266 elements := val.Value.([]ast.Variable)
267 for _, element := range elements {
268 result = append(result, element.Value)
269 }
270 default:
271 result = append(result, val.Value)
272 }
273 case []interface{}:
274 for _, element := range val {
275 result = append(result, element)
276 }
277 default:
278 result = append(result, v)
279 }
280 }
281
282 w.replaceCurrent(reflect.ValueOf(result))
283}
diff --git a/vendor/github.com/hashicorp/terraform/config/lang.go b/vendor/github.com/hashicorp/terraform/config/lang.go
new file mode 100644
index 0000000..890d30b
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/lang.go
@@ -0,0 +1,11 @@
1package config
2
3import (
4 "github.com/hashicorp/hil/ast"
5)
6
7type noopNode struct{}
8
9func (n *noopNode) Accept(ast.Visitor) ast.Node { return n }
10func (n *noopNode) Pos() ast.Pos { return ast.Pos{} }
11func (n *noopNode) Type(ast.Scope) (ast.Type, error) { return ast.TypeString, nil }
diff --git a/vendor/github.com/hashicorp/terraform/config/loader.go b/vendor/github.com/hashicorp/terraform/config/loader.go
new file mode 100644
index 0000000..0bfa89c
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/loader.go
@@ -0,0 +1,224 @@
1package config
2
3import (
4 "encoding/json"
5 "fmt"
6 "io"
7 "os"
8 "path/filepath"
9 "sort"
10 "strings"
11
12 "github.com/hashicorp/hcl"
13)
14
15// ErrNoConfigsFound is the error returned by LoadDir if no
16// Terraform configuration files were found in the given directory.
17type ErrNoConfigsFound struct {
18 Dir string
19}
20
21func (e ErrNoConfigsFound) Error() string {
22 return fmt.Sprintf(
23 "No Terraform configuration files found in directory: %s",
24 e.Dir)
25}
26
27// LoadJSON loads a single Terraform configuration from a given JSON document.
28//
29// The document must be a complete Terraform configuration. This function will
30// NOT try to load any additional modules so only the given document is loaded.
31func LoadJSON(raw json.RawMessage) (*Config, error) {
32 obj, err := hcl.Parse(string(raw))
33 if err != nil {
34 return nil, fmt.Errorf(
35 "Error parsing JSON document as HCL: %s", err)
36 }
37
38 // Start building the result
39 hclConfig := &hclConfigurable{
40 Root: obj,
41 }
42
43 return hclConfig.Config()
44}
45
46// LoadFile loads the Terraform configuration from a given file.
47//
48// This file can be any format that Terraform recognizes, and import any
49// other format that Terraform recognizes.
50func LoadFile(path string) (*Config, error) {
51 importTree, err := loadTree(path)
52 if err != nil {
53 return nil, err
54 }
55
56 configTree, err := importTree.ConfigTree()
57
58 // Close the importTree now so that we can clear resources as quickly
59 // as possible.
60 importTree.Close()
61
62 if err != nil {
63 return nil, err
64 }
65
66 return configTree.Flatten()
67}
68
69// LoadDir loads all the Terraform configuration files in a single
70// directory and appends them together.
71//
72// Special files known as "override files" can also be present, which
73// are merged into the loaded configuration. That is, the non-override
74// files are loaded first to create the configuration. Then, the overrides
75// are merged into the configuration to create the final configuration.
76//
77// Files are loaded in lexical order.
78func LoadDir(root string) (*Config, error) {
79 files, overrides, err := dirFiles(root)
80 if err != nil {
81 return nil, err
82 }
83 if len(files) == 0 {
84 return nil, &ErrNoConfigsFound{Dir: root}
85 }
86
87 // Determine the absolute path to the directory.
88 rootAbs, err := filepath.Abs(root)
89 if err != nil {
90 return nil, err
91 }
92
93 var result *Config
94
95 // Sort the files and overrides so we have a deterministic order
96 sort.Strings(files)
97 sort.Strings(overrides)
98
99 // Load all the regular files, append them to each other.
100 for _, f := range files {
101 c, err := LoadFile(f)
102 if err != nil {
103 return nil, err
104 }
105
106 if result != nil {
107 result, err = Append(result, c)
108 if err != nil {
109 return nil, err
110 }
111 } else {
112 result = c
113 }
114 }
115
116 // Load all the overrides, and merge them into the config
117 for _, f := range overrides {
118 c, err := LoadFile(f)
119 if err != nil {
120 return nil, err
121 }
122
123 result, err = Merge(result, c)
124 if err != nil {
125 return nil, err
126 }
127 }
128
129 // Mark the directory
130 result.Dir = rootAbs
131
132 return result, nil
133}
134
135// IsEmptyDir returns true if the directory given has no Terraform
136// configuration files.
137func IsEmptyDir(root string) (bool, error) {
138 if _, err := os.Stat(root); err != nil && os.IsNotExist(err) {
139 return true, nil
140 }
141
142 fs, os, err := dirFiles(root)
143 if err != nil {
144 return false, err
145 }
146
147 return len(fs) == 0 && len(os) == 0, nil
148}
149
150// Ext returns the Terraform configuration extension of the given
151// path, or a blank string if it is an invalid function.
152func ext(path string) string {
153 if strings.HasSuffix(path, ".tf") {
154 return ".tf"
155 } else if strings.HasSuffix(path, ".tf.json") {
156 return ".tf.json"
157 } else {
158 return ""
159 }
160}
161
162func dirFiles(dir string) ([]string, []string, error) {
163 f, err := os.Open(dir)
164 if err != nil {
165 return nil, nil, err
166 }
167 defer f.Close()
168
169 fi, err := f.Stat()
170 if err != nil {
171 return nil, nil, err
172 }
173 if !fi.IsDir() {
174 return nil, nil, fmt.Errorf(
175 "configuration path must be a directory: %s",
176 dir)
177 }
178
179 var files, overrides []string
180 err = nil
181 for err != io.EOF {
182 var fis []os.FileInfo
183 fis, err = f.Readdir(128)
184 if err != nil && err != io.EOF {
185 return nil, nil, err
186 }
187
188 for _, fi := range fis {
189 // Ignore directories
190 if fi.IsDir() {
191 continue
192 }
193
194 // Only care about files that are valid to load
195 name := fi.Name()
196 extValue := ext(name)
197 if extValue == "" || isIgnoredFile(name) {
198 continue
199 }
200
201 // Determine if we're dealing with an override
202 nameNoExt := name[:len(name)-len(extValue)]
203 override := nameNoExt == "override" ||
204 strings.HasSuffix(nameNoExt, "_override")
205
206 path := filepath.Join(dir, name)
207 if override {
208 overrides = append(overrides, path)
209 } else {
210 files = append(files, path)
211 }
212 }
213 }
214
215 return files, overrides, nil
216}
217
218// isIgnoredFile returns true or false depending on whether the
219// provided file name is a file that should be ignored.
220func isIgnoredFile(name string) bool {
221 return strings.HasPrefix(name, ".") || // Unix-like hidden files
222 strings.HasSuffix(name, "~") || // vim
223 strings.HasPrefix(name, "#") && strings.HasSuffix(name, "#") // emacs
224}
diff --git a/vendor/github.com/hashicorp/terraform/config/loader_hcl.go b/vendor/github.com/hashicorp/terraform/config/loader_hcl.go
new file mode 100644
index 0000000..9abb196
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/loader_hcl.go
@@ -0,0 +1,1130 @@
1package config
2
3import (
4 "fmt"
5 "io/ioutil"
6
7 "github.com/hashicorp/go-multierror"
8 "github.com/hashicorp/hcl"
9 "github.com/hashicorp/hcl/hcl/ast"
10 "github.com/mitchellh/mapstructure"
11)
12
13// hclConfigurable is an implementation of configurable that knows
14// how to turn HCL configuration into a *Config object.
15type hclConfigurable struct {
16 File string
17 Root *ast.File
18}
19
20func (t *hclConfigurable) Config() (*Config, error) {
21 validKeys := map[string]struct{}{
22 "atlas": struct{}{},
23 "data": struct{}{},
24 "module": struct{}{},
25 "output": struct{}{},
26 "provider": struct{}{},
27 "resource": struct{}{},
28 "terraform": struct{}{},
29 "variable": struct{}{},
30 }
31
32 // Top-level item should be the object list
33 list, ok := t.Root.Node.(*ast.ObjectList)
34 if !ok {
35 return nil, fmt.Errorf("error parsing: file doesn't contain a root object")
36 }
37
38 // Start building up the actual configuration.
39 config := new(Config)
40
41 // Terraform config
42 if o := list.Filter("terraform"); len(o.Items) > 0 {
43 var err error
44 config.Terraform, err = loadTerraformHcl(o)
45 if err != nil {
46 return nil, err
47 }
48 }
49
50 // Build the variables
51 if vars := list.Filter("variable"); len(vars.Items) > 0 {
52 var err error
53 config.Variables, err = loadVariablesHcl(vars)
54 if err != nil {
55 return nil, err
56 }
57 }
58
59 // Get Atlas configuration
60 if atlas := list.Filter("atlas"); len(atlas.Items) > 0 {
61 var err error
62 config.Atlas, err = loadAtlasHcl(atlas)
63 if err != nil {
64 return nil, err
65 }
66 }
67
68 // Build the modules
69 if modules := list.Filter("module"); len(modules.Items) > 0 {
70 var err error
71 config.Modules, err = loadModulesHcl(modules)
72 if err != nil {
73 return nil, err
74 }
75 }
76
77 // Build the provider configs
78 if providers := list.Filter("provider"); len(providers.Items) > 0 {
79 var err error
80 config.ProviderConfigs, err = loadProvidersHcl(providers)
81 if err != nil {
82 return nil, err
83 }
84 }
85
86 // Build the resources
87 {
88 var err error
89 managedResourceConfigs := list.Filter("resource")
90 dataResourceConfigs := list.Filter("data")
91
92 config.Resources = make(
93 []*Resource, 0,
94 len(managedResourceConfigs.Items)+len(dataResourceConfigs.Items),
95 )
96
97 managedResources, err := loadManagedResourcesHcl(managedResourceConfigs)
98 if err != nil {
99 return nil, err
100 }
101 dataResources, err := loadDataResourcesHcl(dataResourceConfigs)
102 if err != nil {
103 return nil, err
104 }
105
106 config.Resources = append(config.Resources, dataResources...)
107 config.Resources = append(config.Resources, managedResources...)
108 }
109
110 // Build the outputs
111 if outputs := list.Filter("output"); len(outputs.Items) > 0 {
112 var err error
113 config.Outputs, err = loadOutputsHcl(outputs)
114 if err != nil {
115 return nil, err
116 }
117 }
118
119 // Check for invalid keys
120 for _, item := range list.Items {
121 if len(item.Keys) == 0 {
122 // Not sure how this would happen, but let's avoid a panic
123 continue
124 }
125
126 k := item.Keys[0].Token.Value().(string)
127 if _, ok := validKeys[k]; ok {
128 continue
129 }
130
131 config.unknownKeys = append(config.unknownKeys, k)
132 }
133
134 return config, nil
135}
136
137// loadFileHcl is a fileLoaderFunc that knows how to read HCL
138// files and turn them into hclConfigurables.
139func loadFileHcl(root string) (configurable, []string, error) {
140 // Read the HCL file and prepare for parsing
141 d, err := ioutil.ReadFile(root)
142 if err != nil {
143 return nil, nil, fmt.Errorf(
144 "Error reading %s: %s", root, err)
145 }
146
147 // Parse it
148 hclRoot, err := hcl.Parse(string(d))
149 if err != nil {
150 return nil, nil, fmt.Errorf(
151 "Error parsing %s: %s", root, err)
152 }
153
154 // Start building the result
155 result := &hclConfigurable{
156 File: root,
157 Root: hclRoot,
158 }
159
160 // Dive in, find the imports. This is disabled for now since
161 // imports were removed prior to Terraform 0.1. The code is
162 // remaining here commented for historical purposes.
163 /*
164 imports := obj.Get("import")
165 if imports == nil {
166 result.Object.Ref()
167 return result, nil, nil
168 }
169
170 if imports.Type() != libucl.ObjectTypeString {
171 imports.Close()
172
173 return nil, nil, fmt.Errorf(
174 "Error in %s: all 'import' declarations should be in the format\n"+
175 "`import \"foo\"` (Got type %s)",
176 root,
177 imports.Type())
178 }
179
180 // Gather all the import paths
181 importPaths := make([]string, 0, imports.Len())
182 iter := imports.Iterate(false)
183 for imp := iter.Next(); imp != nil; imp = iter.Next() {
184 path := imp.ToString()
185 if !filepath.IsAbs(path) {
186 // Relative paths are relative to the Terraform file itself
187 dir := filepath.Dir(root)
188 path = filepath.Join(dir, path)
189 }
190
191 importPaths = append(importPaths, path)
192 imp.Close()
193 }
194 iter.Close()
195 imports.Close()
196
197 result.Object.Ref()
198 */
199
200 return result, nil, nil
201}
202
203// Given a handle to a HCL object, this transforms it into the Terraform config
204func loadTerraformHcl(list *ast.ObjectList) (*Terraform, error) {
205 if len(list.Items) > 1 {
206 return nil, fmt.Errorf("only one 'terraform' block allowed per module")
207 }
208
209 // Get our one item
210 item := list.Items[0]
211
212 // This block should have an empty top level ObjectItem. If there are keys
213 // here, it's likely because we have a flattened JSON object, and we can
214 // lift this into a nested ObjectList to decode properly.
215 if len(item.Keys) > 0 {
216 item = &ast.ObjectItem{
217 Val: &ast.ObjectType{
218 List: &ast.ObjectList{
219 Items: []*ast.ObjectItem{item},
220 },
221 },
222 }
223 }
224
225 // We need the item value as an ObjectList
226 var listVal *ast.ObjectList
227 if ot, ok := item.Val.(*ast.ObjectType); ok {
228 listVal = ot.List
229 } else {
230 return nil, fmt.Errorf("terraform block: should be an object")
231 }
232
233 // NOTE: We purposely don't validate unknown HCL keys here so that
234 // we can potentially read _future_ Terraform version config (to
235 // still be able to validate the required version).
236 //
237 // We should still keep track of unknown keys to validate later, but
238 // HCL doesn't currently support that.
239
240 var config Terraform
241 if err := hcl.DecodeObject(&config, item.Val); err != nil {
242 return nil, fmt.Errorf(
243 "Error reading terraform config: %s",
244 err)
245 }
246
247 // If we have provisioners, then parse those out
248 if os := listVal.Filter("backend"); len(os.Items) > 0 {
249 var err error
250 config.Backend, err = loadTerraformBackendHcl(os)
251 if err != nil {
252 return nil, fmt.Errorf(
253 "Error reading backend config for terraform block: %s",
254 err)
255 }
256 }
257
258 return &config, nil
259}
260
261// Loads the Backend configuration from an object list.
262func loadTerraformBackendHcl(list *ast.ObjectList) (*Backend, error) {
263 if len(list.Items) > 1 {
264 return nil, fmt.Errorf("only one 'backend' block allowed")
265 }
266
267 // Get our one item
268 item := list.Items[0]
269
270 // Verify the keys
271 if len(item.Keys) != 1 {
272 return nil, fmt.Errorf(
273 "position %s: 'backend' must be followed by exactly one string: a type",
274 item.Pos())
275 }
276
277 typ := item.Keys[0].Token.Value().(string)
278
279 // Decode the raw config
280 var config map[string]interface{}
281 if err := hcl.DecodeObject(&config, item.Val); err != nil {
282 return nil, fmt.Errorf(
283 "Error reading backend config: %s",
284 err)
285 }
286
287 rawConfig, err := NewRawConfig(config)
288 if err != nil {
289 return nil, fmt.Errorf(
290 "Error reading backend config: %s",
291 err)
292 }
293
294 b := &Backend{
295 Type: typ,
296 RawConfig: rawConfig,
297 }
298 b.Hash = b.Rehash()
299
300 return b, nil
301}
302
303// Given a handle to a HCL object, this transforms it into the Atlas
304// configuration.
305func loadAtlasHcl(list *ast.ObjectList) (*AtlasConfig, error) {
306 if len(list.Items) > 1 {
307 return nil, fmt.Errorf("only one 'atlas' block allowed")
308 }
309
310 // Get our one item
311 item := list.Items[0]
312
313 var config AtlasConfig
314 if err := hcl.DecodeObject(&config, item.Val); err != nil {
315 return nil, fmt.Errorf(
316 "Error reading atlas config: %s",
317 err)
318 }
319
320 return &config, nil
321}
322
323// Given a handle to a HCL object, this recurses into the structure
324// and pulls out a list of modules.
325//
326// The resulting modules may not be unique, but each module
327// represents exactly one module definition in the HCL configuration.
328// We leave it up to another pass to merge them together.
329func loadModulesHcl(list *ast.ObjectList) ([]*Module, error) {
330 if err := assertAllBlocksHaveNames("module", list); err != nil {
331 return nil, err
332 }
333
334 list = list.Children()
335 if len(list.Items) == 0 {
336 return nil, nil
337 }
338
339 // Where all the results will go
340 var result []*Module
341
342 // Now go over all the types and their children in order to get
343 // all of the actual resources.
344 for _, item := range list.Items {
345 k := item.Keys[0].Token.Value().(string)
346
347 var listVal *ast.ObjectList
348 if ot, ok := item.Val.(*ast.ObjectType); ok {
349 listVal = ot.List
350 } else {
351 return nil, fmt.Errorf("module '%s': should be an object", k)
352 }
353
354 var config map[string]interface{}
355 if err := hcl.DecodeObject(&config, item.Val); err != nil {
356 return nil, fmt.Errorf(
357 "Error reading config for %s: %s",
358 k,
359 err)
360 }
361
362 // Remove the fields we handle specially
363 delete(config, "source")
364
365 rawConfig, err := NewRawConfig(config)
366 if err != nil {
367 return nil, fmt.Errorf(
368 "Error reading config for %s: %s",
369 k,
370 err)
371 }
372
373 // If we have a count, then figure it out
374 var source string
375 if o := listVal.Filter("source"); len(o.Items) > 0 {
376 err = hcl.DecodeObject(&source, o.Items[0].Val)
377 if err != nil {
378 return nil, fmt.Errorf(
379 "Error parsing source for %s: %s",
380 k,
381 err)
382 }
383 }
384
385 result = append(result, &Module{
386 Name: k,
387 Source: source,
388 RawConfig: rawConfig,
389 })
390 }
391
392 return result, nil
393}
394
395// LoadOutputsHcl recurses into the given HCL object and turns
396// it into a mapping of outputs.
397func loadOutputsHcl(list *ast.ObjectList) ([]*Output, error) {
398 if err := assertAllBlocksHaveNames("output", list); err != nil {
399 return nil, err
400 }
401
402 list = list.Children()
403
404 // Go through each object and turn it into an actual result.
405 result := make([]*Output, 0, len(list.Items))
406 for _, item := range list.Items {
407 n := item.Keys[0].Token.Value().(string)
408
409 var listVal *ast.ObjectList
410 if ot, ok := item.Val.(*ast.ObjectType); ok {
411 listVal = ot.List
412 } else {
413 return nil, fmt.Errorf("output '%s': should be an object", n)
414 }
415
416 var config map[string]interface{}
417 if err := hcl.DecodeObject(&config, item.Val); err != nil {
418 return nil, err
419 }
420
421 // Delete special keys
422 delete(config, "depends_on")
423
424 rawConfig, err := NewRawConfig(config)
425 if err != nil {
426 return nil, fmt.Errorf(
427 "Error reading config for output %s: %s",
428 n,
429 err)
430 }
431
432 // If we have depends fields, then add those in
433 var dependsOn []string
434 if o := listVal.Filter("depends_on"); len(o.Items) > 0 {
435 err := hcl.DecodeObject(&dependsOn, o.Items[0].Val)
436 if err != nil {
437 return nil, fmt.Errorf(
438 "Error reading depends_on for output %q: %s",
439 n,
440 err)
441 }
442 }
443
444 result = append(result, &Output{
445 Name: n,
446 RawConfig: rawConfig,
447 DependsOn: dependsOn,
448 })
449 }
450
451 return result, nil
452}
453
454// LoadVariablesHcl recurses into the given HCL object and turns
455// it into a list of variables.
456func loadVariablesHcl(list *ast.ObjectList) ([]*Variable, error) {
457 if err := assertAllBlocksHaveNames("variable", list); err != nil {
458 return nil, err
459 }
460
461 list = list.Children()
462
463 // hclVariable is the structure each variable is decoded into
464 type hclVariable struct {
465 DeclaredType string `hcl:"type"`
466 Default interface{}
467 Description string
468 Fields []string `hcl:",decodedFields"`
469 }
470
471 // Go through each object and turn it into an actual result.
472 result := make([]*Variable, 0, len(list.Items))
473 for _, item := range list.Items {
474 // Clean up items from JSON
475 unwrapHCLObjectKeysFromJSON(item, 1)
476
477 // Verify the keys
478 if len(item.Keys) != 1 {
479 return nil, fmt.Errorf(
480 "position %s: 'variable' must be followed by exactly one strings: a name",
481 item.Pos())
482 }
483
484 n := item.Keys[0].Token.Value().(string)
485 if !NameRegexp.MatchString(n) {
486 return nil, fmt.Errorf(
487 "position %s: 'variable' name must match regular expression: %s",
488 item.Pos(), NameRegexp)
489 }
490
491 // Check for invalid keys
492 valid := []string{"type", "default", "description"}
493 if err := checkHCLKeys(item.Val, valid); err != nil {
494 return nil, multierror.Prefix(err, fmt.Sprintf(
495 "variable[%s]:", n))
496 }
497
498 // Decode into hclVariable to get typed values
499 var hclVar hclVariable
500 if err := hcl.DecodeObject(&hclVar, item.Val); err != nil {
501 return nil, err
502 }
503
504 // Defaults turn into a slice of map[string]interface{} and
505 // we need to make sure to convert that down into the
506 // proper type for Config.
507 if ms, ok := hclVar.Default.([]map[string]interface{}); ok {
508 def := make(map[string]interface{})
509 for _, m := range ms {
510 for k, v := range m {
511 def[k] = v
512 }
513 }
514
515 hclVar.Default = def
516 }
517
518 // Build the new variable and do some basic validation
519 newVar := &Variable{
520 Name: n,
521 DeclaredType: hclVar.DeclaredType,
522 Default: hclVar.Default,
523 Description: hclVar.Description,
524 }
525 if err := newVar.ValidateTypeAndDefault(); err != nil {
526 return nil, err
527 }
528
529 result = append(result, newVar)
530 }
531
532 return result, nil
533}
534
535// LoadProvidersHcl recurses into the given HCL object and turns
536// it into a mapping of provider configs.
537func loadProvidersHcl(list *ast.ObjectList) ([]*ProviderConfig, error) {
538 if err := assertAllBlocksHaveNames("provider", list); err != nil {
539 return nil, err
540 }
541
542 list = list.Children()
543 if len(list.Items) == 0 {
544 return nil, nil
545 }
546
547 // Go through each object and turn it into an actual result.
548 result := make([]*ProviderConfig, 0, len(list.Items))
549 for _, item := range list.Items {
550 n := item.Keys[0].Token.Value().(string)
551
552 var listVal *ast.ObjectList
553 if ot, ok := item.Val.(*ast.ObjectType); ok {
554 listVal = ot.List
555 } else {
556 return nil, fmt.Errorf("module '%s': should be an object", n)
557 }
558
559 var config map[string]interface{}
560 if err := hcl.DecodeObject(&config, item.Val); err != nil {
561 return nil, err
562 }
563
564 delete(config, "alias")
565
566 rawConfig, err := NewRawConfig(config)
567 if err != nil {
568 return nil, fmt.Errorf(
569 "Error reading config for provider config %s: %s",
570 n,
571 err)
572 }
573
574 // If we have an alias field, then add those in
575 var alias string
576 if a := listVal.Filter("alias"); len(a.Items) > 0 {
577 err := hcl.DecodeObject(&alias, a.Items[0].Val)
578 if err != nil {
579 return nil, fmt.Errorf(
580 "Error reading alias for provider[%s]: %s",
581 n,
582 err)
583 }
584 }
585
586 result = append(result, &ProviderConfig{
587 Name: n,
588 Alias: alias,
589 RawConfig: rawConfig,
590 })
591 }
592
593 return result, nil
594}
595
596// Given a handle to a HCL object, this recurses into the structure
597// and pulls out a list of data sources.
598//
599// The resulting data sources may not be unique, but each one
600// represents exactly one data definition in the HCL configuration.
601// We leave it up to another pass to merge them together.
602func loadDataResourcesHcl(list *ast.ObjectList) ([]*Resource, error) {
603 if err := assertAllBlocksHaveNames("data", list); err != nil {
604 return nil, err
605 }
606
607 list = list.Children()
608 if len(list.Items) == 0 {
609 return nil, nil
610 }
611
612 // Where all the results will go
613 var result []*Resource
614
615 // Now go over all the types and their children in order to get
616 // all of the actual resources.
617 for _, item := range list.Items {
618 if len(item.Keys) != 2 {
619 return nil, fmt.Errorf(
620 "position %s: 'data' must be followed by exactly two strings: a type and a name",
621 item.Pos())
622 }
623
624 t := item.Keys[0].Token.Value().(string)
625 k := item.Keys[1].Token.Value().(string)
626
627 var listVal *ast.ObjectList
628 if ot, ok := item.Val.(*ast.ObjectType); ok {
629 listVal = ot.List
630 } else {
631 return nil, fmt.Errorf("data sources %s[%s]: should be an object", t, k)
632 }
633
634 var config map[string]interface{}
635 if err := hcl.DecodeObject(&config, item.Val); err != nil {
636 return nil, fmt.Errorf(
637 "Error reading config for %s[%s]: %s",
638 t,
639 k,
640 err)
641 }
642
643 // Remove the fields we handle specially
644 delete(config, "depends_on")
645 delete(config, "provider")
646 delete(config, "count")
647
648 rawConfig, err := NewRawConfig(config)
649 if err != nil {
650 return nil, fmt.Errorf(
651 "Error reading config for %s[%s]: %s",
652 t,
653 k,
654 err)
655 }
656
657 // If we have a count, then figure it out
658 var count string = "1"
659 if o := listVal.Filter("count"); len(o.Items) > 0 {
660 err = hcl.DecodeObject(&count, o.Items[0].Val)
661 if err != nil {
662 return nil, fmt.Errorf(
663 "Error parsing count for %s[%s]: %s",
664 t,
665 k,
666 err)
667 }
668 }
669 countConfig, err := NewRawConfig(map[string]interface{}{
670 "count": count,
671 })
672 if err != nil {
673 return nil, err
674 }
675 countConfig.Key = "count"
676
677 // If we have depends fields, then add those in
678 var dependsOn []string
679 if o := listVal.Filter("depends_on"); len(o.Items) > 0 {
680 err := hcl.DecodeObject(&dependsOn, o.Items[0].Val)
681 if err != nil {
682 return nil, fmt.Errorf(
683 "Error reading depends_on for %s[%s]: %s",
684 t,
685 k,
686 err)
687 }
688 }
689
690 // If we have a provider, then parse it out
691 var provider string
692 if o := listVal.Filter("provider"); len(o.Items) > 0 {
693 err := hcl.DecodeObject(&provider, o.Items[0].Val)
694 if err != nil {
695 return nil, fmt.Errorf(
696 "Error reading provider for %s[%s]: %s",
697 t,
698 k,
699 err)
700 }
701 }
702
703 result = append(result, &Resource{
704 Mode: DataResourceMode,
705 Name: k,
706 Type: t,
707 RawCount: countConfig,
708 RawConfig: rawConfig,
709 Provider: provider,
710 Provisioners: []*Provisioner{},
711 DependsOn: dependsOn,
712 Lifecycle: ResourceLifecycle{},
713 })
714 }
715
716 return result, nil
717}
718
719// Given a handle to a HCL object, this recurses into the structure
720// and pulls out a list of managed resources.
721//
722// The resulting resources may not be unique, but each resource
723// represents exactly one "resource" block in the HCL configuration.
724// We leave it up to another pass to merge them together.
725func loadManagedResourcesHcl(list *ast.ObjectList) ([]*Resource, error) {
726 list = list.Children()
727 if len(list.Items) == 0 {
728 return nil, nil
729 }
730
731 // Where all the results will go
732 var result []*Resource
733
734 // Now go over all the types and their children in order to get
735 // all of the actual resources.
736 for _, item := range list.Items {
737 // GH-4385: We detect a pure provisioner resource and give the user
738 // an error about how to do it cleanly.
739 if len(item.Keys) == 4 && item.Keys[2].Token.Value().(string) == "provisioner" {
740 return nil, fmt.Errorf(
741 "position %s: provisioners in a resource should be wrapped in a list\n\n"+
742 "Example: \"provisioner\": [ { \"local-exec\": ... } ]",
743 item.Pos())
744 }
745
746 // Fix up JSON input
747 unwrapHCLObjectKeysFromJSON(item, 2)
748
749 if len(item.Keys) != 2 {
750 return nil, fmt.Errorf(
751 "position %s: resource must be followed by exactly two strings, a type and a name",
752 item.Pos())
753 }
754
755 t := item.Keys[0].Token.Value().(string)
756 k := item.Keys[1].Token.Value().(string)
757
758 var listVal *ast.ObjectList
759 if ot, ok := item.Val.(*ast.ObjectType); ok {
760 listVal = ot.List
761 } else {
762 return nil, fmt.Errorf("resources %s[%s]: should be an object", t, k)
763 }
764
765 var config map[string]interface{}
766 if err := hcl.DecodeObject(&config, item.Val); err != nil {
767 return nil, fmt.Errorf(
768 "Error reading config for %s[%s]: %s",
769 t,
770 k,
771 err)
772 }
773
774 // Remove the fields we handle specially
775 delete(config, "connection")
776 delete(config, "count")
777 delete(config, "depends_on")
778 delete(config, "provisioner")
779 delete(config, "provider")
780 delete(config, "lifecycle")
781
782 rawConfig, err := NewRawConfig(config)
783 if err != nil {
784 return nil, fmt.Errorf(
785 "Error reading config for %s[%s]: %s",
786 t,
787 k,
788 err)
789 }
790
791 // If we have a count, then figure it out
792 var count string = "1"
793 if o := listVal.Filter("count"); len(o.Items) > 0 {
794 err = hcl.DecodeObject(&count, o.Items[0].Val)
795 if err != nil {
796 return nil, fmt.Errorf(
797 "Error parsing count for %s[%s]: %s",
798 t,
799 k,
800 err)
801 }
802 }
803 countConfig, err := NewRawConfig(map[string]interface{}{
804 "count": count,
805 })
806 if err != nil {
807 return nil, err
808 }
809 countConfig.Key = "count"
810
811 // If we have depends fields, then add those in
812 var dependsOn []string
813 if o := listVal.Filter("depends_on"); len(o.Items) > 0 {
814 err := hcl.DecodeObject(&dependsOn, o.Items[0].Val)
815 if err != nil {
816 return nil, fmt.Errorf(
817 "Error reading depends_on for %s[%s]: %s",
818 t,
819 k,
820 err)
821 }
822 }
823
824 // If we have connection info, then parse those out
825 var connInfo map[string]interface{}
826 if o := listVal.Filter("connection"); len(o.Items) > 0 {
827 err := hcl.DecodeObject(&connInfo, o.Items[0].Val)
828 if err != nil {
829 return nil, fmt.Errorf(
830 "Error reading connection info for %s[%s]: %s",
831 t,
832 k,
833 err)
834 }
835 }
836
837 // If we have provisioners, then parse those out
838 var provisioners []*Provisioner
839 if os := listVal.Filter("provisioner"); len(os.Items) > 0 {
840 var err error
841 provisioners, err = loadProvisionersHcl(os, connInfo)
842 if err != nil {
843 return nil, fmt.Errorf(
844 "Error reading provisioners for %s[%s]: %s",
845 t,
846 k,
847 err)
848 }
849 }
850
851 // If we have a provider, then parse it out
852 var provider string
853 if o := listVal.Filter("provider"); len(o.Items) > 0 {
854 err := hcl.DecodeObject(&provider, o.Items[0].Val)
855 if err != nil {
856 return nil, fmt.Errorf(
857 "Error reading provider for %s[%s]: %s",
858 t,
859 k,
860 err)
861 }
862 }
863
864 // Check if the resource should be re-created before
865 // destroying the existing instance
866 var lifecycle ResourceLifecycle
867 if o := listVal.Filter("lifecycle"); len(o.Items) > 0 {
868 if len(o.Items) > 1 {
869 return nil, fmt.Errorf(
870 "%s[%s]: Multiple lifecycle blocks found, expected one",
871 t, k)
872 }
873
874 // Check for invalid keys
875 valid := []string{"create_before_destroy", "ignore_changes", "prevent_destroy"}
876 if err := checkHCLKeys(o.Items[0].Val, valid); err != nil {
877 return nil, multierror.Prefix(err, fmt.Sprintf(
878 "%s[%s]:", t, k))
879 }
880
881 var raw map[string]interface{}
882 if err = hcl.DecodeObject(&raw, o.Items[0].Val); err != nil {
883 return nil, fmt.Errorf(
884 "Error parsing lifecycle for %s[%s]: %s",
885 t,
886 k,
887 err)
888 }
889
890 if err := mapstructure.WeakDecode(raw, &lifecycle); err != nil {
891 return nil, fmt.Errorf(
892 "Error parsing lifecycle for %s[%s]: %s",
893 t,
894 k,
895 err)
896 }
897 }
898
899 result = append(result, &Resource{
900 Mode: ManagedResourceMode,
901 Name: k,
902 Type: t,
903 RawCount: countConfig,
904 RawConfig: rawConfig,
905 Provisioners: provisioners,
906 Provider: provider,
907 DependsOn: dependsOn,
908 Lifecycle: lifecycle,
909 })
910 }
911
912 return result, nil
913}
914
915func loadProvisionersHcl(list *ast.ObjectList, connInfo map[string]interface{}) ([]*Provisioner, error) {
916 if err := assertAllBlocksHaveNames("provisioner", list); err != nil {
917 return nil, err
918 }
919
920 list = list.Children()
921 if len(list.Items) == 0 {
922 return nil, nil
923 }
924
925 // Go through each object and turn it into an actual result.
926 result := make([]*Provisioner, 0, len(list.Items))
927 for _, item := range list.Items {
928 n := item.Keys[0].Token.Value().(string)
929
930 var listVal *ast.ObjectList
931 if ot, ok := item.Val.(*ast.ObjectType); ok {
932 listVal = ot.List
933 } else {
934 return nil, fmt.Errorf("provisioner '%s': should be an object", n)
935 }
936
937 var config map[string]interface{}
938 if err := hcl.DecodeObject(&config, item.Val); err != nil {
939 return nil, err
940 }
941
942 // Parse the "when" value
943 when := ProvisionerWhenCreate
944 if v, ok := config["when"]; ok {
945 switch v {
946 case "create":
947 when = ProvisionerWhenCreate
948 case "destroy":
949 when = ProvisionerWhenDestroy
950 default:
951 return nil, fmt.Errorf(
952 "position %s: 'provisioner' when must be 'create' or 'destroy'",
953 item.Pos())
954 }
955 }
956
957 // Parse the "on_failure" value
958 onFailure := ProvisionerOnFailureFail
959 if v, ok := config["on_failure"]; ok {
960 switch v {
961 case "continue":
962 onFailure = ProvisionerOnFailureContinue
963 case "fail":
964 onFailure = ProvisionerOnFailureFail
965 default:
966 return nil, fmt.Errorf(
967 "position %s: 'provisioner' on_failure must be 'continue' or 'fail'",
968 item.Pos())
969 }
970 }
971
972 // Delete fields we special case
973 delete(config, "connection")
974 delete(config, "when")
975 delete(config, "on_failure")
976
977 rawConfig, err := NewRawConfig(config)
978 if err != nil {
979 return nil, err
980 }
981
982 // Check if we have a provisioner-level connection
983 // block that overrides the resource-level
984 var subConnInfo map[string]interface{}
985 if o := listVal.Filter("connection"); len(o.Items) > 0 {
986 err := hcl.DecodeObject(&subConnInfo, o.Items[0].Val)
987 if err != nil {
988 return nil, err
989 }
990 }
991
992 // Inherit from the resource connInfo any keys
993 // that are not explicitly overriden.
994 if connInfo != nil && subConnInfo != nil {
995 for k, v := range connInfo {
996 if _, ok := subConnInfo[k]; !ok {
997 subConnInfo[k] = v
998 }
999 }
1000 } else if subConnInfo == nil {
1001 subConnInfo = connInfo
1002 }
1003
1004 // Parse the connInfo
1005 connRaw, err := NewRawConfig(subConnInfo)
1006 if err != nil {
1007 return nil, err
1008 }
1009
1010 result = append(result, &Provisioner{
1011 Type: n,
1012 RawConfig: rawConfig,
1013 ConnInfo: connRaw,
1014 When: when,
1015 OnFailure: onFailure,
1016 })
1017 }
1018
1019 return result, nil
1020}
1021
1022/*
1023func hclObjectMap(os *hclobj.Object) map[string]ast.ListNode {
1024 objects := make(map[string][]*hclobj.Object)
1025
1026 for _, o := range os.Elem(false) {
1027 for _, elem := range o.Elem(true) {
1028 val, ok := objects[elem.Key]
1029 if !ok {
1030 val = make([]*hclobj.Object, 0, 1)
1031 }
1032
1033 val = append(val, elem)
1034 objects[elem.Key] = val
1035 }
1036 }
1037
1038 return objects
1039}
1040*/
1041
1042// assertAllBlocksHaveNames returns an error if any of the items in
1043// the given object list are blocks without keys (like "module {}")
1044// or simple assignments (like "module = 1"). It returns nil if
1045// neither of these things are true.
1046//
1047// The given name is used in any generated error messages, and should
1048// be the name of the block we're dealing with. The given list should
1049// be the result of calling .Filter on an object list with that same
1050// name.
1051func assertAllBlocksHaveNames(name string, list *ast.ObjectList) error {
1052 if elem := list.Elem(); len(elem.Items) != 0 {
1053 switch et := elem.Items[0].Val.(type) {
1054 case *ast.ObjectType:
1055 pos := et.Lbrace
1056 return fmt.Errorf("%s: %q must be followed by a name", pos, name)
1057 default:
1058 pos := elem.Items[0].Val.Pos()
1059 return fmt.Errorf("%s: %q must be a configuration block", pos, name)
1060 }
1061 }
1062 return nil
1063}
1064
1065func checkHCLKeys(node ast.Node, valid []string) error {
1066 var list *ast.ObjectList
1067 switch n := node.(type) {
1068 case *ast.ObjectList:
1069 list = n
1070 case *ast.ObjectType:
1071 list = n.List
1072 default:
1073 return fmt.Errorf("cannot check HCL keys of type %T", n)
1074 }
1075
1076 validMap := make(map[string]struct{}, len(valid))
1077 for _, v := range valid {
1078 validMap[v] = struct{}{}
1079 }
1080
1081 var result error
1082 for _, item := range list.Items {
1083 key := item.Keys[0].Token.Value().(string)
1084 if _, ok := validMap[key]; !ok {
1085 result = multierror.Append(result, fmt.Errorf(
1086 "invalid key: %s", key))
1087 }
1088 }
1089
1090 return result
1091}
1092
1093// unwrapHCLObjectKeysFromJSON cleans up an edge case that can occur when
1094// parsing JSON as input: if we're parsing JSON then directly nested
1095// items will show up as additional "keys".
1096//
1097// For objects that expect a fixed number of keys, this breaks the
1098// decoding process. This function unwraps the object into what it would've
1099// looked like if it came directly from HCL by specifying the number of keys
1100// you expect.
1101//
1102// Example:
1103//
1104// { "foo": { "baz": {} } }
1105//
1106// Will show up with Keys being: []string{"foo", "baz"}
1107// when we really just want the first two. This function will fix this.
1108func unwrapHCLObjectKeysFromJSON(item *ast.ObjectItem, depth int) {
1109 if len(item.Keys) > depth && item.Keys[0].Token.JSON {
1110 for len(item.Keys) > depth {
1111 // Pop off the last key
1112 n := len(item.Keys)
1113 key := item.Keys[n-1]
1114 item.Keys[n-1] = nil
1115 item.Keys = item.Keys[:n-1]
1116
1117 // Wrap our value in a list
1118 item.Val = &ast.ObjectType{
1119 List: &ast.ObjectList{
1120 Items: []*ast.ObjectItem{
1121 &ast.ObjectItem{
1122 Keys: []*ast.ObjectKey{key},
1123 Val: item.Val,
1124 },
1125 },
1126 },
1127 }
1128 }
1129 }
1130}
diff --git a/vendor/github.com/hashicorp/terraform/config/merge.go b/vendor/github.com/hashicorp/terraform/config/merge.go
new file mode 100644
index 0000000..db214be
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/merge.go
@@ -0,0 +1,193 @@
1package config
2
3// Merge merges two configurations into a single configuration.
4//
5// Merge allows for the two configurations to have duplicate resources,
6// because the resources will be merged. This differs from a single
7// Config which must only have unique resources.
8func Merge(c1, c2 *Config) (*Config, error) {
9 c := new(Config)
10
11 // Merge unknown keys
12 unknowns := make(map[string]struct{})
13 for _, k := range c1.unknownKeys {
14 _, present := unknowns[k]
15 if !present {
16 unknowns[k] = struct{}{}
17 c.unknownKeys = append(c.unknownKeys, k)
18 }
19 }
20 for _, k := range c2.unknownKeys {
21 _, present := unknowns[k]
22 if !present {
23 unknowns[k] = struct{}{}
24 c.unknownKeys = append(c.unknownKeys, k)
25 }
26 }
27
28 // Merge Atlas configuration. This is a dumb one overrides the other
29 // sort of merge.
30 c.Atlas = c1.Atlas
31 if c2.Atlas != nil {
32 c.Atlas = c2.Atlas
33 }
34
35 // Merge the Terraform configuration
36 if c1.Terraform != nil {
37 c.Terraform = c1.Terraform
38 if c2.Terraform != nil {
39 c.Terraform.Merge(c2.Terraform)
40 }
41 } else {
42 c.Terraform = c2.Terraform
43 }
44
45 // NOTE: Everything below is pretty gross. Due to the lack of generics
46 // in Go, there is some hoop-jumping involved to make this merging a
47 // little more test-friendly and less repetitive. Ironically, making it
48 // less repetitive involves being a little repetitive, but I prefer to
49 // be repetitive with things that are less error prone than things that
50 // are more error prone (more logic). Type conversions to an interface
51 // are pretty low-error.
52
53 var m1, m2, mresult []merger
54
55 // Modules
56 m1 = make([]merger, 0, len(c1.Modules))
57 m2 = make([]merger, 0, len(c2.Modules))
58 for _, v := range c1.Modules {
59 m1 = append(m1, v)
60 }
61 for _, v := range c2.Modules {
62 m2 = append(m2, v)
63 }
64 mresult = mergeSlice(m1, m2)
65 if len(mresult) > 0 {
66 c.Modules = make([]*Module, len(mresult))
67 for i, v := range mresult {
68 c.Modules[i] = v.(*Module)
69 }
70 }
71
72 // Outputs
73 m1 = make([]merger, 0, len(c1.Outputs))
74 m2 = make([]merger, 0, len(c2.Outputs))
75 for _, v := range c1.Outputs {
76 m1 = append(m1, v)
77 }
78 for _, v := range c2.Outputs {
79 m2 = append(m2, v)
80 }
81 mresult = mergeSlice(m1, m2)
82 if len(mresult) > 0 {
83 c.Outputs = make([]*Output, len(mresult))
84 for i, v := range mresult {
85 c.Outputs[i] = v.(*Output)
86 }
87 }
88
89 // Provider Configs
90 m1 = make([]merger, 0, len(c1.ProviderConfigs))
91 m2 = make([]merger, 0, len(c2.ProviderConfigs))
92 for _, v := range c1.ProviderConfigs {
93 m1 = append(m1, v)
94 }
95 for _, v := range c2.ProviderConfigs {
96 m2 = append(m2, v)
97 }
98 mresult = mergeSlice(m1, m2)
99 if len(mresult) > 0 {
100 c.ProviderConfigs = make([]*ProviderConfig, len(mresult))
101 for i, v := range mresult {
102 c.ProviderConfigs[i] = v.(*ProviderConfig)
103 }
104 }
105
106 // Resources
107 m1 = make([]merger, 0, len(c1.Resources))
108 m2 = make([]merger, 0, len(c2.Resources))
109 for _, v := range c1.Resources {
110 m1 = append(m1, v)
111 }
112 for _, v := range c2.Resources {
113 m2 = append(m2, v)
114 }
115 mresult = mergeSlice(m1, m2)
116 if len(mresult) > 0 {
117 c.Resources = make([]*Resource, len(mresult))
118 for i, v := range mresult {
119 c.Resources[i] = v.(*Resource)
120 }
121 }
122
123 // Variables
124 m1 = make([]merger, 0, len(c1.Variables))
125 m2 = make([]merger, 0, len(c2.Variables))
126 for _, v := range c1.Variables {
127 m1 = append(m1, v)
128 }
129 for _, v := range c2.Variables {
130 m2 = append(m2, v)
131 }
132 mresult = mergeSlice(m1, m2)
133 if len(mresult) > 0 {
134 c.Variables = make([]*Variable, len(mresult))
135 for i, v := range mresult {
136 c.Variables[i] = v.(*Variable)
137 }
138 }
139
140 return c, nil
141}
142
143// merger is an interface that must be implemented by types that are
144// merge-able. This simplifies the implementation of Merge for the various
145// components of a Config.
146type merger interface {
147 mergerName() string
148 mergerMerge(merger) merger
149}
150
151// mergeSlice merges a slice of mergers.
152func mergeSlice(m1, m2 []merger) []merger {
153 r := make([]merger, len(m1), len(m1)+len(m2))
154 copy(r, m1)
155
156 m := map[string]struct{}{}
157 for _, v2 := range m2 {
158 // If we already saw it, just append it because its a
159 // duplicate and invalid...
160 name := v2.mergerName()
161 if _, ok := m[name]; ok {
162 r = append(r, v2)
163 continue
164 }
165 m[name] = struct{}{}
166
167 // Find an original to override
168 var original merger
169 originalIndex := -1
170 for i, v := range m1 {
171 if v.mergerName() == name {
172 originalIndex = i
173 original = v
174 break
175 }
176 }
177
178 var v merger
179 if original == nil {
180 v = v2
181 } else {
182 v = original.mergerMerge(v2)
183 }
184
185 if originalIndex == -1 {
186 r = append(r, v)
187 } else {
188 r[originalIndex] = v
189 }
190 }
191
192 return r
193}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/copy_dir.go b/vendor/github.com/hashicorp/terraform/config/module/copy_dir.go
new file mode 100644
index 0000000..095f61d
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/copy_dir.go
@@ -0,0 +1,114 @@
1package module
2
3import (
4 "io"
5 "os"
6 "path/filepath"
7 "strings"
8)
9
10// copyDir copies the src directory contents into dst. Both directories
11// should already exist.
12func copyDir(dst, src string) error {
13 src, err := filepath.EvalSymlinks(src)
14 if err != nil {
15 return err
16 }
17
18 walkFn := func(path string, info os.FileInfo, err error) error {
19 if err != nil {
20 return err
21 }
22
23 if path == src {
24 return nil
25 }
26
27 if strings.HasPrefix(filepath.Base(path), ".") {
28 // Skip any dot files
29 if info.IsDir() {
30 return filepath.SkipDir
31 } else {
32 return nil
33 }
34 }
35
36 // The "path" has the src prefixed to it. We need to join our
37 // destination with the path without the src on it.
38 dstPath := filepath.Join(dst, path[len(src):])
39
40 // we don't want to try and copy the same file over itself.
41 if eq, err := sameFile(path, dstPath); eq {
42 return nil
43 } else if err != nil {
44 return err
45 }
46
47 // If we have a directory, make that subdirectory, then continue
48 // the walk.
49 if info.IsDir() {
50 if path == filepath.Join(src, dst) {
51 // dst is in src; don't walk it.
52 return nil
53 }
54
55 if err := os.MkdirAll(dstPath, 0755); err != nil {
56 return err
57 }
58
59 return nil
60 }
61
62 // If we have a file, copy the contents.
63 srcF, err := os.Open(path)
64 if err != nil {
65 return err
66 }
67 defer srcF.Close()
68
69 dstF, err := os.Create(dstPath)
70 if err != nil {
71 return err
72 }
73 defer dstF.Close()
74
75 if _, err := io.Copy(dstF, srcF); err != nil {
76 return err
77 }
78
79 // Chmod it
80 return os.Chmod(dstPath, info.Mode())
81 }
82
83 return filepath.Walk(src, walkFn)
84}
85
86// sameFile tried to determine if to paths are the same file.
87// If the paths don't match, we lookup the inode on supported systems.
88func sameFile(a, b string) (bool, error) {
89 if a == b {
90 return true, nil
91 }
92
93 aIno, err := inode(a)
94 if err != nil {
95 if os.IsNotExist(err) {
96 return false, nil
97 }
98 return false, err
99 }
100
101 bIno, err := inode(b)
102 if err != nil {
103 if os.IsNotExist(err) {
104 return false, nil
105 }
106 return false, err
107 }
108
109 if aIno > 0 && aIno == bIno {
110 return true, nil
111 }
112
113 return false, nil
114}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/get.go b/vendor/github.com/hashicorp/terraform/config/module/get.go
new file mode 100644
index 0000000..96b4a63
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/get.go
@@ -0,0 +1,71 @@
1package module
2
3import (
4 "io/ioutil"
5 "os"
6
7 "github.com/hashicorp/go-getter"
8)
9
10// GetMode is an enum that describes how modules are loaded.
11//
12// GetModeLoad says that modules will not be downloaded or updated, they will
13// only be loaded from the storage.
14//
15// GetModeGet says that modules can be initially downloaded if they don't
16// exist, but otherwise to just load from the current version in storage.
17//
18// GetModeUpdate says that modules should be checked for updates and
19// downloaded prior to loading. If there are no updates, we load the version
20// from disk, otherwise we download first and then load.
21type GetMode byte
22
23const (
24 GetModeNone GetMode = iota
25 GetModeGet
26 GetModeUpdate
27)
28
29// GetCopy is the same as Get except that it downloads a copy of the
30// module represented by source.
31//
32// This copy will omit and dot-prefixed files (such as .git/, .hg/) and
33// can't be updated on its own.
34func GetCopy(dst, src string) error {
35 // Create the temporary directory to do the real Get to
36 tmpDir, err := ioutil.TempDir("", "tf")
37 if err != nil {
38 return err
39 }
40 // FIXME: This isn't completely safe. Creating and removing our temp path
41 // exposes where to race to inject files.
42 if err := os.RemoveAll(tmpDir); err != nil {
43 return err
44 }
45 defer os.RemoveAll(tmpDir)
46
47 // Get to that temporary dir
48 if err := getter.Get(tmpDir, src); err != nil {
49 return err
50 }
51
52 // Make sure the destination exists
53 if err := os.MkdirAll(dst, 0755); err != nil {
54 return err
55 }
56
57 // Copy to the final location
58 return copyDir(dst, tmpDir)
59}
60
61func getStorage(s getter.Storage, key string, src string, mode GetMode) (string, bool, error) {
62 // Get the module with the level specified if we were told to.
63 if mode > GetModeNone {
64 if err := s.Get(key, src, mode == GetModeUpdate); err != nil {
65 return "", false, err
66 }
67 }
68
69 // Get the directory where the module is.
70 return s.Dir(key)
71}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/inode.go b/vendor/github.com/hashicorp/terraform/config/module/inode.go
new file mode 100644
index 0000000..8603ee2
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/inode.go
@@ -0,0 +1,21 @@
1// +build linux darwin openbsd netbsd solaris
2
3package module
4
5import (
6 "fmt"
7 "os"
8 "syscall"
9)
10
11// lookup the inode of a file on posix systems
12func inode(path string) (uint64, error) {
13 stat, err := os.Stat(path)
14 if err != nil {
15 return 0, err
16 }
17 if st, ok := stat.Sys().(*syscall.Stat_t); ok {
18 return st.Ino, nil
19 }
20 return 0, fmt.Errorf("could not determine file inode")
21}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/inode_freebsd.go b/vendor/github.com/hashicorp/terraform/config/module/inode_freebsd.go
new file mode 100644
index 0000000..0d95730
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/inode_freebsd.go
@@ -0,0 +1,21 @@
1// +build freebsd
2
3package module
4
5import (
6 "fmt"
7 "os"
8 "syscall"
9)
10
11// lookup the inode of a file on posix systems
12func inode(path string) (uint64, error) {
13 stat, err := os.Stat(path)
14 if err != nil {
15 return 0, err
16 }
17 if st, ok := stat.Sys().(*syscall.Stat_t); ok {
18 return uint64(st.Ino), nil
19 }
20 return 0, fmt.Errorf("could not determine file inode")
21}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/inode_windows.go b/vendor/github.com/hashicorp/terraform/config/module/inode_windows.go
new file mode 100644
index 0000000..c0cf455
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/inode_windows.go
@@ -0,0 +1,8 @@
1// +build windows
2
3package module
4
5// no syscall.Stat_t on windows, return 0 for inodes
6func inode(path string) (uint64, error) {
7 return 0, nil
8}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/module.go b/vendor/github.com/hashicorp/terraform/config/module/module.go
new file mode 100644
index 0000000..f8649f6
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/module.go
@@ -0,0 +1,7 @@
1package module
2
3// Module represents the metadata for a single module.
4type Module struct {
5 Name string
6 Source string
7}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/testing.go b/vendor/github.com/hashicorp/terraform/config/module/testing.go
new file mode 100644
index 0000000..fc9e733
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/testing.go
@@ -0,0 +1,38 @@
1package module
2
3import (
4 "io/ioutil"
5 "os"
6 "testing"
7
8 "github.com/hashicorp/go-getter"
9)
10
11// TestTree loads a module at the given path and returns the tree as well
12// as a function that should be deferred to clean up resources.
13func TestTree(t *testing.T, path string) (*Tree, func()) {
14 // Create a temporary directory for module storage
15 dir, err := ioutil.TempDir("", "tf")
16 if err != nil {
17 t.Fatalf("err: %s", err)
18 return nil, nil
19 }
20
21 // Load the module
22 mod, err := NewTreeModule("", path)
23 if err != nil {
24 t.Fatalf("err: %s", err)
25 return nil, nil
26 }
27
28 // Get the child modules
29 s := &getter.FolderStorage{StorageDir: dir}
30 if err := mod.Load(s, GetModeGet); err != nil {
31 t.Fatalf("err: %s", err)
32 return nil, nil
33 }
34
35 return mod, func() {
36 os.RemoveAll(dir)
37 }
38}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/tree.go b/vendor/github.com/hashicorp/terraform/config/module/tree.go
new file mode 100644
index 0000000..b6f90fd
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/tree.go
@@ -0,0 +1,428 @@
1package module
2
3import (
4 "bufio"
5 "bytes"
6 "fmt"
7 "path/filepath"
8 "strings"
9 "sync"
10
11 "github.com/hashicorp/go-getter"
12 "github.com/hashicorp/terraform/config"
13)
14
15// RootName is the name of the root tree.
16const RootName = "root"
17
18// Tree represents the module import tree of configurations.
19//
20// This Tree structure can be used to get (download) new modules, load
21// all the modules without getting, flatten the tree into something
22// Terraform can use, etc.
23type Tree struct {
24 name string
25 config *config.Config
26 children map[string]*Tree
27 path []string
28 lock sync.RWMutex
29}
30
31// NewTree returns a new Tree for the given config structure.
32func NewTree(name string, c *config.Config) *Tree {
33 return &Tree{config: c, name: name}
34}
35
36// NewEmptyTree returns a new tree that is empty (contains no configuration).
37func NewEmptyTree() *Tree {
38 t := &Tree{config: &config.Config{}}
39
40 // We do this dummy load so that the tree is marked as "loaded". It
41 // should never fail because this is just about a no-op. If it does fail
42 // we panic so we can know its a bug.
43 if err := t.Load(nil, GetModeGet); err != nil {
44 panic(err)
45 }
46
47 return t
48}
49
50// NewTreeModule is like NewTree except it parses the configuration in
51// the directory and gives it a specific name. Use a blank name "" to specify
52// the root module.
53func NewTreeModule(name, dir string) (*Tree, error) {
54 c, err := config.LoadDir(dir)
55 if err != nil {
56 return nil, err
57 }
58
59 return NewTree(name, c), nil
60}
61
62// Config returns the configuration for this module.
63func (t *Tree) Config() *config.Config {
64 return t.config
65}
66
67// Child returns the child with the given path (by name).
68func (t *Tree) Child(path []string) *Tree {
69 if t == nil {
70 return nil
71 }
72
73 if len(path) == 0 {
74 return t
75 }
76
77 c := t.Children()[path[0]]
78 if c == nil {
79 return nil
80 }
81
82 return c.Child(path[1:])
83}
84
85// Children returns the children of this tree (the modules that are
86// imported by this root).
87//
88// This will only return a non-nil value after Load is called.
89func (t *Tree) Children() map[string]*Tree {
90 t.lock.RLock()
91 defer t.lock.RUnlock()
92 return t.children
93}
94
95// Loaded says whether or not this tree has been loaded or not yet.
96func (t *Tree) Loaded() bool {
97 t.lock.RLock()
98 defer t.lock.RUnlock()
99 return t.children != nil
100}
101
102// Modules returns the list of modules that this tree imports.
103//
104// This is only the imports of _this_ level of the tree. To retrieve the
105// full nested imports, you'll have to traverse the tree.
106func (t *Tree) Modules() []*Module {
107 result := make([]*Module, len(t.config.Modules))
108 for i, m := range t.config.Modules {
109 result[i] = &Module{
110 Name: m.Name,
111 Source: m.Source,
112 }
113 }
114
115 return result
116}
117
118// Name returns the name of the tree. This will be "<root>" for the root
119// tree and then the module name given for any children.
120func (t *Tree) Name() string {
121 if t.name == "" {
122 return RootName
123 }
124
125 return t.name
126}
127
128// Load loads the configuration of the entire tree.
129//
130// The parameters are used to tell the tree where to find modules and
131// whether it can download/update modules along the way.
132//
133// Calling this multiple times will reload the tree.
134//
135// Various semantic-like checks are made along the way of loading since
136// module trees inherently require the configuration to be in a reasonably
137// sane state: no circular dependencies, proper module sources, etc. A full
138// suite of validations can be done by running Validate (after loading).
139func (t *Tree) Load(s getter.Storage, mode GetMode) error {
140 t.lock.Lock()
141 defer t.lock.Unlock()
142
143 // Reset the children if we have any
144 t.children = nil
145
146 modules := t.Modules()
147 children := make(map[string]*Tree)
148
149 // Go through all the modules and get the directory for them.
150 for _, m := range modules {
151 if _, ok := children[m.Name]; ok {
152 return fmt.Errorf(
153 "module %s: duplicated. module names must be unique", m.Name)
154 }
155
156 // Determine the path to this child
157 path := make([]string, len(t.path), len(t.path)+1)
158 copy(path, t.path)
159 path = append(path, m.Name)
160
161 // Split out the subdir if we have one
162 source, subDir := getter.SourceDirSubdir(m.Source)
163
164 source, err := getter.Detect(source, t.config.Dir, getter.Detectors)
165 if err != nil {
166 return fmt.Errorf("module %s: %s", m.Name, err)
167 }
168
169 // Check if the detector introduced something new.
170 source, subDir2 := getter.SourceDirSubdir(source)
171 if subDir2 != "" {
172 subDir = filepath.Join(subDir2, subDir)
173 }
174
175 // Get the directory where this module is so we can load it
176 key := strings.Join(path, ".")
177 key = fmt.Sprintf("root.%s-%s", key, m.Source)
178 dir, ok, err := getStorage(s, key, source, mode)
179 if err != nil {
180 return err
181 }
182 if !ok {
183 return fmt.Errorf(
184 "module %s: not found, may need to be downloaded using 'terraform get'", m.Name)
185 }
186
187 // If we have a subdirectory, then merge that in
188 if subDir != "" {
189 dir = filepath.Join(dir, subDir)
190 }
191
192 // Load the configurations.Dir(source)
193 children[m.Name], err = NewTreeModule(m.Name, dir)
194 if err != nil {
195 return fmt.Errorf(
196 "module %s: %s", m.Name, err)
197 }
198
199 // Set the path of this child
200 children[m.Name].path = path
201 }
202
203 // Go through all the children and load them.
204 for _, c := range children {
205 if err := c.Load(s, mode); err != nil {
206 return err
207 }
208 }
209
210 // Set our tree up
211 t.children = children
212
213 return nil
214}
215
216// Path is the full path to this tree.
217func (t *Tree) Path() []string {
218 return t.path
219}
220
221// String gives a nice output to describe the tree.
222func (t *Tree) String() string {
223 var result bytes.Buffer
224 path := strings.Join(t.path, ", ")
225 if path != "" {
226 path = fmt.Sprintf(" (path: %s)", path)
227 }
228 result.WriteString(t.Name() + path + "\n")
229
230 cs := t.Children()
231 if cs == nil {
232 result.WriteString(" not loaded")
233 } else {
234 // Go through each child and get its string value, then indent it
235 // by two.
236 for _, c := range cs {
237 r := strings.NewReader(c.String())
238 scanner := bufio.NewScanner(r)
239 for scanner.Scan() {
240 result.WriteString(" ")
241 result.WriteString(scanner.Text())
242 result.WriteString("\n")
243 }
244 }
245 }
246
247 return result.String()
248}
249
250// Validate does semantic checks on the entire tree of configurations.
251//
252// This will call the respective config.Config.Validate() functions as well
253// as verifying things such as parameters/outputs between the various modules.
254//
255// Load must be called prior to calling Validate or an error will be returned.
256func (t *Tree) Validate() error {
257 if !t.Loaded() {
258 return fmt.Errorf("tree must be loaded before calling Validate")
259 }
260
261 // If something goes wrong, here is our error template
262 newErr := &treeError{Name: []string{t.Name()}}
263
264 // Terraform core does not handle root module children named "root".
265 // We plan to fix this in the future but this bug was brought up in
266 // the middle of a release and we don't want to introduce wide-sweeping
267 // changes at that time.
268 if len(t.path) == 1 && t.name == "root" {
269 return fmt.Errorf("root module cannot contain module named 'root'")
270 }
271
272 // Validate our configuration first.
273 if err := t.config.Validate(); err != nil {
274 newErr.Add(err)
275 }
276
277 // If we're the root, we do extra validation. This validation usually
278 // requires the entire tree (since children don't have parent pointers).
279 if len(t.path) == 0 {
280 if err := t.validateProviderAlias(); err != nil {
281 newErr.Add(err)
282 }
283 }
284
285 // Get the child trees
286 children := t.Children()
287
288 // Validate all our children
289 for _, c := range children {
290 err := c.Validate()
291 if err == nil {
292 continue
293 }
294
295 verr, ok := err.(*treeError)
296 if !ok {
297 // Unknown error, just return...
298 return err
299 }
300
301 // Append ourselves to the error and then return
302 verr.Name = append(verr.Name, t.Name())
303 newErr.AddChild(verr)
304 }
305
306 // Go over all the modules and verify that any parameters are valid
307 // variables into the module in question.
308 for _, m := range t.config.Modules {
309 tree, ok := children[m.Name]
310 if !ok {
311 // This should never happen because Load watches us
312 panic("module not found in children: " + m.Name)
313 }
314
315 // Build the variables that the module defines
316 requiredMap := make(map[string]struct{})
317 varMap := make(map[string]struct{})
318 for _, v := range tree.config.Variables {
319 varMap[v.Name] = struct{}{}
320
321 if v.Required() {
322 requiredMap[v.Name] = struct{}{}
323 }
324 }
325
326 // Compare to the keys in our raw config for the module
327 for k, _ := range m.RawConfig.Raw {
328 if _, ok := varMap[k]; !ok {
329 newErr.Add(fmt.Errorf(
330 "module %s: %s is not a valid parameter",
331 m.Name, k))
332 }
333
334 // Remove the required
335 delete(requiredMap, k)
336 }
337
338 // If we have any required left over, they aren't set.
339 for k, _ := range requiredMap {
340 newErr.Add(fmt.Errorf(
341 "module %s: required variable %q not set",
342 m.Name, k))
343 }
344 }
345
346 // Go over all the variables used and make sure that any module
347 // variables represent outputs properly.
348 for source, vs := range t.config.InterpolatedVariables() {
349 for _, v := range vs {
350 mv, ok := v.(*config.ModuleVariable)
351 if !ok {
352 continue
353 }
354
355 tree, ok := children[mv.Name]
356 if !ok {
357 newErr.Add(fmt.Errorf(
358 "%s: undefined module referenced %s",
359 source, mv.Name))
360 continue
361 }
362
363 found := false
364 for _, o := range tree.config.Outputs {
365 if o.Name == mv.Field {
366 found = true
367 break
368 }
369 }
370 if !found {
371 newErr.Add(fmt.Errorf(
372 "%s: %s is not a valid output for module %s",
373 source, mv.Field, mv.Name))
374 }
375 }
376 }
377
378 return newErr.ErrOrNil()
379}
380
381// treeError is an error use by Tree.Validate to accumulates all
382// validation errors.
383type treeError struct {
384 Name []string
385 Errs []error
386 Children []*treeError
387}
388
389func (e *treeError) Add(err error) {
390 e.Errs = append(e.Errs, err)
391}
392
393func (e *treeError) AddChild(err *treeError) {
394 e.Children = append(e.Children, err)
395}
396
397func (e *treeError) ErrOrNil() error {
398 if len(e.Errs) > 0 || len(e.Children) > 0 {
399 return e
400 }
401 return nil
402}
403
404func (e *treeError) Error() string {
405 name := strings.Join(e.Name, ".")
406 var out bytes.Buffer
407 fmt.Fprintf(&out, "module %s: ", name)
408
409 if len(e.Errs) == 1 {
410 // single like error
411 out.WriteString(e.Errs[0].Error())
412 } else {
413 // multi-line error
414 for _, err := range e.Errs {
415 fmt.Fprintf(&out, "\n %s", err)
416 }
417 }
418
419 if len(e.Children) > 0 {
420 // start the next error on a new line
421 out.WriteString("\n ")
422 }
423 for _, child := range e.Children {
424 out.WriteString(child.Error())
425 }
426
427 return out.String()
428}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/tree_gob.go b/vendor/github.com/hashicorp/terraform/config/module/tree_gob.go
new file mode 100644
index 0000000..fcd37f4
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/tree_gob.go
@@ -0,0 +1,57 @@
1package module
2
3import (
4 "bytes"
5 "encoding/gob"
6
7 "github.com/hashicorp/terraform/config"
8)
9
10func (t *Tree) GobDecode(bs []byte) error {
11 t.lock.Lock()
12 defer t.lock.Unlock()
13
14 // Decode the gob data
15 var data treeGob
16 dec := gob.NewDecoder(bytes.NewReader(bs))
17 if err := dec.Decode(&data); err != nil {
18 return err
19 }
20
21 // Set the fields
22 t.name = data.Name
23 t.config = data.Config
24 t.children = data.Children
25 t.path = data.Path
26
27 return nil
28}
29
30func (t *Tree) GobEncode() ([]byte, error) {
31 data := &treeGob{
32 Config: t.config,
33 Children: t.children,
34 Name: t.name,
35 Path: t.path,
36 }
37
38 var buf bytes.Buffer
39 enc := gob.NewEncoder(&buf)
40 if err := enc.Encode(data); err != nil {
41 return nil, err
42 }
43
44 return buf.Bytes(), nil
45}
46
47// treeGob is used as a structure to Gob encode a tree.
48//
49// This structure is private so it can't be referenced but the fields are
50// public, allowing Gob to properly encode this. When we decode this, we are
51// able to turn it into a Tree.
52type treeGob struct {
53 Config *config.Config
54 Children map[string]*Tree
55 Name string
56 Path []string
57}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go b/vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go
new file mode 100644
index 0000000..090d4f7
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go
@@ -0,0 +1,118 @@
1package module
2
3import (
4 "fmt"
5 "strings"
6
7 "github.com/hashicorp/go-multierror"
8 "github.com/hashicorp/terraform/dag"
9)
10
11// validateProviderAlias validates that all provider alias references are
12// defined at some point in the parent tree. This improves UX by catching
13// alias typos at the slight cost of requiring a declaration of usage. This
14// is usually a good tradeoff since not many aliases are used.
15func (t *Tree) validateProviderAlias() error {
16 // If we're not the root, don't perform this validation. We must be the
17 // root since we require full tree visibilty.
18 if len(t.path) != 0 {
19 return nil
20 }
21
22 // We'll use a graph to keep track of defined aliases at each level.
23 // As long as a parent defines an alias, it is okay.
24 var g dag.AcyclicGraph
25 t.buildProviderAliasGraph(&g, nil)
26
27 // Go through the graph and check that the usage is all good.
28 var err error
29 for _, v := range g.Vertices() {
30 pv, ok := v.(*providerAliasVertex)
31 if !ok {
32 // This shouldn't happen, just ignore it.
33 continue
34 }
35
36 // If we're not using any aliases, fast track and just continue
37 if len(pv.Used) == 0 {
38 continue
39 }
40
41 // Grab the ancestors since we're going to have to check if our
42 // parents define any of our aliases.
43 var parents []*providerAliasVertex
44 ancestors, _ := g.Ancestors(v)
45 for _, raw := range ancestors.List() {
46 if pv, ok := raw.(*providerAliasVertex); ok {
47 parents = append(parents, pv)
48 }
49 }
50 for k, _ := range pv.Used {
51 // Check if we define this
52 if _, ok := pv.Defined[k]; ok {
53 continue
54 }
55
56 // Check for a parent
57 found := false
58 for _, parent := range parents {
59 _, found = parent.Defined[k]
60 if found {
61 break
62 }
63 }
64 if found {
65 continue
66 }
67
68 // We didn't find the alias, error!
69 err = multierror.Append(err, fmt.Errorf(
70 "module %s: provider alias must be defined by the module or a parent: %s",
71 strings.Join(pv.Path, "."), k))
72 }
73 }
74
75 return err
76}
77
78func (t *Tree) buildProviderAliasGraph(g *dag.AcyclicGraph, parent dag.Vertex) {
79 // Add all our defined aliases
80 defined := make(map[string]struct{})
81 for _, p := range t.config.ProviderConfigs {
82 defined[p.FullName()] = struct{}{}
83 }
84
85 // Add all our used aliases
86 used := make(map[string]struct{})
87 for _, r := range t.config.Resources {
88 if r.Provider != "" {
89 used[r.Provider] = struct{}{}
90 }
91 }
92
93 // Add it to the graph
94 vertex := &providerAliasVertex{
95 Path: t.Path(),
96 Defined: defined,
97 Used: used,
98 }
99 g.Add(vertex)
100
101 // Connect to our parent if we have one
102 if parent != nil {
103 g.Connect(dag.BasicEdge(vertex, parent))
104 }
105
106 // Build all our children
107 for _, c := range t.Children() {
108 c.buildProviderAliasGraph(g, vertex)
109 }
110}
111
112// providerAliasVertex is the vertex for the graph that keeps track of
113// defined provider aliases.
114type providerAliasVertex struct {
115 Path []string
116 Defined map[string]struct{}
117 Used map[string]struct{}
118}
diff --git a/vendor/github.com/hashicorp/terraform/config/provisioner_enums.go b/vendor/github.com/hashicorp/terraform/config/provisioner_enums.go
new file mode 100644
index 0000000..00fd43f
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/provisioner_enums.go
@@ -0,0 +1,40 @@
1package config
2
3// ProvisionerWhen is an enum for valid values for when to run provisioners.
4type ProvisionerWhen int
5
6const (
7 ProvisionerWhenInvalid ProvisionerWhen = iota
8 ProvisionerWhenCreate
9 ProvisionerWhenDestroy
10)
11
12var provisionerWhenStrs = map[ProvisionerWhen]string{
13 ProvisionerWhenInvalid: "invalid",
14 ProvisionerWhenCreate: "create",
15 ProvisionerWhenDestroy: "destroy",
16}
17
18func (v ProvisionerWhen) String() string {
19 return provisionerWhenStrs[v]
20}
21
22// ProvisionerOnFailure is an enum for valid values for on_failure options
23// for provisioners.
24type ProvisionerOnFailure int
25
26const (
27 ProvisionerOnFailureInvalid ProvisionerOnFailure = iota
28 ProvisionerOnFailureContinue
29 ProvisionerOnFailureFail
30)
31
32var provisionerOnFailureStrs = map[ProvisionerOnFailure]string{
33 ProvisionerOnFailureInvalid: "invalid",
34 ProvisionerOnFailureContinue: "continue",
35 ProvisionerOnFailureFail: "fail",
36}
37
38func (v ProvisionerOnFailure) String() string {
39 return provisionerOnFailureStrs[v]
40}
diff --git a/vendor/github.com/hashicorp/terraform/config/raw_config.go b/vendor/github.com/hashicorp/terraform/config/raw_config.go
new file mode 100644
index 0000000..f8498d8
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/raw_config.go
@@ -0,0 +1,335 @@
1package config
2
3import (
4 "bytes"
5 "encoding/gob"
6 "sync"
7
8 "github.com/hashicorp/hil"
9 "github.com/hashicorp/hil/ast"
10 "github.com/mitchellh/copystructure"
11 "github.com/mitchellh/reflectwalk"
12)
13
14// UnknownVariableValue is a sentinel value that can be used
15// to denote that the value of a variable is unknown at this time.
16// RawConfig uses this information to build up data about
17// unknown keys.
18const UnknownVariableValue = "74D93920-ED26-11E3-AC10-0800200C9A66"
19
20// RawConfig is a structure that holds a piece of configuration
21// where the overall structure is unknown since it will be used
22// to configure a plugin or some other similar external component.
23//
24// RawConfigs can be interpolated with variables that come from
25// other resources, user variables, etc.
26//
27// RawConfig supports a query-like interface to request
28// information from deep within the structure.
29type RawConfig struct {
30 Key string
31 Raw map[string]interface{}
32 Interpolations []ast.Node
33 Variables map[string]InterpolatedVariable
34
35 lock sync.Mutex
36 config map[string]interface{}
37 unknownKeys []string
38}
39
40// NewRawConfig creates a new RawConfig structure and populates the
41// publicly readable struct fields.
42func NewRawConfig(raw map[string]interface{}) (*RawConfig, error) {
43 result := &RawConfig{Raw: raw}
44 if err := result.init(); err != nil {
45 return nil, err
46 }
47
48 return result, nil
49}
50
51// RawMap returns a copy of the RawConfig.Raw map.
52func (r *RawConfig) RawMap() map[string]interface{} {
53 r.lock.Lock()
54 defer r.lock.Unlock()
55
56 m := make(map[string]interface{})
57 for k, v := range r.Raw {
58 m[k] = v
59 }
60 return m
61}
62
63// Copy returns a copy of this RawConfig, uninterpolated.
64func (r *RawConfig) Copy() *RawConfig {
65 if r == nil {
66 return nil
67 }
68
69 r.lock.Lock()
70 defer r.lock.Unlock()
71
72 newRaw := make(map[string]interface{})
73 for k, v := range r.Raw {
74 newRaw[k] = v
75 }
76
77 result, err := NewRawConfig(newRaw)
78 if err != nil {
79 panic("copy failed: " + err.Error())
80 }
81
82 result.Key = r.Key
83 return result
84}
85
86// Value returns the value of the configuration if this configuration
87// has a Key set. If this does not have a Key set, nil will be returned.
88func (r *RawConfig) Value() interface{} {
89 if c := r.Config(); c != nil {
90 if v, ok := c[r.Key]; ok {
91 return v
92 }
93 }
94
95 r.lock.Lock()
96 defer r.lock.Unlock()
97 return r.Raw[r.Key]
98}
99
100// Config returns the entire configuration with the variables
101// interpolated from any call to Interpolate.
102//
103// If any interpolated variables are unknown (value set to
104// UnknownVariableValue), the first non-container (map, slice, etc.) element
105// will be removed from the config. The keys of unknown variables
106// can be found using the UnknownKeys function.
107//
108// By pruning out unknown keys from the configuration, the raw
109// structure will always successfully decode into its ultimate
110// structure using something like mapstructure.
111func (r *RawConfig) Config() map[string]interface{} {
112 r.lock.Lock()
113 defer r.lock.Unlock()
114 return r.config
115}
116
117// Interpolate uses the given mapping of variable values and uses
118// those as the values to replace any variables in this raw
119// configuration.
120//
121// Any prior calls to Interpolate are replaced with this one.
122//
123// If a variable key is missing, this will panic.
124func (r *RawConfig) Interpolate(vs map[string]ast.Variable) error {
125 r.lock.Lock()
126 defer r.lock.Unlock()
127
128 config := langEvalConfig(vs)
129 return r.interpolate(func(root ast.Node) (interface{}, error) {
130 // None of the variables we need are computed, meaning we should
131 // be able to properly evaluate.
132 result, err := hil.Eval(root, config)
133 if err != nil {
134 return "", err
135 }
136
137 return result.Value, nil
138 })
139}
140
141// Merge merges another RawConfig into this one (overriding any conflicting
142// values in this config) and returns a new config. The original config
143// is not modified.
144func (r *RawConfig) Merge(other *RawConfig) *RawConfig {
145 r.lock.Lock()
146 defer r.lock.Unlock()
147
148 // Merge the raw configurations
149 raw := make(map[string]interface{})
150 for k, v := range r.Raw {
151 raw[k] = v
152 }
153 for k, v := range other.Raw {
154 raw[k] = v
155 }
156
157 // Create the result
158 result, err := NewRawConfig(raw)
159 if err != nil {
160 panic(err)
161 }
162
163 // Merge the interpolated results
164 result.config = make(map[string]interface{})
165 for k, v := range r.config {
166 result.config[k] = v
167 }
168 for k, v := range other.config {
169 result.config[k] = v
170 }
171
172 // Build the unknown keys
173 if len(r.unknownKeys) > 0 || len(other.unknownKeys) > 0 {
174 unknownKeys := make(map[string]struct{})
175 for _, k := range r.unknownKeys {
176 unknownKeys[k] = struct{}{}
177 }
178 for _, k := range other.unknownKeys {
179 unknownKeys[k] = struct{}{}
180 }
181
182 result.unknownKeys = make([]string, 0, len(unknownKeys))
183 for k, _ := range unknownKeys {
184 result.unknownKeys = append(result.unknownKeys, k)
185 }
186 }
187
188 return result
189}
190
191func (r *RawConfig) init() error {
192 r.lock.Lock()
193 defer r.lock.Unlock()
194
195 r.config = r.Raw
196 r.Interpolations = nil
197 r.Variables = nil
198
199 fn := func(node ast.Node) (interface{}, error) {
200 r.Interpolations = append(r.Interpolations, node)
201 vars, err := DetectVariables(node)
202 if err != nil {
203 return "", err
204 }
205
206 for _, v := range vars {
207 if r.Variables == nil {
208 r.Variables = make(map[string]InterpolatedVariable)
209 }
210
211 r.Variables[v.FullKey()] = v
212 }
213
214 return "", nil
215 }
216
217 walker := &interpolationWalker{F: fn}
218 if err := reflectwalk.Walk(r.Raw, walker); err != nil {
219 return err
220 }
221
222 return nil
223}
224
225func (r *RawConfig) interpolate(fn interpolationWalkerFunc) error {
226 config, err := copystructure.Copy(r.Raw)
227 if err != nil {
228 return err
229 }
230 r.config = config.(map[string]interface{})
231
232 w := &interpolationWalker{F: fn, Replace: true}
233 err = reflectwalk.Walk(r.config, w)
234 if err != nil {
235 return err
236 }
237
238 r.unknownKeys = w.unknownKeys
239 return nil
240}
241
242func (r *RawConfig) merge(r2 *RawConfig) *RawConfig {
243 if r == nil && r2 == nil {
244 return nil
245 }
246
247 if r == nil {
248 r = &RawConfig{}
249 }
250
251 rawRaw, err := copystructure.Copy(r.Raw)
252 if err != nil {
253 panic(err)
254 }
255
256 raw := rawRaw.(map[string]interface{})
257 if r2 != nil {
258 for k, v := range r2.Raw {
259 raw[k] = v
260 }
261 }
262
263 result, err := NewRawConfig(raw)
264 if err != nil {
265 panic(err)
266 }
267
268 return result
269}
270
271// UnknownKeys returns the keys of the configuration that are unknown
272// because they had interpolated variables that must be computed.
273func (r *RawConfig) UnknownKeys() []string {
274 r.lock.Lock()
275 defer r.lock.Unlock()
276 return r.unknownKeys
277}
278
279// See GobEncode
280func (r *RawConfig) GobDecode(b []byte) error {
281 var data gobRawConfig
282 err := gob.NewDecoder(bytes.NewReader(b)).Decode(&data)
283 if err != nil {
284 return err
285 }
286
287 r.Key = data.Key
288 r.Raw = data.Raw
289
290 return r.init()
291}
292
293// GobEncode is a custom Gob encoder to use so that we only include the
294// raw configuration. Interpolated variables and such are lost and the
295// tree of interpolated variables is recomputed on decode, since it is
296// referentially transparent.
297func (r *RawConfig) GobEncode() ([]byte, error) {
298 r.lock.Lock()
299 defer r.lock.Unlock()
300
301 data := gobRawConfig{
302 Key: r.Key,
303 Raw: r.Raw,
304 }
305
306 var buf bytes.Buffer
307 if err := gob.NewEncoder(&buf).Encode(data); err != nil {
308 return nil, err
309 }
310
311 return buf.Bytes(), nil
312}
313
314type gobRawConfig struct {
315 Key string
316 Raw map[string]interface{}
317}
318
319// langEvalConfig returns the evaluation configuration we use to execute.
320func langEvalConfig(vs map[string]ast.Variable) *hil.EvalConfig {
321 funcMap := make(map[string]ast.Function)
322 for k, v := range Funcs() {
323 funcMap[k] = v
324 }
325 funcMap["lookup"] = interpolationFuncLookup(vs)
326 funcMap["keys"] = interpolationFuncKeys(vs)
327 funcMap["values"] = interpolationFuncValues(vs)
328
329 return &hil.EvalConfig{
330 GlobalScope: &ast.BasicScope{
331 VarMap: vs,
332 FuncMap: funcMap,
333 },
334 }
335}
diff --git a/vendor/github.com/hashicorp/terraform/config/resource_mode.go b/vendor/github.com/hashicorp/terraform/config/resource_mode.go
new file mode 100644
index 0000000..877c6e8
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/resource_mode.go
@@ -0,0 +1,9 @@
1package config
2
3//go:generate stringer -type=ResourceMode -output=resource_mode_string.go resource_mode.go
4type ResourceMode int
5
6const (
7 ManagedResourceMode ResourceMode = iota
8 DataResourceMode
9)
diff --git a/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go b/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go
new file mode 100644
index 0000000..ea68b4f
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go
@@ -0,0 +1,16 @@
1// Code generated by "stringer -type=ResourceMode -output=resource_mode_string.go resource_mode.go"; DO NOT EDIT.
2
3package config
4
5import "fmt"
6
7const _ResourceMode_name = "ManagedResourceModeDataResourceMode"
8
9var _ResourceMode_index = [...]uint8{0, 19, 35}
10
11func (i ResourceMode) String() string {
12 if i < 0 || i >= ResourceMode(len(_ResourceMode_index)-1) {
13 return fmt.Sprintf("ResourceMode(%d)", i)
14 }
15 return _ResourceMode_name[_ResourceMode_index[i]:_ResourceMode_index[i+1]]
16}
diff --git a/vendor/github.com/hashicorp/terraform/config/testing.go b/vendor/github.com/hashicorp/terraform/config/testing.go
new file mode 100644
index 0000000..f7bfadd
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/testing.go
@@ -0,0 +1,15 @@
1package config
2
3import (
4 "testing"
5)
6
7// TestRawConfig is used to create a RawConfig for testing.
8func TestRawConfig(t *testing.T, c map[string]interface{}) *RawConfig {
9 cfg, err := NewRawConfig(c)
10 if err != nil {
11 t.Fatalf("err: %s", err)
12 }
13
14 return cfg
15}