package schema
import (
+ "context"
"fmt"
"os"
"reflect"
"sort"
"strconv"
"strings"
+ "sync"
+ "github.com/hashicorp/terraform/config"
"github.com/hashicorp/terraform/terraform"
"github.com/mitchellh/copystructure"
"github.com/mitchellh/mapstructure"
// type used for schema package context keys
type contextKey string
+var (
+ protoVersionMu sync.Mutex
+ protoVersion5 = false
+)
+
+func isProto5() bool {
+ protoVersionMu.Lock()
+ defer protoVersionMu.Unlock()
+ return protoVersion5
+
+}
+
+// SetProto5 enables a feature flag for any internal changes required required
+// to work with the new plugin protocol. This should not be called by
+// provider.
+func SetProto5() {
+ protoVersionMu.Lock()
+ defer protoVersionMu.Unlock()
+ protoVersion5 = true
+}
+
// Schema is used to describe the structure of a value.
//
// Read the documentation of the struct elements for important details.
//
Type ValueType
+ // ConfigMode allows for overriding the default behaviors for mapping
+ // schema entries onto configuration constructs.
+ //
+ // By default, the Elem field is used to choose whether a particular
+ // schema is represented in configuration as an attribute or as a nested
+ // block; if Elem is a *schema.Resource then it's a block and it's an
+ // attribute otherwise.
+ //
+ // If Elem is *schema.Resource then setting ConfigMode to
+ // SchemaConfigModeAttr will force it to be represented in configuration
+ // as an attribute, which means that the Computed flag can be used to
+ // provide default elements when the argument isn't set at all, while still
+ // allowing the user to force zero elements by explicitly assigning an
+ // empty list.
+ //
+ // When Computed is set without Optional, the attribute is not settable
+ // in configuration at all and so SchemaConfigModeAttr is the automatic
+ // behavior, and SchemaConfigModeBlock is not permitted.
+ ConfigMode SchemaConfigMode
+
// If one of these is set, then this item can come from the configuration.
// Both cannot be set. If Optional is set, the value is optional. If
// Required is set, the value is required.
// The following fields are only set for a TypeList, TypeSet, or TypeMap.
//
// Elem represents the element type. For a TypeMap, it must be a *Schema
- // with a Type of TypeString, otherwise it may be either a *Schema or a
+ // with a Type that is one of the primitives: TypeString, TypeBool,
+ // TypeInt, or TypeFloat. Otherwise it may be either a *Schema or a
// *Resource. If it is *Schema, the element type is just a simple value.
// If it is *Resource, the element type is a complex structure,
// potentially with its own lifecycle.
// used to wrap a complex structure, however less than one instance would
// cause instability.
//
- // PromoteSingle, if true, will allow single elements to be standalone
- // and promote them to a list. For example "foo" would be promoted to
- // ["foo"] automatically. This is primarily for legacy reasons and the
- // ambiguity is not recommended for new usage. Promotion is only allowed
- // for primitive element types.
- MaxItems int
- MinItems int
+ // If the field Optional is set to true then MinItems is ignored and thus
+ // effectively zero.
+ MaxItems int
+ MinItems int
+
+ // PromoteSingle originally allowed for a single element to be assigned
+ // where a primitive list was expected, but this no longer works from
+ // Terraform v0.12 onwards (Terraform Core will require a list to be set
+ // regardless of what this is set to) and so only applies to Terraform v0.11
+ // and earlier, and so should be used only to retain this functionality
+ // for those still using v0.11 with a provider that formerly used this.
PromoteSingle bool
// The following fields are only valid for a TypeSet type.
// guaranteed to be of the proper Schema type, and it can yield warnings or
// errors based on inspection of that value.
//
- // ValidateFunc currently only works for primitive types.
+ // ValidateFunc is honored only when the schema's Type is set to TypeInt,
+ // TypeFloat, TypeString, TypeBool, or TypeMap. It is ignored for all other types.
ValidateFunc SchemaValidateFunc
// Sensitive ensures that the attribute's value does not get displayed in
Sensitive bool
}
+// SchemaConfigMode is used to influence how a schema item is mapped into a
+// corresponding configuration construct, using the ConfigMode field of
+// Schema.
+type SchemaConfigMode int
+
+const (
+ SchemaConfigModeAuto SchemaConfigMode = iota
+ SchemaConfigModeAttr
+ SchemaConfigModeBlock
+)
+
// SchemaDiffSuppressFunc is a function which can be used to determine
// whether a detected diff on a schema element is "valid" or not, and
// suppress it from the plan if necessary.
return d
}
+// InternalMap is used to aid in the transition to the new schema types and
+// protocol. The name is not meant to convey any usefulness, as this is not to
+// be used directly by any providers.
+type InternalMap = schemaMap
+
// schemaMap is a wrapper that adds nice functions on top of schemas.
type schemaMap map[string]*Schema
s *terraform.InstanceState,
c *terraform.ResourceConfig,
customizeDiff CustomizeDiffFunc,
- meta interface{}) (*terraform.InstanceDiff, error) {
+ meta interface{},
+ handleRequiresNew bool) (*terraform.InstanceDiff, error) {
result := new(terraform.InstanceDiff)
result.Attributes = make(map[string]*terraform.ResourceAttrDiff)
}
}
- // If the diff requires a new resource, then we recompute the diff
- // so we have the complete new resource diff, and preserve the
- // RequiresNew fields where necessary so the user knows exactly what
- // caused that.
- if result.RequiresNew() {
- // Create the new diff
- result2 := new(terraform.InstanceDiff)
- result2.Attributes = make(map[string]*terraform.ResourceAttrDiff)
-
- // Preserve the DestroyTainted flag
- result2.DestroyTainted = result.DestroyTainted
+ if handleRequiresNew {
+ // If the diff requires a new resource, then we recompute the diff
+ // so we have the complete new resource diff, and preserve the
+ // RequiresNew fields where necessary so the user knows exactly what
+ // caused that.
+ if result.RequiresNew() {
+ // Create the new diff
+ result2 := new(terraform.InstanceDiff)
+ result2.Attributes = make(map[string]*terraform.ResourceAttrDiff)
- // Reset the data to not contain state. We have to call init()
- // again in order to reset the FieldReaders.
- d.state = nil
- d.init()
+ // Preserve the DestroyTainted flag
+ result2.DestroyTainted = result.DestroyTainted
- // Perform the diff again
- for k, schema := range m {
- err := m.diff(k, schema, result2, d, false)
- if err != nil {
- return nil, err
- }
- }
+ // Reset the data to not contain state. We have to call init()
+ // again in order to reset the FieldReaders.
+ d.state = nil
+ d.init()
- // Re-run customization
- if !result2.DestroyTainted && customizeDiff != nil {
- mc := m.DeepCopy()
- rd := newResourceDiff(mc, c, d.state, result2)
- if err := customizeDiff(rd, meta); err != nil {
- return nil, err
- }
- for _, k := range rd.UpdatedKeys() {
- err := m.diff(k, mc[k], result2, rd, false)
+ // Perform the diff again
+ for k, schema := range m {
+ err := m.diff(k, schema, result2, d, false)
if err != nil {
return nil, err
}
}
- }
- // Force all the fields to not force a new since we know what we
- // want to force new.
- for k, attr := range result2.Attributes {
- if attr == nil {
- continue
+ // Re-run customization
+ if !result2.DestroyTainted && customizeDiff != nil {
+ mc := m.DeepCopy()
+ rd := newResourceDiff(mc, c, d.state, result2)
+ if err := customizeDiff(rd, meta); err != nil {
+ return nil, err
+ }
+ for _, k := range rd.UpdatedKeys() {
+ err := m.diff(k, mc[k], result2, rd, false)
+ if err != nil {
+ return nil, err
+ }
+ }
}
- if attr.RequiresNew {
- attr.RequiresNew = false
- }
+ // Force all the fields to not force a new since we know what we
+ // want to force new.
+ for k, attr := range result2.Attributes {
+ if attr == nil {
+ continue
+ }
- if s != nil {
- attr.Old = s.Attributes[k]
- }
- }
+ if attr.RequiresNew {
+ attr.RequiresNew = false
+ }
- // Now copy in all the requires new diffs...
- for k, attr := range result.Attributes {
- if attr == nil {
- continue
+ if s != nil {
+ attr.Old = s.Attributes[k]
+ }
}
- newAttr, ok := result2.Attributes[k]
- if !ok {
- newAttr = attr
- }
+ // Now copy in all the requires new diffs...
+ for k, attr := range result.Attributes {
+ if attr == nil {
+ continue
+ }
- if attr.RequiresNew {
- newAttr.RequiresNew = true
+ newAttr, ok := result2.Attributes[k]
+ if !ok {
+ newAttr = attr
+ }
+
+ if attr.RequiresNew {
+ newAttr.RequiresNew = true
+ }
+
+ result2.Attributes[k] = newAttr
}
- result2.Attributes[k] = newAttr
+ // And set the diff!
+ result = result2
}
- // And set the diff!
- result = result2
}
// Go through and detect all of the ComputedWhens now that we've
// from a unit test (and not in user-path code) to verify that a schema
// is properly built.
func (m schemaMap) InternalValidate(topSchemaMap schemaMap) error {
+ return m.internalValidate(topSchemaMap, false)
+}
+
+func (m schemaMap) internalValidate(topSchemaMap schemaMap, attrsOnly bool) error {
if topSchemaMap == nil {
topSchemaMap = m
}
return fmt.Errorf("%s: One of optional, required, or computed must be set", k)
}
+ computedOnly := v.Computed && !v.Optional
+
+ switch v.ConfigMode {
+ case SchemaConfigModeBlock:
+ if _, ok := v.Elem.(*Resource); !ok {
+ return fmt.Errorf("%s: ConfigMode of block is allowed only when Elem is *schema.Resource", k)
+ }
+ if attrsOnly {
+ return fmt.Errorf("%s: ConfigMode of block cannot be used in child of schema with ConfigMode of attribute", k)
+ }
+ if computedOnly {
+ return fmt.Errorf("%s: ConfigMode of block cannot be used for computed schema", k)
+ }
+ case SchemaConfigModeAttr:
+ // anything goes
+ case SchemaConfigModeAuto:
+ // Since "Auto" for Elem: *Resource would create a nested block,
+ // and that's impossible inside an attribute, we require it to be
+ // explicitly overridden as mode "Attr" for clarity.
+ if _, ok := v.Elem.(*Resource); ok {
+ if attrsOnly {
+ return fmt.Errorf("%s: in *schema.Resource with ConfigMode of attribute, so must also have ConfigMode of attribute", k)
+ }
+ }
+ default:
+ return fmt.Errorf("%s: invalid ConfigMode value", k)
+ }
+
if v.Computed && v.Default != nil {
return fmt.Errorf("%s: Default must be nil if computed", k)
}
switch t := v.Elem.(type) {
case *Resource:
- if err := t.InternalValidate(topSchemaMap, true); err != nil {
+ attrsOnly := attrsOnly || v.ConfigMode == SchemaConfigModeAttr
+
+ if err := schemaMap(t.Schema).internalValidate(topSchemaMap, attrsOnly); err != nil {
return err
}
case *Schema:
for attrK, attrV := range unsupressedDiff.Attributes {
switch rd := d.(type) {
case *ResourceData:
- if schema.DiffSuppressFunc != nil &&
- attrV != nil &&
+ if schema.DiffSuppressFunc != nil && attrV != nil &&
schema.DiffSuppressFunc(attrK, attrV.Old, attrV.New, rd) {
- continue
+ // If this attr diff is suppressed, we may still need it in the
+ // overall diff if it's contained within a set. Rather than
+ // dropping the diff, make it a NOOP.
+ if !all {
+ continue
+ }
+
+ attrV = &terraform.ResourceAttrDiff{
+ Old: attrV.Old,
+ New: attrV.Old,
+ }
}
}
diff.Attributes[attrK] = attrV
return fmt.Errorf("%s: %s", k, err)
}
- if os == ns && !all {
+ if os == ns && !all && !computed {
// They're the same value. If there old value is not blank or we
// have an ID, then return right away since we're already setup.
if os != "" || d.Id() != "" {
}
// Otherwise, only continue if we're computed
- if !schema.Computed && !computed {
+ if !schema.Computed {
return nil
}
}
input terraform.UIInput,
k string,
schema *Schema) (interface{}, error) {
- result, err := input.Input(&terraform.InputOpts{
+ result, err := input.Input(context.Background(), &terraform.InputOpts{
Id: k,
Query: k,
Description: schema.Description,
"%q: this field cannot be set", k)}
}
+ if raw == config.UnknownVariableValue {
+ // If the value is unknown then we can't validate it yet.
+ // In particular, this avoids spurious type errors where downstream
+ // validation code sees UnknownVariableValue as being just a string.
+ return nil, nil
+ }
+
err := m.validateConflictingAttributes(k, schema, c)
if err != nil {
return nil, []error{err}
return nil
}
- for _, conflicting_key := range schema.ConflictsWith {
- if _, ok := c.Get(conflicting_key); ok {
+ for _, conflictingKey := range schema.ConflictsWith {
+ if raw, ok := c.Get(conflictingKey); ok {
+ if raw == config.UnknownVariableValue {
+ // An unknown value might become unset (null) once known, so
+ // we must defer validation until it's known.
+ continue
+ }
return fmt.Errorf(
- "%q: conflicts with %s", k, conflicting_key)
+ "%q: conflicts with %s", k, conflictingKey)
}
}
raw interface{},
schema *Schema,
c *terraform.ResourceConfig) ([]string, []error) {
+ // first check if the list is wholly unknown
+ if s, ok := raw.(string); ok {
+ if s == config.UnknownVariableValue {
+ return nil, nil
+ }
+ }
+
// We use reflection to verify the slice because you can't
// case to []interface{} unless the slice is exactly that type.
rawV := reflect.ValueOf(raw)
raw interface{},
schema *Schema,
c *terraform.ResourceConfig) ([]string, []error) {
+ // first check if the list is wholly unknown
+ if s, ok := raw.(string); ok {
+ if s == config.UnknownVariableValue {
+ return nil, nil
+ }
+ }
+
// We use reflection to verify the slice because you can't
// case to []interface{} unless the slice is exactly that type.
rawV := reflect.ValueOf(raw)
}
decoded = n
case TypeInt:
- // Verify that we can parse this as an int
- var n int
- if err := mapstructure.WeakDecode(raw, &n); err != nil {
- return nil, []error{fmt.Errorf("%s: %s", k, err)}
+ switch {
+ case isProto5():
+ // We need to verify the type precisely, because WeakDecode will
+ // decode a float as an integer.
+
+ // the config shims only use int for integral number values
+ if v, ok := raw.(int); ok {
+ decoded = v
+ } else {
+ return nil, []error{fmt.Errorf("%s: must be a whole number, got %v", k, raw)}
+ }
+ default:
+ // Verify that we can parse this as an int
+ var n int
+ if err := mapstructure.WeakDecode(raw, &n); err != nil {
+ return nil, []error{fmt.Errorf("%s: %s", k, err)}
+ }
+ decoded = n
}
- decoded = n
case TypeFloat:
// Verify that we can parse this as an int
var n float64