aboutsummaryrefslogtreecommitdiffhomepage
path: root/vendor/github.com/hashicorp/terraform/plans/objchange/normalize_obj.go
blob: c23f44dac68c996ca15a834a1b2fb6769798c554 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
package objchange

import (
	"github.com/hashicorp/terraform/configs/configschema"
	"github.com/zclconf/go-cty/cty"
)

// NormalizeObjectFromLegacySDK takes an object that may have been generated
// by the legacy Terraform SDK (i.e. returned from a provider with the
// LegacyTypeSystem opt-out set) and does its best to normalize it for the
// assumptions we would normally enforce if the provider had not opted out.
//
// In particular, this function guarantees that a value representing a nested
// block will never itself be unknown or null, instead representing that as
// a non-null value that may contain null/unknown values.
//
// The input value must still conform to the implied type of the given schema,
// or else this function may produce garbage results or panic. This is usually
// okay because type consistency is enforced when deserializing the value
// returned from the provider over the RPC wire protocol anyway.
func NormalizeObjectFromLegacySDK(val cty.Value, schema *configschema.Block) cty.Value {
	if val == cty.NilVal || val.IsNull() {
		// This should never happen in reasonable use, but we'll allow it
		// and normalize to a null of the expected type rather than panicking
		// below.
		return cty.NullVal(schema.ImpliedType())
	}

	vals := make(map[string]cty.Value)
	for name := range schema.Attributes {
		// No normalization for attributes, since them being type-conformant
		// is all that we require.
		vals[name] = val.GetAttr(name)
	}
	for name, blockS := range schema.BlockTypes {
		lv := val.GetAttr(name)

		// Legacy SDK never generates dynamically-typed attributes and so our
		// normalization code doesn't deal with them, but we need to make sure
		// we still pass them through properly so that we don't interfere with
		// objects generated by other SDKs.
		if ty := blockS.Block.ImpliedType(); ty.HasDynamicTypes() {
			vals[name] = lv
			continue
		}

		switch blockS.Nesting {
		case configschema.NestingSingle, configschema.NestingGroup:
			if lv.IsKnown() {
				if lv.IsNull() && blockS.Nesting == configschema.NestingGroup {
					vals[name] = blockS.EmptyValue()
				} else {
					vals[name] = NormalizeObjectFromLegacySDK(lv, &blockS.Block)
				}
			} else {
				vals[name] = unknownBlockStub(&blockS.Block)
			}
		case configschema.NestingList:
			switch {
			case !lv.IsKnown():
				vals[name] = cty.ListVal([]cty.Value{unknownBlockStub(&blockS.Block)})
			case lv.IsNull() || lv.LengthInt() == 0:
				vals[name] = cty.ListValEmpty(blockS.Block.ImpliedType())
			default:
				subVals := make([]cty.Value, 0, lv.LengthInt())
				for it := lv.ElementIterator(); it.Next(); {
					_, subVal := it.Element()
					subVals = append(subVals, NormalizeObjectFromLegacySDK(subVal, &blockS.Block))
				}
				vals[name] = cty.ListVal(subVals)
			}
		case configschema.NestingSet:
			switch {
			case !lv.IsKnown():
				vals[name] = cty.SetVal([]cty.Value{unknownBlockStub(&blockS.Block)})
			case lv.IsNull() || lv.LengthInt() == 0:
				vals[name] = cty.SetValEmpty(blockS.Block.ImpliedType())
			default:
				subVals := make([]cty.Value, 0, lv.LengthInt())
				for it := lv.ElementIterator(); it.Next(); {
					_, subVal := it.Element()
					subVals = append(subVals, NormalizeObjectFromLegacySDK(subVal, &blockS.Block))
				}
				vals[name] = cty.SetVal(subVals)
			}
		default:
			// The legacy SDK doesn't support NestingMap, so we just assume
			// maps are always okay. (If not, we would've detected and returned
			// an error to the user before we got here.)
			vals[name] = lv
		}
	}
	return cty.ObjectVal(vals)
}

// unknownBlockStub constructs an object value that approximates an unknown
// block by producing a known block object with all of its leaf attribute
// values set to unknown.
//
// Blocks themselves cannot be unknown, so if the legacy SDK tries to return
// such a thing, we'll use this result instead. This convention mimics how
// the dynamic block feature deals with being asked to iterate over an unknown
// value, because our value-checking functions already accept this convention
// as a special case.
func unknownBlockStub(schema *configschema.Block) cty.Value {
	vals := make(map[string]cty.Value)
	for name, attrS := range schema.Attributes {
		vals[name] = cty.UnknownVal(attrS.Type)
	}
	for name, blockS := range schema.BlockTypes {
		switch blockS.Nesting {
		case configschema.NestingSingle, configschema.NestingGroup:
			vals[name] = unknownBlockStub(&blockS.Block)
		case configschema.NestingList:
			// In principle we may be expected to produce a tuple value here,
			// if there are any dynamically-typed attributes in our nested block,
			// but the legacy SDK doesn't support that, so we just assume it'll
			// never be necessary to normalize those. (Incorrect usage in any
			// other SDK would be caught and returned as an error before we
			// get here.)
			vals[name] = cty.ListVal([]cty.Value{unknownBlockStub(&blockS.Block)})
		case configschema.NestingSet:
			vals[name] = cty.SetVal([]cty.Value{unknownBlockStub(&blockS.Block)})
		case configschema.NestingMap:
			// A nesting map can never be unknown since we then wouldn't know
			// what the keys are. (Legacy SDK doesn't support NestingMap anyway,
			// so this should never arise.)
			vals[name] = cty.MapValEmpty(blockS.Block.ImpliedType())
		}
	}
	return cty.ObjectVal(vals)
}