summaryrefslogtreecommitdiff
path: root/vendor/github.com/hashicorp/terraform/config
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/hashicorp/terraform/config')
-rw-r--r--vendor/github.com/hashicorp/terraform/config/append.go86
-rw-r--r--vendor/github.com/hashicorp/terraform/config/config.go1096
-rw-r--r--vendor/github.com/hashicorp/terraform/config/config_string.go338
-rw-r--r--vendor/github.com/hashicorp/terraform/config/config_terraform.go117
-rw-r--r--vendor/github.com/hashicorp/terraform/config/config_tree.go43
-rw-r--r--vendor/github.com/hashicorp/terraform/config/import_tree.go113
-rw-r--r--vendor/github.com/hashicorp/terraform/config/interpolate.go386
-rw-r--r--vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go1346
-rw-r--r--vendor/github.com/hashicorp/terraform/config/interpolate_walk.go283
-rw-r--r--vendor/github.com/hashicorp/terraform/config/lang.go11
-rw-r--r--vendor/github.com/hashicorp/terraform/config/loader.go224
-rw-r--r--vendor/github.com/hashicorp/terraform/config/loader_hcl.go1091
-rw-r--r--vendor/github.com/hashicorp/terraform/config/merge.go193
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/copy_dir.go114
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/get.go71
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/inode.go21
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/inode_freebsd.go21
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/inode_windows.go8
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/module.go7
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/testing.go38
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/tree.go428
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/tree_gob.go57
-rw-r--r--vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go118
-rw-r--r--vendor/github.com/hashicorp/terraform/config/provisioner_enums.go40
-rw-r--r--vendor/github.com/hashicorp/terraform/config/raw_config.go335
-rw-r--r--vendor/github.com/hashicorp/terraform/config/resource_mode.go9
-rw-r--r--vendor/github.com/hashicorp/terraform/config/resource_mode_string.go16
-rw-r--r--vendor/github.com/hashicorp/terraform/config/testing.go15
28 files changed, 6625 insertions, 0 deletions
diff --git a/vendor/github.com/hashicorp/terraform/config/append.go b/vendor/github.com/hashicorp/terraform/config/append.go
new file mode 100644
index 00000000..5f4e89ee
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/append.go
@@ -0,0 +1,86 @@
+package config
+
+// Append appends one configuration to another.
+//
+// Append assumes that both configurations will not have
+// conflicting variables, resources, etc. If they do, the
+// problems will be caught in the validation phase.
+//
+// It is possible that c1, c2 on their own are not valid. For
+// example, a resource in c2 may reference a variable in c1. But
+// together, they would be valid.
+func Append(c1, c2 *Config) (*Config, error) {
+ c := new(Config)
+
+ // Append unknown keys, but keep them unique since it is a set
+ unknowns := make(map[string]struct{})
+ for _, k := range c1.unknownKeys {
+ _, present := unknowns[k]
+ if !present {
+ unknowns[k] = struct{}{}
+ c.unknownKeys = append(c.unknownKeys, k)
+ }
+ }
+
+ for _, k := range c2.unknownKeys {
+ _, present := unknowns[k]
+ if !present {
+ unknowns[k] = struct{}{}
+ c.unknownKeys = append(c.unknownKeys, k)
+ }
+ }
+
+ c.Atlas = c1.Atlas
+ if c2.Atlas != nil {
+ c.Atlas = c2.Atlas
+ }
+
+ // merge Terraform blocks
+ if c1.Terraform != nil {
+ c.Terraform = c1.Terraform
+ if c2.Terraform != nil {
+ c.Terraform.Merge(c2.Terraform)
+ }
+ } else {
+ c.Terraform = c2.Terraform
+ }
+
+ if len(c1.Modules) > 0 || len(c2.Modules) > 0 {
+ c.Modules = make(
+ []*Module, 0, len(c1.Modules)+len(c2.Modules))
+ c.Modules = append(c.Modules, c1.Modules...)
+ c.Modules = append(c.Modules, c2.Modules...)
+ }
+
+ if len(c1.Outputs) > 0 || len(c2.Outputs) > 0 {
+ c.Outputs = make(
+ []*Output, 0, len(c1.Outputs)+len(c2.Outputs))
+ c.Outputs = append(c.Outputs, c1.Outputs...)
+ c.Outputs = append(c.Outputs, c2.Outputs...)
+ }
+
+ if len(c1.ProviderConfigs) > 0 || len(c2.ProviderConfigs) > 0 {
+ c.ProviderConfigs = make(
+ []*ProviderConfig,
+ 0, len(c1.ProviderConfigs)+len(c2.ProviderConfigs))
+ c.ProviderConfigs = append(c.ProviderConfigs, c1.ProviderConfigs...)
+ c.ProviderConfigs = append(c.ProviderConfigs, c2.ProviderConfigs...)
+ }
+
+ if len(c1.Resources) > 0 || len(c2.Resources) > 0 {
+ c.Resources = make(
+ []*Resource,
+ 0, len(c1.Resources)+len(c2.Resources))
+ c.Resources = append(c.Resources, c1.Resources...)
+ c.Resources = append(c.Resources, c2.Resources...)
+ }
+
+ if len(c1.Variables) > 0 || len(c2.Variables) > 0 {
+ c.Variables = make(
+ []*Variable, 0, len(c1.Variables)+len(c2.Variables))
+ c.Variables = append(c.Variables, c1.Variables...)
+ c.Variables = append(c.Variables, c2.Variables...)
+ }
+
+ return c, nil
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/config.go b/vendor/github.com/hashicorp/terraform/config/config.go
new file mode 100644
index 00000000..9a764ace
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/config.go
@@ -0,0 +1,1096 @@
+// The config package is responsible for loading and validating the
+// configuration.
+package config
+
+import (
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/hashicorp/go-multierror"
+ "github.com/hashicorp/hil"
+ "github.com/hashicorp/hil/ast"
+ "github.com/hashicorp/terraform/helper/hilmapstructure"
+ "github.com/mitchellh/reflectwalk"
+)
+
+// NameRegexp is the regular expression that all names (modules, providers,
+// resources, etc.) must follow.
+var NameRegexp = regexp.MustCompile(`(?i)\A[A-Z0-9_][A-Z0-9\-\_]*\z`)
+
+// Config is the configuration that comes from loading a collection
+// of Terraform templates.
+type Config struct {
+ // Dir is the path to the directory where this configuration was
+ // loaded from. If it is blank, this configuration wasn't loaded from
+ // any meaningful directory.
+ Dir string
+
+ Terraform *Terraform
+ Atlas *AtlasConfig
+ Modules []*Module
+ ProviderConfigs []*ProviderConfig
+ Resources []*Resource
+ Variables []*Variable
+ Outputs []*Output
+
+ // The fields below can be filled in by loaders for validation
+ // purposes.
+ unknownKeys []string
+}
+
+// AtlasConfig is the configuration for building in HashiCorp's Atlas.
+type AtlasConfig struct {
+ Name string
+ Include []string
+ Exclude []string
+}
+
+// Module is a module used within a configuration.
+//
+// This does not represent a module itself, this represents a module
+// call-site within an existing configuration.
+type Module struct {
+ Name string
+ Source string
+ RawConfig *RawConfig
+}
+
+// ProviderConfig is the configuration for a resource provider.
+//
+// For example, Terraform needs to set the AWS access keys for the AWS
+// resource provider.
+type ProviderConfig struct {
+ Name string
+ Alias string
+ RawConfig *RawConfig
+}
+
+// A resource represents a single Terraform resource in the configuration.
+// A Terraform resource is something that supports some or all of the
+// usual "create, read, update, delete" operations, depending on
+// the given Mode.
+type Resource struct {
+ Mode ResourceMode // which operations the resource supports
+ Name string
+ Type string
+ RawCount *RawConfig
+ RawConfig *RawConfig
+ Provisioners []*Provisioner
+ Provider string
+ DependsOn []string
+ Lifecycle ResourceLifecycle
+}
+
+// Copy returns a copy of this Resource. Helpful for avoiding shared
+// config pointers across multiple pieces of the graph that need to do
+// interpolation.
+func (r *Resource) Copy() *Resource {
+ n := &Resource{
+ Mode: r.Mode,
+ Name: r.Name,
+ Type: r.Type,
+ RawCount: r.RawCount.Copy(),
+ RawConfig: r.RawConfig.Copy(),
+ Provisioners: make([]*Provisioner, 0, len(r.Provisioners)),
+ Provider: r.Provider,
+ DependsOn: make([]string, len(r.DependsOn)),
+ Lifecycle: *r.Lifecycle.Copy(),
+ }
+ for _, p := range r.Provisioners {
+ n.Provisioners = append(n.Provisioners, p.Copy())
+ }
+ copy(n.DependsOn, r.DependsOn)
+ return n
+}
+
+// ResourceLifecycle is used to store the lifecycle tuning parameters
+// to allow customized behavior
+type ResourceLifecycle struct {
+ CreateBeforeDestroy bool `mapstructure:"create_before_destroy"`
+ PreventDestroy bool `mapstructure:"prevent_destroy"`
+ IgnoreChanges []string `mapstructure:"ignore_changes"`
+}
+
+// Copy returns a copy of this ResourceLifecycle
+func (r *ResourceLifecycle) Copy() *ResourceLifecycle {
+ n := &ResourceLifecycle{
+ CreateBeforeDestroy: r.CreateBeforeDestroy,
+ PreventDestroy: r.PreventDestroy,
+ IgnoreChanges: make([]string, len(r.IgnoreChanges)),
+ }
+ copy(n.IgnoreChanges, r.IgnoreChanges)
+ return n
+}
+
+// Provisioner is a configured provisioner step on a resource.
+type Provisioner struct {
+ Type string
+ RawConfig *RawConfig
+ ConnInfo *RawConfig
+
+ When ProvisionerWhen
+ OnFailure ProvisionerOnFailure
+}
+
+// Copy returns a copy of this Provisioner
+func (p *Provisioner) Copy() *Provisioner {
+ return &Provisioner{
+ Type: p.Type,
+ RawConfig: p.RawConfig.Copy(),
+ ConnInfo: p.ConnInfo.Copy(),
+ When: p.When,
+ OnFailure: p.OnFailure,
+ }
+}
+
+// Variable is a variable defined within the configuration.
+type Variable struct {
+ Name string
+ DeclaredType string `mapstructure:"type"`
+ Default interface{}
+ Description string
+}
+
+// Output is an output defined within the configuration. An output is
+// resulting data that is highlighted by Terraform when finished. An
+// output marked Sensitive will be output in a masked form following
+// application, but will still be available in state.
+type Output struct {
+ Name string
+ DependsOn []string
+ Description string
+ Sensitive bool
+ RawConfig *RawConfig
+}
+
+// VariableType is the type of value a variable is holding, and returned
+// by the Type() function on variables.
+type VariableType byte
+
+const (
+ VariableTypeUnknown VariableType = iota
+ VariableTypeString
+ VariableTypeList
+ VariableTypeMap
+)
+
+func (v VariableType) Printable() string {
+ switch v {
+ case VariableTypeString:
+ return "string"
+ case VariableTypeMap:
+ return "map"
+ case VariableTypeList:
+ return "list"
+ default:
+ return "unknown"
+ }
+}
+
+// ProviderConfigName returns the name of the provider configuration in
+// the given mapping that maps to the proper provider configuration
+// for this resource.
+func ProviderConfigName(t string, pcs []*ProviderConfig) string {
+ lk := ""
+ for _, v := range pcs {
+ k := v.Name
+ if strings.HasPrefix(t, k) && len(k) > len(lk) {
+ lk = k
+ }
+ }
+
+ return lk
+}
+
+// A unique identifier for this module.
+func (r *Module) Id() string {
+ return fmt.Sprintf("%s", r.Name)
+}
+
+// Count returns the count of this resource.
+func (r *Resource) Count() (int, error) {
+ raw := r.RawCount.Value()
+ count, ok := r.RawCount.Value().(string)
+ if !ok {
+ return 0, fmt.Errorf(
+ "expected count to be a string or int, got %T", raw)
+ }
+
+ v, err := strconv.ParseInt(count, 0, 0)
+ if err != nil {
+ return 0, err
+ }
+
+ return int(v), nil
+}
+
+// A unique identifier for this resource.
+func (r *Resource) Id() string {
+ switch r.Mode {
+ case ManagedResourceMode:
+ return fmt.Sprintf("%s.%s", r.Type, r.Name)
+ case DataResourceMode:
+ return fmt.Sprintf("data.%s.%s", r.Type, r.Name)
+ default:
+ panic(fmt.Errorf("unknown resource mode %s", r.Mode))
+ }
+}
+
+// Validate does some basic semantic checking of the configuration.
+func (c *Config) Validate() error {
+ if c == nil {
+ return nil
+ }
+
+ var errs []error
+
+ for _, k := range c.unknownKeys {
+ errs = append(errs, fmt.Errorf(
+ "Unknown root level key: %s", k))
+ }
+
+ // Validate the Terraform config
+ if tf := c.Terraform; tf != nil {
+ errs = append(errs, c.Terraform.Validate()...)
+ }
+
+ vars := c.InterpolatedVariables()
+ varMap := make(map[string]*Variable)
+ for _, v := range c.Variables {
+ if _, ok := varMap[v.Name]; ok {
+ errs = append(errs, fmt.Errorf(
+ "Variable '%s': duplicate found. Variable names must be unique.",
+ v.Name))
+ }
+
+ varMap[v.Name] = v
+ }
+
+ for k, _ := range varMap {
+ if !NameRegexp.MatchString(k) {
+ errs = append(errs, fmt.Errorf(
+ "variable %q: variable name must match regular expresion %s",
+ k, NameRegexp))
+ }
+ }
+
+ for _, v := range c.Variables {
+ if v.Type() == VariableTypeUnknown {
+ errs = append(errs, fmt.Errorf(
+ "Variable '%s': must be a string or a map",
+ v.Name))
+ continue
+ }
+
+ interp := false
+ fn := func(n ast.Node) (interface{}, error) {
+ // LiteralNode is a literal string (outside of a ${ ... } sequence).
+ // interpolationWalker skips most of these. but in particular it
+ // visits those that have escaped sequences (like $${foo}) as a
+ // signal that *some* processing is required on this string. For
+ // our purposes here though, this is fine and not an interpolation.
+ if _, ok := n.(*ast.LiteralNode); !ok {
+ interp = true
+ }
+ return "", nil
+ }
+
+ w := &interpolationWalker{F: fn}
+ if v.Default != nil {
+ if err := reflectwalk.Walk(v.Default, w); err == nil {
+ if interp {
+ errs = append(errs, fmt.Errorf(
+ "Variable '%s': cannot contain interpolations",
+ v.Name))
+ }
+ }
+ }
+ }
+
+ // Check for references to user variables that do not actually
+ // exist and record those errors.
+ for source, vs := range vars {
+ for _, v := range vs {
+ uv, ok := v.(*UserVariable)
+ if !ok {
+ continue
+ }
+
+ if _, ok := varMap[uv.Name]; !ok {
+ errs = append(errs, fmt.Errorf(
+ "%s: unknown variable referenced: '%s'. define it with 'variable' blocks",
+ source,
+ uv.Name))
+ }
+ }
+ }
+
+ // Check that all count variables are valid.
+ for source, vs := range vars {
+ for _, rawV := range vs {
+ switch v := rawV.(type) {
+ case *CountVariable:
+ if v.Type == CountValueInvalid {
+ errs = append(errs, fmt.Errorf(
+ "%s: invalid count variable: %s",
+ source,
+ v.FullKey()))
+ }
+ case *PathVariable:
+ if v.Type == PathValueInvalid {
+ errs = append(errs, fmt.Errorf(
+ "%s: invalid path variable: %s",
+ source,
+ v.FullKey()))
+ }
+ }
+ }
+ }
+
+ // Check that providers aren't declared multiple times.
+ providerSet := make(map[string]struct{})
+ for _, p := range c.ProviderConfigs {
+ name := p.FullName()
+ if _, ok := providerSet[name]; ok {
+ errs = append(errs, fmt.Errorf(
+ "provider.%s: declared multiple times, you can only declare a provider once",
+ name))
+ continue
+ }
+
+ providerSet[name] = struct{}{}
+ }
+
+ // Check that all references to modules are valid
+ modules := make(map[string]*Module)
+ dupped := make(map[string]struct{})
+ for _, m := range c.Modules {
+ // Check for duplicates
+ if _, ok := modules[m.Id()]; ok {
+ if _, ok := dupped[m.Id()]; !ok {
+ dupped[m.Id()] = struct{}{}
+
+ errs = append(errs, fmt.Errorf(
+ "%s: module repeated multiple times",
+ m.Id()))
+ }
+
+ // Already seen this module, just skip it
+ continue
+ }
+
+ modules[m.Id()] = m
+
+ // Check that the source has no interpolations
+ rc, err := NewRawConfig(map[string]interface{}{
+ "root": m.Source,
+ })
+ if err != nil {
+ errs = append(errs, fmt.Errorf(
+ "%s: module source error: %s",
+ m.Id(), err))
+ } else if len(rc.Interpolations) > 0 {
+ errs = append(errs, fmt.Errorf(
+ "%s: module source cannot contain interpolations",
+ m.Id()))
+ }
+
+ // Check that the name matches our regexp
+ if !NameRegexp.Match([]byte(m.Name)) {
+ errs = append(errs, fmt.Errorf(
+ "%s: module name can only contain letters, numbers, "+
+ "dashes, and underscores",
+ m.Id()))
+ }
+
+ // Check that the configuration can all be strings, lists or maps
+ raw := make(map[string]interface{})
+ for k, v := range m.RawConfig.Raw {
+ var strVal string
+ if err := hilmapstructure.WeakDecode(v, &strVal); err == nil {
+ raw[k] = strVal
+ continue
+ }
+
+ var mapVal map[string]interface{}
+ if err := hilmapstructure.WeakDecode(v, &mapVal); err == nil {
+ raw[k] = mapVal
+ continue
+ }
+
+ var sliceVal []interface{}
+ if err := hilmapstructure.WeakDecode(v, &sliceVal); err == nil {
+ raw[k] = sliceVal
+ continue
+ }
+
+ errs = append(errs, fmt.Errorf(
+ "%s: variable %s must be a string, list or map value",
+ m.Id(), k))
+ }
+
+ // Check for invalid count variables
+ for _, v := range m.RawConfig.Variables {
+ switch v.(type) {
+ case *CountVariable:
+ errs = append(errs, fmt.Errorf(
+ "%s: count variables are only valid within resources", m.Name))
+ case *SelfVariable:
+ errs = append(errs, fmt.Errorf(
+ "%s: self variables are only valid within resources", m.Name))
+ }
+ }
+
+ // Update the raw configuration to only contain the string values
+ m.RawConfig, err = NewRawConfig(raw)
+ if err != nil {
+ errs = append(errs, fmt.Errorf(
+ "%s: can't initialize configuration: %s",
+ m.Id(), err))
+ }
+ }
+ dupped = nil
+
+ // Check that all variables for modules reference modules that
+ // exist.
+ for source, vs := range vars {
+ for _, v := range vs {
+ mv, ok := v.(*ModuleVariable)
+ if !ok {
+ continue
+ }
+
+ if _, ok := modules[mv.Name]; !ok {
+ errs = append(errs, fmt.Errorf(
+ "%s: unknown module referenced: %s",
+ source,
+ mv.Name))
+ }
+ }
+ }
+
+ // Check that all references to resources are valid
+ resources := make(map[string]*Resource)
+ dupped = make(map[string]struct{})
+ for _, r := range c.Resources {
+ if _, ok := resources[r.Id()]; ok {
+ if _, ok := dupped[r.Id()]; !ok {
+ dupped[r.Id()] = struct{}{}
+
+ errs = append(errs, fmt.Errorf(
+ "%s: resource repeated multiple times",
+ r.Id()))
+ }
+ }
+
+ resources[r.Id()] = r
+ }
+ dupped = nil
+
+ // Validate resources
+ for n, r := range resources {
+ // Verify count variables
+ for _, v := range r.RawCount.Variables {
+ switch v.(type) {
+ case *CountVariable:
+ errs = append(errs, fmt.Errorf(
+ "%s: resource count can't reference count variable: %s",
+ n,
+ v.FullKey()))
+ case *SimpleVariable:
+ errs = append(errs, fmt.Errorf(
+ "%s: resource count can't reference variable: %s",
+ n,
+ v.FullKey()))
+
+ // Good
+ case *ModuleVariable:
+ case *ResourceVariable:
+ case *TerraformVariable:
+ case *UserVariable:
+
+ default:
+ errs = append(errs, fmt.Errorf(
+ "Internal error. Unknown type in count var in %s: %T",
+ n, v))
+ }
+ }
+
+ // Interpolate with a fixed number to verify that its a number.
+ r.RawCount.interpolate(func(root ast.Node) (interface{}, error) {
+ // Execute the node but transform the AST so that it returns
+ // a fixed value of "5" for all interpolations.
+ result, err := hil.Eval(
+ hil.FixedValueTransform(
+ root, &ast.LiteralNode{Value: "5", Typex: ast.TypeString}),
+ nil)
+ if err != nil {
+ return "", err
+ }
+
+ return result.Value, nil
+ })
+ _, err := strconv.ParseInt(r.RawCount.Value().(string), 0, 0)
+ if err != nil {
+ errs = append(errs, fmt.Errorf(
+ "%s: resource count must be an integer",
+ n))
+ }
+ r.RawCount.init()
+
+ // Validate DependsOn
+ errs = append(errs, c.validateDependsOn(n, r.DependsOn, resources, modules)...)
+
+ // Verify provisioners
+ for _, p := range r.Provisioners {
+ // This validation checks that there are now splat variables
+ // referencing ourself. This currently is not allowed.
+
+ for _, v := range p.ConnInfo.Variables {
+ rv, ok := v.(*ResourceVariable)
+ if !ok {
+ continue
+ }
+
+ if rv.Multi && rv.Index == -1 && rv.Type == r.Type && rv.Name == r.Name {
+ errs = append(errs, fmt.Errorf(
+ "%s: connection info cannot contain splat variable "+
+ "referencing itself", n))
+ break
+ }
+ }
+
+ for _, v := range p.RawConfig.Variables {
+ rv, ok := v.(*ResourceVariable)
+ if !ok {
+ continue
+ }
+
+ if rv.Multi && rv.Index == -1 && rv.Type == r.Type && rv.Name == r.Name {
+ errs = append(errs, fmt.Errorf(
+ "%s: connection info cannot contain splat variable "+
+ "referencing itself", n))
+ break
+ }
+ }
+
+ // Check for invalid when/onFailure values, though this should be
+ // picked up by the loader we check here just in case.
+ if p.When == ProvisionerWhenInvalid {
+ errs = append(errs, fmt.Errorf(
+ "%s: provisioner 'when' value is invalid", n))
+ }
+ if p.OnFailure == ProvisionerOnFailureInvalid {
+ errs = append(errs, fmt.Errorf(
+ "%s: provisioner 'on_failure' value is invalid", n))
+ }
+ }
+
+ // Verify ignore_changes contains valid entries
+ for _, v := range r.Lifecycle.IgnoreChanges {
+ if strings.Contains(v, "*") && v != "*" {
+ errs = append(errs, fmt.Errorf(
+ "%s: ignore_changes does not support using a partial string "+
+ "together with a wildcard: %s", n, v))
+ }
+ }
+
+ // Verify ignore_changes has no interpolations
+ rc, err := NewRawConfig(map[string]interface{}{
+ "root": r.Lifecycle.IgnoreChanges,
+ })
+ if err != nil {
+ errs = append(errs, fmt.Errorf(
+ "%s: lifecycle ignore_changes error: %s",
+ n, err))
+ } else if len(rc.Interpolations) > 0 {
+ errs = append(errs, fmt.Errorf(
+ "%s: lifecycle ignore_changes cannot contain interpolations",
+ n))
+ }
+
+ // If it is a data source then it can't have provisioners
+ if r.Mode == DataResourceMode {
+ if _, ok := r.RawConfig.Raw["provisioner"]; ok {
+ errs = append(errs, fmt.Errorf(
+ "%s: data sources cannot have provisioners",
+ n))
+ }
+ }
+ }
+
+ for source, vs := range vars {
+ for _, v := range vs {
+ rv, ok := v.(*ResourceVariable)
+ if !ok {
+ continue
+ }
+
+ id := rv.ResourceId()
+ if _, ok := resources[id]; !ok {
+ errs = append(errs, fmt.Errorf(
+ "%s: unknown resource '%s' referenced in variable %s",
+ source,
+ id,
+ rv.FullKey()))
+ continue
+ }
+ }
+ }
+
+ // Check that all outputs are valid
+ {
+ found := make(map[string]struct{})
+ for _, o := range c.Outputs {
+ // Verify the output is new
+ if _, ok := found[o.Name]; ok {
+ errs = append(errs, fmt.Errorf(
+ "%s: duplicate output. output names must be unique.",
+ o.Name))
+ continue
+ }
+ found[o.Name] = struct{}{}
+
+ var invalidKeys []string
+ valueKeyFound := false
+ for k := range o.RawConfig.Raw {
+ if k == "value" {
+ valueKeyFound = true
+ continue
+ }
+ if k == "sensitive" {
+ if sensitive, ok := o.RawConfig.config[k].(bool); ok {
+ if sensitive {
+ o.Sensitive = true
+ }
+ continue
+ }
+
+ errs = append(errs, fmt.Errorf(
+ "%s: value for 'sensitive' must be boolean",
+ o.Name))
+ continue
+ }
+ if k == "description" {
+ if desc, ok := o.RawConfig.config[k].(string); ok {
+ o.Description = desc
+ continue
+ }
+
+ errs = append(errs, fmt.Errorf(
+ "%s: value for 'description' must be string",
+ o.Name))
+ continue
+ }
+ invalidKeys = append(invalidKeys, k)
+ }
+ if len(invalidKeys) > 0 {
+ errs = append(errs, fmt.Errorf(
+ "%s: output has invalid keys: %s",
+ o.Name, strings.Join(invalidKeys, ", ")))
+ }
+ if !valueKeyFound {
+ errs = append(errs, fmt.Errorf(
+ "%s: output is missing required 'value' key", o.Name))
+ }
+
+ for _, v := range o.RawConfig.Variables {
+ if _, ok := v.(*CountVariable); ok {
+ errs = append(errs, fmt.Errorf(
+ "%s: count variables are only valid within resources", o.Name))
+ }
+ }
+ }
+ }
+
+ // Check that all variables are in the proper context
+ for source, rc := range c.rawConfigs() {
+ walker := &interpolationWalker{
+ ContextF: c.validateVarContextFn(source, &errs),
+ }
+ if err := reflectwalk.Walk(rc.Raw, walker); err != nil {
+ errs = append(errs, fmt.Errorf(
+ "%s: error reading config: %s", source, err))
+ }
+ }
+
+ // Validate the self variable
+ for source, rc := range c.rawConfigs() {
+ // Ignore provisioners. This is a pretty brittle way to do this,
+ // but better than also repeating all the resources.
+ if strings.Contains(source, "provision") {
+ continue
+ }
+
+ for _, v := range rc.Variables {
+ if _, ok := v.(*SelfVariable); ok {
+ errs = append(errs, fmt.Errorf(
+ "%s: cannot contain self-reference %s", source, v.FullKey()))
+ }
+ }
+ }
+
+ if len(errs) > 0 {
+ return &multierror.Error{Errors: errs}
+ }
+
+ return nil
+}
+
+// InterpolatedVariables is a helper that returns a mapping of all the interpolated
+// variables within the configuration. This is used to verify references
+// are valid in the Validate step.
+func (c *Config) InterpolatedVariables() map[string][]InterpolatedVariable {
+ result := make(map[string][]InterpolatedVariable)
+ for source, rc := range c.rawConfigs() {
+ for _, v := range rc.Variables {
+ result[source] = append(result[source], v)
+ }
+ }
+ return result
+}
+
+// rawConfigs returns all of the RawConfigs that are available keyed by
+// a human-friendly source.
+func (c *Config) rawConfigs() map[string]*RawConfig {
+ result := make(map[string]*RawConfig)
+ for _, m := range c.Modules {
+ source := fmt.Sprintf("module '%s'", m.Name)
+ result[source] = m.RawConfig
+ }
+
+ for _, pc := range c.ProviderConfigs {
+ source := fmt.Sprintf("provider config '%s'", pc.Name)
+ result[source] = pc.RawConfig
+ }
+
+ for _, rc := range c.Resources {
+ source := fmt.Sprintf("resource '%s'", rc.Id())
+ result[source+" count"] = rc.RawCount
+ result[source+" config"] = rc.RawConfig
+
+ for i, p := range rc.Provisioners {
+ subsource := fmt.Sprintf(
+ "%s provisioner %s (#%d)",
+ source, p.Type, i+1)
+ result[subsource] = p.RawConfig
+ }
+ }
+
+ for _, o := range c.Outputs {
+ source := fmt.Sprintf("output '%s'", o.Name)
+ result[source] = o.RawConfig
+ }
+
+ return result
+}
+
+func (c *Config) validateVarContextFn(
+ source string, errs *[]error) interpolationWalkerContextFunc {
+ return func(loc reflectwalk.Location, node ast.Node) {
+ // If we're in a slice element, then its fine, since you can do
+ // anything in there.
+ if loc == reflectwalk.SliceElem {
+ return
+ }
+
+ // Otherwise, let's check if there is a splat resource variable
+ // at the top level in here. We do this by doing a transform that
+ // replaces everything with a noop node unless its a variable
+ // access or concat. This should turn the AST into a flat tree
+ // of Concat(Noop, ...). If there are any variables left that are
+ // multi-access, then its still broken.
+ node = node.Accept(func(n ast.Node) ast.Node {
+ // If it is a concat or variable access, we allow it.
+ switch n.(type) {
+ case *ast.Output:
+ return n
+ case *ast.VariableAccess:
+ return n
+ }
+
+ // Otherwise, noop
+ return &noopNode{}
+ })
+
+ vars, err := DetectVariables(node)
+ if err != nil {
+ // Ignore it since this will be caught during parse. This
+ // actually probably should never happen by the time this
+ // is called, but its okay.
+ return
+ }
+
+ for _, v := range vars {
+ rv, ok := v.(*ResourceVariable)
+ if !ok {
+ return
+ }
+
+ if rv.Multi && rv.Index == -1 {
+ *errs = append(*errs, fmt.Errorf(
+ "%s: use of the splat ('*') operator must be wrapped in a list declaration",
+ source))
+ }
+ }
+ }
+}
+
+func (c *Config) validateDependsOn(
+ n string,
+ v []string,
+ resources map[string]*Resource,
+ modules map[string]*Module) []error {
+ // Verify depends on points to resources that all exist
+ var errs []error
+ for _, d := range v {
+ // Check if we contain interpolations
+ rc, err := NewRawConfig(map[string]interface{}{
+ "value": d,
+ })
+ if err == nil && len(rc.Variables) > 0 {
+ errs = append(errs, fmt.Errorf(
+ "%s: depends on value cannot contain interpolations: %s",
+ n, d))
+ continue
+ }
+
+ // If it is a module, verify it is a module
+ if strings.HasPrefix(d, "module.") {
+ name := d[len("module."):]
+ if _, ok := modules[name]; !ok {
+ errs = append(errs, fmt.Errorf(
+ "%s: resource depends on non-existent module '%s'",
+ n, name))
+ }
+
+ continue
+ }
+
+ // Check resources
+ if _, ok := resources[d]; !ok {
+ errs = append(errs, fmt.Errorf(
+ "%s: resource depends on non-existent resource '%s'",
+ n, d))
+ }
+ }
+
+ return errs
+}
+
+func (m *Module) mergerName() string {
+ return m.Id()
+}
+
+func (m *Module) mergerMerge(other merger) merger {
+ m2 := other.(*Module)
+
+ result := *m
+ result.Name = m2.Name
+ result.RawConfig = result.RawConfig.merge(m2.RawConfig)
+
+ if m2.Source != "" {
+ result.Source = m2.Source
+ }
+
+ return &result
+}
+
+func (o *Output) mergerName() string {
+ return o.Name
+}
+
+func (o *Output) mergerMerge(m merger) merger {
+ o2 := m.(*Output)
+
+ result := *o
+ result.Name = o2.Name
+ result.Description = o2.Description
+ result.RawConfig = result.RawConfig.merge(o2.RawConfig)
+ result.Sensitive = o2.Sensitive
+ result.DependsOn = o2.DependsOn
+
+ return &result
+}
+
+func (c *ProviderConfig) GoString() string {
+ return fmt.Sprintf("*%#v", *c)
+}
+
+func (c *ProviderConfig) FullName() string {
+ if c.Alias == "" {
+ return c.Name
+ }
+
+ return fmt.Sprintf("%s.%s", c.Name, c.Alias)
+}
+
+func (c *ProviderConfig) mergerName() string {
+ return c.Name
+}
+
+func (c *ProviderConfig) mergerMerge(m merger) merger {
+ c2 := m.(*ProviderConfig)
+
+ result := *c
+ result.Name = c2.Name
+ result.RawConfig = result.RawConfig.merge(c2.RawConfig)
+
+ if c2.Alias != "" {
+ result.Alias = c2.Alias
+ }
+
+ return &result
+}
+
+func (r *Resource) mergerName() string {
+ return r.Id()
+}
+
+func (r *Resource) mergerMerge(m merger) merger {
+ r2 := m.(*Resource)
+
+ result := *r
+ result.Mode = r2.Mode
+ result.Name = r2.Name
+ result.Type = r2.Type
+ result.RawConfig = result.RawConfig.merge(r2.RawConfig)
+
+ if r2.RawCount.Value() != "1" {
+ result.RawCount = r2.RawCount
+ }
+
+ if len(r2.Provisioners) > 0 {
+ result.Provisioners = r2.Provisioners
+ }
+
+ return &result
+}
+
+// Merge merges two variables to create a new third variable.
+func (v *Variable) Merge(v2 *Variable) *Variable {
+ // Shallow copy the variable
+ result := *v
+
+ // The names should be the same, but the second name always wins.
+ result.Name = v2.Name
+
+ if v2.DeclaredType != "" {
+ result.DeclaredType = v2.DeclaredType
+ }
+ if v2.Default != nil {
+ result.Default = v2.Default
+ }
+ if v2.Description != "" {
+ result.Description = v2.Description
+ }
+
+ return &result
+}
+
+var typeStringMap = map[string]VariableType{
+ "string": VariableTypeString,
+ "map": VariableTypeMap,
+ "list": VariableTypeList,
+}
+
+// Type returns the type of variable this is.
+func (v *Variable) Type() VariableType {
+ if v.DeclaredType != "" {
+ declaredType, ok := typeStringMap[v.DeclaredType]
+ if !ok {
+ return VariableTypeUnknown
+ }
+
+ return declaredType
+ }
+
+ return v.inferTypeFromDefault()
+}
+
+// ValidateTypeAndDefault ensures that default variable value is compatible
+// with the declared type (if one exists), and that the type is one which is
+// known to Terraform
+func (v *Variable) ValidateTypeAndDefault() error {
+ // If an explicit type is declared, ensure it is valid
+ if v.DeclaredType != "" {
+ if _, ok := typeStringMap[v.DeclaredType]; !ok {
+ validTypes := []string{}
+ for k := range typeStringMap {
+ validTypes = append(validTypes, k)
+ }
+ return fmt.Errorf(
+ "Variable '%s' type must be one of [%s] - '%s' is not a valid type",
+ v.Name,
+ strings.Join(validTypes, ", "),
+ v.DeclaredType,
+ )
+ }
+ }
+
+ if v.DeclaredType == "" || v.Default == nil {
+ return nil
+ }
+
+ if v.inferTypeFromDefault() != v.Type() {
+ return fmt.Errorf("'%s' has a default value which is not of type '%s' (got '%s')",
+ v.Name, v.DeclaredType, v.inferTypeFromDefault().Printable())
+ }
+
+ return nil
+}
+
+func (v *Variable) mergerName() string {
+ return v.Name
+}
+
+func (v *Variable) mergerMerge(m merger) merger {
+ return v.Merge(m.(*Variable))
+}
+
+// Required tests whether a variable is required or not.
+func (v *Variable) Required() bool {
+ return v.Default == nil
+}
+
+// inferTypeFromDefault contains the logic for the old method of inferring
+// variable types - we can also use this for validating that the declared
+// type matches the type of the default value
+func (v *Variable) inferTypeFromDefault() VariableType {
+ if v.Default == nil {
+ return VariableTypeString
+ }
+
+ var s string
+ if err := hilmapstructure.WeakDecode(v.Default, &s); err == nil {
+ v.Default = s
+ return VariableTypeString
+ }
+
+ var m map[string]interface{}
+ if err := hilmapstructure.WeakDecode(v.Default, &m); err == nil {
+ v.Default = m
+ return VariableTypeMap
+ }
+
+ var l []interface{}
+ if err := hilmapstructure.WeakDecode(v.Default, &l); err == nil {
+ v.Default = l
+ return VariableTypeList
+ }
+
+ return VariableTypeUnknown
+}
+
+func (m ResourceMode) Taintable() bool {
+ switch m {
+ case ManagedResourceMode:
+ return true
+ case DataResourceMode:
+ return false
+ default:
+ panic(fmt.Errorf("unsupported ResourceMode value %s", m))
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/config_string.go b/vendor/github.com/hashicorp/terraform/config/config_string.go
new file mode 100644
index 00000000..0b3abbcd
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/config_string.go
@@ -0,0 +1,338 @@
+package config
+
+import (
+ "bytes"
+ "fmt"
+ "sort"
+ "strings"
+)
+
+// TestString is a Stringer-like function that outputs a string that can
+// be used to easily compare multiple Config structures in unit tests.
+//
+// This function has no practical use outside of unit tests and debugging.
+func (c *Config) TestString() string {
+ if c == nil {
+ return "<nil config>"
+ }
+
+ var buf bytes.Buffer
+ if len(c.Modules) > 0 {
+ buf.WriteString("Modules:\n\n")
+ buf.WriteString(modulesStr(c.Modules))
+ buf.WriteString("\n\n")
+ }
+
+ if len(c.Variables) > 0 {
+ buf.WriteString("Variables:\n\n")
+ buf.WriteString(variablesStr(c.Variables))
+ buf.WriteString("\n\n")
+ }
+
+ if len(c.ProviderConfigs) > 0 {
+ buf.WriteString("Provider Configs:\n\n")
+ buf.WriteString(providerConfigsStr(c.ProviderConfigs))
+ buf.WriteString("\n\n")
+ }
+
+ if len(c.Resources) > 0 {
+ buf.WriteString("Resources:\n\n")
+ buf.WriteString(resourcesStr(c.Resources))
+ buf.WriteString("\n\n")
+ }
+
+ if len(c.Outputs) > 0 {
+ buf.WriteString("Outputs:\n\n")
+ buf.WriteString(outputsStr(c.Outputs))
+ buf.WriteString("\n")
+ }
+
+ return strings.TrimSpace(buf.String())
+}
+
+func terraformStr(t *Terraform) string {
+ result := ""
+
+ if b := t.Backend; b != nil {
+ result += fmt.Sprintf("backend (%s)\n", b.Type)
+
+ keys := make([]string, 0, len(b.RawConfig.Raw))
+ for k, _ := range b.RawConfig.Raw {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+
+ for _, k := range keys {
+ result += fmt.Sprintf(" %s\n", k)
+ }
+ }
+
+ return strings.TrimSpace(result)
+}
+
+func modulesStr(ms []*Module) string {
+ result := ""
+ order := make([]int, 0, len(ms))
+ ks := make([]string, 0, len(ms))
+ mapping := make(map[string]int)
+ for i, m := range ms {
+ k := m.Id()
+ ks = append(ks, k)
+ mapping[k] = i
+ }
+ sort.Strings(ks)
+ for _, k := range ks {
+ order = append(order, mapping[k])
+ }
+
+ for _, i := range order {
+ m := ms[i]
+ result += fmt.Sprintf("%s\n", m.Id())
+
+ ks := make([]string, 0, len(m.RawConfig.Raw))
+ for k, _ := range m.RawConfig.Raw {
+ ks = append(ks, k)
+ }
+ sort.Strings(ks)
+
+ result += fmt.Sprintf(" source = %s\n", m.Source)
+
+ for _, k := range ks {
+ result += fmt.Sprintf(" %s\n", k)
+ }
+ }
+
+ return strings.TrimSpace(result)
+}
+
+func outputsStr(os []*Output) string {
+ ns := make([]string, 0, len(os))
+ m := make(map[string]*Output)
+ for _, o := range os {
+ ns = append(ns, o.Name)
+ m[o.Name] = o
+ }
+ sort.Strings(ns)
+
+ result := ""
+ for _, n := range ns {
+ o := m[n]
+
+ result += fmt.Sprintf("%s\n", n)
+
+ if len(o.DependsOn) > 0 {
+ result += fmt.Sprintf(" dependsOn\n")
+ for _, d := range o.DependsOn {
+ result += fmt.Sprintf(" %s\n", d)
+ }
+ }
+
+ if len(o.RawConfig.Variables) > 0 {
+ result += fmt.Sprintf(" vars\n")
+ for _, rawV := range o.RawConfig.Variables {
+ kind := "unknown"
+ str := rawV.FullKey()
+
+ switch rawV.(type) {
+ case *ResourceVariable:
+ kind = "resource"
+ case *UserVariable:
+ kind = "user"
+ }
+
+ result += fmt.Sprintf(" %s: %s\n", kind, str)
+ }
+ }
+ }
+
+ return strings.TrimSpace(result)
+}
+
+// This helper turns a provider configs field into a deterministic
+// string value for comparison in tests.
+func providerConfigsStr(pcs []*ProviderConfig) string {
+ result := ""
+
+ ns := make([]string, 0, len(pcs))
+ m := make(map[string]*ProviderConfig)
+ for _, n := range pcs {
+ ns = append(ns, n.Name)
+ m[n.Name] = n
+ }
+ sort.Strings(ns)
+
+ for _, n := range ns {
+ pc := m[n]
+
+ result += fmt.Sprintf("%s\n", n)
+
+ keys := make([]string, 0, len(pc.RawConfig.Raw))
+ for k, _ := range pc.RawConfig.Raw {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+
+ for _, k := range keys {
+ result += fmt.Sprintf(" %s\n", k)
+ }
+
+ if len(pc.RawConfig.Variables) > 0 {
+ result += fmt.Sprintf(" vars\n")
+ for _, rawV := range pc.RawConfig.Variables {
+ kind := "unknown"
+ str := rawV.FullKey()
+
+ switch rawV.(type) {
+ case *ResourceVariable:
+ kind = "resource"
+ case *UserVariable:
+ kind = "user"
+ }
+
+ result += fmt.Sprintf(" %s: %s\n", kind, str)
+ }
+ }
+ }
+
+ return strings.TrimSpace(result)
+}
+
+// This helper turns a resources field into a deterministic
+// string value for comparison in tests.
+func resourcesStr(rs []*Resource) string {
+ result := ""
+ order := make([]int, 0, len(rs))
+ ks := make([]string, 0, len(rs))
+ mapping := make(map[string]int)
+ for i, r := range rs {
+ k := r.Id()
+ ks = append(ks, k)
+ mapping[k] = i
+ }
+ sort.Strings(ks)
+ for _, k := range ks {
+ order = append(order, mapping[k])
+ }
+
+ for _, i := range order {
+ r := rs[i]
+ result += fmt.Sprintf(
+ "%s (x%s)\n",
+ r.Id(),
+ r.RawCount.Value())
+
+ ks := make([]string, 0, len(r.RawConfig.Raw))
+ for k, _ := range r.RawConfig.Raw {
+ ks = append(ks, k)
+ }
+ sort.Strings(ks)
+
+ for _, k := range ks {
+ result += fmt.Sprintf(" %s\n", k)
+ }
+
+ if len(r.Provisioners) > 0 {
+ result += fmt.Sprintf(" provisioners\n")
+ for _, p := range r.Provisioners {
+ when := ""
+ if p.When != ProvisionerWhenCreate {
+ when = fmt.Sprintf(" (%s)", p.When.String())
+ }
+
+ result += fmt.Sprintf(" %s%s\n", p.Type, when)
+
+ if p.OnFailure != ProvisionerOnFailureFail {
+ result += fmt.Sprintf(" on_failure = %s\n", p.OnFailure.String())
+ }
+
+ ks := make([]string, 0, len(p.RawConfig.Raw))
+ for k, _ := range p.RawConfig.Raw {
+ ks = append(ks, k)
+ }
+ sort.Strings(ks)
+
+ for _, k := range ks {
+ result += fmt.Sprintf(" %s\n", k)
+ }
+ }
+ }
+
+ if len(r.DependsOn) > 0 {
+ result += fmt.Sprintf(" dependsOn\n")
+ for _, d := range r.DependsOn {
+ result += fmt.Sprintf(" %s\n", d)
+ }
+ }
+
+ if len(r.RawConfig.Variables) > 0 {
+ result += fmt.Sprintf(" vars\n")
+
+ ks := make([]string, 0, len(r.RawConfig.Variables))
+ for k, _ := range r.RawConfig.Variables {
+ ks = append(ks, k)
+ }
+ sort.Strings(ks)
+
+ for _, k := range ks {
+ rawV := r.RawConfig.Variables[k]
+ kind := "unknown"
+ str := rawV.FullKey()
+
+ switch rawV.(type) {
+ case *ResourceVariable:
+ kind = "resource"
+ case *UserVariable:
+ kind = "user"
+ }
+
+ result += fmt.Sprintf(" %s: %s\n", kind, str)
+ }
+ }
+ }
+
+ return strings.TrimSpace(result)
+}
+
+// This helper turns a variables field into a deterministic
+// string value for comparison in tests.
+func variablesStr(vs []*Variable) string {
+ result := ""
+ ks := make([]string, 0, len(vs))
+ m := make(map[string]*Variable)
+ for _, v := range vs {
+ ks = append(ks, v.Name)
+ m[v.Name] = v
+ }
+ sort.Strings(ks)
+
+ for _, k := range ks {
+ v := m[k]
+
+ required := ""
+ if v.Required() {
+ required = " (required)"
+ }
+
+ declaredType := ""
+ if v.DeclaredType != "" {
+ declaredType = fmt.Sprintf(" (%s)", v.DeclaredType)
+ }
+
+ if v.Default == nil || v.Default == "" {
+ v.Default = "<>"
+ }
+ if v.Description == "" {
+ v.Description = "<>"
+ }
+
+ result += fmt.Sprintf(
+ "%s%s%s\n %v\n %s\n",
+ k,
+ required,
+ declaredType,
+ v.Default,
+ v.Description)
+ }
+
+ return strings.TrimSpace(result)
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/config_terraform.go b/vendor/github.com/hashicorp/terraform/config/config_terraform.go
new file mode 100644
index 00000000..8535c964
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/config_terraform.go
@@ -0,0 +1,117 @@
+package config
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/hashicorp/go-version"
+ "github.com/mitchellh/hashstructure"
+)
+
+// Terraform is the Terraform meta-configuration that can be present
+// in configuration files for configuring Terraform itself.
+type Terraform struct {
+ RequiredVersion string `hcl:"required_version"` // Required Terraform version (constraint)
+ Backend *Backend // See Backend struct docs
+}
+
+// Validate performs the validation for just the Terraform configuration.
+func (t *Terraform) Validate() []error {
+ var errs []error
+
+ if raw := t.RequiredVersion; raw != "" {
+ // Check that the value has no interpolations
+ rc, err := NewRawConfig(map[string]interface{}{
+ "root": raw,
+ })
+ if err != nil {
+ errs = append(errs, fmt.Errorf(
+ "terraform.required_version: %s", err))
+ } else if len(rc.Interpolations) > 0 {
+ errs = append(errs, fmt.Errorf(
+ "terraform.required_version: cannot contain interpolations"))
+ } else {
+ // Check it is valid
+ _, err := version.NewConstraint(raw)
+ if err != nil {
+ errs = append(errs, fmt.Errorf(
+ "terraform.required_version: invalid syntax: %s", err))
+ }
+ }
+ }
+
+ if t.Backend != nil {
+ errs = append(errs, t.Backend.Validate()...)
+ }
+
+ return errs
+}
+
+// Merge t with t2.
+// Any conflicting fields are overwritten by t2.
+func (t *Terraform) Merge(t2 *Terraform) {
+ if t2.RequiredVersion != "" {
+ t.RequiredVersion = t2.RequiredVersion
+ }
+
+ if t2.Backend != nil {
+ t.Backend = t2.Backend
+ }
+}
+
+// Backend is the configuration for the "backend" to use with Terraform.
+// A backend is responsible for all major behavior of Terraform's core.
+// The abstraction layer above the core (the "backend") allows for behavior
+// such as remote operation.
+type Backend struct {
+ Type string
+ RawConfig *RawConfig
+
+ // Hash is a unique hash code representing the original configuration
+ // of the backend. This won't be recomputed unless Rehash is called.
+ Hash uint64
+}
+
+// Rehash returns a unique content hash for this backend's configuration
+// as a uint64 value.
+func (b *Backend) Rehash() uint64 {
+ // If we have no backend, the value is zero
+ if b == nil {
+ return 0
+ }
+
+ // Use hashstructure to hash only our type with the config.
+ code, err := hashstructure.Hash(map[string]interface{}{
+ "type": b.Type,
+ "config": b.RawConfig.Raw,
+ }, nil)
+
+ // This should never happen since we have just some basic primitives
+ // so panic if there is an error.
+ if err != nil {
+ panic(err)
+ }
+
+ return code
+}
+
+func (b *Backend) Validate() []error {
+ if len(b.RawConfig.Interpolations) > 0 {
+ return []error{fmt.Errorf(strings.TrimSpace(errBackendInterpolations))}
+ }
+
+ return nil
+}
+
+const errBackendInterpolations = `
+terraform.backend: configuration cannot contain interpolations
+
+The backend configuration is loaded by Terraform extremely early, before
+the core of Terraform can be initialized. This is necessary because the backend
+dictates the behavior of that core. The core is what handles interpolation
+processing. Because of this, interpolations cannot be used in backend
+configuration.
+
+If you'd like to parameterize backend configuration, we recommend using
+partial configuration with the "-backend-config" flag to "terraform init".
+`
diff --git a/vendor/github.com/hashicorp/terraform/config/config_tree.go b/vendor/github.com/hashicorp/terraform/config/config_tree.go
new file mode 100644
index 00000000..08dc0fe9
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/config_tree.go
@@ -0,0 +1,43 @@
+package config
+
+// configTree represents a tree of configurations where the root is the
+// first file and its children are the configurations it has imported.
+type configTree struct {
+ Path string
+ Config *Config
+ Children []*configTree
+}
+
+// Flatten flattens the entire tree down to a single merged Config
+// structure.
+func (t *configTree) Flatten() (*Config, error) {
+ // No children is easy: we're already merged!
+ if len(t.Children) == 0 {
+ return t.Config, nil
+ }
+
+ // Depth-first, merge all the children first.
+ childConfigs := make([]*Config, len(t.Children))
+ for i, ct := range t.Children {
+ c, err := ct.Flatten()
+ if err != nil {
+ return nil, err
+ }
+
+ childConfigs[i] = c
+ }
+
+ // Merge all the children in order
+ config := childConfigs[0]
+ childConfigs = childConfigs[1:]
+ for _, config2 := range childConfigs {
+ var err error
+ config, err = Merge(config, config2)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Merge the final merged child config with our own
+ return Merge(config, t.Config)
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/import_tree.go b/vendor/github.com/hashicorp/terraform/config/import_tree.go
new file mode 100644
index 00000000..37ec11a1
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/import_tree.go
@@ -0,0 +1,113 @@
+package config
+
+import (
+ "fmt"
+ "io"
+)
+
+// configurable is an interface that must be implemented by any configuration
+// formats of Terraform in order to return a *Config.
+type configurable interface {
+ Config() (*Config, error)
+}
+
+// importTree is the result of the first-pass load of the configuration
+// files. It is a tree of raw configurables and then any children (their
+// imports).
+//
+// An importTree can be turned into a configTree.
+type importTree struct {
+ Path string
+ Raw configurable
+ Children []*importTree
+}
+
+// This is the function type that must be implemented by the configuration
+// file loader to turn a single file into a configurable and any additional
+// imports.
+type fileLoaderFunc func(path string) (configurable, []string, error)
+
+// loadTree takes a single file and loads the entire importTree for that
+// file. This function detects what kind of configuration file it is an
+// executes the proper fileLoaderFunc.
+func loadTree(root string) (*importTree, error) {
+ var f fileLoaderFunc
+ switch ext(root) {
+ case ".tf", ".tf.json":
+ f = loadFileHcl
+ default:
+ }
+
+ if f == nil {
+ return nil, fmt.Errorf(
+ "%s: unknown configuration format. Use '.tf' or '.tf.json' extension",
+ root)
+ }
+
+ c, imps, err := f(root)
+ if err != nil {
+ return nil, err
+ }
+
+ children := make([]*importTree, len(imps))
+ for i, imp := range imps {
+ t, err := loadTree(imp)
+ if err != nil {
+ return nil, err
+ }
+
+ children[i] = t
+ }
+
+ return &importTree{
+ Path: root,
+ Raw: c,
+ Children: children,
+ }, nil
+}
+
+// Close releases any resources we might be holding open for the importTree.
+//
+// This can safely be called even while ConfigTree results are alive. The
+// importTree is not bound to these.
+func (t *importTree) Close() error {
+ if c, ok := t.Raw.(io.Closer); ok {
+ c.Close()
+ }
+ for _, ct := range t.Children {
+ ct.Close()
+ }
+
+ return nil
+}
+
+// ConfigTree traverses the importTree and turns each node into a *Config
+// object, ultimately returning a *configTree.
+func (t *importTree) ConfigTree() (*configTree, error) {
+ config, err := t.Raw.Config()
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error loading %s: %s",
+ t.Path,
+ err)
+ }
+
+ // Build our result
+ result := &configTree{
+ Path: t.Path,
+ Config: config,
+ }
+
+ // Build the config trees for the children
+ result.Children = make([]*configTree, len(t.Children))
+ for i, ct := range t.Children {
+ t, err := ct.ConfigTree()
+ if err != nil {
+ return nil, err
+ }
+
+ result.Children[i] = t
+ }
+
+ return result, nil
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate.go b/vendor/github.com/hashicorp/terraform/config/interpolate.go
new file mode 100644
index 00000000..bbb35554
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/interpolate.go
@@ -0,0 +1,386 @@
+package config
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ "github.com/hashicorp/hil/ast"
+)
+
+// An InterpolatedVariable is a variable reference within an interpolation.
+//
+// Implementations of this interface represents various sources where
+// variables can come from: user variables, resources, etc.
+type InterpolatedVariable interface {
+ FullKey() string
+}
+
+// CountVariable is a variable for referencing information about
+// the count.
+type CountVariable struct {
+ Type CountValueType
+ key string
+}
+
+// CountValueType is the type of the count variable that is referenced.
+type CountValueType byte
+
+const (
+ CountValueInvalid CountValueType = iota
+ CountValueIndex
+)
+
+// A ModuleVariable is a variable that is referencing the output
+// of a module, such as "${module.foo.bar}"
+type ModuleVariable struct {
+ Name string
+ Field string
+ key string
+}
+
+// A PathVariable is a variable that references path information about the
+// module.
+type PathVariable struct {
+ Type PathValueType
+ key string
+}
+
+type PathValueType byte
+
+const (
+ PathValueInvalid PathValueType = iota
+ PathValueCwd
+ PathValueModule
+ PathValueRoot
+)
+
+// A ResourceVariable is a variable that is referencing the field
+// of a resource, such as "${aws_instance.foo.ami}"
+type ResourceVariable struct {
+ Mode ResourceMode
+ Type string // Resource type, i.e. "aws_instance"
+ Name string // Resource name
+ Field string // Resource field
+
+ Multi bool // True if multi-variable: aws_instance.foo.*.id
+ Index int // Index for multi-variable: aws_instance.foo.1.id == 1
+
+ key string
+}
+
+// SelfVariable is a variable that is referencing the same resource
+// it is running on: "${self.address}"
+type SelfVariable struct {
+ Field string
+
+ key string
+}
+
+// SimpleVariable is an unprefixed variable, which can show up when users have
+// strings they are passing down to resources that use interpolation
+// internally. The template_file resource is an example of this.
+type SimpleVariable struct {
+ Key string
+}
+
+// TerraformVariable is a "terraform."-prefixed variable used to access
+// metadata about the Terraform run.
+type TerraformVariable struct {
+ Field string
+ key string
+}
+
+// A UserVariable is a variable that is referencing a user variable
+// that is inputted from outside the configuration. This looks like
+// "${var.foo}"
+type UserVariable struct {
+ Name string
+ Elem string
+
+ key string
+}
+
+func NewInterpolatedVariable(v string) (InterpolatedVariable, error) {
+ if strings.HasPrefix(v, "count.") {
+ return NewCountVariable(v)
+ } else if strings.HasPrefix(v, "path.") {
+ return NewPathVariable(v)
+ } else if strings.HasPrefix(v, "self.") {
+ return NewSelfVariable(v)
+ } else if strings.HasPrefix(v, "terraform.") {
+ return NewTerraformVariable(v)
+ } else if strings.HasPrefix(v, "var.") {
+ return NewUserVariable(v)
+ } else if strings.HasPrefix(v, "module.") {
+ return NewModuleVariable(v)
+ } else if !strings.ContainsRune(v, '.') {
+ return NewSimpleVariable(v)
+ } else {
+ return NewResourceVariable(v)
+ }
+}
+
+func NewCountVariable(key string) (*CountVariable, error) {
+ var fieldType CountValueType
+ parts := strings.SplitN(key, ".", 2)
+ switch parts[1] {
+ case "index":
+ fieldType = CountValueIndex
+ }
+
+ return &CountVariable{
+ Type: fieldType,
+ key: key,
+ }, nil
+}
+
+func (c *CountVariable) FullKey() string {
+ return c.key
+}
+
+func NewModuleVariable(key string) (*ModuleVariable, error) {
+ parts := strings.SplitN(key, ".", 3)
+ if len(parts) < 3 {
+ return nil, fmt.Errorf(
+ "%s: module variables must be three parts: module.name.attr",
+ key)
+ }
+
+ return &ModuleVariable{
+ Name: parts[1],
+ Field: parts[2],
+ key: key,
+ }, nil
+}
+
+func (v *ModuleVariable) FullKey() string {
+ return v.key
+}
+
+func (v *ModuleVariable) GoString() string {
+ return fmt.Sprintf("*%#v", *v)
+}
+
+func NewPathVariable(key string) (*PathVariable, error) {
+ var fieldType PathValueType
+ parts := strings.SplitN(key, ".", 2)
+ switch parts[1] {
+ case "cwd":
+ fieldType = PathValueCwd
+ case "module":
+ fieldType = PathValueModule
+ case "root":
+ fieldType = PathValueRoot
+ }
+
+ return &PathVariable{
+ Type: fieldType,
+ key: key,
+ }, nil
+}
+
+func (v *PathVariable) FullKey() string {
+ return v.key
+}
+
+func NewResourceVariable(key string) (*ResourceVariable, error) {
+ var mode ResourceMode
+ var parts []string
+ if strings.HasPrefix(key, "data.") {
+ mode = DataResourceMode
+ parts = strings.SplitN(key, ".", 4)
+ if len(parts) < 4 {
+ return nil, fmt.Errorf(
+ "%s: data variables must be four parts: data.TYPE.NAME.ATTR",
+ key)
+ }
+
+ // Don't actually need the "data." prefix for parsing, since it's
+ // always constant.
+ parts = parts[1:]
+ } else {
+ mode = ManagedResourceMode
+ parts = strings.SplitN(key, ".", 3)
+ if len(parts) < 3 {
+ return nil, fmt.Errorf(
+ "%s: resource variables must be three parts: TYPE.NAME.ATTR",
+ key)
+ }
+ }
+
+ field := parts[2]
+ multi := false
+ var index int
+
+ if idx := strings.Index(field, "."); idx != -1 {
+ indexStr := field[:idx]
+ multi = indexStr == "*"
+ index = -1
+
+ if !multi {
+ indexInt, err := strconv.ParseInt(indexStr, 0, 0)
+ if err == nil {
+ multi = true
+ index = int(indexInt)
+ }
+ }
+
+ if multi {
+ field = field[idx+1:]
+ }
+ }
+
+ return &ResourceVariable{
+ Mode: mode,
+ Type: parts[0],
+ Name: parts[1],
+ Field: field,
+ Multi: multi,
+ Index: index,
+ key: key,
+ }, nil
+}
+
+func (v *ResourceVariable) ResourceId() string {
+ switch v.Mode {
+ case ManagedResourceMode:
+ return fmt.Sprintf("%s.%s", v.Type, v.Name)
+ case DataResourceMode:
+ return fmt.Sprintf("data.%s.%s", v.Type, v.Name)
+ default:
+ panic(fmt.Errorf("unknown resource mode %s", v.Mode))
+ }
+}
+
+func (v *ResourceVariable) FullKey() string {
+ return v.key
+}
+
+func NewSelfVariable(key string) (*SelfVariable, error) {
+ field := key[len("self."):]
+
+ return &SelfVariable{
+ Field: field,
+
+ key: key,
+ }, nil
+}
+
+func (v *SelfVariable) FullKey() string {
+ return v.key
+}
+
+func (v *SelfVariable) GoString() string {
+ return fmt.Sprintf("*%#v", *v)
+}
+
+func NewSimpleVariable(key string) (*SimpleVariable, error) {
+ return &SimpleVariable{key}, nil
+}
+
+func (v *SimpleVariable) FullKey() string {
+ return v.Key
+}
+
+func (v *SimpleVariable) GoString() string {
+ return fmt.Sprintf("*%#v", *v)
+}
+
+func NewTerraformVariable(key string) (*TerraformVariable, error) {
+ field := key[len("terraform."):]
+ return &TerraformVariable{
+ Field: field,
+ key: key,
+ }, nil
+}
+
+func (v *TerraformVariable) FullKey() string {
+ return v.key
+}
+
+func (v *TerraformVariable) GoString() string {
+ return fmt.Sprintf("*%#v", *v)
+}
+
+func NewUserVariable(key string) (*UserVariable, error) {
+ name := key[len("var."):]
+ elem := ""
+ if idx := strings.Index(name, "."); idx > -1 {
+ elem = name[idx+1:]
+ name = name[:idx]
+ }
+
+ if len(elem) > 0 {
+ return nil, fmt.Errorf("Invalid dot index found: 'var.%s.%s'. Values in maps and lists can be referenced using square bracket indexing, like: 'var.mymap[\"key\"]' or 'var.mylist[1]'.", name, elem)
+ }
+
+ return &UserVariable{
+ key: key,
+
+ Name: name,
+ Elem: elem,
+ }, nil
+}
+
+func (v *UserVariable) FullKey() string {
+ return v.key
+}
+
+func (v *UserVariable) GoString() string {
+ return fmt.Sprintf("*%#v", *v)
+}
+
+// DetectVariables takes an AST root and returns all the interpolated
+// variables that are detected in the AST tree.
+func DetectVariables(root ast.Node) ([]InterpolatedVariable, error) {
+ var result []InterpolatedVariable
+ var resultErr error
+
+ // Visitor callback
+ fn := func(n ast.Node) ast.Node {
+ if resultErr != nil {
+ return n
+ }
+
+ switch vn := n.(type) {
+ case *ast.VariableAccess:
+ v, err := NewInterpolatedVariable(vn.Name)
+ if err != nil {
+ resultErr = err
+ return n
+ }
+ result = append(result, v)
+ case *ast.Index:
+ if va, ok := vn.Target.(*ast.VariableAccess); ok {
+ v, err := NewInterpolatedVariable(va.Name)
+ if err != nil {
+ resultErr = err
+ return n
+ }
+ result = append(result, v)
+ }
+ if va, ok := vn.Key.(*ast.VariableAccess); ok {
+ v, err := NewInterpolatedVariable(va.Name)
+ if err != nil {
+ resultErr = err
+ return n
+ }
+ result = append(result, v)
+ }
+ default:
+ return n
+ }
+
+ return n
+ }
+
+ // Visitor pattern
+ root.Accept(fn)
+
+ if resultErr != nil {
+ return nil, resultErr
+ }
+
+ return result, nil
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go b/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go
new file mode 100644
index 00000000..b7933471
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go
@@ -0,0 +1,1346 @@
+package config
+
+import (
+ "crypto/md5"
+ "crypto/sha1"
+ "crypto/sha256"
+ "encoding/base64"
+ "encoding/hex"
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "math"
+ "net"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/apparentlymart/go-cidr/cidr"
+ "github.com/hashicorp/go-uuid"
+ "github.com/hashicorp/hil"
+ "github.com/hashicorp/hil/ast"
+ "github.com/mitchellh/go-homedir"
+)
+
+// stringSliceToVariableValue converts a string slice into the value
+// required to be returned from interpolation functions which return
+// TypeList.
+func stringSliceToVariableValue(values []string) []ast.Variable {
+ output := make([]ast.Variable, len(values))
+ for index, value := range values {
+ output[index] = ast.Variable{
+ Type: ast.TypeString,
+ Value: value,
+ }
+ }
+ return output
+}
+
+func listVariableValueToStringSlice(values []ast.Variable) ([]string, error) {
+ output := make([]string, len(values))
+ for index, value := range values {
+ if value.Type != ast.TypeString {
+ return []string{}, fmt.Errorf("list has non-string element (%T)", value.Type.String())
+ }
+ output[index] = value.Value.(string)
+ }
+ return output, nil
+}
+
+// Funcs is the mapping of built-in functions for configuration.
+func Funcs() map[string]ast.Function {
+ return map[string]ast.Function{
+ "basename": interpolationFuncBasename(),
+ "base64decode": interpolationFuncBase64Decode(),
+ "base64encode": interpolationFuncBase64Encode(),
+ "base64sha256": interpolationFuncBase64Sha256(),
+ "ceil": interpolationFuncCeil(),
+ "chomp": interpolationFuncChomp(),
+ "cidrhost": interpolationFuncCidrHost(),
+ "cidrnetmask": interpolationFuncCidrNetmask(),
+ "cidrsubnet": interpolationFuncCidrSubnet(),
+ "coalesce": interpolationFuncCoalesce(),
+ "coalescelist": interpolationFuncCoalesceList(),
+ "compact": interpolationFuncCompact(),
+ "concat": interpolationFuncConcat(),
+ "dirname": interpolationFuncDirname(),
+ "distinct": interpolationFuncDistinct(),
+ "element": interpolationFuncElement(),
+ "file": interpolationFuncFile(),
+ "matchkeys": interpolationFuncMatchKeys(),
+ "floor": interpolationFuncFloor(),
+ "format": interpolationFuncFormat(),
+ "formatlist": interpolationFuncFormatList(),
+ "index": interpolationFuncIndex(),
+ "join": interpolationFuncJoin(),
+ "jsonencode": interpolationFuncJSONEncode(),
+ "length": interpolationFuncLength(),
+ "list": interpolationFuncList(),
+ "lower": interpolationFuncLower(),
+ "map": interpolationFuncMap(),
+ "max": interpolationFuncMax(),
+ "md5": interpolationFuncMd5(),
+ "merge": interpolationFuncMerge(),
+ "min": interpolationFuncMin(),
+ "pathexpand": interpolationFuncPathExpand(),
+ "uuid": interpolationFuncUUID(),
+ "replace": interpolationFuncReplace(),
+ "sha1": interpolationFuncSha1(),
+ "sha256": interpolationFuncSha256(),
+ "signum": interpolationFuncSignum(),
+ "slice": interpolationFuncSlice(),
+ "sort": interpolationFuncSort(),
+ "split": interpolationFuncSplit(),
+ "substr": interpolationFuncSubstr(),
+ "timestamp": interpolationFuncTimestamp(),
+ "title": interpolationFuncTitle(),
+ "trimspace": interpolationFuncTrimSpace(),
+ "upper": interpolationFuncUpper(),
+ "zipmap": interpolationFuncZipMap(),
+ }
+}
+
+// interpolationFuncList creates a list from the parameters passed
+// to it.
+func interpolationFuncList() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{},
+ ReturnType: ast.TypeList,
+ Variadic: true,
+ VariadicType: ast.TypeAny,
+ Callback: func(args []interface{}) (interface{}, error) {
+ var outputList []ast.Variable
+
+ for i, val := range args {
+ switch v := val.(type) {
+ case string:
+ outputList = append(outputList, ast.Variable{Type: ast.TypeString, Value: v})
+ case []ast.Variable:
+ outputList = append(outputList, ast.Variable{Type: ast.TypeList, Value: v})
+ case map[string]ast.Variable:
+ outputList = append(outputList, ast.Variable{Type: ast.TypeMap, Value: v})
+ default:
+ return nil, fmt.Errorf("unexpected type %T for argument %d in list", v, i)
+ }
+ }
+
+ // we don't support heterogeneous types, so make sure all types match the first
+ if len(outputList) > 0 {
+ firstType := outputList[0].Type
+ for i, v := range outputList[1:] {
+ if v.Type != firstType {
+ return nil, fmt.Errorf("unexpected type %s for argument %d in list", v.Type, i+1)
+ }
+ }
+ }
+
+ return outputList, nil
+ },
+ }
+}
+
+// interpolationFuncMap creates a map from the parameters passed
+// to it.
+func interpolationFuncMap() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{},
+ ReturnType: ast.TypeMap,
+ Variadic: true,
+ VariadicType: ast.TypeAny,
+ Callback: func(args []interface{}) (interface{}, error) {
+ outputMap := make(map[string]ast.Variable)
+
+ if len(args)%2 != 0 {
+ return nil, fmt.Errorf("requires an even number of arguments, got %d", len(args))
+ }
+
+ var firstType *ast.Type
+ for i := 0; i < len(args); i += 2 {
+ key, ok := args[i].(string)
+ if !ok {
+ return nil, fmt.Errorf("argument %d represents a key, so it must be a string", i+1)
+ }
+ val := args[i+1]
+ variable, err := hil.InterfaceToVariable(val)
+ if err != nil {
+ return nil, err
+ }
+ // Enforce map type homogeneity
+ if firstType == nil {
+ firstType = &variable.Type
+ } else if variable.Type != *firstType {
+ return nil, fmt.Errorf("all map values must have the same type, got %s then %s", firstType.Printable(), variable.Type.Printable())
+ }
+ // Check for duplicate keys
+ if _, ok := outputMap[key]; ok {
+ return nil, fmt.Errorf("argument %d is a duplicate key: %q", i+1, key)
+ }
+ outputMap[key] = variable
+ }
+
+ return outputMap, nil
+ },
+ }
+}
+
+// interpolationFuncCompact strips a list of multi-variable values
+// (e.g. as returned by "split") of any empty strings.
+func interpolationFuncCompact() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeList},
+ ReturnType: ast.TypeList,
+ Variadic: false,
+ Callback: func(args []interface{}) (interface{}, error) {
+ inputList := args[0].([]ast.Variable)
+
+ var outputList []string
+ for _, val := range inputList {
+ strVal, ok := val.Value.(string)
+ if !ok {
+ return nil, fmt.Errorf(
+ "compact() may only be used with flat lists, this list contains elements of %s",
+ val.Type.Printable())
+ }
+ if strVal == "" {
+ continue
+ }
+
+ outputList = append(outputList, strVal)
+ }
+ return stringSliceToVariableValue(outputList), nil
+ },
+ }
+}
+
+// interpolationFuncCidrHost implements the "cidrhost" function that
+// fills in the host part of a CIDR range address to create a single
+// host address
+func interpolationFuncCidrHost() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{
+ ast.TypeString, // starting CIDR mask
+ ast.TypeInt, // host number to insert
+ },
+ ReturnType: ast.TypeString,
+ Variadic: false,
+ Callback: func(args []interface{}) (interface{}, error) {
+ hostNum := args[1].(int)
+ _, network, err := net.ParseCIDR(args[0].(string))
+ if err != nil {
+ return nil, fmt.Errorf("invalid CIDR expression: %s", err)
+ }
+
+ ip, err := cidr.Host(network, hostNum)
+ if err != nil {
+ return nil, err
+ }
+
+ return ip.String(), nil
+ },
+ }
+}
+
+// interpolationFuncCidrNetmask implements the "cidrnetmask" function
+// that returns the subnet mask in IP address notation.
+func interpolationFuncCidrNetmask() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{
+ ast.TypeString, // CIDR mask
+ },
+ ReturnType: ast.TypeString,
+ Variadic: false,
+ Callback: func(args []interface{}) (interface{}, error) {
+ _, network, err := net.ParseCIDR(args[0].(string))
+ if err != nil {
+ return nil, fmt.Errorf("invalid CIDR expression: %s", err)
+ }
+
+ return net.IP(network.Mask).String(), nil
+ },
+ }
+}
+
+// interpolationFuncCidrSubnet implements the "cidrsubnet" function that
+// adds an additional subnet of the given length onto an existing
+// IP block expressed in CIDR notation.
+func interpolationFuncCidrSubnet() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{
+ ast.TypeString, // starting CIDR mask
+ ast.TypeInt, // number of bits to extend the prefix
+ ast.TypeInt, // network number to append to the prefix
+ },
+ ReturnType: ast.TypeString,
+ Variadic: false,
+ Callback: func(args []interface{}) (interface{}, error) {
+ extraBits := args[1].(int)
+ subnetNum := args[2].(int)
+ _, network, err := net.ParseCIDR(args[0].(string))
+ if err != nil {
+ return nil, fmt.Errorf("invalid CIDR expression: %s", err)
+ }
+
+ // For portability with 32-bit systems where the subnet number
+ // will be a 32-bit int, we only allow extension of 32 bits in
+ // one call even if we're running on a 64-bit machine.
+ // (Of course, this is significant only for IPv6.)
+ if extraBits > 32 {
+ return nil, fmt.Errorf("may not extend prefix by more than 32 bits")
+ }
+
+ newNetwork, err := cidr.Subnet(network, extraBits, subnetNum)
+ if err != nil {
+ return nil, err
+ }
+
+ return newNetwork.String(), nil
+ },
+ }
+}
+
+// interpolationFuncCoalesce implements the "coalesce" function that
+// returns the first non null / empty string from the provided input
+func interpolationFuncCoalesce() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Variadic: true,
+ VariadicType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ if len(args) < 2 {
+ return nil, fmt.Errorf("must provide at least two arguments")
+ }
+ for _, arg := range args {
+ argument := arg.(string)
+
+ if argument != "" {
+ return argument, nil
+ }
+ }
+ return "", nil
+ },
+ }
+}
+
+// interpolationFuncCoalesceList implements the "coalescelist" function that
+// returns the first non empty list from the provided input
+func interpolationFuncCoalesceList() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeList},
+ ReturnType: ast.TypeList,
+ Variadic: true,
+ VariadicType: ast.TypeList,
+ Callback: func(args []interface{}) (interface{}, error) {
+ if len(args) < 2 {
+ return nil, fmt.Errorf("must provide at least two arguments")
+ }
+ for _, arg := range args {
+ argument := arg.([]ast.Variable)
+
+ if len(argument) > 0 {
+ return argument, nil
+ }
+ }
+ return make([]ast.Variable, 0), nil
+ },
+ }
+}
+
+// interpolationFuncConcat implements the "concat" function that concatenates
+// multiple lists.
+func interpolationFuncConcat() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeList},
+ ReturnType: ast.TypeList,
+ Variadic: true,
+ VariadicType: ast.TypeList,
+ Callback: func(args []interface{}) (interface{}, error) {
+ var outputList []ast.Variable
+
+ for _, arg := range args {
+ for _, v := range arg.([]ast.Variable) {
+ switch v.Type {
+ case ast.TypeString:
+ outputList = append(outputList, v)
+ case ast.TypeList:
+ outputList = append(outputList, v)
+ case ast.TypeMap:
+ outputList = append(outputList, v)
+ default:
+ return nil, fmt.Errorf("concat() does not support lists of %s", v.Type.Printable())
+ }
+ }
+ }
+
+ // we don't support heterogeneous types, so make sure all types match the first
+ if len(outputList) > 0 {
+ firstType := outputList[0].Type
+ for _, v := range outputList[1:] {
+ if v.Type != firstType {
+ return nil, fmt.Errorf("unexpected %s in list of %s", v.Type.Printable(), firstType.Printable())
+ }
+ }
+ }
+
+ return outputList, nil
+ },
+ }
+}
+
+// interpolationFuncFile implements the "file" function that allows
+// loading contents from a file.
+func interpolationFuncFile() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ path, err := homedir.Expand(args[0].(string))
+ if err != nil {
+ return "", err
+ }
+ data, err := ioutil.ReadFile(path)
+ if err != nil {
+ return "", err
+ }
+
+ return string(data), nil
+ },
+ }
+}
+
+// interpolationFuncFormat implements the "format" function that does
+// string formatting.
+func interpolationFuncFormat() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ Variadic: true,
+ VariadicType: ast.TypeAny,
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ format := args[0].(string)
+ return fmt.Sprintf(format, args[1:]...), nil
+ },
+ }
+}
+
+// interpolationFuncMax returns the maximum of the numeric arguments
+func interpolationFuncMax() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeFloat},
+ ReturnType: ast.TypeFloat,
+ Variadic: true,
+ VariadicType: ast.TypeFloat,
+ Callback: func(args []interface{}) (interface{}, error) {
+ max := args[0].(float64)
+
+ for i := 1; i < len(args); i++ {
+ max = math.Max(max, args[i].(float64))
+ }
+
+ return max, nil
+ },
+ }
+}
+
+// interpolationFuncMin returns the minimum of the numeric arguments
+func interpolationFuncMin() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeFloat},
+ ReturnType: ast.TypeFloat,
+ Variadic: true,
+ VariadicType: ast.TypeFloat,
+ Callback: func(args []interface{}) (interface{}, error) {
+ min := args[0].(float64)
+
+ for i := 1; i < len(args); i++ {
+ min = math.Min(min, args[i].(float64))
+ }
+
+ return min, nil
+ },
+ }
+}
+
+// interpolationFuncPathExpand will expand any `~`'s found with the full file path
+func interpolationFuncPathExpand() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ return homedir.Expand(args[0].(string))
+ },
+ }
+}
+
+// interpolationFuncCeil returns the the least integer value greater than or equal to the argument
+func interpolationFuncCeil() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeFloat},
+ ReturnType: ast.TypeInt,
+ Callback: func(args []interface{}) (interface{}, error) {
+ return int(math.Ceil(args[0].(float64))), nil
+ },
+ }
+}
+
+// interpolationFuncChomp removes trailing newlines from the given string
+func interpolationFuncChomp() ast.Function {
+ newlines := regexp.MustCompile(`(?:\r\n?|\n)*\z`)
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ return newlines.ReplaceAllString(args[0].(string), ""), nil
+ },
+ }
+}
+
+// interpolationFuncFloorreturns returns the greatest integer value less than or equal to the argument
+func interpolationFuncFloor() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeFloat},
+ ReturnType: ast.TypeInt,
+ Callback: func(args []interface{}) (interface{}, error) {
+ return int(math.Floor(args[0].(float64))), nil
+ },
+ }
+}
+
+func interpolationFuncZipMap() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{
+ ast.TypeList, // Keys
+ ast.TypeList, // Values
+ },
+ ReturnType: ast.TypeMap,
+ Callback: func(args []interface{}) (interface{}, error) {
+ keys := args[0].([]ast.Variable)
+ values := args[1].([]ast.Variable)
+
+ if len(keys) != len(values) {
+ return nil, fmt.Errorf("count of keys (%d) does not match count of values (%d)",
+ len(keys), len(values))
+ }
+
+ for i, val := range keys {
+ if val.Type != ast.TypeString {
+ return nil, fmt.Errorf("keys must be strings. value at position %d is %s",
+ i, val.Type.Printable())
+ }
+ }
+
+ result := map[string]ast.Variable{}
+ for i := 0; i < len(keys); i++ {
+ result[keys[i].Value.(string)] = values[i]
+ }
+
+ return result, nil
+ },
+ }
+}
+
+// interpolationFuncFormatList implements the "formatlist" function that does
+// string formatting on lists.
+func interpolationFuncFormatList() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeAny},
+ Variadic: true,
+ VariadicType: ast.TypeAny,
+ ReturnType: ast.TypeList,
+ Callback: func(args []interface{}) (interface{}, error) {
+ // Make a copy of the variadic part of args
+ // to avoid modifying the original.
+ varargs := make([]interface{}, len(args)-1)
+ copy(varargs, args[1:])
+
+ // Verify we have some arguments
+ if len(varargs) == 0 {
+ return nil, fmt.Errorf("no arguments to formatlist")
+ }
+
+ // Convert arguments that are lists into slices.
+ // Confirm along the way that all lists have the same length (n).
+ var n int
+ listSeen := false
+ for i := 1; i < len(args); i++ {
+ s, ok := args[i].([]ast.Variable)
+ if !ok {
+ continue
+ }
+
+ // Mark that we've seen at least one list
+ listSeen = true
+
+ // Convert the ast.Variable to a slice of strings
+ parts, err := listVariableValueToStringSlice(s)
+ if err != nil {
+ return nil, err
+ }
+
+ // otherwise the list is sent down to be indexed
+ varargs[i-1] = parts
+
+ // Check length
+ if n == 0 {
+ // first list we've seen
+ n = len(parts)
+ continue
+ }
+ if n != len(parts) {
+ return nil, fmt.Errorf("format: mismatched list lengths: %d != %d", n, len(parts))
+ }
+ }
+
+ // If we didn't see a list this is an error because we
+ // can't determine the return value length.
+ if !listSeen {
+ return nil, fmt.Errorf(
+ "formatlist requires at least one list argument")
+ }
+
+ // Do the formatting.
+ format := args[0].(string)
+
+ // Generate a list of formatted strings.
+ list := make([]string, n)
+ fmtargs := make([]interface{}, len(varargs))
+ for i := 0; i < n; i++ {
+ for j, arg := range varargs {
+ switch arg := arg.(type) {
+ default:
+ fmtargs[j] = arg
+ case []string:
+ fmtargs[j] = arg[i]
+ }
+ }
+ list[i] = fmt.Sprintf(format, fmtargs...)
+ }
+ return stringSliceToVariableValue(list), nil
+ },
+ }
+}
+
+// interpolationFuncIndex implements the "index" function that allows one to
+// find the index of a specific element in a list
+func interpolationFuncIndex() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeList, ast.TypeString},
+ ReturnType: ast.TypeInt,
+ Callback: func(args []interface{}) (interface{}, error) {
+ haystack := args[0].([]ast.Variable)
+ needle := args[1].(string)
+ for index, element := range haystack {
+ if needle == element.Value {
+ return index, nil
+ }
+ }
+ return nil, fmt.Errorf("Could not find '%s' in '%s'", needle, haystack)
+ },
+ }
+}
+
+// interpolationFuncBasename implements the "dirname" function.
+func interpolationFuncDirname() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ return filepath.Dir(args[0].(string)), nil
+ },
+ }
+}
+
+// interpolationFuncDistinct implements the "distinct" function that
+// removes duplicate elements from a list.
+func interpolationFuncDistinct() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeList},
+ ReturnType: ast.TypeList,
+ Variadic: true,
+ VariadicType: ast.TypeList,
+ Callback: func(args []interface{}) (interface{}, error) {
+ var list []string
+
+ if len(args) != 1 {
+ return nil, fmt.Errorf("accepts only one argument.")
+ }
+
+ if argument, ok := args[0].([]ast.Variable); ok {
+ for _, element := range argument {
+ if element.Type != ast.TypeString {
+ return nil, fmt.Errorf(
+ "only works for flat lists, this list contains elements of %s",
+ element.Type.Printable())
+ }
+ list = appendIfMissing(list, element.Value.(string))
+ }
+ }
+
+ return stringSliceToVariableValue(list), nil
+ },
+ }
+}
+
+// helper function to add an element to a list, if it does not already exsit
+func appendIfMissing(slice []string, element string) []string {
+ for _, ele := range slice {
+ if ele == element {
+ return slice
+ }
+ }
+ return append(slice, element)
+}
+
+// for two lists `keys` and `values` of equal length, returns all elements
+// from `values` where the corresponding element from `keys` is in `searchset`.
+func interpolationFuncMatchKeys() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeList, ast.TypeList, ast.TypeList},
+ ReturnType: ast.TypeList,
+ Callback: func(args []interface{}) (interface{}, error) {
+ output := make([]ast.Variable, 0)
+
+ values, _ := args[0].([]ast.Variable)
+ keys, _ := args[1].([]ast.Variable)
+ searchset, _ := args[2].([]ast.Variable)
+
+ if len(keys) != len(values) {
+ return nil, fmt.Errorf("length of keys and values should be equal")
+ }
+
+ for i, key := range keys {
+ for _, search := range searchset {
+ if res, err := compareSimpleVariables(key, search); err != nil {
+ return nil, err
+ } else if res == true {
+ output = append(output, values[i])
+ break
+ }
+ }
+ }
+ // if searchset is empty, then output is an empty list as well.
+ // if we haven't matched any key, then output is an empty list.
+ return output, nil
+ },
+ }
+}
+
+// compare two variables of the same type, i.e. non complex one, such as TypeList or TypeMap
+func compareSimpleVariables(a, b ast.Variable) (bool, error) {
+ if a.Type != b.Type {
+ return false, fmt.Errorf(
+ "won't compare items of different types %s and %s",
+ a.Type.Printable(), b.Type.Printable())
+ }
+ switch a.Type {
+ case ast.TypeString:
+ return a.Value.(string) == b.Value.(string), nil
+ default:
+ return false, fmt.Errorf(
+ "can't compare items of type %s",
+ a.Type.Printable())
+ }
+}
+
+// interpolationFuncJoin implements the "join" function that allows
+// multi-variable values to be joined by some character.
+func interpolationFuncJoin() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ Variadic: true,
+ VariadicType: ast.TypeList,
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ var list []string
+
+ if len(args) < 2 {
+ return nil, fmt.Errorf("not enough arguments to join()")
+ }
+
+ for _, arg := range args[1:] {
+ for _, part := range arg.([]ast.Variable) {
+ if part.Type != ast.TypeString {
+ return nil, fmt.Errorf(
+ "only works on flat lists, this list contains elements of %s",
+ part.Type.Printable())
+ }
+ list = append(list, part.Value.(string))
+ }
+ }
+
+ return strings.Join(list, args[0].(string)), nil
+ },
+ }
+}
+
+// interpolationFuncJSONEncode implements the "jsonencode" function that encodes
+// a string, list, or map as its JSON representation. For now, values in the
+// list or map may only be strings.
+func interpolationFuncJSONEncode() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeAny},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ var toEncode interface{}
+
+ switch typedArg := args[0].(type) {
+ case string:
+ toEncode = typedArg
+
+ case []ast.Variable:
+ // We preallocate the list here. Note that it's important that in
+ // the length 0 case, we have an empty list rather than nil, as
+ // they encode differently.
+ // XXX It would be nice to support arbitrarily nested data here. Is
+ // there an inverse of hil.InterfaceToVariable?
+ strings := make([]string, len(typedArg))
+
+ for i, v := range typedArg {
+ if v.Type != ast.TypeString {
+ return "", fmt.Errorf("list elements must be strings")
+ }
+ strings[i] = v.Value.(string)
+ }
+ toEncode = strings
+
+ case map[string]ast.Variable:
+ // XXX It would be nice to support arbitrarily nested data here. Is
+ // there an inverse of hil.InterfaceToVariable?
+ stringMap := make(map[string]string)
+ for k, v := range typedArg {
+ if v.Type != ast.TypeString {
+ return "", fmt.Errorf("map values must be strings")
+ }
+ stringMap[k] = v.Value.(string)
+ }
+ toEncode = stringMap
+
+ default:
+ return "", fmt.Errorf("unknown type for JSON encoding: %T", args[0])
+ }
+
+ jEnc, err := json.Marshal(toEncode)
+ if err != nil {
+ return "", fmt.Errorf("failed to encode JSON data '%s'", toEncode)
+ }
+ return string(jEnc), nil
+ },
+ }
+}
+
+// interpolationFuncReplace implements the "replace" function that does
+// string replacement.
+func interpolationFuncReplace() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString, ast.TypeString, ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ s := args[0].(string)
+ search := args[1].(string)
+ replace := args[2].(string)
+
+ // We search/replace using a regexp if the string is surrounded
+ // in forward slashes.
+ if len(search) > 1 && search[0] == '/' && search[len(search)-1] == '/' {
+ re, err := regexp.Compile(search[1 : len(search)-1])
+ if err != nil {
+ return nil, err
+ }
+
+ return re.ReplaceAllString(s, replace), nil
+ }
+
+ return strings.Replace(s, search, replace, -1), nil
+ },
+ }
+}
+
+func interpolationFuncLength() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeAny},
+ ReturnType: ast.TypeInt,
+ Variadic: false,
+ Callback: func(args []interface{}) (interface{}, error) {
+ subject := args[0]
+
+ switch typedSubject := subject.(type) {
+ case string:
+ return len(typedSubject), nil
+ case []ast.Variable:
+ return len(typedSubject), nil
+ case map[string]ast.Variable:
+ return len(typedSubject), nil
+ }
+
+ return 0, fmt.Errorf("arguments to length() must be a string, list, or map")
+ },
+ }
+}
+
+func interpolationFuncSignum() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeInt},
+ ReturnType: ast.TypeInt,
+ Variadic: false,
+ Callback: func(args []interface{}) (interface{}, error) {
+ num := args[0].(int)
+ switch {
+ case num < 0:
+ return -1, nil
+ case num > 0:
+ return +1, nil
+ default:
+ return 0, nil
+ }
+ },
+ }
+}
+
+// interpolationFuncSlice returns a portion of the input list between from, inclusive and to, exclusive.
+func interpolationFuncSlice() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{
+ ast.TypeList, // inputList
+ ast.TypeInt, // from
+ ast.TypeInt, // to
+ },
+ ReturnType: ast.TypeList,
+ Variadic: false,
+ Callback: func(args []interface{}) (interface{}, error) {
+ inputList := args[0].([]ast.Variable)
+ from := args[1].(int)
+ to := args[2].(int)
+
+ if from < 0 {
+ return nil, fmt.Errorf("from index must be >= 0")
+ }
+ if to > len(inputList) {
+ return nil, fmt.Errorf("to index must be <= length of the input list")
+ }
+ if from > to {
+ return nil, fmt.Errorf("from index must be <= to index")
+ }
+
+ var outputList []ast.Variable
+ for i, val := range inputList {
+ if i >= from && i < to {
+ outputList = append(outputList, val)
+ }
+ }
+ return outputList, nil
+ },
+ }
+}
+
+// interpolationFuncSort sorts a list of a strings lexographically
+func interpolationFuncSort() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeList},
+ ReturnType: ast.TypeList,
+ Variadic: false,
+ Callback: func(args []interface{}) (interface{}, error) {
+ inputList := args[0].([]ast.Variable)
+
+ // Ensure that all the list members are strings and
+ // create a string slice from them
+ members := make([]string, len(inputList))
+ for i, val := range inputList {
+ if val.Type != ast.TypeString {
+ return nil, fmt.Errorf(
+ "sort() may only be used with lists of strings - %s at index %d",
+ val.Type.String(), i)
+ }
+
+ members[i] = val.Value.(string)
+ }
+
+ sort.Strings(members)
+ return stringSliceToVariableValue(members), nil
+ },
+ }
+}
+
+// interpolationFuncSplit implements the "split" function that allows
+// strings to split into multi-variable values
+func interpolationFuncSplit() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString, ast.TypeString},
+ ReturnType: ast.TypeList,
+ Callback: func(args []interface{}) (interface{}, error) {
+ sep := args[0].(string)
+ s := args[1].(string)
+ elements := strings.Split(s, sep)
+ return stringSliceToVariableValue(elements), nil
+ },
+ }
+}
+
+// interpolationFuncLookup implements the "lookup" function that allows
+// dynamic lookups of map types within a Terraform configuration.
+func interpolationFuncLookup(vs map[string]ast.Variable) ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeMap, ast.TypeString},
+ ReturnType: ast.TypeString,
+ Variadic: true,
+ VariadicType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ defaultValue := ""
+ defaultValueSet := false
+ if len(args) > 2 {
+ defaultValue = args[2].(string)
+ defaultValueSet = true
+ }
+ if len(args) > 3 {
+ return "", fmt.Errorf("lookup() takes no more than three arguments")
+ }
+ index := args[1].(string)
+ mapVar := args[0].(map[string]ast.Variable)
+
+ v, ok := mapVar[index]
+ if !ok {
+ if defaultValueSet {
+ return defaultValue, nil
+ } else {
+ return "", fmt.Errorf(
+ "lookup failed to find '%s'",
+ args[1].(string))
+ }
+ }
+ if v.Type != ast.TypeString {
+ return nil, fmt.Errorf(
+ "lookup() may only be used with flat maps, this map contains elements of %s",
+ v.Type.Printable())
+ }
+
+ return v.Value.(string), nil
+ },
+ }
+}
+
+// interpolationFuncElement implements the "element" function that allows
+// a specific index to be looked up in a multi-variable value. Note that this will
+// wrap if the index is larger than the number of elements in the multi-variable value.
+func interpolationFuncElement() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeList, ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ list := args[0].([]ast.Variable)
+ if len(list) == 0 {
+ return nil, fmt.Errorf("element() may not be used with an empty list")
+ }
+
+ index, err := strconv.Atoi(args[1].(string))
+ if err != nil || index < 0 {
+ return "", fmt.Errorf(
+ "invalid number for index, got %s", args[1])
+ }
+
+ resolvedIndex := index % len(list)
+
+ v := list[resolvedIndex]
+ if v.Type != ast.TypeString {
+ return nil, fmt.Errorf(
+ "element() may only be used with flat lists, this list contains elements of %s",
+ v.Type.Printable())
+ }
+ return v.Value, nil
+ },
+ }
+}
+
+// interpolationFuncKeys implements the "keys" function that yields a list of
+// keys of map types within a Terraform configuration.
+func interpolationFuncKeys(vs map[string]ast.Variable) ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeMap},
+ ReturnType: ast.TypeList,
+ Callback: func(args []interface{}) (interface{}, error) {
+ mapVar := args[0].(map[string]ast.Variable)
+ keys := make([]string, 0)
+
+ for k, _ := range mapVar {
+ keys = append(keys, k)
+ }
+
+ sort.Strings(keys)
+
+ // Keys are guaranteed to be strings
+ return stringSliceToVariableValue(keys), nil
+ },
+ }
+}
+
+// interpolationFuncValues implements the "values" function that yields a list of
+// keys of map types within a Terraform configuration.
+func interpolationFuncValues(vs map[string]ast.Variable) ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeMap},
+ ReturnType: ast.TypeList,
+ Callback: func(args []interface{}) (interface{}, error) {
+ mapVar := args[0].(map[string]ast.Variable)
+ keys := make([]string, 0)
+
+ for k, _ := range mapVar {
+ keys = append(keys, k)
+ }
+
+ sort.Strings(keys)
+
+ values := make([]string, len(keys))
+ for index, key := range keys {
+ if value, ok := mapVar[key].Value.(string); ok {
+ values[index] = value
+ } else {
+ return "", fmt.Errorf("values(): %q has element with bad type %s",
+ key, mapVar[key].Type)
+ }
+ }
+
+ variable, err := hil.InterfaceToVariable(values)
+ if err != nil {
+ return nil, err
+ }
+
+ return variable.Value, nil
+ },
+ }
+}
+
+// interpolationFuncBasename implements the "basename" function.
+func interpolationFuncBasename() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ return filepath.Base(args[0].(string)), nil
+ },
+ }
+}
+
+// interpolationFuncBase64Encode implements the "base64encode" function that
+// allows Base64 encoding.
+func interpolationFuncBase64Encode() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ s := args[0].(string)
+ return base64.StdEncoding.EncodeToString([]byte(s)), nil
+ },
+ }
+}
+
+// interpolationFuncBase64Decode implements the "base64decode" function that
+// allows Base64 decoding.
+func interpolationFuncBase64Decode() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ s := args[0].(string)
+ sDec, err := base64.StdEncoding.DecodeString(s)
+ if err != nil {
+ return "", fmt.Errorf("failed to decode base64 data '%s'", s)
+ }
+ return string(sDec), nil
+ },
+ }
+}
+
+// interpolationFuncLower implements the "lower" function that does
+// string lower casing.
+func interpolationFuncLower() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ toLower := args[0].(string)
+ return strings.ToLower(toLower), nil
+ },
+ }
+}
+
+func interpolationFuncMd5() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ s := args[0].(string)
+ h := md5.New()
+ h.Write([]byte(s))
+ hash := hex.EncodeToString(h.Sum(nil))
+ return hash, nil
+ },
+ }
+}
+
+func interpolationFuncMerge() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeMap},
+ ReturnType: ast.TypeMap,
+ Variadic: true,
+ VariadicType: ast.TypeMap,
+ Callback: func(args []interface{}) (interface{}, error) {
+ outputMap := make(map[string]ast.Variable)
+
+ for _, arg := range args {
+ for k, v := range arg.(map[string]ast.Variable) {
+ outputMap[k] = v
+ }
+ }
+
+ return outputMap, nil
+ },
+ }
+}
+
+// interpolationFuncUpper implements the "upper" function that does
+// string upper casing.
+func interpolationFuncUpper() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ toUpper := args[0].(string)
+ return strings.ToUpper(toUpper), nil
+ },
+ }
+}
+
+func interpolationFuncSha1() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ s := args[0].(string)
+ h := sha1.New()
+ h.Write([]byte(s))
+ hash := hex.EncodeToString(h.Sum(nil))
+ return hash, nil
+ },
+ }
+}
+
+// hexadecimal representation of sha256 sum
+func interpolationFuncSha256() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ s := args[0].(string)
+ h := sha256.New()
+ h.Write([]byte(s))
+ hash := hex.EncodeToString(h.Sum(nil))
+ return hash, nil
+ },
+ }
+}
+
+func interpolationFuncTrimSpace() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ trimSpace := args[0].(string)
+ return strings.TrimSpace(trimSpace), nil
+ },
+ }
+}
+
+func interpolationFuncBase64Sha256() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ s := args[0].(string)
+ h := sha256.New()
+ h.Write([]byte(s))
+ shaSum := h.Sum(nil)
+ encoded := base64.StdEncoding.EncodeToString(shaSum[:])
+ return encoded, nil
+ },
+ }
+}
+
+func interpolationFuncUUID() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ return uuid.GenerateUUID()
+ },
+ }
+}
+
+// interpolationFuncTimestamp
+func interpolationFuncTimestamp() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ return time.Now().UTC().Format(time.RFC3339), nil
+ },
+ }
+}
+
+// interpolationFuncTitle implements the "title" function that returns a copy of the
+// string in which first characters of all the words are capitalized.
+func interpolationFuncTitle() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{ast.TypeString},
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ toTitle := args[0].(string)
+ return strings.Title(toTitle), nil
+ },
+ }
+}
+
+// interpolationFuncSubstr implements the "substr" function that allows strings
+// to be truncated.
+func interpolationFuncSubstr() ast.Function {
+ return ast.Function{
+ ArgTypes: []ast.Type{
+ ast.TypeString, // input string
+ ast.TypeInt, // offset
+ ast.TypeInt, // length
+ },
+ ReturnType: ast.TypeString,
+ Callback: func(args []interface{}) (interface{}, error) {
+ str := args[0].(string)
+ offset := args[1].(int)
+ length := args[2].(int)
+
+ // Interpret a negative offset as being equivalent to a positive
+ // offset taken from the end of the string.
+ if offset < 0 {
+ offset += len(str)
+ }
+
+ // Interpret a length of `-1` as indicating that the substring
+ // should start at `offset` and continue until the end of the
+ // string. Any other negative length (other than `-1`) is invalid.
+ if length == -1 {
+ length = len(str)
+ } else if length >= 0 {
+ length += offset
+ } else {
+ return nil, fmt.Errorf("length should be a non-negative integer")
+ }
+
+ if offset > len(str) {
+ return nil, fmt.Errorf("offset cannot be larger than the length of the string")
+ }
+
+ if length > len(str) {
+ return nil, fmt.Errorf("'offset + length' cannot be larger than the length of the string")
+ }
+
+ return str[offset:length], nil
+ },
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go b/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go
new file mode 100644
index 00000000..ead3d102
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go
@@ -0,0 +1,283 @@
+package config
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+
+ "github.com/hashicorp/hil"
+ "github.com/hashicorp/hil/ast"
+ "github.com/mitchellh/reflectwalk"
+)
+
+// interpolationWalker implements interfaces for the reflectwalk package
+// (github.com/mitchellh/reflectwalk) that can be used to automatically
+// execute a callback for an interpolation.
+type interpolationWalker struct {
+ // F is the function to call for every interpolation. It can be nil.
+ //
+ // If Replace is true, then the return value of F will be used to
+ // replace the interpolation.
+ F interpolationWalkerFunc
+ Replace bool
+
+ // ContextF is an advanced version of F that also receives the
+ // location of where it is in the structure. This lets you do
+ // context-aware validation.
+ ContextF interpolationWalkerContextFunc
+
+ key []string
+ lastValue reflect.Value
+ loc reflectwalk.Location
+ cs []reflect.Value
+ csKey []reflect.Value
+ csData interface{}
+ sliceIndex []int
+ unknownKeys []string
+}
+
+// interpolationWalkerFunc is the callback called by interpolationWalk.
+// It is called with any interpolation found. It should return a value
+// to replace the interpolation with, along with any errors.
+//
+// If Replace is set to false in interpolationWalker, then the replace
+// value can be anything as it will have no effect.
+type interpolationWalkerFunc func(ast.Node) (interface{}, error)
+
+// interpolationWalkerContextFunc is called by interpolationWalk if
+// ContextF is set. This receives both the interpolation and the location
+// where the interpolation is.
+//
+// This callback can be used to validate the location of the interpolation
+// within the configuration.
+type interpolationWalkerContextFunc func(reflectwalk.Location, ast.Node)
+
+func (w *interpolationWalker) Enter(loc reflectwalk.Location) error {
+ w.loc = loc
+ return nil
+}
+
+func (w *interpolationWalker) Exit(loc reflectwalk.Location) error {
+ w.loc = reflectwalk.None
+
+ switch loc {
+ case reflectwalk.Map:
+ w.cs = w.cs[:len(w.cs)-1]
+ case reflectwalk.MapValue:
+ w.key = w.key[:len(w.key)-1]
+ w.csKey = w.csKey[:len(w.csKey)-1]
+ case reflectwalk.Slice:
+ // Split any values that need to be split
+ w.splitSlice()
+ w.cs = w.cs[:len(w.cs)-1]
+ case reflectwalk.SliceElem:
+ w.csKey = w.csKey[:len(w.csKey)-1]
+ w.sliceIndex = w.sliceIndex[:len(w.sliceIndex)-1]
+ }
+
+ return nil
+}
+
+func (w *interpolationWalker) Map(m reflect.Value) error {
+ w.cs = append(w.cs, m)
+ return nil
+}
+
+func (w *interpolationWalker) MapElem(m, k, v reflect.Value) error {
+ w.csData = k
+ w.csKey = append(w.csKey, k)
+
+ if l := len(w.sliceIndex); l > 0 {
+ w.key = append(w.key, fmt.Sprintf("%d.%s", w.sliceIndex[l-1], k.String()))
+ } else {
+ w.key = append(w.key, k.String())
+ }
+
+ w.lastValue = v
+ return nil
+}
+
+func (w *interpolationWalker) Slice(s reflect.Value) error {
+ w.cs = append(w.cs, s)
+ return nil
+}
+
+func (w *interpolationWalker) SliceElem(i int, elem reflect.Value) error {
+ w.csKey = append(w.csKey, reflect.ValueOf(i))
+ w.sliceIndex = append(w.sliceIndex, i)
+ return nil
+}
+
+func (w *interpolationWalker) Primitive(v reflect.Value) error {
+ setV := v
+
+ // We only care about strings
+ if v.Kind() == reflect.Interface {
+ setV = v
+ v = v.Elem()
+ }
+ if v.Kind() != reflect.String {
+ return nil
+ }
+
+ astRoot, err := hil.Parse(v.String())
+ if err != nil {
+ return err
+ }
+
+ // If the AST we got is just a literal string value with the same
+ // value then we ignore it. We have to check if its the same value
+ // because it is possible to input a string, get out a string, and
+ // have it be different. For example: "foo-$${bar}" turns into
+ // "foo-${bar}"
+ if n, ok := astRoot.(*ast.LiteralNode); ok {
+ if s, ok := n.Value.(string); ok && s == v.String() {
+ return nil
+ }
+ }
+
+ if w.ContextF != nil {
+ w.ContextF(w.loc, astRoot)
+ }
+
+ if w.F == nil {
+ return nil
+ }
+
+ replaceVal, err := w.F(astRoot)
+ if err != nil {
+ return fmt.Errorf(
+ "%s in:\n\n%s",
+ err, v.String())
+ }
+
+ if w.Replace {
+ // We need to determine if we need to remove this element
+ // if the result contains any "UnknownVariableValue" which is
+ // set if it is computed. This behavior is different if we're
+ // splitting (in a SliceElem) or not.
+ remove := false
+ if w.loc == reflectwalk.SliceElem {
+ switch typedReplaceVal := replaceVal.(type) {
+ case string:
+ if typedReplaceVal == UnknownVariableValue {
+ remove = true
+ }
+ case []interface{}:
+ if hasUnknownValue(typedReplaceVal) {
+ remove = true
+ }
+ }
+ } else if replaceVal == UnknownVariableValue {
+ remove = true
+ }
+
+ if remove {
+ w.unknownKeys = append(w.unknownKeys, strings.Join(w.key, "."))
+ }
+
+ resultVal := reflect.ValueOf(replaceVal)
+ switch w.loc {
+ case reflectwalk.MapKey:
+ m := w.cs[len(w.cs)-1]
+
+ // Delete the old value
+ var zero reflect.Value
+ m.SetMapIndex(w.csData.(reflect.Value), zero)
+
+ // Set the new key with the existing value
+ m.SetMapIndex(resultVal, w.lastValue)
+
+ // Set the key to be the new key
+ w.csData = resultVal
+ case reflectwalk.MapValue:
+ // If we're in a map, then the only way to set a map value is
+ // to set it directly.
+ m := w.cs[len(w.cs)-1]
+ mk := w.csData.(reflect.Value)
+ m.SetMapIndex(mk, resultVal)
+ default:
+ // Otherwise, we should be addressable
+ setV.Set(resultVal)
+ }
+ }
+
+ return nil
+}
+
+func (w *interpolationWalker) replaceCurrent(v reflect.Value) {
+ // if we don't have at least 2 values, we're not going to find a map, but
+ // we could panic.
+ if len(w.cs) < 2 {
+ return
+ }
+
+ c := w.cs[len(w.cs)-2]
+ switch c.Kind() {
+ case reflect.Map:
+ // Get the key and delete it
+ k := w.csKey[len(w.csKey)-1]
+ c.SetMapIndex(k, v)
+ }
+}
+
+func hasUnknownValue(variable []interface{}) bool {
+ for _, value := range variable {
+ if strVal, ok := value.(string); ok {
+ if strVal == UnknownVariableValue {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func (w *interpolationWalker) splitSlice() {
+ raw := w.cs[len(w.cs)-1]
+
+ var s []interface{}
+ switch v := raw.Interface().(type) {
+ case []interface{}:
+ s = v
+ case []map[string]interface{}:
+ return
+ }
+
+ split := false
+ for _, val := range s {
+ if varVal, ok := val.(ast.Variable); ok && varVal.Type == ast.TypeList {
+ split = true
+ }
+ if _, ok := val.([]interface{}); ok {
+ split = true
+ }
+ }
+
+ if !split {
+ return
+ }
+
+ result := make([]interface{}, 0)
+ for _, v := range s {
+ switch val := v.(type) {
+ case ast.Variable:
+ switch val.Type {
+ case ast.TypeList:
+ elements := val.Value.([]ast.Variable)
+ for _, element := range elements {
+ result = append(result, element.Value)
+ }
+ default:
+ result = append(result, val.Value)
+ }
+ case []interface{}:
+ for _, element := range val {
+ result = append(result, element)
+ }
+ default:
+ result = append(result, v)
+ }
+ }
+
+ w.replaceCurrent(reflect.ValueOf(result))
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/lang.go b/vendor/github.com/hashicorp/terraform/config/lang.go
new file mode 100644
index 00000000..890d30be
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/lang.go
@@ -0,0 +1,11 @@
+package config
+
+import (
+ "github.com/hashicorp/hil/ast"
+)
+
+type noopNode struct{}
+
+func (n *noopNode) Accept(ast.Visitor) ast.Node { return n }
+func (n *noopNode) Pos() ast.Pos { return ast.Pos{} }
+func (n *noopNode) Type(ast.Scope) (ast.Type, error) { return ast.TypeString, nil }
diff --git a/vendor/github.com/hashicorp/terraform/config/loader.go b/vendor/github.com/hashicorp/terraform/config/loader.go
new file mode 100644
index 00000000..0bfa89c2
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/loader.go
@@ -0,0 +1,224 @@
+package config
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/hashicorp/hcl"
+)
+
+// ErrNoConfigsFound is the error returned by LoadDir if no
+// Terraform configuration files were found in the given directory.
+type ErrNoConfigsFound struct {
+ Dir string
+}
+
+func (e ErrNoConfigsFound) Error() string {
+ return fmt.Sprintf(
+ "No Terraform configuration files found in directory: %s",
+ e.Dir)
+}
+
+// LoadJSON loads a single Terraform configuration from a given JSON document.
+//
+// The document must be a complete Terraform configuration. This function will
+// NOT try to load any additional modules so only the given document is loaded.
+func LoadJSON(raw json.RawMessage) (*Config, error) {
+ obj, err := hcl.Parse(string(raw))
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error parsing JSON document as HCL: %s", err)
+ }
+
+ // Start building the result
+ hclConfig := &hclConfigurable{
+ Root: obj,
+ }
+
+ return hclConfig.Config()
+}
+
+// LoadFile loads the Terraform configuration from a given file.
+//
+// This file can be any format that Terraform recognizes, and import any
+// other format that Terraform recognizes.
+func LoadFile(path string) (*Config, error) {
+ importTree, err := loadTree(path)
+ if err != nil {
+ return nil, err
+ }
+
+ configTree, err := importTree.ConfigTree()
+
+ // Close the importTree now so that we can clear resources as quickly
+ // as possible.
+ importTree.Close()
+
+ if err != nil {
+ return nil, err
+ }
+
+ return configTree.Flatten()
+}
+
+// LoadDir loads all the Terraform configuration files in a single
+// directory and appends them together.
+//
+// Special files known as "override files" can also be present, which
+// are merged into the loaded configuration. That is, the non-override
+// files are loaded first to create the configuration. Then, the overrides
+// are merged into the configuration to create the final configuration.
+//
+// Files are loaded in lexical order.
+func LoadDir(root string) (*Config, error) {
+ files, overrides, err := dirFiles(root)
+ if err != nil {
+ return nil, err
+ }
+ if len(files) == 0 {
+ return nil, &ErrNoConfigsFound{Dir: root}
+ }
+
+ // Determine the absolute path to the directory.
+ rootAbs, err := filepath.Abs(root)
+ if err != nil {
+ return nil, err
+ }
+
+ var result *Config
+
+ // Sort the files and overrides so we have a deterministic order
+ sort.Strings(files)
+ sort.Strings(overrides)
+
+ // Load all the regular files, append them to each other.
+ for _, f := range files {
+ c, err := LoadFile(f)
+ if err != nil {
+ return nil, err
+ }
+
+ if result != nil {
+ result, err = Append(result, c)
+ if err != nil {
+ return nil, err
+ }
+ } else {
+ result = c
+ }
+ }
+
+ // Load all the overrides, and merge them into the config
+ for _, f := range overrides {
+ c, err := LoadFile(f)
+ if err != nil {
+ return nil, err
+ }
+
+ result, err = Merge(result, c)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Mark the directory
+ result.Dir = rootAbs
+
+ return result, nil
+}
+
+// IsEmptyDir returns true if the directory given has no Terraform
+// configuration files.
+func IsEmptyDir(root string) (bool, error) {
+ if _, err := os.Stat(root); err != nil && os.IsNotExist(err) {
+ return true, nil
+ }
+
+ fs, os, err := dirFiles(root)
+ if err != nil {
+ return false, err
+ }
+
+ return len(fs) == 0 && len(os) == 0, nil
+}
+
+// Ext returns the Terraform configuration extension of the given
+// path, or a blank string if it is an invalid function.
+func ext(path string) string {
+ if strings.HasSuffix(path, ".tf") {
+ return ".tf"
+ } else if strings.HasSuffix(path, ".tf.json") {
+ return ".tf.json"
+ } else {
+ return ""
+ }
+}
+
+func dirFiles(dir string) ([]string, []string, error) {
+ f, err := os.Open(dir)
+ if err != nil {
+ return nil, nil, err
+ }
+ defer f.Close()
+
+ fi, err := f.Stat()
+ if err != nil {
+ return nil, nil, err
+ }
+ if !fi.IsDir() {
+ return nil, nil, fmt.Errorf(
+ "configuration path must be a directory: %s",
+ dir)
+ }
+
+ var files, overrides []string
+ err = nil
+ for err != io.EOF {
+ var fis []os.FileInfo
+ fis, err = f.Readdir(128)
+ if err != nil && err != io.EOF {
+ return nil, nil, err
+ }
+
+ for _, fi := range fis {
+ // Ignore directories
+ if fi.IsDir() {
+ continue
+ }
+
+ // Only care about files that are valid to load
+ name := fi.Name()
+ extValue := ext(name)
+ if extValue == "" || isIgnoredFile(name) {
+ continue
+ }
+
+ // Determine if we're dealing with an override
+ nameNoExt := name[:len(name)-len(extValue)]
+ override := nameNoExt == "override" ||
+ strings.HasSuffix(nameNoExt, "_override")
+
+ path := filepath.Join(dir, name)
+ if override {
+ overrides = append(overrides, path)
+ } else {
+ files = append(files, path)
+ }
+ }
+ }
+
+ return files, overrides, nil
+}
+
+// isIgnoredFile returns true or false depending on whether the
+// provided file name is a file that should be ignored.
+func isIgnoredFile(name string) bool {
+ return strings.HasPrefix(name, ".") || // Unix-like hidden files
+ strings.HasSuffix(name, "~") || // vim
+ strings.HasPrefix(name, "#") && strings.HasSuffix(name, "#") // emacs
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/loader_hcl.go b/vendor/github.com/hashicorp/terraform/config/loader_hcl.go
new file mode 100644
index 00000000..a40ad5ba
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/loader_hcl.go
@@ -0,0 +1,1091 @@
+package config
+
+import (
+ "fmt"
+ "io/ioutil"
+
+ "github.com/hashicorp/go-multierror"
+ "github.com/hashicorp/hcl"
+ "github.com/hashicorp/hcl/hcl/ast"
+ "github.com/mitchellh/mapstructure"
+)
+
+// hclConfigurable is an implementation of configurable that knows
+// how to turn HCL configuration into a *Config object.
+type hclConfigurable struct {
+ File string
+ Root *ast.File
+}
+
+func (t *hclConfigurable) Config() (*Config, error) {
+ validKeys := map[string]struct{}{
+ "atlas": struct{}{},
+ "data": struct{}{},
+ "module": struct{}{},
+ "output": struct{}{},
+ "provider": struct{}{},
+ "resource": struct{}{},
+ "terraform": struct{}{},
+ "variable": struct{}{},
+ }
+
+ // Top-level item should be the object list
+ list, ok := t.Root.Node.(*ast.ObjectList)
+ if !ok {
+ return nil, fmt.Errorf("error parsing: file doesn't contain a root object")
+ }
+
+ // Start building up the actual configuration.
+ config := new(Config)
+
+ // Terraform config
+ if o := list.Filter("terraform"); len(o.Items) > 0 {
+ var err error
+ config.Terraform, err = loadTerraformHcl(o)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Build the variables
+ if vars := list.Filter("variable"); len(vars.Items) > 0 {
+ var err error
+ config.Variables, err = loadVariablesHcl(vars)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Get Atlas configuration
+ if atlas := list.Filter("atlas"); len(atlas.Items) > 0 {
+ var err error
+ config.Atlas, err = loadAtlasHcl(atlas)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Build the modules
+ if modules := list.Filter("module"); len(modules.Items) > 0 {
+ var err error
+ config.Modules, err = loadModulesHcl(modules)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Build the provider configs
+ if providers := list.Filter("provider"); len(providers.Items) > 0 {
+ var err error
+ config.ProviderConfigs, err = loadProvidersHcl(providers)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Build the resources
+ {
+ var err error
+ managedResourceConfigs := list.Filter("resource")
+ dataResourceConfigs := list.Filter("data")
+
+ config.Resources = make(
+ []*Resource, 0,
+ len(managedResourceConfigs.Items)+len(dataResourceConfigs.Items),
+ )
+
+ managedResources, err := loadManagedResourcesHcl(managedResourceConfigs)
+ if err != nil {
+ return nil, err
+ }
+ dataResources, err := loadDataResourcesHcl(dataResourceConfigs)
+ if err != nil {
+ return nil, err
+ }
+
+ config.Resources = append(config.Resources, dataResources...)
+ config.Resources = append(config.Resources, managedResources...)
+ }
+
+ // Build the outputs
+ if outputs := list.Filter("output"); len(outputs.Items) > 0 {
+ var err error
+ config.Outputs, err = loadOutputsHcl(outputs)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Check for invalid keys
+ for _, item := range list.Items {
+ if len(item.Keys) == 0 {
+ // Not sure how this would happen, but let's avoid a panic
+ continue
+ }
+
+ k := item.Keys[0].Token.Value().(string)
+ if _, ok := validKeys[k]; ok {
+ continue
+ }
+
+ config.unknownKeys = append(config.unknownKeys, k)
+ }
+
+ return config, nil
+}
+
+// loadFileHcl is a fileLoaderFunc that knows how to read HCL
+// files and turn them into hclConfigurables.
+func loadFileHcl(root string) (configurable, []string, error) {
+ // Read the HCL file and prepare for parsing
+ d, err := ioutil.ReadFile(root)
+ if err != nil {
+ return nil, nil, fmt.Errorf(
+ "Error reading %s: %s", root, err)
+ }
+
+ // Parse it
+ hclRoot, err := hcl.Parse(string(d))
+ if err != nil {
+ return nil, nil, fmt.Errorf(
+ "Error parsing %s: %s", root, err)
+ }
+
+ // Start building the result
+ result := &hclConfigurable{
+ File: root,
+ Root: hclRoot,
+ }
+
+ // Dive in, find the imports. This is disabled for now since
+ // imports were removed prior to Terraform 0.1. The code is
+ // remaining here commented for historical purposes.
+ /*
+ imports := obj.Get("import")
+ if imports == nil {
+ result.Object.Ref()
+ return result, nil, nil
+ }
+
+ if imports.Type() != libucl.ObjectTypeString {
+ imports.Close()
+
+ return nil, nil, fmt.Errorf(
+ "Error in %s: all 'import' declarations should be in the format\n"+
+ "`import \"foo\"` (Got type %s)",
+ root,
+ imports.Type())
+ }
+
+ // Gather all the import paths
+ importPaths := make([]string, 0, imports.Len())
+ iter := imports.Iterate(false)
+ for imp := iter.Next(); imp != nil; imp = iter.Next() {
+ path := imp.ToString()
+ if !filepath.IsAbs(path) {
+ // Relative paths are relative to the Terraform file itself
+ dir := filepath.Dir(root)
+ path = filepath.Join(dir, path)
+ }
+
+ importPaths = append(importPaths, path)
+ imp.Close()
+ }
+ iter.Close()
+ imports.Close()
+
+ result.Object.Ref()
+ */
+
+ return result, nil, nil
+}
+
+// Given a handle to a HCL object, this transforms it into the Terraform config
+func loadTerraformHcl(list *ast.ObjectList) (*Terraform, error) {
+ if len(list.Items) > 1 {
+ return nil, fmt.Errorf("only one 'terraform' block allowed per module")
+ }
+
+ // Get our one item
+ item := list.Items[0]
+
+ // This block should have an empty top level ObjectItem. If there are keys
+ // here, it's likely because we have a flattened JSON object, and we can
+ // lift this into a nested ObjectList to decode properly.
+ if len(item.Keys) > 0 {
+ item = &ast.ObjectItem{
+ Val: &ast.ObjectType{
+ List: &ast.ObjectList{
+ Items: []*ast.ObjectItem{item},
+ },
+ },
+ }
+ }
+
+ // We need the item value as an ObjectList
+ var listVal *ast.ObjectList
+ if ot, ok := item.Val.(*ast.ObjectType); ok {
+ listVal = ot.List
+ } else {
+ return nil, fmt.Errorf("terraform block: should be an object")
+ }
+
+ // NOTE: We purposely don't validate unknown HCL keys here so that
+ // we can potentially read _future_ Terraform version config (to
+ // still be able to validate the required version).
+ //
+ // We should still keep track of unknown keys to validate later, but
+ // HCL doesn't currently support that.
+
+ var config Terraform
+ if err := hcl.DecodeObject(&config, item.Val); err != nil {
+ return nil, fmt.Errorf(
+ "Error reading terraform config: %s",
+ err)
+ }
+
+ // If we have provisioners, then parse those out
+ if os := listVal.Filter("backend"); len(os.Items) > 0 {
+ var err error
+ config.Backend, err = loadTerraformBackendHcl(os)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading backend config for terraform block: %s",
+ err)
+ }
+ }
+
+ return &config, nil
+}
+
+// Loads the Backend configuration from an object list.
+func loadTerraformBackendHcl(list *ast.ObjectList) (*Backend, error) {
+ if len(list.Items) > 1 {
+ return nil, fmt.Errorf("only one 'backend' block allowed")
+ }
+
+ // Get our one item
+ item := list.Items[0]
+
+ // Verify the keys
+ if len(item.Keys) != 1 {
+ return nil, fmt.Errorf(
+ "position %s: 'backend' must be followed by exactly one string: a type",
+ item.Pos())
+ }
+
+ typ := item.Keys[0].Token.Value().(string)
+
+ // Decode the raw config
+ var config map[string]interface{}
+ if err := hcl.DecodeObject(&config, item.Val); err != nil {
+ return nil, fmt.Errorf(
+ "Error reading backend config: %s",
+ err)
+ }
+
+ rawConfig, err := NewRawConfig(config)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading backend config: %s",
+ err)
+ }
+
+ b := &Backend{
+ Type: typ,
+ RawConfig: rawConfig,
+ }
+ b.Hash = b.Rehash()
+
+ return b, nil
+}
+
+// Given a handle to a HCL object, this transforms it into the Atlas
+// configuration.
+func loadAtlasHcl(list *ast.ObjectList) (*AtlasConfig, error) {
+ if len(list.Items) > 1 {
+ return nil, fmt.Errorf("only one 'atlas' block allowed")
+ }
+
+ // Get our one item
+ item := list.Items[0]
+
+ var config AtlasConfig
+ if err := hcl.DecodeObject(&config, item.Val); err != nil {
+ return nil, fmt.Errorf(
+ "Error reading atlas config: %s",
+ err)
+ }
+
+ return &config, nil
+}
+
+// Given a handle to a HCL object, this recurses into the structure
+// and pulls out a list of modules.
+//
+// The resulting modules may not be unique, but each module
+// represents exactly one module definition in the HCL configuration.
+// We leave it up to another pass to merge them together.
+func loadModulesHcl(list *ast.ObjectList) ([]*Module, error) {
+ list = list.Children()
+ if len(list.Items) == 0 {
+ return nil, nil
+ }
+
+ // Where all the results will go
+ var result []*Module
+
+ // Now go over all the types and their children in order to get
+ // all of the actual resources.
+ for _, item := range list.Items {
+ k := item.Keys[0].Token.Value().(string)
+
+ var listVal *ast.ObjectList
+ if ot, ok := item.Val.(*ast.ObjectType); ok {
+ listVal = ot.List
+ } else {
+ return nil, fmt.Errorf("module '%s': should be an object", k)
+ }
+
+ var config map[string]interface{}
+ if err := hcl.DecodeObject(&config, item.Val); err != nil {
+ return nil, fmt.Errorf(
+ "Error reading config for %s: %s",
+ k,
+ err)
+ }
+
+ // Remove the fields we handle specially
+ delete(config, "source")
+
+ rawConfig, err := NewRawConfig(config)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading config for %s: %s",
+ k,
+ err)
+ }
+
+ // If we have a count, then figure it out
+ var source string
+ if o := listVal.Filter("source"); len(o.Items) > 0 {
+ err = hcl.DecodeObject(&source, o.Items[0].Val)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error parsing source for %s: %s",
+ k,
+ err)
+ }
+ }
+
+ result = append(result, &Module{
+ Name: k,
+ Source: source,
+ RawConfig: rawConfig,
+ })
+ }
+
+ return result, nil
+}
+
+// LoadOutputsHcl recurses into the given HCL object and turns
+// it into a mapping of outputs.
+func loadOutputsHcl(list *ast.ObjectList) ([]*Output, error) {
+ list = list.Children()
+ if len(list.Items) == 0 {
+ return nil, fmt.Errorf(
+ "'output' must be followed by exactly one string: a name")
+ }
+
+ // Go through each object and turn it into an actual result.
+ result := make([]*Output, 0, len(list.Items))
+ for _, item := range list.Items {
+ n := item.Keys[0].Token.Value().(string)
+
+ var listVal *ast.ObjectList
+ if ot, ok := item.Val.(*ast.ObjectType); ok {
+ listVal = ot.List
+ } else {
+ return nil, fmt.Errorf("output '%s': should be an object", n)
+ }
+
+ var config map[string]interface{}
+ if err := hcl.DecodeObject(&config, item.Val); err != nil {
+ return nil, err
+ }
+
+ // Delete special keys
+ delete(config, "depends_on")
+
+ rawConfig, err := NewRawConfig(config)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading config for output %s: %s",
+ n,
+ err)
+ }
+
+ // If we have depends fields, then add those in
+ var dependsOn []string
+ if o := listVal.Filter("depends_on"); len(o.Items) > 0 {
+ err := hcl.DecodeObject(&dependsOn, o.Items[0].Val)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading depends_on for output %q: %s",
+ n,
+ err)
+ }
+ }
+
+ result = append(result, &Output{
+ Name: n,
+ RawConfig: rawConfig,
+ DependsOn: dependsOn,
+ })
+ }
+
+ return result, nil
+}
+
+// LoadVariablesHcl recurses into the given HCL object and turns
+// it into a list of variables.
+func loadVariablesHcl(list *ast.ObjectList) ([]*Variable, error) {
+ list = list.Children()
+ if len(list.Items) == 0 {
+ return nil, fmt.Errorf(
+ "'variable' must be followed by exactly one strings: a name")
+ }
+
+ // hclVariable is the structure each variable is decoded into
+ type hclVariable struct {
+ DeclaredType string `hcl:"type"`
+ Default interface{}
+ Description string
+ Fields []string `hcl:",decodedFields"`
+ }
+
+ // Go through each object and turn it into an actual result.
+ result := make([]*Variable, 0, len(list.Items))
+ for _, item := range list.Items {
+ // Clean up items from JSON
+ unwrapHCLObjectKeysFromJSON(item, 1)
+
+ // Verify the keys
+ if len(item.Keys) != 1 {
+ return nil, fmt.Errorf(
+ "position %s: 'variable' must be followed by exactly one strings: a name",
+ item.Pos())
+ }
+
+ n := item.Keys[0].Token.Value().(string)
+ if !NameRegexp.MatchString(n) {
+ return nil, fmt.Errorf(
+ "position %s: 'variable' name must match regular expression: %s",
+ item.Pos(), NameRegexp)
+ }
+
+ // Check for invalid keys
+ valid := []string{"type", "default", "description"}
+ if err := checkHCLKeys(item.Val, valid); err != nil {
+ return nil, multierror.Prefix(err, fmt.Sprintf(
+ "variable[%s]:", n))
+ }
+
+ // Decode into hclVariable to get typed values
+ var hclVar hclVariable
+ if err := hcl.DecodeObject(&hclVar, item.Val); err != nil {
+ return nil, err
+ }
+
+ // Defaults turn into a slice of map[string]interface{} and
+ // we need to make sure to convert that down into the
+ // proper type for Config.
+ if ms, ok := hclVar.Default.([]map[string]interface{}); ok {
+ def := make(map[string]interface{})
+ for _, m := range ms {
+ for k, v := range m {
+ def[k] = v
+ }
+ }
+
+ hclVar.Default = def
+ }
+
+ // Build the new variable and do some basic validation
+ newVar := &Variable{
+ Name: n,
+ DeclaredType: hclVar.DeclaredType,
+ Default: hclVar.Default,
+ Description: hclVar.Description,
+ }
+ if err := newVar.ValidateTypeAndDefault(); err != nil {
+ return nil, err
+ }
+
+ result = append(result, newVar)
+ }
+
+ return result, nil
+}
+
+// LoadProvidersHcl recurses into the given HCL object and turns
+// it into a mapping of provider configs.
+func loadProvidersHcl(list *ast.ObjectList) ([]*ProviderConfig, error) {
+ list = list.Children()
+ if len(list.Items) == 0 {
+ return nil, nil
+ }
+
+ // Go through each object and turn it into an actual result.
+ result := make([]*ProviderConfig, 0, len(list.Items))
+ for _, item := range list.Items {
+ n := item.Keys[0].Token.Value().(string)
+
+ var listVal *ast.ObjectList
+ if ot, ok := item.Val.(*ast.ObjectType); ok {
+ listVal = ot.List
+ } else {
+ return nil, fmt.Errorf("module '%s': should be an object", n)
+ }
+
+ var config map[string]interface{}
+ if err := hcl.DecodeObject(&config, item.Val); err != nil {
+ return nil, err
+ }
+
+ delete(config, "alias")
+
+ rawConfig, err := NewRawConfig(config)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading config for provider config %s: %s",
+ n,
+ err)
+ }
+
+ // If we have an alias field, then add those in
+ var alias string
+ if a := listVal.Filter("alias"); len(a.Items) > 0 {
+ err := hcl.DecodeObject(&alias, a.Items[0].Val)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading alias for provider[%s]: %s",
+ n,
+ err)
+ }
+ }
+
+ result = append(result, &ProviderConfig{
+ Name: n,
+ Alias: alias,
+ RawConfig: rawConfig,
+ })
+ }
+
+ return result, nil
+}
+
+// Given a handle to a HCL object, this recurses into the structure
+// and pulls out a list of data sources.
+//
+// The resulting data sources may not be unique, but each one
+// represents exactly one data definition in the HCL configuration.
+// We leave it up to another pass to merge them together.
+func loadDataResourcesHcl(list *ast.ObjectList) ([]*Resource, error) {
+ list = list.Children()
+ if len(list.Items) == 0 {
+ return nil, nil
+ }
+
+ // Where all the results will go
+ var result []*Resource
+
+ // Now go over all the types and their children in order to get
+ // all of the actual resources.
+ for _, item := range list.Items {
+ if len(item.Keys) != 2 {
+ return nil, fmt.Errorf(
+ "position %s: 'data' must be followed by exactly two strings: a type and a name",
+ item.Pos())
+ }
+
+ t := item.Keys[0].Token.Value().(string)
+ k := item.Keys[1].Token.Value().(string)
+
+ var listVal *ast.ObjectList
+ if ot, ok := item.Val.(*ast.ObjectType); ok {
+ listVal = ot.List
+ } else {
+ return nil, fmt.Errorf("data sources %s[%s]: should be an object", t, k)
+ }
+
+ var config map[string]interface{}
+ if err := hcl.DecodeObject(&config, item.Val); err != nil {
+ return nil, fmt.Errorf(
+ "Error reading config for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+
+ // Remove the fields we handle specially
+ delete(config, "depends_on")
+ delete(config, "provider")
+ delete(config, "count")
+
+ rawConfig, err := NewRawConfig(config)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading config for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+
+ // If we have a count, then figure it out
+ var count string = "1"
+ if o := listVal.Filter("count"); len(o.Items) > 0 {
+ err = hcl.DecodeObject(&count, o.Items[0].Val)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error parsing count for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+ }
+ countConfig, err := NewRawConfig(map[string]interface{}{
+ "count": count,
+ })
+ if err != nil {
+ return nil, err
+ }
+ countConfig.Key = "count"
+
+ // If we have depends fields, then add those in
+ var dependsOn []string
+ if o := listVal.Filter("depends_on"); len(o.Items) > 0 {
+ err := hcl.DecodeObject(&dependsOn, o.Items[0].Val)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading depends_on for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+ }
+
+ // If we have a provider, then parse it out
+ var provider string
+ if o := listVal.Filter("provider"); len(o.Items) > 0 {
+ err := hcl.DecodeObject(&provider, o.Items[0].Val)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading provider for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+ }
+
+ result = append(result, &Resource{
+ Mode: DataResourceMode,
+ Name: k,
+ Type: t,
+ RawCount: countConfig,
+ RawConfig: rawConfig,
+ Provider: provider,
+ Provisioners: []*Provisioner{},
+ DependsOn: dependsOn,
+ Lifecycle: ResourceLifecycle{},
+ })
+ }
+
+ return result, nil
+}
+
+// Given a handle to a HCL object, this recurses into the structure
+// and pulls out a list of managed resources.
+//
+// The resulting resources may not be unique, but each resource
+// represents exactly one "resource" block in the HCL configuration.
+// We leave it up to another pass to merge them together.
+func loadManagedResourcesHcl(list *ast.ObjectList) ([]*Resource, error) {
+ list = list.Children()
+ if len(list.Items) == 0 {
+ return nil, nil
+ }
+
+ // Where all the results will go
+ var result []*Resource
+
+ // Now go over all the types and their children in order to get
+ // all of the actual resources.
+ for _, item := range list.Items {
+ // GH-4385: We detect a pure provisioner resource and give the user
+ // an error about how to do it cleanly.
+ if len(item.Keys) == 4 && item.Keys[2].Token.Value().(string) == "provisioner" {
+ return nil, fmt.Errorf(
+ "position %s: provisioners in a resource should be wrapped in a list\n\n"+
+ "Example: \"provisioner\": [ { \"local-exec\": ... } ]",
+ item.Pos())
+ }
+
+ // Fix up JSON input
+ unwrapHCLObjectKeysFromJSON(item, 2)
+
+ if len(item.Keys) != 2 {
+ return nil, fmt.Errorf(
+ "position %s: resource must be followed by exactly two strings, a type and a name",
+ item.Pos())
+ }
+
+ t := item.Keys[0].Token.Value().(string)
+ k := item.Keys[1].Token.Value().(string)
+
+ var listVal *ast.ObjectList
+ if ot, ok := item.Val.(*ast.ObjectType); ok {
+ listVal = ot.List
+ } else {
+ return nil, fmt.Errorf("resources %s[%s]: should be an object", t, k)
+ }
+
+ var config map[string]interface{}
+ if err := hcl.DecodeObject(&config, item.Val); err != nil {
+ return nil, fmt.Errorf(
+ "Error reading config for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+
+ // Remove the fields we handle specially
+ delete(config, "connection")
+ delete(config, "count")
+ delete(config, "depends_on")
+ delete(config, "provisioner")
+ delete(config, "provider")
+ delete(config, "lifecycle")
+
+ rawConfig, err := NewRawConfig(config)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading config for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+
+ // If we have a count, then figure it out
+ var count string = "1"
+ if o := listVal.Filter("count"); len(o.Items) > 0 {
+ err = hcl.DecodeObject(&count, o.Items[0].Val)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error parsing count for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+ }
+ countConfig, err := NewRawConfig(map[string]interface{}{
+ "count": count,
+ })
+ if err != nil {
+ return nil, err
+ }
+ countConfig.Key = "count"
+
+ // If we have depends fields, then add those in
+ var dependsOn []string
+ if o := listVal.Filter("depends_on"); len(o.Items) > 0 {
+ err := hcl.DecodeObject(&dependsOn, o.Items[0].Val)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading depends_on for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+ }
+
+ // If we have connection info, then parse those out
+ var connInfo map[string]interface{}
+ if o := listVal.Filter("connection"); len(o.Items) > 0 {
+ err := hcl.DecodeObject(&connInfo, o.Items[0].Val)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading connection info for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+ }
+
+ // If we have provisioners, then parse those out
+ var provisioners []*Provisioner
+ if os := listVal.Filter("provisioner"); len(os.Items) > 0 {
+ var err error
+ provisioners, err = loadProvisionersHcl(os, connInfo)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading provisioners for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+ }
+
+ // If we have a provider, then parse it out
+ var provider string
+ if o := listVal.Filter("provider"); len(o.Items) > 0 {
+ err := hcl.DecodeObject(&provider, o.Items[0].Val)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error reading provider for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+ }
+
+ // Check if the resource should be re-created before
+ // destroying the existing instance
+ var lifecycle ResourceLifecycle
+ if o := listVal.Filter("lifecycle"); len(o.Items) > 0 {
+ if len(o.Items) > 1 {
+ return nil, fmt.Errorf(
+ "%s[%s]: Multiple lifecycle blocks found, expected one",
+ t, k)
+ }
+
+ // Check for invalid keys
+ valid := []string{"create_before_destroy", "ignore_changes", "prevent_destroy"}
+ if err := checkHCLKeys(o.Items[0].Val, valid); err != nil {
+ return nil, multierror.Prefix(err, fmt.Sprintf(
+ "%s[%s]:", t, k))
+ }
+
+ var raw map[string]interface{}
+ if err = hcl.DecodeObject(&raw, o.Items[0].Val); err != nil {
+ return nil, fmt.Errorf(
+ "Error parsing lifecycle for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+
+ if err := mapstructure.WeakDecode(raw, &lifecycle); err != nil {
+ return nil, fmt.Errorf(
+ "Error parsing lifecycle for %s[%s]: %s",
+ t,
+ k,
+ err)
+ }
+ }
+
+ result = append(result, &Resource{
+ Mode: ManagedResourceMode,
+ Name: k,
+ Type: t,
+ RawCount: countConfig,
+ RawConfig: rawConfig,
+ Provisioners: provisioners,
+ Provider: provider,
+ DependsOn: dependsOn,
+ Lifecycle: lifecycle,
+ })
+ }
+
+ return result, nil
+}
+
+func loadProvisionersHcl(list *ast.ObjectList, connInfo map[string]interface{}) ([]*Provisioner, error) {
+ list = list.Children()
+ if len(list.Items) == 0 {
+ return nil, nil
+ }
+
+ // Go through each object and turn it into an actual result.
+ result := make([]*Provisioner, 0, len(list.Items))
+ for _, item := range list.Items {
+ n := item.Keys[0].Token.Value().(string)
+
+ var listVal *ast.ObjectList
+ if ot, ok := item.Val.(*ast.ObjectType); ok {
+ listVal = ot.List
+ } else {
+ return nil, fmt.Errorf("provisioner '%s': should be an object", n)
+ }
+
+ var config map[string]interface{}
+ if err := hcl.DecodeObject(&config, item.Val); err != nil {
+ return nil, err
+ }
+
+ // Parse the "when" value
+ when := ProvisionerWhenCreate
+ if v, ok := config["when"]; ok {
+ switch v {
+ case "create":
+ when = ProvisionerWhenCreate
+ case "destroy":
+ when = ProvisionerWhenDestroy
+ default:
+ return nil, fmt.Errorf(
+ "position %s: 'provisioner' when must be 'create' or 'destroy'",
+ item.Pos())
+ }
+ }
+
+ // Parse the "on_failure" value
+ onFailure := ProvisionerOnFailureFail
+ if v, ok := config["on_failure"]; ok {
+ switch v {
+ case "continue":
+ onFailure = ProvisionerOnFailureContinue
+ case "fail":
+ onFailure = ProvisionerOnFailureFail
+ default:
+ return nil, fmt.Errorf(
+ "position %s: 'provisioner' on_failure must be 'continue' or 'fail'",
+ item.Pos())
+ }
+ }
+
+ // Delete fields we special case
+ delete(config, "connection")
+ delete(config, "when")
+ delete(config, "on_failure")
+
+ rawConfig, err := NewRawConfig(config)
+ if err != nil {
+ return nil, err
+ }
+
+ // Check if we have a provisioner-level connection
+ // block that overrides the resource-level
+ var subConnInfo map[string]interface{}
+ if o := listVal.Filter("connection"); len(o.Items) > 0 {
+ err := hcl.DecodeObject(&subConnInfo, o.Items[0].Val)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Inherit from the resource connInfo any keys
+ // that are not explicitly overriden.
+ if connInfo != nil && subConnInfo != nil {
+ for k, v := range connInfo {
+ if _, ok := subConnInfo[k]; !ok {
+ subConnInfo[k] = v
+ }
+ }
+ } else if subConnInfo == nil {
+ subConnInfo = connInfo
+ }
+
+ // Parse the connInfo
+ connRaw, err := NewRawConfig(subConnInfo)
+ if err != nil {
+ return nil, err
+ }
+
+ result = append(result, &Provisioner{
+ Type: n,
+ RawConfig: rawConfig,
+ ConnInfo: connRaw,
+ When: when,
+ OnFailure: onFailure,
+ })
+ }
+
+ return result, nil
+}
+
+/*
+func hclObjectMap(os *hclobj.Object) map[string]ast.ListNode {
+ objects := make(map[string][]*hclobj.Object)
+
+ for _, o := range os.Elem(false) {
+ for _, elem := range o.Elem(true) {
+ val, ok := objects[elem.Key]
+ if !ok {
+ val = make([]*hclobj.Object, 0, 1)
+ }
+
+ val = append(val, elem)
+ objects[elem.Key] = val
+ }
+ }
+
+ return objects
+}
+*/
+
+func checkHCLKeys(node ast.Node, valid []string) error {
+ var list *ast.ObjectList
+ switch n := node.(type) {
+ case *ast.ObjectList:
+ list = n
+ case *ast.ObjectType:
+ list = n.List
+ default:
+ return fmt.Errorf("cannot check HCL keys of type %T", n)
+ }
+
+ validMap := make(map[string]struct{}, len(valid))
+ for _, v := range valid {
+ validMap[v] = struct{}{}
+ }
+
+ var result error
+ for _, item := range list.Items {
+ key := item.Keys[0].Token.Value().(string)
+ if _, ok := validMap[key]; !ok {
+ result = multierror.Append(result, fmt.Errorf(
+ "invalid key: %s", key))
+ }
+ }
+
+ return result
+}
+
+// unwrapHCLObjectKeysFromJSON cleans up an edge case that can occur when
+// parsing JSON as input: if we're parsing JSON then directly nested
+// items will show up as additional "keys".
+//
+// For objects that expect a fixed number of keys, this breaks the
+// decoding process. This function unwraps the object into what it would've
+// looked like if it came directly from HCL by specifying the number of keys
+// you expect.
+//
+// Example:
+//
+// { "foo": { "baz": {} } }
+//
+// Will show up with Keys being: []string{"foo", "baz"}
+// when we really just want the first two. This function will fix this.
+func unwrapHCLObjectKeysFromJSON(item *ast.ObjectItem, depth int) {
+ if len(item.Keys) > depth && item.Keys[0].Token.JSON {
+ for len(item.Keys) > depth {
+ // Pop off the last key
+ n := len(item.Keys)
+ key := item.Keys[n-1]
+ item.Keys[n-1] = nil
+ item.Keys = item.Keys[:n-1]
+
+ // Wrap our value in a list
+ item.Val = &ast.ObjectType{
+ List: &ast.ObjectList{
+ Items: []*ast.ObjectItem{
+ &ast.ObjectItem{
+ Keys: []*ast.ObjectKey{key},
+ Val: item.Val,
+ },
+ },
+ },
+ }
+ }
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/merge.go b/vendor/github.com/hashicorp/terraform/config/merge.go
new file mode 100644
index 00000000..db214be4
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/merge.go
@@ -0,0 +1,193 @@
+package config
+
+// Merge merges two configurations into a single configuration.
+//
+// Merge allows for the two configurations to have duplicate resources,
+// because the resources will be merged. This differs from a single
+// Config which must only have unique resources.
+func Merge(c1, c2 *Config) (*Config, error) {
+ c := new(Config)
+
+ // Merge unknown keys
+ unknowns := make(map[string]struct{})
+ for _, k := range c1.unknownKeys {
+ _, present := unknowns[k]
+ if !present {
+ unknowns[k] = struct{}{}
+ c.unknownKeys = append(c.unknownKeys, k)
+ }
+ }
+ for _, k := range c2.unknownKeys {
+ _, present := unknowns[k]
+ if !present {
+ unknowns[k] = struct{}{}
+ c.unknownKeys = append(c.unknownKeys, k)
+ }
+ }
+
+ // Merge Atlas configuration. This is a dumb one overrides the other
+ // sort of merge.
+ c.Atlas = c1.Atlas
+ if c2.Atlas != nil {
+ c.Atlas = c2.Atlas
+ }
+
+ // Merge the Terraform configuration
+ if c1.Terraform != nil {
+ c.Terraform = c1.Terraform
+ if c2.Terraform != nil {
+ c.Terraform.Merge(c2.Terraform)
+ }
+ } else {
+ c.Terraform = c2.Terraform
+ }
+
+ // NOTE: Everything below is pretty gross. Due to the lack of generics
+ // in Go, there is some hoop-jumping involved to make this merging a
+ // little more test-friendly and less repetitive. Ironically, making it
+ // less repetitive involves being a little repetitive, but I prefer to
+ // be repetitive with things that are less error prone than things that
+ // are more error prone (more logic). Type conversions to an interface
+ // are pretty low-error.
+
+ var m1, m2, mresult []merger
+
+ // Modules
+ m1 = make([]merger, 0, len(c1.Modules))
+ m2 = make([]merger, 0, len(c2.Modules))
+ for _, v := range c1.Modules {
+ m1 = append(m1, v)
+ }
+ for _, v := range c2.Modules {
+ m2 = append(m2, v)
+ }
+ mresult = mergeSlice(m1, m2)
+ if len(mresult) > 0 {
+ c.Modules = make([]*Module, len(mresult))
+ for i, v := range mresult {
+ c.Modules[i] = v.(*Module)
+ }
+ }
+
+ // Outputs
+ m1 = make([]merger, 0, len(c1.Outputs))
+ m2 = make([]merger, 0, len(c2.Outputs))
+ for _, v := range c1.Outputs {
+ m1 = append(m1, v)
+ }
+ for _, v := range c2.Outputs {
+ m2 = append(m2, v)
+ }
+ mresult = mergeSlice(m1, m2)
+ if len(mresult) > 0 {
+ c.Outputs = make([]*Output, len(mresult))
+ for i, v := range mresult {
+ c.Outputs[i] = v.(*Output)
+ }
+ }
+
+ // Provider Configs
+ m1 = make([]merger, 0, len(c1.ProviderConfigs))
+ m2 = make([]merger, 0, len(c2.ProviderConfigs))
+ for _, v := range c1.ProviderConfigs {
+ m1 = append(m1, v)
+ }
+ for _, v := range c2.ProviderConfigs {
+ m2 = append(m2, v)
+ }
+ mresult = mergeSlice(m1, m2)
+ if len(mresult) > 0 {
+ c.ProviderConfigs = make([]*ProviderConfig, len(mresult))
+ for i, v := range mresult {
+ c.ProviderConfigs[i] = v.(*ProviderConfig)
+ }
+ }
+
+ // Resources
+ m1 = make([]merger, 0, len(c1.Resources))
+ m2 = make([]merger, 0, len(c2.Resources))
+ for _, v := range c1.Resources {
+ m1 = append(m1, v)
+ }
+ for _, v := range c2.Resources {
+ m2 = append(m2, v)
+ }
+ mresult = mergeSlice(m1, m2)
+ if len(mresult) > 0 {
+ c.Resources = make([]*Resource, len(mresult))
+ for i, v := range mresult {
+ c.Resources[i] = v.(*Resource)
+ }
+ }
+
+ // Variables
+ m1 = make([]merger, 0, len(c1.Variables))
+ m2 = make([]merger, 0, len(c2.Variables))
+ for _, v := range c1.Variables {
+ m1 = append(m1, v)
+ }
+ for _, v := range c2.Variables {
+ m2 = append(m2, v)
+ }
+ mresult = mergeSlice(m1, m2)
+ if len(mresult) > 0 {
+ c.Variables = make([]*Variable, len(mresult))
+ for i, v := range mresult {
+ c.Variables[i] = v.(*Variable)
+ }
+ }
+
+ return c, nil
+}
+
+// merger is an interface that must be implemented by types that are
+// merge-able. This simplifies the implementation of Merge for the various
+// components of a Config.
+type merger interface {
+ mergerName() string
+ mergerMerge(merger) merger
+}
+
+// mergeSlice merges a slice of mergers.
+func mergeSlice(m1, m2 []merger) []merger {
+ r := make([]merger, len(m1), len(m1)+len(m2))
+ copy(r, m1)
+
+ m := map[string]struct{}{}
+ for _, v2 := range m2 {
+ // If we already saw it, just append it because its a
+ // duplicate and invalid...
+ name := v2.mergerName()
+ if _, ok := m[name]; ok {
+ r = append(r, v2)
+ continue
+ }
+ m[name] = struct{}{}
+
+ // Find an original to override
+ var original merger
+ originalIndex := -1
+ for i, v := range m1 {
+ if v.mergerName() == name {
+ originalIndex = i
+ original = v
+ break
+ }
+ }
+
+ var v merger
+ if original == nil {
+ v = v2
+ } else {
+ v = original.mergerMerge(v2)
+ }
+
+ if originalIndex == -1 {
+ r = append(r, v)
+ } else {
+ r[originalIndex] = v
+ }
+ }
+
+ return r
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/copy_dir.go b/vendor/github.com/hashicorp/terraform/config/module/copy_dir.go
new file mode 100644
index 00000000..095f61d8
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/copy_dir.go
@@ -0,0 +1,114 @@
+package module
+
+import (
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+// copyDir copies the src directory contents into dst. Both directories
+// should already exist.
+func copyDir(dst, src string) error {
+ src, err := filepath.EvalSymlinks(src)
+ if err != nil {
+ return err
+ }
+
+ walkFn := func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+
+ if path == src {
+ return nil
+ }
+
+ if strings.HasPrefix(filepath.Base(path), ".") {
+ // Skip any dot files
+ if info.IsDir() {
+ return filepath.SkipDir
+ } else {
+ return nil
+ }
+ }
+
+ // The "path" has the src prefixed to it. We need to join our
+ // destination with the path without the src on it.
+ dstPath := filepath.Join(dst, path[len(src):])
+
+ // we don't want to try and copy the same file over itself.
+ if eq, err := sameFile(path, dstPath); eq {
+ return nil
+ } else if err != nil {
+ return err
+ }
+
+ // If we have a directory, make that subdirectory, then continue
+ // the walk.
+ if info.IsDir() {
+ if path == filepath.Join(src, dst) {
+ // dst is in src; don't walk it.
+ return nil
+ }
+
+ if err := os.MkdirAll(dstPath, 0755); err != nil {
+ return err
+ }
+
+ return nil
+ }
+
+ // If we have a file, copy the contents.
+ srcF, err := os.Open(path)
+ if err != nil {
+ return err
+ }
+ defer srcF.Close()
+
+ dstF, err := os.Create(dstPath)
+ if err != nil {
+ return err
+ }
+ defer dstF.Close()
+
+ if _, err := io.Copy(dstF, srcF); err != nil {
+ return err
+ }
+
+ // Chmod it
+ return os.Chmod(dstPath, info.Mode())
+ }
+
+ return filepath.Walk(src, walkFn)
+}
+
+// sameFile tried to determine if to paths are the same file.
+// If the paths don't match, we lookup the inode on supported systems.
+func sameFile(a, b string) (bool, error) {
+ if a == b {
+ return true, nil
+ }
+
+ aIno, err := inode(a)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return false, nil
+ }
+ return false, err
+ }
+
+ bIno, err := inode(b)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return false, nil
+ }
+ return false, err
+ }
+
+ if aIno > 0 && aIno == bIno {
+ return true, nil
+ }
+
+ return false, nil
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/get.go b/vendor/github.com/hashicorp/terraform/config/module/get.go
new file mode 100644
index 00000000..96b4a63c
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/get.go
@@ -0,0 +1,71 @@
+package module
+
+import (
+ "io/ioutil"
+ "os"
+
+ "github.com/hashicorp/go-getter"
+)
+
+// GetMode is an enum that describes how modules are loaded.
+//
+// GetModeLoad says that modules will not be downloaded or updated, they will
+// only be loaded from the storage.
+//
+// GetModeGet says that modules can be initially downloaded if they don't
+// exist, but otherwise to just load from the current version in storage.
+//
+// GetModeUpdate says that modules should be checked for updates and
+// downloaded prior to loading. If there are no updates, we load the version
+// from disk, otherwise we download first and then load.
+type GetMode byte
+
+const (
+ GetModeNone GetMode = iota
+ GetModeGet
+ GetModeUpdate
+)
+
+// GetCopy is the same as Get except that it downloads a copy of the
+// module represented by source.
+//
+// This copy will omit and dot-prefixed files (such as .git/, .hg/) and
+// can't be updated on its own.
+func GetCopy(dst, src string) error {
+ // Create the temporary directory to do the real Get to
+ tmpDir, err := ioutil.TempDir("", "tf")
+ if err != nil {
+ return err
+ }
+ // FIXME: This isn't completely safe. Creating and removing our temp path
+ // exposes where to race to inject files.
+ if err := os.RemoveAll(tmpDir); err != nil {
+ return err
+ }
+ defer os.RemoveAll(tmpDir)
+
+ // Get to that temporary dir
+ if err := getter.Get(tmpDir, src); err != nil {
+ return err
+ }
+
+ // Make sure the destination exists
+ if err := os.MkdirAll(dst, 0755); err != nil {
+ return err
+ }
+
+ // Copy to the final location
+ return copyDir(dst, tmpDir)
+}
+
+func getStorage(s getter.Storage, key string, src string, mode GetMode) (string, bool, error) {
+ // Get the module with the level specified if we were told to.
+ if mode > GetModeNone {
+ if err := s.Get(key, src, mode == GetModeUpdate); err != nil {
+ return "", false, err
+ }
+ }
+
+ // Get the directory where the module is.
+ return s.Dir(key)
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/inode.go b/vendor/github.com/hashicorp/terraform/config/module/inode.go
new file mode 100644
index 00000000..8603ee26
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/inode.go
@@ -0,0 +1,21 @@
+// +build linux darwin openbsd netbsd solaris
+
+package module
+
+import (
+ "fmt"
+ "os"
+ "syscall"
+)
+
+// lookup the inode of a file on posix systems
+func inode(path string) (uint64, error) {
+ stat, err := os.Stat(path)
+ if err != nil {
+ return 0, err
+ }
+ if st, ok := stat.Sys().(*syscall.Stat_t); ok {
+ return st.Ino, nil
+ }
+ return 0, fmt.Errorf("could not determine file inode")
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/inode_freebsd.go b/vendor/github.com/hashicorp/terraform/config/module/inode_freebsd.go
new file mode 100644
index 00000000..0d95730d
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/inode_freebsd.go
@@ -0,0 +1,21 @@
+// +build freebsd
+
+package module
+
+import (
+ "fmt"
+ "os"
+ "syscall"
+)
+
+// lookup the inode of a file on posix systems
+func inode(path string) (uint64, error) {
+ stat, err := os.Stat(path)
+ if err != nil {
+ return 0, err
+ }
+ if st, ok := stat.Sys().(*syscall.Stat_t); ok {
+ return uint64(st.Ino), nil
+ }
+ return 0, fmt.Errorf("could not determine file inode")
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/inode_windows.go b/vendor/github.com/hashicorp/terraform/config/module/inode_windows.go
new file mode 100644
index 00000000..c0cf4553
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/inode_windows.go
@@ -0,0 +1,8 @@
+// +build windows
+
+package module
+
+// no syscall.Stat_t on windows, return 0 for inodes
+func inode(path string) (uint64, error) {
+ return 0, nil
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/module.go b/vendor/github.com/hashicorp/terraform/config/module/module.go
new file mode 100644
index 00000000..f8649f6e
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/module.go
@@ -0,0 +1,7 @@
+package module
+
+// Module represents the metadata for a single module.
+type Module struct {
+ Name string
+ Source string
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/testing.go b/vendor/github.com/hashicorp/terraform/config/module/testing.go
new file mode 100644
index 00000000..fc9e7331
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/testing.go
@@ -0,0 +1,38 @@
+package module
+
+import (
+ "io/ioutil"
+ "os"
+ "testing"
+
+ "github.com/hashicorp/go-getter"
+)
+
+// TestTree loads a module at the given path and returns the tree as well
+// as a function that should be deferred to clean up resources.
+func TestTree(t *testing.T, path string) (*Tree, func()) {
+ // Create a temporary directory for module storage
+ dir, err := ioutil.TempDir("", "tf")
+ if err != nil {
+ t.Fatalf("err: %s", err)
+ return nil, nil
+ }
+
+ // Load the module
+ mod, err := NewTreeModule("", path)
+ if err != nil {
+ t.Fatalf("err: %s", err)
+ return nil, nil
+ }
+
+ // Get the child modules
+ s := &getter.FolderStorage{StorageDir: dir}
+ if err := mod.Load(s, GetModeGet); err != nil {
+ t.Fatalf("err: %s", err)
+ return nil, nil
+ }
+
+ return mod, func() {
+ os.RemoveAll(dir)
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/tree.go b/vendor/github.com/hashicorp/terraform/config/module/tree.go
new file mode 100644
index 00000000..b6f90fd9
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/tree.go
@@ -0,0 +1,428 @@
+package module
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/hashicorp/go-getter"
+ "github.com/hashicorp/terraform/config"
+)
+
+// RootName is the name of the root tree.
+const RootName = "root"
+
+// Tree represents the module import tree of configurations.
+//
+// This Tree structure can be used to get (download) new modules, load
+// all the modules without getting, flatten the tree into something
+// Terraform can use, etc.
+type Tree struct {
+ name string
+ config *config.Config
+ children map[string]*Tree
+ path []string
+ lock sync.RWMutex
+}
+
+// NewTree returns a new Tree for the given config structure.
+func NewTree(name string, c *config.Config) *Tree {
+ return &Tree{config: c, name: name}
+}
+
+// NewEmptyTree returns a new tree that is empty (contains no configuration).
+func NewEmptyTree() *Tree {
+ t := &Tree{config: &config.Config{}}
+
+ // We do this dummy load so that the tree is marked as "loaded". It
+ // should never fail because this is just about a no-op. If it does fail
+ // we panic so we can know its a bug.
+ if err := t.Load(nil, GetModeGet); err != nil {
+ panic(err)
+ }
+
+ return t
+}
+
+// NewTreeModule is like NewTree except it parses the configuration in
+// the directory and gives it a specific name. Use a blank name "" to specify
+// the root module.
+func NewTreeModule(name, dir string) (*Tree, error) {
+ c, err := config.LoadDir(dir)
+ if err != nil {
+ return nil, err
+ }
+
+ return NewTree(name, c), nil
+}
+
+// Config returns the configuration for this module.
+func (t *Tree) Config() *config.Config {
+ return t.config
+}
+
+// Child returns the child with the given path (by name).
+func (t *Tree) Child(path []string) *Tree {
+ if t == nil {
+ return nil
+ }
+
+ if len(path) == 0 {
+ return t
+ }
+
+ c := t.Children()[path[0]]
+ if c == nil {
+ return nil
+ }
+
+ return c.Child(path[1:])
+}
+
+// Children returns the children of this tree (the modules that are
+// imported by this root).
+//
+// This will only return a non-nil value after Load is called.
+func (t *Tree) Children() map[string]*Tree {
+ t.lock.RLock()
+ defer t.lock.RUnlock()
+ return t.children
+}
+
+// Loaded says whether or not this tree has been loaded or not yet.
+func (t *Tree) Loaded() bool {
+ t.lock.RLock()
+ defer t.lock.RUnlock()
+ return t.children != nil
+}
+
+// Modules returns the list of modules that this tree imports.
+//
+// This is only the imports of _this_ level of the tree. To retrieve the
+// full nested imports, you'll have to traverse the tree.
+func (t *Tree) Modules() []*Module {
+ result := make([]*Module, len(t.config.Modules))
+ for i, m := range t.config.Modules {
+ result[i] = &Module{
+ Name: m.Name,
+ Source: m.Source,
+ }
+ }
+
+ return result
+}
+
+// Name returns the name of the tree. This will be "<root>" for the root
+// tree and then the module name given for any children.
+func (t *Tree) Name() string {
+ if t.name == "" {
+ return RootName
+ }
+
+ return t.name
+}
+
+// Load loads the configuration of the entire tree.
+//
+// The parameters are used to tell the tree where to find modules and
+// whether it can download/update modules along the way.
+//
+// Calling this multiple times will reload the tree.
+//
+// Various semantic-like checks are made along the way of loading since
+// module trees inherently require the configuration to be in a reasonably
+// sane state: no circular dependencies, proper module sources, etc. A full
+// suite of validations can be done by running Validate (after loading).
+func (t *Tree) Load(s getter.Storage, mode GetMode) error {
+ t.lock.Lock()
+ defer t.lock.Unlock()
+
+ // Reset the children if we have any
+ t.children = nil
+
+ modules := t.Modules()
+ children := make(map[string]*Tree)
+
+ // Go through all the modules and get the directory for them.
+ for _, m := range modules {
+ if _, ok := children[m.Name]; ok {
+ return fmt.Errorf(
+ "module %s: duplicated. module names must be unique", m.Name)
+ }
+
+ // Determine the path to this child
+ path := make([]string, len(t.path), len(t.path)+1)
+ copy(path, t.path)
+ path = append(path, m.Name)
+
+ // Split out the subdir if we have one
+ source, subDir := getter.SourceDirSubdir(m.Source)
+
+ source, err := getter.Detect(source, t.config.Dir, getter.Detectors)
+ if err != nil {
+ return fmt.Errorf("module %s: %s", m.Name, err)
+ }
+
+ // Check if the detector introduced something new.
+ source, subDir2 := getter.SourceDirSubdir(source)
+ if subDir2 != "" {
+ subDir = filepath.Join(subDir2, subDir)
+ }
+
+ // Get the directory where this module is so we can load it
+ key := strings.Join(path, ".")
+ key = fmt.Sprintf("root.%s-%s", key, m.Source)
+ dir, ok, err := getStorage(s, key, source, mode)
+ if err != nil {
+ return err
+ }
+ if !ok {
+ return fmt.Errorf(
+ "module %s: not found, may need to be downloaded using 'terraform get'", m.Name)
+ }
+
+ // If we have a subdirectory, then merge that in
+ if subDir != "" {
+ dir = filepath.Join(dir, subDir)
+ }
+
+ // Load the configurations.Dir(source)
+ children[m.Name], err = NewTreeModule(m.Name, dir)
+ if err != nil {
+ return fmt.Errorf(
+ "module %s: %s", m.Name, err)
+ }
+
+ // Set the path of this child
+ children[m.Name].path = path
+ }
+
+ // Go through all the children and load them.
+ for _, c := range children {
+ if err := c.Load(s, mode); err != nil {
+ return err
+ }
+ }
+
+ // Set our tree up
+ t.children = children
+
+ return nil
+}
+
+// Path is the full path to this tree.
+func (t *Tree) Path() []string {
+ return t.path
+}
+
+// String gives a nice output to describe the tree.
+func (t *Tree) String() string {
+ var result bytes.Buffer
+ path := strings.Join(t.path, ", ")
+ if path != "" {
+ path = fmt.Sprintf(" (path: %s)", path)
+ }
+ result.WriteString(t.Name() + path + "\n")
+
+ cs := t.Children()
+ if cs == nil {
+ result.WriteString(" not loaded")
+ } else {
+ // Go through each child and get its string value, then indent it
+ // by two.
+ for _, c := range cs {
+ r := strings.NewReader(c.String())
+ scanner := bufio.NewScanner(r)
+ for scanner.Scan() {
+ result.WriteString(" ")
+ result.WriteString(scanner.Text())
+ result.WriteString("\n")
+ }
+ }
+ }
+
+ return result.String()
+}
+
+// Validate does semantic checks on the entire tree of configurations.
+//
+// This will call the respective config.Config.Validate() functions as well
+// as verifying things such as parameters/outputs between the various modules.
+//
+// Load must be called prior to calling Validate or an error will be returned.
+func (t *Tree) Validate() error {
+ if !t.Loaded() {
+ return fmt.Errorf("tree must be loaded before calling Validate")
+ }
+
+ // If something goes wrong, here is our error template
+ newErr := &treeError{Name: []string{t.Name()}}
+
+ // Terraform core does not handle root module children named "root".
+ // We plan to fix this in the future but this bug was brought up in
+ // the middle of a release and we don't want to introduce wide-sweeping
+ // changes at that time.
+ if len(t.path) == 1 && t.name == "root" {
+ return fmt.Errorf("root module cannot contain module named 'root'")
+ }
+
+ // Validate our configuration first.
+ if err := t.config.Validate(); err != nil {
+ newErr.Add(err)
+ }
+
+ // If we're the root, we do extra validation. This validation usually
+ // requires the entire tree (since children don't have parent pointers).
+ if len(t.path) == 0 {
+ if err := t.validateProviderAlias(); err != nil {
+ newErr.Add(err)
+ }
+ }
+
+ // Get the child trees
+ children := t.Children()
+
+ // Validate all our children
+ for _, c := range children {
+ err := c.Validate()
+ if err == nil {
+ continue
+ }
+
+ verr, ok := err.(*treeError)
+ if !ok {
+ // Unknown error, just return...
+ return err
+ }
+
+ // Append ourselves to the error and then return
+ verr.Name = append(verr.Name, t.Name())
+ newErr.AddChild(verr)
+ }
+
+ // Go over all the modules and verify that any parameters are valid
+ // variables into the module in question.
+ for _, m := range t.config.Modules {
+ tree, ok := children[m.Name]
+ if !ok {
+ // This should never happen because Load watches us
+ panic("module not found in children: " + m.Name)
+ }
+
+ // Build the variables that the module defines
+ requiredMap := make(map[string]struct{})
+ varMap := make(map[string]struct{})
+ for _, v := range tree.config.Variables {
+ varMap[v.Name] = struct{}{}
+
+ if v.Required() {
+ requiredMap[v.Name] = struct{}{}
+ }
+ }
+
+ // Compare to the keys in our raw config for the module
+ for k, _ := range m.RawConfig.Raw {
+ if _, ok := varMap[k]; !ok {
+ newErr.Add(fmt.Errorf(
+ "module %s: %s is not a valid parameter",
+ m.Name, k))
+ }
+
+ // Remove the required
+ delete(requiredMap, k)
+ }
+
+ // If we have any required left over, they aren't set.
+ for k, _ := range requiredMap {
+ newErr.Add(fmt.Errorf(
+ "module %s: required variable %q not set",
+ m.Name, k))
+ }
+ }
+
+ // Go over all the variables used and make sure that any module
+ // variables represent outputs properly.
+ for source, vs := range t.config.InterpolatedVariables() {
+ for _, v := range vs {
+ mv, ok := v.(*config.ModuleVariable)
+ if !ok {
+ continue
+ }
+
+ tree, ok := children[mv.Name]
+ if !ok {
+ newErr.Add(fmt.Errorf(
+ "%s: undefined module referenced %s",
+ source, mv.Name))
+ continue
+ }
+
+ found := false
+ for _, o := range tree.config.Outputs {
+ if o.Name == mv.Field {
+ found = true
+ break
+ }
+ }
+ if !found {
+ newErr.Add(fmt.Errorf(
+ "%s: %s is not a valid output for module %s",
+ source, mv.Field, mv.Name))
+ }
+ }
+ }
+
+ return newErr.ErrOrNil()
+}
+
+// treeError is an error use by Tree.Validate to accumulates all
+// validation errors.
+type treeError struct {
+ Name []string
+ Errs []error
+ Children []*treeError
+}
+
+func (e *treeError) Add(err error) {
+ e.Errs = append(e.Errs, err)
+}
+
+func (e *treeError) AddChild(err *treeError) {
+ e.Children = append(e.Children, err)
+}
+
+func (e *treeError) ErrOrNil() error {
+ if len(e.Errs) > 0 || len(e.Children) > 0 {
+ return e
+ }
+ return nil
+}
+
+func (e *treeError) Error() string {
+ name := strings.Join(e.Name, ".")
+ var out bytes.Buffer
+ fmt.Fprintf(&out, "module %s: ", name)
+
+ if len(e.Errs) == 1 {
+ // single like error
+ out.WriteString(e.Errs[0].Error())
+ } else {
+ // multi-line error
+ for _, err := range e.Errs {
+ fmt.Fprintf(&out, "\n %s", err)
+ }
+ }
+
+ if len(e.Children) > 0 {
+ // start the next error on a new line
+ out.WriteString("\n ")
+ }
+ for _, child := range e.Children {
+ out.WriteString(child.Error())
+ }
+
+ return out.String()
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/tree_gob.go b/vendor/github.com/hashicorp/terraform/config/module/tree_gob.go
new file mode 100644
index 00000000..fcd37f4e
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/tree_gob.go
@@ -0,0 +1,57 @@
+package module
+
+import (
+ "bytes"
+ "encoding/gob"
+
+ "github.com/hashicorp/terraform/config"
+)
+
+func (t *Tree) GobDecode(bs []byte) error {
+ t.lock.Lock()
+ defer t.lock.Unlock()
+
+ // Decode the gob data
+ var data treeGob
+ dec := gob.NewDecoder(bytes.NewReader(bs))
+ if err := dec.Decode(&data); err != nil {
+ return err
+ }
+
+ // Set the fields
+ t.name = data.Name
+ t.config = data.Config
+ t.children = data.Children
+ t.path = data.Path
+
+ return nil
+}
+
+func (t *Tree) GobEncode() ([]byte, error) {
+ data := &treeGob{
+ Config: t.config,
+ Children: t.children,
+ Name: t.name,
+ Path: t.path,
+ }
+
+ var buf bytes.Buffer
+ enc := gob.NewEncoder(&buf)
+ if err := enc.Encode(data); err != nil {
+ return nil, err
+ }
+
+ return buf.Bytes(), nil
+}
+
+// treeGob is used as a structure to Gob encode a tree.
+//
+// This structure is private so it can't be referenced but the fields are
+// public, allowing Gob to properly encode this. When we decode this, we are
+// able to turn it into a Tree.
+type treeGob struct {
+ Config *config.Config
+ Children map[string]*Tree
+ Name string
+ Path []string
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go b/vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go
new file mode 100644
index 00000000..090d4f7e
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/module/validate_provider_alias.go
@@ -0,0 +1,118 @@
+package module
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/hashicorp/go-multierror"
+ "github.com/hashicorp/terraform/dag"
+)
+
+// validateProviderAlias validates that all provider alias references are
+// defined at some point in the parent tree. This improves UX by catching
+// alias typos at the slight cost of requiring a declaration of usage. This
+// is usually a good tradeoff since not many aliases are used.
+func (t *Tree) validateProviderAlias() error {
+ // If we're not the root, don't perform this validation. We must be the
+ // root since we require full tree visibilty.
+ if len(t.path) != 0 {
+ return nil
+ }
+
+ // We'll use a graph to keep track of defined aliases at each level.
+ // As long as a parent defines an alias, it is okay.
+ var g dag.AcyclicGraph
+ t.buildProviderAliasGraph(&g, nil)
+
+ // Go through the graph and check that the usage is all good.
+ var err error
+ for _, v := range g.Vertices() {
+ pv, ok := v.(*providerAliasVertex)
+ if !ok {
+ // This shouldn't happen, just ignore it.
+ continue
+ }
+
+ // If we're not using any aliases, fast track and just continue
+ if len(pv.Used) == 0 {
+ continue
+ }
+
+ // Grab the ancestors since we're going to have to check if our
+ // parents define any of our aliases.
+ var parents []*providerAliasVertex
+ ancestors, _ := g.Ancestors(v)
+ for _, raw := range ancestors.List() {
+ if pv, ok := raw.(*providerAliasVertex); ok {
+ parents = append(parents, pv)
+ }
+ }
+ for k, _ := range pv.Used {
+ // Check if we define this
+ if _, ok := pv.Defined[k]; ok {
+ continue
+ }
+
+ // Check for a parent
+ found := false
+ for _, parent := range parents {
+ _, found = parent.Defined[k]
+ if found {
+ break
+ }
+ }
+ if found {
+ continue
+ }
+
+ // We didn't find the alias, error!
+ err = multierror.Append(err, fmt.Errorf(
+ "module %s: provider alias must be defined by the module or a parent: %s",
+ strings.Join(pv.Path, "."), k))
+ }
+ }
+
+ return err
+}
+
+func (t *Tree) buildProviderAliasGraph(g *dag.AcyclicGraph, parent dag.Vertex) {
+ // Add all our defined aliases
+ defined := make(map[string]struct{})
+ for _, p := range t.config.ProviderConfigs {
+ defined[p.FullName()] = struct{}{}
+ }
+
+ // Add all our used aliases
+ used := make(map[string]struct{})
+ for _, r := range t.config.Resources {
+ if r.Provider != "" {
+ used[r.Provider] = struct{}{}
+ }
+ }
+
+ // Add it to the graph
+ vertex := &providerAliasVertex{
+ Path: t.Path(),
+ Defined: defined,
+ Used: used,
+ }
+ g.Add(vertex)
+
+ // Connect to our parent if we have one
+ if parent != nil {
+ g.Connect(dag.BasicEdge(vertex, parent))
+ }
+
+ // Build all our children
+ for _, c := range t.Children() {
+ c.buildProviderAliasGraph(g, vertex)
+ }
+}
+
+// providerAliasVertex is the vertex for the graph that keeps track of
+// defined provider aliases.
+type providerAliasVertex struct {
+ Path []string
+ Defined map[string]struct{}
+ Used map[string]struct{}
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/provisioner_enums.go b/vendor/github.com/hashicorp/terraform/config/provisioner_enums.go
new file mode 100644
index 00000000..00fd43fc
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/provisioner_enums.go
@@ -0,0 +1,40 @@
+package config
+
+// ProvisionerWhen is an enum for valid values for when to run provisioners.
+type ProvisionerWhen int
+
+const (
+ ProvisionerWhenInvalid ProvisionerWhen = iota
+ ProvisionerWhenCreate
+ ProvisionerWhenDestroy
+)
+
+var provisionerWhenStrs = map[ProvisionerWhen]string{
+ ProvisionerWhenInvalid: "invalid",
+ ProvisionerWhenCreate: "create",
+ ProvisionerWhenDestroy: "destroy",
+}
+
+func (v ProvisionerWhen) String() string {
+ return provisionerWhenStrs[v]
+}
+
+// ProvisionerOnFailure is an enum for valid values for on_failure options
+// for provisioners.
+type ProvisionerOnFailure int
+
+const (
+ ProvisionerOnFailureInvalid ProvisionerOnFailure = iota
+ ProvisionerOnFailureContinue
+ ProvisionerOnFailureFail
+)
+
+var provisionerOnFailureStrs = map[ProvisionerOnFailure]string{
+ ProvisionerOnFailureInvalid: "invalid",
+ ProvisionerOnFailureContinue: "continue",
+ ProvisionerOnFailureFail: "fail",
+}
+
+func (v ProvisionerOnFailure) String() string {
+ return provisionerOnFailureStrs[v]
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/raw_config.go b/vendor/github.com/hashicorp/terraform/config/raw_config.go
new file mode 100644
index 00000000..f8498d85
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/raw_config.go
@@ -0,0 +1,335 @@
+package config
+
+import (
+ "bytes"
+ "encoding/gob"
+ "sync"
+
+ "github.com/hashicorp/hil"
+ "github.com/hashicorp/hil/ast"
+ "github.com/mitchellh/copystructure"
+ "github.com/mitchellh/reflectwalk"
+)
+
+// UnknownVariableValue is a sentinel value that can be used
+// to denote that the value of a variable is unknown at this time.
+// RawConfig uses this information to build up data about
+// unknown keys.
+const UnknownVariableValue = "74D93920-ED26-11E3-AC10-0800200C9A66"
+
+// RawConfig is a structure that holds a piece of configuration
+// where the overall structure is unknown since it will be used
+// to configure a plugin or some other similar external component.
+//
+// RawConfigs can be interpolated with variables that come from
+// other resources, user variables, etc.
+//
+// RawConfig supports a query-like interface to request
+// information from deep within the structure.
+type RawConfig struct {
+ Key string
+ Raw map[string]interface{}
+ Interpolations []ast.Node
+ Variables map[string]InterpolatedVariable
+
+ lock sync.Mutex
+ config map[string]interface{}
+ unknownKeys []string
+}
+
+// NewRawConfig creates a new RawConfig structure and populates the
+// publicly readable struct fields.
+func NewRawConfig(raw map[string]interface{}) (*RawConfig, error) {
+ result := &RawConfig{Raw: raw}
+ if err := result.init(); err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// RawMap returns a copy of the RawConfig.Raw map.
+func (r *RawConfig) RawMap() map[string]interface{} {
+ r.lock.Lock()
+ defer r.lock.Unlock()
+
+ m := make(map[string]interface{})
+ for k, v := range r.Raw {
+ m[k] = v
+ }
+ return m
+}
+
+// Copy returns a copy of this RawConfig, uninterpolated.
+func (r *RawConfig) Copy() *RawConfig {
+ if r == nil {
+ return nil
+ }
+
+ r.lock.Lock()
+ defer r.lock.Unlock()
+
+ newRaw := make(map[string]interface{})
+ for k, v := range r.Raw {
+ newRaw[k] = v
+ }
+
+ result, err := NewRawConfig(newRaw)
+ if err != nil {
+ panic("copy failed: " + err.Error())
+ }
+
+ result.Key = r.Key
+ return result
+}
+
+// Value returns the value of the configuration if this configuration
+// has a Key set. If this does not have a Key set, nil will be returned.
+func (r *RawConfig) Value() interface{} {
+ if c := r.Config(); c != nil {
+ if v, ok := c[r.Key]; ok {
+ return v
+ }
+ }
+
+ r.lock.Lock()
+ defer r.lock.Unlock()
+ return r.Raw[r.Key]
+}
+
+// Config returns the entire configuration with the variables
+// interpolated from any call to Interpolate.
+//
+// If any interpolated variables are unknown (value set to
+// UnknownVariableValue), the first non-container (map, slice, etc.) element
+// will be removed from the config. The keys of unknown variables
+// can be found using the UnknownKeys function.
+//
+// By pruning out unknown keys from the configuration, the raw
+// structure will always successfully decode into its ultimate
+// structure using something like mapstructure.
+func (r *RawConfig) Config() map[string]interface{} {
+ r.lock.Lock()
+ defer r.lock.Unlock()
+ return r.config
+}
+
+// Interpolate uses the given mapping of variable values and uses
+// those as the values to replace any variables in this raw
+// configuration.
+//
+// Any prior calls to Interpolate are replaced with this one.
+//
+// If a variable key is missing, this will panic.
+func (r *RawConfig) Interpolate(vs map[string]ast.Variable) error {
+ r.lock.Lock()
+ defer r.lock.Unlock()
+
+ config := langEvalConfig(vs)
+ return r.interpolate(func(root ast.Node) (interface{}, error) {
+ // None of the variables we need are computed, meaning we should
+ // be able to properly evaluate.
+ result, err := hil.Eval(root, config)
+ if err != nil {
+ return "", err
+ }
+
+ return result.Value, nil
+ })
+}
+
+// Merge merges another RawConfig into this one (overriding any conflicting
+// values in this config) and returns a new config. The original config
+// is not modified.
+func (r *RawConfig) Merge(other *RawConfig) *RawConfig {
+ r.lock.Lock()
+ defer r.lock.Unlock()
+
+ // Merge the raw configurations
+ raw := make(map[string]interface{})
+ for k, v := range r.Raw {
+ raw[k] = v
+ }
+ for k, v := range other.Raw {
+ raw[k] = v
+ }
+
+ // Create the result
+ result, err := NewRawConfig(raw)
+ if err != nil {
+ panic(err)
+ }
+
+ // Merge the interpolated results
+ result.config = make(map[string]interface{})
+ for k, v := range r.config {
+ result.config[k] = v
+ }
+ for k, v := range other.config {
+ result.config[k] = v
+ }
+
+ // Build the unknown keys
+ if len(r.unknownKeys) > 0 || len(other.unknownKeys) > 0 {
+ unknownKeys := make(map[string]struct{})
+ for _, k := range r.unknownKeys {
+ unknownKeys[k] = struct{}{}
+ }
+ for _, k := range other.unknownKeys {
+ unknownKeys[k] = struct{}{}
+ }
+
+ result.unknownKeys = make([]string, 0, len(unknownKeys))
+ for k, _ := range unknownKeys {
+ result.unknownKeys = append(result.unknownKeys, k)
+ }
+ }
+
+ return result
+}
+
+func (r *RawConfig) init() error {
+ r.lock.Lock()
+ defer r.lock.Unlock()
+
+ r.config = r.Raw
+ r.Interpolations = nil
+ r.Variables = nil
+
+ fn := func(node ast.Node) (interface{}, error) {
+ r.Interpolations = append(r.Interpolations, node)
+ vars, err := DetectVariables(node)
+ if err != nil {
+ return "", err
+ }
+
+ for _, v := range vars {
+ if r.Variables == nil {
+ r.Variables = make(map[string]InterpolatedVariable)
+ }
+
+ r.Variables[v.FullKey()] = v
+ }
+
+ return "", nil
+ }
+
+ walker := &interpolationWalker{F: fn}
+ if err := reflectwalk.Walk(r.Raw, walker); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (r *RawConfig) interpolate(fn interpolationWalkerFunc) error {
+ config, err := copystructure.Copy(r.Raw)
+ if err != nil {
+ return err
+ }
+ r.config = config.(map[string]interface{})
+
+ w := &interpolationWalker{F: fn, Replace: true}
+ err = reflectwalk.Walk(r.config, w)
+ if err != nil {
+ return err
+ }
+
+ r.unknownKeys = w.unknownKeys
+ return nil
+}
+
+func (r *RawConfig) merge(r2 *RawConfig) *RawConfig {
+ if r == nil && r2 == nil {
+ return nil
+ }
+
+ if r == nil {
+ r = &RawConfig{}
+ }
+
+ rawRaw, err := copystructure.Copy(r.Raw)
+ if err != nil {
+ panic(err)
+ }
+
+ raw := rawRaw.(map[string]interface{})
+ if r2 != nil {
+ for k, v := range r2.Raw {
+ raw[k] = v
+ }
+ }
+
+ result, err := NewRawConfig(raw)
+ if err != nil {
+ panic(err)
+ }
+
+ return result
+}
+
+// UnknownKeys returns the keys of the configuration that are unknown
+// because they had interpolated variables that must be computed.
+func (r *RawConfig) UnknownKeys() []string {
+ r.lock.Lock()
+ defer r.lock.Unlock()
+ return r.unknownKeys
+}
+
+// See GobEncode
+func (r *RawConfig) GobDecode(b []byte) error {
+ var data gobRawConfig
+ err := gob.NewDecoder(bytes.NewReader(b)).Decode(&data)
+ if err != nil {
+ return err
+ }
+
+ r.Key = data.Key
+ r.Raw = data.Raw
+
+ return r.init()
+}
+
+// GobEncode is a custom Gob encoder to use so that we only include the
+// raw configuration. Interpolated variables and such are lost and the
+// tree of interpolated variables is recomputed on decode, since it is
+// referentially transparent.
+func (r *RawConfig) GobEncode() ([]byte, error) {
+ r.lock.Lock()
+ defer r.lock.Unlock()
+
+ data := gobRawConfig{
+ Key: r.Key,
+ Raw: r.Raw,
+ }
+
+ var buf bytes.Buffer
+ if err := gob.NewEncoder(&buf).Encode(data); err != nil {
+ return nil, err
+ }
+
+ return buf.Bytes(), nil
+}
+
+type gobRawConfig struct {
+ Key string
+ Raw map[string]interface{}
+}
+
+// langEvalConfig returns the evaluation configuration we use to execute.
+func langEvalConfig(vs map[string]ast.Variable) *hil.EvalConfig {
+ funcMap := make(map[string]ast.Function)
+ for k, v := range Funcs() {
+ funcMap[k] = v
+ }
+ funcMap["lookup"] = interpolationFuncLookup(vs)
+ funcMap["keys"] = interpolationFuncKeys(vs)
+ funcMap["values"] = interpolationFuncValues(vs)
+
+ return &hil.EvalConfig{
+ GlobalScope: &ast.BasicScope{
+ VarMap: vs,
+ FuncMap: funcMap,
+ },
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/resource_mode.go b/vendor/github.com/hashicorp/terraform/config/resource_mode.go
new file mode 100644
index 00000000..877c6e84
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/resource_mode.go
@@ -0,0 +1,9 @@
+package config
+
+//go:generate stringer -type=ResourceMode -output=resource_mode_string.go resource_mode.go
+type ResourceMode int
+
+const (
+ ManagedResourceMode ResourceMode = iota
+ DataResourceMode
+)
diff --git a/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go b/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go
new file mode 100644
index 00000000..ea68b4fc
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go
@@ -0,0 +1,16 @@
+// Code generated by "stringer -type=ResourceMode -output=resource_mode_string.go resource_mode.go"; DO NOT EDIT.
+
+package config
+
+import "fmt"
+
+const _ResourceMode_name = "ManagedResourceModeDataResourceMode"
+
+var _ResourceMode_index = [...]uint8{0, 19, 35}
+
+func (i ResourceMode) String() string {
+ if i < 0 || i >= ResourceMode(len(_ResourceMode_index)-1) {
+ return fmt.Sprintf("ResourceMode(%d)", i)
+ }
+ return _ResourceMode_name[_ResourceMode_index[i]:_ResourceMode_index[i+1]]
+}
diff --git a/vendor/github.com/hashicorp/terraform/config/testing.go b/vendor/github.com/hashicorp/terraform/config/testing.go
new file mode 100644
index 00000000..f7bfadd9
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform/config/testing.go
@@ -0,0 +1,15 @@
+package config
+
+import (
+ "testing"
+)
+
+// TestRawConfig is used to create a RawConfig for testing.
+func TestRawConfig(t *testing.T, c map[string]interface{}) *RawConfig {
+ cfg, err := NewRawConfig(c)
+ if err != nil {
+ t.Fatalf("err: %s", err)
+ }
+
+ return cfg
+}