summaryrefslogtreecommitdiff
path: root/vendor/github.com/hashicorp/go-getter
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/hashicorp/go-getter')
-rw-r--r--vendor/github.com/hashicorp/go-getter/LICENSE354
-rw-r--r--vendor/github.com/hashicorp/go-getter/README.md253
-rw-r--r--vendor/github.com/hashicorp/go-getter/client.go335
-rw-r--r--vendor/github.com/hashicorp/go-getter/client_mode.go24
-rw-r--r--vendor/github.com/hashicorp/go-getter/copy_dir.go78
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress.go29
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_bzip2.go45
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_gzip.go49
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_tar.go83
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_tbz2.go33
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_testing.go135
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_tgz.go39
-rw-r--r--vendor/github.com/hashicorp/go-getter/decompress_zip.go96
-rw-r--r--vendor/github.com/hashicorp/go-getter/detect.go97
-rw-r--r--vendor/github.com/hashicorp/go-getter/detect_bitbucket.go66
-rw-r--r--vendor/github.com/hashicorp/go-getter/detect_file.go67
-rw-r--r--vendor/github.com/hashicorp/go-getter/detect_github.go73
-rw-r--r--vendor/github.com/hashicorp/go-getter/detect_s3.go61
-rw-r--r--vendor/github.com/hashicorp/go-getter/folder_storage.go65
-rw-r--r--vendor/github.com/hashicorp/go-getter/get.go139
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_file.go32
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_file_unix.go103
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_file_windows.go120
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_git.go225
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_hg.go131
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_http.go227
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_mock.go52
-rw-r--r--vendor/github.com/hashicorp/go-getter/get_s3.go243
-rw-r--r--vendor/github.com/hashicorp/go-getter/helper/url/url.go14
-rw-r--r--vendor/github.com/hashicorp/go-getter/helper/url/url_unix.go11
-rw-r--r--vendor/github.com/hashicorp/go-getter/helper/url/url_windows.go40
-rw-r--r--vendor/github.com/hashicorp/go-getter/netrc.go67
-rw-r--r--vendor/github.com/hashicorp/go-getter/source.go36
-rw-r--r--vendor/github.com/hashicorp/go-getter/storage.go13
34 files changed, 3435 insertions, 0 deletions
diff --git a/vendor/github.com/hashicorp/go-getter/LICENSE b/vendor/github.com/hashicorp/go-getter/LICENSE
new file mode 100644
index 00000000..c33dcc7c
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/LICENSE
@@ -0,0 +1,354 @@
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. “Contributor”
+
+ means each individual or legal entity that creates, contributes to the
+ creation of, or owns Covered Software.
+
+1.2. “Contributor Version”
+
+ means the combination of the Contributions of others (if any) used by a
+ Contributor and that particular Contributor’s Contribution.
+
+1.3. “Contribution”
+
+ means Covered Software of a particular Contributor.
+
+1.4. “Covered Software”
+
+ means Source Code Form to which the initial Contributor has attached the
+ notice in Exhibit A, the Executable Form of such Source Code Form, and
+ Modifications of such Source Code Form, in each case including portions
+ thereof.
+
+1.5. “Incompatible With Secondary Licenses”
+ means
+
+ a. that the initial Contributor has attached the notice described in
+ Exhibit B to the Covered Software; or
+
+ b. that the Covered Software was made available under the terms of version
+ 1.1 or earlier of the License, but not also under the terms of a
+ Secondary License.
+
+1.6. “Executable Form”
+
+ means any form of the work other than Source Code Form.
+
+1.7. “Larger Work”
+
+ means a work that combines Covered Software with other material, in a separate
+ file or files, that is not Covered Software.
+
+1.8. “License”
+
+ means this document.
+
+1.9. “Licensable”
+
+ means having the right to grant, to the maximum extent possible, whether at the
+ time of the initial grant or subsequently, any and all of the rights conveyed by
+ this License.
+
+1.10. “Modifications”
+
+ means any of the following:
+
+ a. any file in Source Code Form that results from an addition to, deletion
+ from, or modification of the contents of Covered Software; or
+
+ b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. “Patent Claims” of a Contributor
+
+ means any patent claim(s), including without limitation, method, process,
+ and apparatus claims, in any patent Licensable by such Contributor that
+ would be infringed, but for the grant of the License, by the making,
+ using, selling, offering for sale, having made, import, or transfer of
+ either its Contributions or its Contributor Version.
+
+1.12. “Secondary License”
+
+ means either the GNU General Public License, Version 2.0, the GNU Lesser
+ General Public License, Version 2.1, the GNU Affero General Public
+ License, Version 3.0, or any later versions of those licenses.
+
+1.13. “Source Code Form”
+
+ means the form of the work preferred for making modifications.
+
+1.14. “You” (or “Your”)
+
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, “You” includes any entity that controls, is
+ controlled by, or is under common control with You. For purposes of this
+ definition, “control” means (a) the power, direct or indirect, to cause
+ the direction or management of such entity, whether by contract or
+ otherwise, or (b) ownership of more than fifty percent (50%) of the
+ outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+ Each Contributor hereby grants You a world-wide, royalty-free,
+ non-exclusive license:
+
+ a. under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or as
+ part of a Larger Work; and
+
+ b. under Patent Claims of such Contributor to make, use, sell, offer for
+ sale, have made, import, and otherwise transfer either its Contributions
+ or its Contributor Version.
+
+2.2. Effective Date
+
+ The licenses granted in Section 2.1 with respect to any Contribution become
+ effective for each Contribution on the date the Contributor first distributes
+ such Contribution.
+
+2.3. Limitations on Grant Scope
+
+ The licenses granted in this Section 2 are the only rights granted under this
+ License. No additional rights or licenses will be implied from the distribution
+ or licensing of Covered Software under this License. Notwithstanding Section
+ 2.1(b) above, no patent license is granted by a Contributor:
+
+ a. for any code that a Contributor has removed from Covered Software; or
+
+ b. for infringements caused by: (i) Your and any other third party’s
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+ c. under Patent Claims infringed by Covered Software in the absence of its
+ Contributions.
+
+ This License does not grant any rights in the trademarks, service marks, or
+ logos of any Contributor (except as may be necessary to comply with the
+ notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+ No Contributor makes additional grants as a result of Your choice to
+ distribute the Covered Software under a subsequent version of this License
+ (see Section 10.2) or under the terms of a Secondary License (if permitted
+ under the terms of Section 3.3).
+
+2.5. Representation
+
+ Each Contributor represents that the Contributor believes its Contributions
+ are its original creation(s) or it has sufficient rights to grant the
+ rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+ This License is not intended to limit any rights You have under applicable
+ copyright doctrines of fair use, fair dealing, or other equivalents.
+
+2.7. Conditions
+
+ Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+ Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+ All distribution of Covered Software in Source Code Form, including any
+ Modifications that You create or to which You contribute, must be under the
+ terms of this License. You must inform recipients that the Source Code Form
+ of the Covered Software is governed by the terms of this License, and how
+ they can obtain a copy of this License. You may not attempt to alter or
+ restrict the recipients’ rights in the Source Code Form.
+
+3.2. Distribution of Executable Form
+
+ If You distribute Covered Software in Executable Form then:
+
+ a. such Covered Software must also be made available in Source Code Form,
+ as described in Section 3.1, and You must inform recipients of the
+ Executable Form how they can obtain a copy of such Source Code Form by
+ reasonable means in a timely manner, at a charge no more than the cost
+ of distribution to the recipient; and
+
+ b. You may distribute such Executable Form under the terms of this License,
+ or sublicense it under different terms, provided that the license for
+ the Executable Form does not attempt to limit or alter the recipients’
+ rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+ You may create and distribute a Larger Work under terms of Your choice,
+ provided that You also comply with the requirements of this License for the
+ Covered Software. If the Larger Work is a combination of Covered Software
+ with a work governed by one or more Secondary Licenses, and the Covered
+ Software is not Incompatible With Secondary Licenses, this License permits
+ You to additionally distribute such Covered Software under the terms of
+ such Secondary License(s), so that the recipient of the Larger Work may, at
+ their option, further distribute the Covered Software under the terms of
+ either this License or such Secondary License(s).
+
+3.4. Notices
+
+ You may not remove or alter the substance of any license notices (including
+ copyright notices, patent notices, disclaimers of warranty, or limitations
+ of liability) contained within the Source Code Form of the Covered
+ Software, except that You may alter any license notices to the extent
+ required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+ You may choose to offer, and to charge a fee for, warranty, support,
+ indemnity or liability obligations to one or more recipients of Covered
+ Software. However, You may do so only on Your own behalf, and not on behalf
+ of any Contributor. You must make it absolutely clear that any such
+ warranty, support, indemnity, or liability obligation is offered by You
+ alone, and You hereby agree to indemnify every Contributor for any
+ liability incurred by such Contributor as a result of warranty, support,
+ indemnity or liability terms You offer. You may include additional
+ disclaimers of warranty and limitations of liability specific to any
+ jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+ If it is impossible for You to comply with any of the terms of this License
+ with respect to some or all of the Covered Software due to statute, judicial
+ order, or regulation then You must: (a) comply with the terms of this License
+ to the maximum extent possible; and (b) describe the limitations and the code
+ they affect. Such description must be placed in a text file included with all
+ distributions of the Covered Software under this License. Except to the
+ extent prohibited by statute or regulation, such description must be
+ sufficiently detailed for a recipient of ordinary skill to be able to
+ understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+ fail to comply with any of its terms. However, if You become compliant,
+ then the rights granted under this License from a particular Contributor
+ are reinstated (a) provisionally, unless and until such Contributor
+ explicitly and finally terminates Your grants, and (b) on an ongoing basis,
+ if such Contributor fails to notify You of the non-compliance by some
+ reasonable means prior to 60 days after You have come back into compliance.
+ Moreover, Your grants from a particular Contributor are reinstated on an
+ ongoing basis if such Contributor notifies You of the non-compliance by
+ some reasonable means, this is the first time You have received notice of
+ non-compliance with this License from such Contributor, and You become
+ compliant prior to 30 days after Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+ infringement claim (excluding declaratory judgment actions, counter-claims,
+ and cross-claims) alleging that a Contributor Version directly or
+ indirectly infringes any patent, then the rights granted to You by any and
+ all Contributors for the Covered Software under Section 2.1 of this License
+ shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+ license agreements (excluding distributors and resellers) which have been
+ validly granted by You or Your distributors under this License prior to
+ termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+ Covered Software is provided under this License on an “as is” basis, without
+ warranty of any kind, either expressed, implied, or statutory, including,
+ without limitation, warranties that the Covered Software is free of defects,
+ merchantable, fit for a particular purpose or non-infringing. The entire
+ risk as to the quality and performance of the Covered Software is with You.
+ Should any Covered Software prove defective in any respect, You (not any
+ Contributor) assume the cost of any necessary servicing, repair, or
+ correction. This disclaimer of warranty constitutes an essential part of this
+ License. No use of any Covered Software is authorized under this License
+ except under this disclaimer.
+
+7. Limitation of Liability
+
+ Under no circumstances and under no legal theory, whether tort (including
+ negligence), contract, or otherwise, shall any Contributor, or anyone who
+ distributes Covered Software as permitted above, be liable to You for any
+ direct, indirect, special, incidental, or consequential damages of any
+ character including, without limitation, damages for lost profits, loss of
+ goodwill, work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses, even if such party shall have been
+ informed of the possibility of such damages. This limitation of liability
+ shall not apply to liability for death or personal injury resulting from such
+ party’s negligence to the extent applicable law prohibits such limitation.
+ Some jurisdictions do not allow the exclusion or limitation of incidental or
+ consequential damages, so this exclusion and limitation may not apply to You.
+
+8. Litigation
+
+ Any litigation relating to this License may be brought only in the courts of
+ a jurisdiction where the defendant maintains its principal place of business
+ and such litigation shall be governed by laws of that jurisdiction, without
+ reference to its conflict-of-law provisions. Nothing in this Section shall
+ prevent a party’s ability to bring cross-claims or counter-claims.
+
+9. Miscellaneous
+
+ This License represents the complete agreement concerning the subject matter
+ hereof. If any provision of this License is held to be unenforceable, such
+ provision shall be reformed only to the extent necessary to make it
+ enforceable. Any law or regulation which provides that the language of a
+ contract shall be construed against the drafter shall not be used to construe
+ this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+ Mozilla Foundation is the license steward. Except as provided in Section
+ 10.3, no one other than the license steward has the right to modify or
+ publish new versions of this License. Each version will be given a
+ distinguishing version number.
+
+10.2. Effect of New Versions
+
+ You may distribute the Covered Software under the terms of the version of
+ the License under which You originally received the Covered Software, or
+ under the terms of any subsequent version published by the license
+ steward.
+
+10.3. Modified Versions
+
+ If you create software not governed by this License, and you want to
+ create a new license for such software, you may create and use a modified
+ version of this License if you rename the license and remove any
+ references to the name of the license steward (except to note that such
+ modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
+ If You choose to distribute Source Code Form that is Incompatible With
+ Secondary Licenses under the terms of this version of the License, the
+ notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+
+ This Source Code Form is subject to the
+ terms of the Mozilla Public License, v.
+ 2.0. If a copy of the MPL was not
+ distributed with this file, You can
+ obtain one at
+ http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file, then
+You may include the notice in a location (such as a LICENSE file in a relevant
+directory) where a recipient would be likely to look for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - “Incompatible With Secondary Licenses” Notice
+
+ This Source Code Form is “Incompatible
+ With Secondary Licenses”, as defined by
+ the Mozilla Public License, v. 2.0.
+
diff --git a/vendor/github.com/hashicorp/go-getter/README.md b/vendor/github.com/hashicorp/go-getter/README.md
new file mode 100644
index 00000000..4a0b6a62
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/README.md
@@ -0,0 +1,253 @@
+# go-getter
+
+[![Build Status](http://img.shields.io/travis/hashicorp/go-getter.svg?style=flat-square)][travis]
+[![Build status](https://ci.appveyor.com/api/projects/status/ulq3qr43n62croyq/branch/master?svg=true)][appveyor]
+[![Go Documentation](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)][godocs]
+
+[travis]: http://travis-ci.org/hashicorp/go-getter
+[godocs]: http://godoc.org/github.com/hashicorp/go-getter
+[appveyor]: https://ci.appveyor.com/project/hashicorp/go-getter/branch/master
+
+go-getter is a library for Go (golang) for downloading files or directories
+from various sources using a URL as the primary form of input.
+
+The power of this library is being flexible in being able to download
+from a number of different sources (file paths, Git, HTTP, Mercurial, etc.)
+using a single string as input. This removes the burden of knowing how to
+download from a variety of sources from the implementer.
+
+The concept of a _detector_ automatically turns invalid URLs into proper
+URLs. For example: "github.com/hashicorp/go-getter" would turn into a
+Git URL. Or "./foo" would turn into a file URL. These are extensible.
+
+This library is used by [Terraform](https://terraform.io) for
+downloading modules, [Otto](https://ottoproject.io) for dependencies and
+Appfile imports, and [Nomad](https://nomadproject.io) for downloading
+binaries.
+
+## Installation and Usage
+
+Package documentation can be found on
+[GoDoc](http://godoc.org/github.com/hashicorp/go-getter).
+
+Installation can be done with a normal `go get`:
+
+```
+$ go get github.com/hashicorp/go-getter
+```
+
+go-getter also has a command you can use to test URL strings:
+
+```
+$ go install github.com/hashicorp/go-getter/cmd/go-getter
+...
+
+$ go-getter github.com/foo/bar ./foo
+...
+```
+
+The command is useful for verifying URL structures.
+
+## URL Format
+
+go-getter uses a single string URL as input to download from a variety of
+protocols. go-getter has various "tricks" with this URL to do certain things.
+This section documents the URL format.
+
+### Supported Protocols and Detectors
+
+**Protocols** are used to download files/directories using a specific
+mechanism. Example protocols are Git and HTTP.
+
+**Detectors** are used to transform a valid or invalid URL into another
+URL if it matches a certain pattern. Example: "github.com/user/repo" is
+automatically transformed into a fully valid Git URL. This allows go-getter
+to be very user friendly.
+
+go-getter out of the box supports the following protocols. Additional protocols
+can be augmented at runtime by implementing the `Getter` interface.
+
+ * Local files
+ * Git
+ * Mercurial
+ * HTTP
+ * Amazon S3
+
+In addition to the above protocols, go-getter has what are called "detectors."
+These take a URL and attempt to automatically choose the best protocol for
+it, which might involve even changing the protocol. The following detection
+is built-in by default:
+
+ * File paths such as "./foo" are automatically changed to absolute
+ file URLs.
+ * GitHub URLs, such as "github.com/mitchellh/vagrant" are automatically
+ changed to Git protocol over HTTP.
+ * BitBucket URLs, such as "bitbucket.org/mitchellh/vagrant" are automatically
+ changed to a Git or mercurial protocol using the BitBucket API.
+
+### Forced Protocol
+
+In some cases, the protocol to use is ambiguous depending on the source
+URL. For example, "http://github.com/mitchellh/vagrant.git" could reference
+an HTTP URL or a Git URL. Forced protocol syntax is used to disambiguate this
+URL.
+
+Forced protocol can be done by prefixing the URL with the protocol followed
+by double colons. For example: `git::http://github.com/mitchellh/vagrant.git`
+would download the given HTTP URL using the Git protocol.
+
+Forced protocols will also override any detectors.
+
+In the absense of a forced protocol, detectors may be run on the URL, transforming
+the protocol anyways. The above example would've used the Git protocol either
+way since the Git detector would've detected it was a GitHub URL.
+
+### Protocol-Specific Options
+
+Each protocol can support protocol-specific options to configure that
+protocol. For example, the `git` protocol supports specifying a `ref`
+query parameter that tells it what ref to checkout for that Git
+repository.
+
+The options are specified as query parameters on the URL (or URL-like string)
+given to go-getter. Using the Git example above, the URL below is a valid
+input to go-getter:
+
+ github.com/hashicorp/go-getter?ref=abcd1234
+
+The protocol-specific options are documented below the URL format
+section. But because they are part of the URL, we point it out here so
+you know they exist.
+
+### Checksumming
+
+For file downloads of any protocol, go-getter can automatically verify
+a checksum for you. Note that checksumming only works for downloading files,
+not directories, but checksumming will work for any protocol.
+
+To checksum a file, append a `checksum` query parameter to the URL.
+The paramter value should be in the format of `type:value`, where
+type is "md5", "sha1", "sha256", or "sha512". The "value" should be
+the actual checksum value. go-getter will parse out this query parameter
+automatically and use it to verify the checksum. An example URL
+is shown below:
+
+```
+./foo.txt?checksum=md5:b7d96c89d09d9e204f5fedc4d5d55b21
+```
+
+The checksum query parameter is never sent to the backend protocol
+implementation. It is used at a higher level by go-getter itself.
+
+### Unarchiving
+
+go-getter will automatically unarchive files into a file or directory
+based on the extension of the file being requested (over any protocol).
+This works for both file and directory downloads.
+
+go-getter looks for an `archive` query parameter to specify the format of
+the archive. If this isn't specified, go-getter will use the extension of
+the path to see if it appears archived. Unarchiving can be explicitly
+disabled by setting the `archive` query parameter to `false`.
+
+The following archive formats are supported:
+
+ * `tar.gz` and `tgz`
+ * `tar.bz2` and `tbz2`
+ * `zip`
+ * `gz`
+ * `bz2`
+
+For example, an example URL is shown below:
+
+```
+./foo.zip
+```
+
+This will automatically be inferred to be a ZIP file and will be extracted.
+You can also be explicit about the archive type:
+
+```
+./some/other/path?archive=zip
+```
+
+And finally, you can disable archiving completely:
+
+```
+./some/path?archive=false
+```
+
+You can combine unarchiving with the other features of go-getter such
+as checksumming. The special `archive` query parameter will be removed
+from the URL before going to the final protocol downloader.
+
+## Protocol-Specific Options
+
+This section documents the protocol-specific options that can be specified
+for go-getter. These options should be appended to the input as normal query
+parameters. Depending on the usage of go-getter, applications may provide
+alternate ways of inputting options. For example, [Nomad](https://www.nomadproject.io)
+provides a nice options block for specifying options rather than in the URL.
+
+## General (All Protocols)
+
+The options below are available to all protocols:
+
+ * `archive` - The archive format to use to unarchive this file, or "" (empty
+ string) to disable unarchiving. For more details, see the complete section
+ on archive support above.
+
+ * `checksum` - Checksum to verify the downloaded file or archive. See
+ the entire section on checksumming above for format and more details.
+
+### Local Files (`file`)
+
+None
+
+### Git (`git`)
+
+ * `ref` - The Git ref to checkout. This is a ref, so it can point to
+ a commit SHA, a branch name, etc. If it is a named ref such as a branch
+ name, go-getter will update it to the latest on each get.
+
+ * `sshkey` - An SSH private key to use during clones. The provided key must
+ be a base64-encoded string. For example, to generate a suitable `sshkey`
+ from a private key file on disk, you would run `base64 -w0 <file>`.
+
+ **Note**: Git 2.3+ is required to use this feature.
+
+### Mercurial (`hg`)
+
+ * `rev` - The Mercurial revision to checkout.
+
+### HTTP (`http`)
+
+None
+
+### S3 (`s3`)
+
+S3 takes various access configurations in the URL. Note that it will also
+read these from standard AWS environment variables if they're set. If
+the query parameters are present, these take priority.
+
+ * `aws_access_key_id` - AWS access key.
+ * `aws_access_key_secret` - AWS access key secret.
+ * `aws_access_token` - AWS access token if this is being used.
+
+#### Using IAM Instance Profiles with S3
+
+If you use go-getter and want to use an EC2 IAM Instance Profile to avoid
+using credentials, then just omit these and the profile, if available will
+be used automatically.
+
+#### S3 Bucket Examples
+
+S3 has several addressing schemes used to reference your bucket. These are
+listed here: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingBucket.html#access-bucket-intro
+
+Some examples for these addressing schemes:
+- s3::https://s3.amazonaws.com/bucket/foo
+- s3::https://s3-eu-west-1.amazonaws.com/bucket/foo
+- bucket.s3.amazonaws.com/foo
+- bucket.s3-eu-west-1.amazonaws.com/foo/bar
+
diff --git a/vendor/github.com/hashicorp/go-getter/client.go b/vendor/github.com/hashicorp/go-getter/client.go
new file mode 100644
index 00000000..876812a0
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/client.go
@@ -0,0 +1,335 @@
+package getter
+
+import (
+ "bytes"
+ "crypto/md5"
+ "crypto/sha1"
+ "crypto/sha256"
+ "crypto/sha512"
+ "encoding/hex"
+ "fmt"
+ "hash"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+
+ urlhelper "github.com/hashicorp/go-getter/helper/url"
+)
+
+// Client is a client for downloading things.
+//
+// Top-level functions such as Get are shortcuts for interacting with a client.
+// Using a client directly allows more fine-grained control over how downloading
+// is done, as well as customizing the protocols supported.
+type Client struct {
+ // Src is the source URL to get.
+ //
+ // Dst is the path to save the downloaded thing as. If Dir is set to
+ // true, then this should be a directory. If the directory doesn't exist,
+ // it will be created for you.
+ //
+ // Pwd is the working directory for detection. If this isn't set, some
+ // detection may fail. Client will not default pwd to the current
+ // working directory for security reasons.
+ Src string
+ Dst string
+ Pwd string
+
+ // Mode is the method of download the client will use. See ClientMode
+ // for documentation.
+ Mode ClientMode
+
+ // Detectors is the list of detectors that are tried on the source.
+ // If this is nil, then the default Detectors will be used.
+ Detectors []Detector
+
+ // Decompressors is the map of decompressors supported by this client.
+ // If this is nil, then the default value is the Decompressors global.
+ Decompressors map[string]Decompressor
+
+ // Getters is the map of protocols supported by this client. If this
+ // is nil, then the default Getters variable will be used.
+ Getters map[string]Getter
+
+ // Dir, if true, tells the Client it is downloading a directory (versus
+ // a single file). This distinction is necessary since filenames and
+ // directory names follow the same format so disambiguating is impossible
+ // without knowing ahead of time.
+ //
+ // WARNING: deprecated. If Mode is set, that will take precedence.
+ Dir bool
+}
+
+// Get downloads the configured source to the destination.
+func (c *Client) Get() error {
+ // Store this locally since there are cases we swap this
+ mode := c.Mode
+ if mode == ClientModeInvalid {
+ if c.Dir {
+ mode = ClientModeDir
+ } else {
+ mode = ClientModeFile
+ }
+ }
+
+ // Default decompressor value
+ decompressors := c.Decompressors
+ if decompressors == nil {
+ decompressors = Decompressors
+ }
+
+ // Detect the URL. This is safe if it is already detected.
+ detectors := c.Detectors
+ if detectors == nil {
+ detectors = Detectors
+ }
+ src, err := Detect(c.Src, c.Pwd, detectors)
+ if err != nil {
+ return err
+ }
+
+ // Determine if we have a forced protocol, i.e. "git::http://..."
+ force, src := getForcedGetter(src)
+
+ // If there is a subdir component, then we download the root separately
+ // and then copy over the proper subdir.
+ var realDst string
+ dst := c.Dst
+ src, subDir := SourceDirSubdir(src)
+ if subDir != "" {
+ tmpDir, err := ioutil.TempDir("", "tf")
+ if err != nil {
+ return err
+ }
+ if err := os.RemoveAll(tmpDir); err != nil {
+ return err
+ }
+ defer os.RemoveAll(tmpDir)
+
+ realDst = dst
+ dst = tmpDir
+ }
+
+ u, err := urlhelper.Parse(src)
+ if err != nil {
+ return err
+ }
+ if force == "" {
+ force = u.Scheme
+ }
+
+ getters := c.Getters
+ if getters == nil {
+ getters = Getters
+ }
+
+ g, ok := getters[force]
+ if !ok {
+ return fmt.Errorf(
+ "download not supported for scheme '%s'", force)
+ }
+
+ // We have magic query parameters that we use to signal different features
+ q := u.Query()
+
+ // Determine if we have an archive type
+ archiveV := q.Get("archive")
+ if archiveV != "" {
+ // Delete the paramter since it is a magic parameter we don't
+ // want to pass on to the Getter
+ q.Del("archive")
+ u.RawQuery = q.Encode()
+
+ // If we can parse the value as a bool and it is false, then
+ // set the archive to "-" which should never map to a decompressor
+ if b, err := strconv.ParseBool(archiveV); err == nil && !b {
+ archiveV = "-"
+ }
+ }
+ if archiveV == "" {
+ // We don't appear to... but is it part of the filename?
+ matchingLen := 0
+ for k, _ := range decompressors {
+ if strings.HasSuffix(u.Path, "."+k) && len(k) > matchingLen {
+ archiveV = k
+ matchingLen = len(k)
+ }
+ }
+ }
+
+ // If we have a decompressor, then we need to change the destination
+ // to download to a temporary path. We unarchive this into the final,
+ // real path.
+ var decompressDst string
+ var decompressDir bool
+ decompressor := decompressors[archiveV]
+ if decompressor != nil {
+ // Create a temporary directory to store our archive. We delete
+ // this at the end of everything.
+ td, err := ioutil.TempDir("", "getter")
+ if err != nil {
+ return fmt.Errorf(
+ "Error creating temporary directory for archive: %s", err)
+ }
+ defer os.RemoveAll(td)
+
+ // Swap the download directory to be our temporary path and
+ // store the old values.
+ decompressDst = dst
+ decompressDir = mode != ClientModeFile
+ dst = filepath.Join(td, "archive")
+ mode = ClientModeFile
+ }
+
+ // Determine if we have a checksum
+ var checksumHash hash.Hash
+ var checksumValue []byte
+ if v := q.Get("checksum"); v != "" {
+ // Delete the query parameter if we have it.
+ q.Del("checksum")
+ u.RawQuery = q.Encode()
+
+ // Determine the checksum hash type
+ checksumType := ""
+ idx := strings.Index(v, ":")
+ if idx > -1 {
+ checksumType = v[:idx]
+ }
+ switch checksumType {
+ case "md5":
+ checksumHash = md5.New()
+ case "sha1":
+ checksumHash = sha1.New()
+ case "sha256":
+ checksumHash = sha256.New()
+ case "sha512":
+ checksumHash = sha512.New()
+ default:
+ return fmt.Errorf(
+ "unsupported checksum type: %s", checksumType)
+ }
+
+ // Get the remainder of the value and parse it into bytes
+ b, err := hex.DecodeString(v[idx+1:])
+ if err != nil {
+ return fmt.Errorf("invalid checksum: %s", err)
+ }
+
+ // Set our value
+ checksumValue = b
+ }
+
+ if mode == ClientModeAny {
+ // Ask the getter which client mode to use
+ mode, err = g.ClientMode(u)
+ if err != nil {
+ return err
+ }
+
+ // Destination is the base name of the URL path in "any" mode when
+ // a file source is detected.
+ if mode == ClientModeFile {
+ dst = filepath.Join(dst, filepath.Base(u.Path))
+ }
+ }
+
+ // If we're not downloading a directory, then just download the file
+ // and return.
+ if mode == ClientModeFile {
+ err := g.GetFile(dst, u)
+ if err != nil {
+ return err
+ }
+
+ if checksumHash != nil {
+ if err := checksum(dst, checksumHash, checksumValue); err != nil {
+ return err
+ }
+ }
+
+ if decompressor != nil {
+ // We have a decompressor, so decompress the current destination
+ // into the final destination with the proper mode.
+ err := decompressor.Decompress(decompressDst, dst, decompressDir)
+ if err != nil {
+ return err
+ }
+
+ // Swap the information back
+ dst = decompressDst
+ if decompressDir {
+ mode = ClientModeAny
+ } else {
+ mode = ClientModeFile
+ }
+ }
+
+ // We check the dir value again because it can be switched back
+ // if we were unarchiving. If we're still only Get-ing a file, then
+ // we're done.
+ if mode == ClientModeFile {
+ return nil
+ }
+ }
+
+ // If we're at this point we're either downloading a directory or we've
+ // downloaded and unarchived a directory and we're just checking subdir.
+ // In the case we have a decompressor we don't Get because it was Get
+ // above.
+ if decompressor == nil {
+ // If we're getting a directory, then this is an error. You cannot
+ // checksum a directory. TODO: test
+ if checksumHash != nil {
+ return fmt.Errorf(
+ "checksum cannot be specified for directory download")
+ }
+
+ // We're downloading a directory, which might require a bit more work
+ // if we're specifying a subdir.
+ err := g.Get(dst, u)
+ if err != nil {
+ err = fmt.Errorf("error downloading '%s': %s", src, err)
+ return err
+ }
+ }
+
+ // If we have a subdir, copy that over
+ if subDir != "" {
+ if err := os.RemoveAll(realDst); err != nil {
+ return err
+ }
+ if err := os.MkdirAll(realDst, 0755); err != nil {
+ return err
+ }
+
+ return copyDir(realDst, filepath.Join(dst, subDir), false)
+ }
+
+ return nil
+}
+
+// checksum is a simple method to compute the checksum of a source file
+// and compare it to the given expected value.
+func checksum(source string, h hash.Hash, v []byte) error {
+ f, err := os.Open(source)
+ if err != nil {
+ return fmt.Errorf("Failed to open file for checksum: %s", err)
+ }
+ defer f.Close()
+
+ if _, err := io.Copy(h, f); err != nil {
+ return fmt.Errorf("Failed to hash: %s", err)
+ }
+
+ if actual := h.Sum(nil); !bytes.Equal(actual, v) {
+ return fmt.Errorf(
+ "Checksums did not match.\nExpected: %s\nGot: %s",
+ hex.EncodeToString(v),
+ hex.EncodeToString(actual))
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/hashicorp/go-getter/client_mode.go b/vendor/github.com/hashicorp/go-getter/client_mode.go
new file mode 100644
index 00000000..7f02509a
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/client_mode.go
@@ -0,0 +1,24 @@
+package getter
+
+// ClientMode is the mode that the client operates in.
+type ClientMode uint
+
+const (
+ ClientModeInvalid ClientMode = iota
+
+ // ClientModeAny downloads anything it can. In this mode, dst must
+ // be a directory. If src is a file, it is saved into the directory
+ // with the basename of the URL. If src is a directory or archive,
+ // it is unpacked directly into dst.
+ ClientModeAny
+
+ // ClientModeFile downloads a single file. In this mode, dst must
+ // be a file path (doesn't have to exist). src must point to a single
+ // file. It is saved as dst.
+ ClientModeFile
+
+ // ClientModeDir downloads a directory. In this mode, dst must be
+ // a directory path (doesn't have to exist). src must point to an
+ // archive or directory (such as in s3).
+ ClientModeDir
+)
diff --git a/vendor/github.com/hashicorp/go-getter/copy_dir.go b/vendor/github.com/hashicorp/go-getter/copy_dir.go
new file mode 100644
index 00000000..2f58e8ae
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/copy_dir.go
@@ -0,0 +1,78 @@
+package getter
+
+import (
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+// copyDir copies the src directory contents into dst. Both directories
+// should already exist.
+//
+// If ignoreDot is set to true, then dot-prefixed files/folders are ignored.
+func copyDir(dst string, src string, ignoreDot bool) error {
+ src, err := filepath.EvalSymlinks(src)
+ if err != nil {
+ return err
+ }
+
+ walkFn := func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if path == src {
+ return nil
+ }
+
+ if ignoreDot && strings.HasPrefix(filepath.Base(path), ".") {
+ // Skip any dot files
+ if info.IsDir() {
+ return filepath.SkipDir
+ } else {
+ return nil
+ }
+ }
+
+ // The "path" has the src prefixed to it. We need to join our
+ // destination with the path without the src on it.
+ dstPath := filepath.Join(dst, path[len(src):])
+
+ // If we have a directory, make that subdirectory, then continue
+ // the walk.
+ if info.IsDir() {
+ if path == filepath.Join(src, dst) {
+ // dst is in src; don't walk it.
+ return nil
+ }
+
+ if err := os.MkdirAll(dstPath, 0755); err != nil {
+ return err
+ }
+
+ return nil
+ }
+
+ // If we have a file, copy the contents.
+ srcF, err := os.Open(path)
+ if err != nil {
+ return err
+ }
+ defer srcF.Close()
+
+ dstF, err := os.Create(dstPath)
+ if err != nil {
+ return err
+ }
+ defer dstF.Close()
+
+ if _, err := io.Copy(dstF, srcF); err != nil {
+ return err
+ }
+
+ // Chmod it
+ return os.Chmod(dstPath, info.Mode())
+ }
+
+ return filepath.Walk(src, walkFn)
+}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress.go b/vendor/github.com/hashicorp/go-getter/decompress.go
new file mode 100644
index 00000000..d18174cc
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress.go
@@ -0,0 +1,29 @@
+package getter
+
+// Decompressor defines the interface that must be implemented to add
+// support for decompressing a type.
+type Decompressor interface {
+ // Decompress should decompress src to dst. dir specifies whether dst
+ // is a directory or single file. src is guaranteed to be a single file
+ // that exists. dst is not guaranteed to exist already.
+ Decompress(dst, src string, dir bool) error
+}
+
+// Decompressors is the mapping of extension to the Decompressor implementation
+// that will decompress that extension/type.
+var Decompressors map[string]Decompressor
+
+func init() {
+ tbzDecompressor := new(TarBzip2Decompressor)
+ tgzDecompressor := new(TarGzipDecompressor)
+
+ Decompressors = map[string]Decompressor{
+ "bz2": new(Bzip2Decompressor),
+ "gz": new(GzipDecompressor),
+ "tar.bz2": tbzDecompressor,
+ "tar.gz": tgzDecompressor,
+ "tbz2": tbzDecompressor,
+ "tgz": tgzDecompressor,
+ "zip": new(ZipDecompressor),
+ }
+}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_bzip2.go b/vendor/github.com/hashicorp/go-getter/decompress_bzip2.go
new file mode 100644
index 00000000..339f4cf7
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress_bzip2.go
@@ -0,0 +1,45 @@
+package getter
+
+import (
+ "compress/bzip2"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+)
+
+// Bzip2Decompressor is an implementation of Decompressor that can
+// decompress bz2 files.
+type Bzip2Decompressor struct{}
+
+func (d *Bzip2Decompressor) Decompress(dst, src string, dir bool) error {
+ // Directory isn't supported at all
+ if dir {
+ return fmt.Errorf("bzip2-compressed files can only unarchive to a single file")
+ }
+
+ // If we're going into a directory we should make that first
+ if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
+ return err
+ }
+
+ // File first
+ f, err := os.Open(src)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // Bzip2 compression is second
+ bzipR := bzip2.NewReader(f)
+
+ // Copy it out
+ dstF, err := os.Create(dst)
+ if err != nil {
+ return err
+ }
+ defer dstF.Close()
+
+ _, err = io.Copy(dstF, bzipR)
+ return err
+}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_gzip.go b/vendor/github.com/hashicorp/go-getter/decompress_gzip.go
new file mode 100644
index 00000000..20010540
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress_gzip.go
@@ -0,0 +1,49 @@
+package getter
+
+import (
+ "compress/gzip"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+)
+
+// GzipDecompressor is an implementation of Decompressor that can
+// decompress bz2 files.
+type GzipDecompressor struct{}
+
+func (d *GzipDecompressor) Decompress(dst, src string, dir bool) error {
+ // Directory isn't supported at all
+ if dir {
+ return fmt.Errorf("gzip-compressed files can only unarchive to a single file")
+ }
+
+ // If we're going into a directory we should make that first
+ if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
+ return err
+ }
+
+ // File first
+ f, err := os.Open(src)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // gzip compression is second
+ gzipR, err := gzip.NewReader(f)
+ if err != nil {
+ return err
+ }
+ defer gzipR.Close()
+
+ // Copy it out
+ dstF, err := os.Create(dst)
+ if err != nil {
+ return err
+ }
+ defer dstF.Close()
+
+ _, err = io.Copy(dstF, gzipR)
+ return err
+}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_tar.go b/vendor/github.com/hashicorp/go-getter/decompress_tar.go
new file mode 100644
index 00000000..61f60431
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress_tar.go
@@ -0,0 +1,83 @@
+package getter
+
+import (
+ "archive/tar"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+)
+
+// untar is a shared helper for untarring an archive. The reader should provide
+// an uncompressed view of the tar archive.
+func untar(input io.Reader, dst, src string, dir bool) error {
+ tarR := tar.NewReader(input)
+ done := false
+ for {
+ hdr, err := tarR.Next()
+ if err == io.EOF {
+ if !done {
+ // Empty archive
+ return fmt.Errorf("empty archive: %s", src)
+ }
+
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+
+ path := dst
+ if dir {
+ path = filepath.Join(path, hdr.Name)
+ }
+
+ if hdr.FileInfo().IsDir() {
+ if !dir {
+ return fmt.Errorf("expected a single file: %s", src)
+ }
+
+ // A directory, just make the directory and continue unarchiving...
+ if err := os.MkdirAll(path, 0755); err != nil {
+ return err
+ }
+
+ continue
+ } else {
+ // There is no ordering guarantee that a file in a directory is
+ // listed before the directory
+ dstPath := filepath.Dir(path)
+
+ // Check that the directory exists, otherwise create it
+ if _, err := os.Stat(dstPath); os.IsNotExist(err) {
+ if err := os.MkdirAll(dstPath, 0755); err != nil {
+ return err
+ }
+ }
+ }
+
+ // We have a file. If we already decoded, then it is an error
+ if !dir && done {
+ return fmt.Errorf("expected a single file, got multiple: %s", src)
+ }
+
+ // Mark that we're done so future in single file mode errors
+ done = true
+
+ // Open the file for writing
+ dstF, err := os.Create(path)
+ if err != nil {
+ return err
+ }
+ _, err = io.Copy(dstF, tarR)
+ dstF.Close()
+ if err != nil {
+ return err
+ }
+
+ // Chmod the file
+ if err := os.Chmod(path, hdr.FileInfo().Mode()); err != nil {
+ return err
+ }
+ }
+}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go b/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go
new file mode 100644
index 00000000..5391b5c8
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go
@@ -0,0 +1,33 @@
+package getter
+
+import (
+ "compress/bzip2"
+ "os"
+ "path/filepath"
+)
+
+// TarBzip2Decompressor is an implementation of Decompressor that can
+// decompress tar.bz2 files.
+type TarBzip2Decompressor struct{}
+
+func (d *TarBzip2Decompressor) Decompress(dst, src string, dir bool) error {
+ // If we're going into a directory we should make that first
+ mkdir := dst
+ if !dir {
+ mkdir = filepath.Dir(dst)
+ }
+ if err := os.MkdirAll(mkdir, 0755); err != nil {
+ return err
+ }
+
+ // File first
+ f, err := os.Open(src)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // Bzip2 compression is second
+ bzipR := bzip2.NewReader(f)
+ return untar(bzipR, dst, src, dir)
+}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_testing.go b/vendor/github.com/hashicorp/go-getter/decompress_testing.go
new file mode 100644
index 00000000..82b8ab4f
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress_testing.go
@@ -0,0 +1,135 @@
+package getter
+
+import (
+ "crypto/md5"
+ "encoding/hex"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "sort"
+ "strings"
+
+ "github.com/mitchellh/go-testing-interface"
+)
+
+// TestDecompressCase is a single test case for testing decompressors
+type TestDecompressCase struct {
+ Input string // Input is the complete path to the input file
+ Dir bool // Dir is whether or not we're testing directory mode
+ Err bool // Err is whether we expect an error or not
+ DirList []string // DirList is the list of files for Dir mode
+ FileMD5 string // FileMD5 is the expected MD5 for a single file
+}
+
+// TestDecompressor is a helper function for testing generic decompressors.
+func TestDecompressor(t testing.T, d Decompressor, cases []TestDecompressCase) {
+ for _, tc := range cases {
+ t.Logf("Testing: %s", tc.Input)
+
+ // Temporary dir to store stuff
+ td, err := ioutil.TempDir("", "getter")
+ if err != nil {
+ t.Fatalf("err: %s", err)
+ }
+
+ // Destination is always joining result so that we have a new path
+ dst := filepath.Join(td, "subdir", "result")
+
+ // We use a function so defers work
+ func() {
+ defer os.RemoveAll(td)
+
+ // Decompress
+ err := d.Decompress(dst, tc.Input, tc.Dir)
+ if (err != nil) != tc.Err {
+ t.Fatalf("err %s: %s", tc.Input, err)
+ }
+ if tc.Err {
+ return
+ }
+
+ // If it isn't a directory, then check for a single file
+ if !tc.Dir {
+ fi, err := os.Stat(dst)
+ if err != nil {
+ t.Fatalf("err %s: %s", tc.Input, err)
+ }
+ if fi.IsDir() {
+ t.Fatalf("err %s: expected file, got directory", tc.Input)
+ }
+ if tc.FileMD5 != "" {
+ actual := testMD5(t, dst)
+ expected := tc.FileMD5
+ if actual != expected {
+ t.Fatalf("err %s: expected MD5 %s, got %s", tc.Input, expected, actual)
+ }
+ }
+
+ return
+ }
+
+ // Convert expected for windows
+ expected := tc.DirList
+ if runtime.GOOS == "windows" {
+ for i, v := range expected {
+ expected[i] = strings.Replace(v, "/", "\\", -1)
+ }
+ }
+
+ // Directory, check for the correct contents
+ actual := testListDir(t, dst)
+ if !reflect.DeepEqual(actual, expected) {
+ t.Fatalf("bad %s\n\n%#v\n\n%#v", tc.Input, actual, expected)
+ }
+ }()
+ }
+}
+
+func testListDir(t testing.T, path string) []string {
+ var result []string
+ err := filepath.Walk(path, func(sub string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+
+ sub = strings.TrimPrefix(sub, path)
+ if sub == "" {
+ return nil
+ }
+ sub = sub[1:] // Trim the leading path sep.
+
+ // If it is a dir, add trailing sep
+ if info.IsDir() {
+ sub += string(os.PathSeparator)
+ }
+
+ result = append(result, sub)
+ return nil
+ })
+ if err != nil {
+ t.Fatalf("err: %s", err)
+ }
+
+ sort.Strings(result)
+ return result
+}
+
+func testMD5(t testing.T, path string) string {
+ f, err := os.Open(path)
+ if err != nil {
+ t.Fatalf("err: %s", err)
+ }
+ defer f.Close()
+
+ h := md5.New()
+ _, err = io.Copy(h, f)
+ if err != nil {
+ t.Fatalf("err: %s", err)
+ }
+
+ result := h.Sum(nil)
+ return hex.EncodeToString(result)
+}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_tgz.go b/vendor/github.com/hashicorp/go-getter/decompress_tgz.go
new file mode 100644
index 00000000..65eb70dd
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress_tgz.go
@@ -0,0 +1,39 @@
+package getter
+
+import (
+ "compress/gzip"
+ "fmt"
+ "os"
+ "path/filepath"
+)
+
+// TarGzipDecompressor is an implementation of Decompressor that can
+// decompress tar.gzip files.
+type TarGzipDecompressor struct{}
+
+func (d *TarGzipDecompressor) Decompress(dst, src string, dir bool) error {
+ // If we're going into a directory we should make that first
+ mkdir := dst
+ if !dir {
+ mkdir = filepath.Dir(dst)
+ }
+ if err := os.MkdirAll(mkdir, 0755); err != nil {
+ return err
+ }
+
+ // File first
+ f, err := os.Open(src)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // Gzip compression is second
+ gzipR, err := gzip.NewReader(f)
+ if err != nil {
+ return fmt.Errorf("Error opening a gzip reader for %s: %s", src, err)
+ }
+ defer gzipR.Close()
+
+ return untar(gzipR, dst, src, dir)
+}
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_zip.go b/vendor/github.com/hashicorp/go-getter/decompress_zip.go
new file mode 100644
index 00000000..a065c076
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/decompress_zip.go
@@ -0,0 +1,96 @@
+package getter
+
+import (
+ "archive/zip"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+)
+
+// ZipDecompressor is an implementation of Decompressor that can
+// decompress tar.gzip files.
+type ZipDecompressor struct{}
+
+func (d *ZipDecompressor) Decompress(dst, src string, dir bool) error {
+ // If we're going into a directory we should make that first
+ mkdir := dst
+ if !dir {
+ mkdir = filepath.Dir(dst)
+ }
+ if err := os.MkdirAll(mkdir, 0755); err != nil {
+ return err
+ }
+
+ // Open the zip
+ zipR, err := zip.OpenReader(src)
+ if err != nil {
+ return err
+ }
+ defer zipR.Close()
+
+ // Check the zip integrity
+ if len(zipR.File) == 0 {
+ // Empty archive
+ return fmt.Errorf("empty archive: %s", src)
+ }
+ if !dir && len(zipR.File) > 1 {
+ return fmt.Errorf("expected a single file: %s", src)
+ }
+
+ // Go through and unarchive
+ for _, f := range zipR.File {
+ path := dst
+ if dir {
+ path = filepath.Join(path, f.Name)
+ }
+
+ if f.FileInfo().IsDir() {
+ if !dir {
+ return fmt.Errorf("expected a single file: %s", src)
+ }
+
+ // A directory, just make the directory and continue unarchiving...
+ if err := os.MkdirAll(path, 0755); err != nil {
+ return err
+ }
+
+ continue
+ }
+
+ // Create the enclosing directories if we must. ZIP files aren't
+ // required to contain entries for just the directories so this
+ // can happen.
+ if dir {
+ if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
+ return err
+ }
+ }
+
+ // Open the file for reading
+ srcF, err := f.Open()
+ if err != nil {
+ return err
+ }
+
+ // Open the file for writing
+ dstF, err := os.Create(path)
+ if err != nil {
+ srcF.Close()
+ return err
+ }
+ _, err = io.Copy(dstF, srcF)
+ srcF.Close()
+ dstF.Close()
+ if err != nil {
+ return err
+ }
+
+ // Chmod the file
+ if err := os.Chmod(path, f.Mode()); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/hashicorp/go-getter/detect.go b/vendor/github.com/hashicorp/go-getter/detect.go
new file mode 100644
index 00000000..481b737c
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/detect.go
@@ -0,0 +1,97 @@
+package getter
+
+import (
+ "fmt"
+ "path/filepath"
+
+ "github.com/hashicorp/go-getter/helper/url"
+)
+
+// Detector defines the interface that an invalid URL or a URL with a blank
+// scheme is passed through in order to determine if its shorthand for
+// something else well-known.
+type Detector interface {
+ // Detect will detect whether the string matches a known pattern to
+ // turn it into a proper URL.
+ Detect(string, string) (string, bool, error)
+}
+
+// Detectors is the list of detectors that are tried on an invalid URL.
+// This is also the order they're tried (index 0 is first).
+var Detectors []Detector
+
+func init() {
+ Detectors = []Detector{
+ new(GitHubDetector),
+ new(BitBucketDetector),
+ new(S3Detector),
+ new(FileDetector),
+ }
+}
+
+// Detect turns a source string into another source string if it is
+// detected to be of a known pattern.
+//
+// The third parameter should be the list of detectors to use in the
+// order to try them. If you don't want to configure this, just use
+// the global Detectors variable.
+//
+// This is safe to be called with an already valid source string: Detect
+// will just return it.
+func Detect(src string, pwd string, ds []Detector) (string, error) {
+ getForce, getSrc := getForcedGetter(src)
+
+ // Separate out the subdir if there is one, we don't pass that to detect
+ getSrc, subDir := SourceDirSubdir(getSrc)
+
+ u, err := url.Parse(getSrc)
+ if err == nil && u.Scheme != "" {
+ // Valid URL
+ return src, nil
+ }
+
+ for _, d := range ds {
+ result, ok, err := d.Detect(getSrc, pwd)
+ if err != nil {
+ return "", err
+ }
+ if !ok {
+ continue
+ }
+
+ var detectForce string
+ detectForce, result = getForcedGetter(result)
+ result, detectSubdir := SourceDirSubdir(result)
+
+ // If we have a subdir from the detection, then prepend it to our
+ // requested subdir.
+ if detectSubdir != "" {
+ if subDir != "" {
+ subDir = filepath.Join(detectSubdir, subDir)
+ } else {
+ subDir = detectSubdir
+ }
+ }
+ if subDir != "" {
+ u, err := url.Parse(result)
+ if err != nil {
+ return "", fmt.Errorf("Error parsing URL: %s", err)
+ }
+ u.Path += "//" + subDir
+ result = u.String()
+ }
+
+ // Preserve the forced getter if it exists. We try to use the
+ // original set force first, followed by any force set by the
+ // detector.
+ if getForce != "" {
+ result = fmt.Sprintf("%s::%s", getForce, result)
+ } else if detectForce != "" {
+ result = fmt.Sprintf("%s::%s", detectForce, result)
+ }
+
+ return result, nil
+ }
+
+ return "", fmt.Errorf("invalid source string: %s", src)
+}
diff --git a/vendor/github.com/hashicorp/go-getter/detect_bitbucket.go b/vendor/github.com/hashicorp/go-getter/detect_bitbucket.go
new file mode 100644
index 00000000..a183a17d
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/detect_bitbucket.go
@@ -0,0 +1,66 @@
+package getter
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+)
+
+// BitBucketDetector implements Detector to detect BitBucket URLs and turn
+// them into URLs that the Git or Hg Getter can understand.
+type BitBucketDetector struct{}
+
+func (d *BitBucketDetector) Detect(src, _ string) (string, bool, error) {
+ if len(src) == 0 {
+ return "", false, nil
+ }
+
+ if strings.HasPrefix(src, "bitbucket.org/") {
+ return d.detectHTTP(src)
+ }
+
+ return "", false, nil
+}
+
+func (d *BitBucketDetector) detectHTTP(src string) (string, bool, error) {
+ u, err := url.Parse("https://" + src)
+ if err != nil {
+ return "", true, fmt.Errorf("error parsing BitBucket URL: %s", err)
+ }
+
+ // We need to get info on this BitBucket repository to determine whether
+ // it is Git or Hg.
+ var info struct {
+ SCM string `json:"scm"`
+ }
+ infoUrl := "https://api.bitbucket.org/1.0/repositories" + u.Path
+ resp, err := http.Get(infoUrl)
+ if err != nil {
+ return "", true, fmt.Errorf("error looking up BitBucket URL: %s", err)
+ }
+ if resp.StatusCode == 403 {
+ // A private repo
+ return "", true, fmt.Errorf(
+ "shorthand BitBucket URL can't be used for private repos, " +
+ "please use a full URL")
+ }
+ dec := json.NewDecoder(resp.Body)
+ if err := dec.Decode(&info); err != nil {
+ return "", true, fmt.Errorf("error looking up BitBucket URL: %s", err)
+ }
+
+ switch info.SCM {
+ case "git":
+ if !strings.HasSuffix(u.Path, ".git") {
+ u.Path += ".git"
+ }
+
+ return "git::" + u.String(), true, nil
+ case "hg":
+ return "hg::" + u.String(), true, nil
+ default:
+ return "", true, fmt.Errorf("unknown BitBucket SCM type: %s", info.SCM)
+ }
+}
diff --git a/vendor/github.com/hashicorp/go-getter/detect_file.go b/vendor/github.com/hashicorp/go-getter/detect_file.go
new file mode 100644
index 00000000..756ea43f
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/detect_file.go
@@ -0,0 +1,67 @@
+package getter
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "runtime"
+)
+
+// FileDetector implements Detector to detect file paths.
+type FileDetector struct{}
+
+func (d *FileDetector) Detect(src, pwd string) (string, bool, error) {
+ if len(src) == 0 {
+ return "", false, nil
+ }
+
+ if !filepath.IsAbs(src) {
+ if pwd == "" {
+ return "", true, fmt.Errorf(
+ "relative paths require a module with a pwd")
+ }
+
+ // Stat the pwd to determine if its a symbolic link. If it is,
+ // then the pwd becomes the original directory. Otherwise,
+ // `filepath.Join` below does some weird stuff.
+ //
+ // We just ignore if the pwd doesn't exist. That error will be
+ // caught later when we try to use the URL.
+ if fi, err := os.Lstat(pwd); !os.IsNotExist(err) {
+ if err != nil {
+ return "", true, err
+ }
+ if fi.Mode()&os.ModeSymlink != 0 {
+ pwd, err = os.Readlink(pwd)
+ if err != nil {
+ return "", true, err
+ }
+
+ // The symlink itself might be a relative path, so we have to
+ // resolve this to have a correctly rooted URL.
+ pwd, err = filepath.Abs(pwd)
+ if err != nil {
+ return "", true, err
+ }
+ }
+ }
+
+ src = filepath.Join(pwd, src)
+ }
+
+ return fmtFileURL(src), true, nil
+}
+
+func fmtFileURL(path string) string {
+ if runtime.GOOS == "windows" {
+ // Make sure we're using "/" on Windows. URLs are "/"-based.
+ path = filepath.ToSlash(path)
+ return fmt.Sprintf("file://%s", path)
+ }
+
+ // Make sure that we don't start with "/" since we add that below.
+ if path[0] == '/' {
+ path = path[1:]
+ }
+ return fmt.Sprintf("file:///%s", path)
+}
diff --git a/vendor/github.com/hashicorp/go-getter/detect_github.go b/vendor/github.com/hashicorp/go-getter/detect_github.go
new file mode 100644
index 00000000..c084ad9a
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/detect_github.go
@@ -0,0 +1,73 @@
+package getter
+
+import (
+ "fmt"
+ "net/url"
+ "strings"
+)
+
+// GitHubDetector implements Detector to detect GitHub URLs and turn
+// them into URLs that the Git Getter can understand.
+type GitHubDetector struct{}
+
+func (d *GitHubDetector) Detect(src, _ string) (string, bool, error) {
+ if len(src) == 0 {
+ return "", false, nil
+ }
+
+ if strings.HasPrefix(src, "github.com/") {
+ return d.detectHTTP(src)
+ } else if strings.HasPrefix(src, "git@github.com:") {
+ return d.detectSSH(src)
+ }
+
+ return "", false, nil
+}
+
+func (d *GitHubDetector) detectHTTP(src string) (string, bool, error) {
+ parts := strings.Split(src, "/")
+ if len(parts) < 3 {
+ return "", false, fmt.Errorf(
+ "GitHub URLs should be github.com/username/repo")
+ }
+
+ urlStr := fmt.Sprintf("https://%s", strings.Join(parts[:3], "/"))
+ url, err := url.Parse(urlStr)
+ if err != nil {
+ return "", true, fmt.Errorf("error parsing GitHub URL: %s", err)
+ }
+
+ if !strings.HasSuffix(url.Path, ".git") {
+ url.Path += ".git"
+ }
+
+ if len(parts) > 3 {
+ url.Path += "//" + strings.Join(parts[3:], "/")
+ }
+
+ return "git::" + url.String(), true, nil
+}
+
+func (d *GitHubDetector) detectSSH(src string) (string, bool, error) {
+ idx := strings.Index(src, ":")
+ qidx := strings.Index(src, "?")
+ if qidx == -1 {
+ qidx = len(src)
+ }
+
+ var u url.URL
+ u.Scheme = "ssh"
+ u.User = url.User("git")
+ u.Host = "github.com"
+ u.Path = src[idx+1 : qidx]
+ if qidx < len(src) {
+ q, err := url.ParseQuery(src[qidx+1:])
+ if err != nil {
+ return "", true, fmt.Errorf("error parsing GitHub SSH URL: %s", err)
+ }
+
+ u.RawQuery = q.Encode()
+ }
+
+ return "git::" + u.String(), true, nil
+}
diff --git a/vendor/github.com/hashicorp/go-getter/detect_s3.go b/vendor/github.com/hashicorp/go-getter/detect_s3.go
new file mode 100644
index 00000000..8e0f4a03
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/detect_s3.go
@@ -0,0 +1,61 @@
+package getter
+
+import (
+ "fmt"
+ "net/url"
+ "strings"
+)
+
+// S3Detector implements Detector to detect S3 URLs and turn
+// them into URLs that the S3 getter can understand.
+type S3Detector struct{}
+
+func (d *S3Detector) Detect(src, _ string) (string, bool, error) {
+ if len(src) == 0 {
+ return "", false, nil
+ }
+
+ if strings.Contains(src, ".amazonaws.com/") {
+ return d.detectHTTP(src)
+ }
+
+ return "", false, nil
+}
+
+func (d *S3Detector) detectHTTP(src string) (string, bool, error) {
+ parts := strings.Split(src, "/")
+ if len(parts) < 2 {
+ return "", false, fmt.Errorf(
+ "URL is not a valid S3 URL")
+ }
+
+ hostParts := strings.Split(parts[0], ".")
+ if len(hostParts) == 3 {
+ return d.detectPathStyle(hostParts[0], parts[1:])
+ } else if len(hostParts) == 4 {
+ return d.detectVhostStyle(hostParts[1], hostParts[0], parts[1:])
+ } else {
+ return "", false, fmt.Errorf(
+ "URL is not a valid S3 URL")
+ }
+}
+
+func (d *S3Detector) detectPathStyle(region string, parts []string) (string, bool, error) {
+ urlStr := fmt.Sprintf("https://%s.amazonaws.com/%s", region, strings.Join(parts, "/"))
+ url, err := url.Parse(urlStr)
+ if err != nil {
+ return "", false, fmt.Errorf("error parsing S3 URL: %s", err)
+ }
+
+ return "s3::" + url.String(), true, nil
+}
+
+func (d *S3Detector) detectVhostStyle(region, bucket string, parts []string) (string, bool, error) {
+ urlStr := fmt.Sprintf("https://%s.amazonaws.com/%s/%s", region, bucket, strings.Join(parts, "/"))
+ url, err := url.Parse(urlStr)
+ if err != nil {
+ return "", false, fmt.Errorf("error parsing S3 URL: %s", err)
+ }
+
+ return "s3::" + url.String(), true, nil
+}
diff --git a/vendor/github.com/hashicorp/go-getter/folder_storage.go b/vendor/github.com/hashicorp/go-getter/folder_storage.go
new file mode 100644
index 00000000..647ccf45
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/folder_storage.go
@@ -0,0 +1,65 @@
+package getter
+
+import (
+ "crypto/md5"
+ "encoding/hex"
+ "fmt"
+ "os"
+ "path/filepath"
+)
+
+// FolderStorage is an implementation of the Storage interface that manages
+// modules on the disk.
+type FolderStorage struct {
+ // StorageDir is the directory where the modules will be stored.
+ StorageDir string
+}
+
+// Dir implements Storage.Dir
+func (s *FolderStorage) Dir(key string) (d string, e bool, err error) {
+ d = s.dir(key)
+ _, err = os.Stat(d)
+ if err == nil {
+ // Directory exists
+ e = true
+ return
+ }
+ if os.IsNotExist(err) {
+ // Directory doesn't exist
+ d = ""
+ e = false
+ err = nil
+ return
+ }
+
+ // An error
+ d = ""
+ e = false
+ return
+}
+
+// Get implements Storage.Get
+func (s *FolderStorage) Get(key string, source string, update bool) error {
+ dir := s.dir(key)
+ if !update {
+ if _, err := os.Stat(dir); err == nil {
+ // If the directory already exists, then we're done since
+ // we're not updating.
+ return nil
+ } else if !os.IsNotExist(err) {
+ // If the error we got wasn't a file-not-exist error, then
+ // something went wrong and we should report it.
+ return fmt.Errorf("Error reading module directory: %s", err)
+ }
+ }
+
+ // Get the source. This always forces an update.
+ return Get(dir, source)
+}
+
+// dir returns the directory name internally that we'll use to map to
+// internally.
+func (s *FolderStorage) dir(key string) string {
+ sum := md5.Sum([]byte(key))
+ return filepath.Join(s.StorageDir, hex.EncodeToString(sum[:]))
+}
diff --git a/vendor/github.com/hashicorp/go-getter/get.go b/vendor/github.com/hashicorp/go-getter/get.go
new file mode 100644
index 00000000..c3236f55
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/get.go
@@ -0,0 +1,139 @@
+// getter is a package for downloading files or directories from a variety of
+// protocols.
+//
+// getter is unique in its ability to download both directories and files.
+// It also detects certain source strings to be protocol-specific URLs. For
+// example, "github.com/hashicorp/go-getter" would turn into a Git URL and
+// use the Git protocol.
+//
+// Protocols and detectors are extensible.
+//
+// To get started, see Client.
+package getter
+
+import (
+ "bytes"
+ "fmt"
+ "net/url"
+ "os/exec"
+ "regexp"
+ "syscall"
+)
+
+// Getter defines the interface that schemes must implement to download
+// things.
+type Getter interface {
+ // Get downloads the given URL into the given directory. This always
+ // assumes that we're updating and gets the latest version that it can.
+ //
+ // The directory may already exist (if we're updating). If it is in a
+ // format that isn't understood, an error should be returned. Get shouldn't
+ // simply nuke the directory.
+ Get(string, *url.URL) error
+
+ // GetFile downloads the give URL into the given path. The URL must
+ // reference a single file. If possible, the Getter should check if
+ // the remote end contains the same file and no-op this operation.
+ GetFile(string, *url.URL) error
+
+ // ClientMode returns the mode based on the given URL. This is used to
+ // allow clients to let the getters decide which mode to use.
+ ClientMode(*url.URL) (ClientMode, error)
+}
+
+// Getters is the mapping of scheme to the Getter implementation that will
+// be used to get a dependency.
+var Getters map[string]Getter
+
+// forcedRegexp is the regular expression that finds forced getters. This
+// syntax is schema::url, example: git::https://foo.com
+var forcedRegexp = regexp.MustCompile(`^([A-Za-z0-9]+)::(.+)$`)
+
+func init() {
+ httpGetter := &HttpGetter{Netrc: true}
+
+ Getters = map[string]Getter{
+ "file": new(FileGetter),
+ "git": new(GitGetter),
+ "hg": new(HgGetter),
+ "s3": new(S3Getter),
+ "http": httpGetter,
+ "https": httpGetter,
+ }
+}
+
+// Get downloads the directory specified by src into the folder specified by
+// dst. If dst already exists, Get will attempt to update it.
+//
+// src is a URL, whereas dst is always just a file path to a folder. This
+// folder doesn't need to exist. It will be created if it doesn't exist.
+func Get(dst, src string) error {
+ return (&Client{
+ Src: src,
+ Dst: dst,
+ Dir: true,
+ Getters: Getters,
+ }).Get()
+}
+
+// GetAny downloads a URL into the given destination. Unlike Get or
+// GetFile, both directories and files are supported.
+//
+// dst must be a directory. If src is a file, it will be downloaded
+// into dst with the basename of the URL. If src is a directory or
+// archive, it will be unpacked directly into dst.
+func GetAny(dst, src string) error {
+ return (&Client{
+ Src: src,
+ Dst: dst,
+ Mode: ClientModeAny,
+ Getters: Getters,
+ }).Get()
+}
+
+// GetFile downloads the file specified by src into the path specified by
+// dst.
+func GetFile(dst, src string) error {
+ return (&Client{
+ Src: src,
+ Dst: dst,
+ Dir: false,
+ Getters: Getters,
+ }).Get()
+}
+
+// getRunCommand is a helper that will run a command and capture the output
+// in the case an error happens.
+func getRunCommand(cmd *exec.Cmd) error {
+ var buf bytes.Buffer
+ cmd.Stdout = &buf
+ cmd.Stderr = &buf
+ err := cmd.Run()
+ if err == nil {
+ return nil
+ }
+ if exiterr, ok := err.(*exec.ExitError); ok {
+ // The program has exited with an exit code != 0
+ if status, ok := exiterr.Sys().(syscall.WaitStatus); ok {
+ return fmt.Errorf(
+ "%s exited with %d: %s",
+ cmd.Path,
+ status.ExitStatus(),
+ buf.String())
+ }
+ }
+
+ return fmt.Errorf("error running %s: %s", cmd.Path, buf.String())
+}
+
+// getForcedGetter takes a source and returns the tuple of the forced
+// getter and the raw URL (without the force syntax).
+func getForcedGetter(src string) (string, string) {
+ var forced string
+ if ms := forcedRegexp.FindStringSubmatch(src); ms != nil {
+ forced = ms[1]
+ src = ms[2]
+ }
+
+ return forced, src
+}
diff --git a/vendor/github.com/hashicorp/go-getter/get_file.go b/vendor/github.com/hashicorp/go-getter/get_file.go
new file mode 100644
index 00000000..e5d2d61d
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/get_file.go
@@ -0,0 +1,32 @@
+package getter
+
+import (
+ "net/url"
+ "os"
+)
+
+// FileGetter is a Getter implementation that will download a module from
+// a file scheme.
+type FileGetter struct {
+ // Copy, if set to true, will copy data instead of using a symlink
+ Copy bool
+}
+
+func (g *FileGetter) ClientMode(u *url.URL) (ClientMode, error) {
+ path := u.Path
+ if u.RawPath != "" {
+ path = u.RawPath
+ }
+
+ fi, err := os.Stat(path)
+ if err != nil {
+ return 0, err
+ }
+
+ // Check if the source is a directory.
+ if fi.IsDir() {
+ return ClientModeDir, nil
+ }
+
+ return ClientModeFile, nil
+}
diff --git a/vendor/github.com/hashicorp/go-getter/get_file_unix.go b/vendor/github.com/hashicorp/go-getter/get_file_unix.go
new file mode 100644
index 00000000..c89a2d5a
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/get_file_unix.go
@@ -0,0 +1,103 @@
+// +build !windows
+
+package getter
+
+import (
+ "fmt"
+ "io"
+ "net/url"
+ "os"
+ "path/filepath"
+)
+
+func (g *FileGetter) Get(dst string, u *url.URL) error {
+ path := u.Path
+ if u.RawPath != "" {
+ path = u.RawPath
+ }
+
+ // The source path must exist and be a directory to be usable.
+ if fi, err := os.Stat(path); err != nil {
+ return fmt.Errorf("source path error: %s", err)
+ } else if !fi.IsDir() {
+ return fmt.Errorf("source path must be a directory")
+ }
+
+ fi, err := os.Lstat(dst)
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+
+ // If the destination already exists, it must be a symlink
+ if err == nil {
+ mode := fi.Mode()
+ if mode&os.ModeSymlink == 0 {
+ return fmt.Errorf("destination exists and is not a symlink")
+ }
+
+ // Remove the destination
+ if err := os.Remove(dst); err != nil {
+ return err
+ }
+ }
+
+ // Create all the parent directories
+ if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
+ return err
+ }
+
+ return os.Symlink(path, dst)
+}
+
+func (g *FileGetter) GetFile(dst string, u *url.URL) error {
+ path := u.Path
+ if u.RawPath != "" {
+ path = u.RawPath
+ }
+
+ // The source path must exist and be a file to be usable.
+ if fi, err := os.Stat(path); err != nil {
+ return fmt.Errorf("source path error: %s", err)
+ } else if fi.IsDir() {
+ return fmt.Errorf("source path must be a file")
+ }
+
+ _, err := os.Lstat(dst)
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+
+ // If the destination already exists, it must be a symlink
+ if err == nil {
+ // Remove the destination
+ if err := os.Remove(dst); err != nil {
+ return err
+ }
+ }
+
+ // Create all the parent directories
+ if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
+ return err
+ }
+
+ // If we're not copying, just symlink and we're done
+ if !g.Copy {
+ return os.Symlink(path, dst)
+ }
+
+ // Copy
+ srcF, err := os.Open(path)
+ if err != nil {
+ return err
+ }
+ defer srcF.Close()
+
+ dstF, err := os.Create(dst)
+ if err != nil {
+ return err
+ }
+ defer dstF.Close()
+
+ _, err = io.Copy(dstF, srcF)
+ return err
+}
diff --git a/vendor/github.com/hashicorp/go-getter/get_file_windows.go b/vendor/github.com/hashicorp/go-getter/get_file_windows.go
new file mode 100644
index 00000000..f87ed0a0
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/get_file_windows.go
@@ -0,0 +1,120 @@
+// +build windows
+
+package getter
+
+import (
+ "fmt"
+ "io"
+ "net/url"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+)
+
+func (g *FileGetter) Get(dst string, u *url.URL) error {
+ path := u.Path
+ if u.RawPath != "" {
+ path = u.RawPath
+ }
+
+ // The source path must exist and be a directory to be usable.
+ if fi, err := os.Stat(path); err != nil {
+ return fmt.Errorf("source path error: %s", err)
+ } else if !fi.IsDir() {
+ return fmt.Errorf("source path must be a directory")
+ }
+
+ fi, err := os.Lstat(dst)
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+
+ // If the destination already exists, it must be a symlink
+ if err == nil {
+ mode := fi.Mode()
+ if mode&os.ModeSymlink == 0 {
+ return fmt.Errorf("destination exists and is not a symlink")
+ }
+
+ // Remove the destination
+ if err := os.Remove(dst); err != nil {
+ return err
+ }
+ }
+
+ // Create all the parent directories
+ if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
+ return err
+ }
+
+ sourcePath := toBackslash(path)
+
+ // Use mklink to create a junction point
+ output, err := exec.Command("cmd", "/c", "mklink", "/J", dst, sourcePath).CombinedOutput()
+ if err != nil {
+ return fmt.Errorf("failed to run mklink %v %v: %v %q", dst, sourcePath, err, output)
+ }
+
+ return nil
+}
+
+func (g *FileGetter) GetFile(dst string, u *url.URL) error {
+ path := u.Path
+ if u.RawPath != "" {
+ path = u.RawPath
+ }
+
+ // The source path must exist and be a directory to be usable.
+ if fi, err := os.Stat(path); err != nil {
+ return fmt.Errorf("source path error: %s", err)
+ } else if fi.IsDir() {
+ return fmt.Errorf("source path must be a file")
+ }
+
+ _, err := os.Lstat(dst)
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+
+ // If the destination already exists, it must be a symlink
+ if err == nil {
+ // Remove the destination
+ if err := os.Remove(dst); err != nil {
+ return err
+ }
+ }
+
+ // Create all the parent directories
+ if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
+ return err
+ }
+
+ // If we're not copying, just symlink and we're done
+ if !g.Copy {
+ return os.Symlink(path, dst)
+ }
+
+ // Copy
+ srcF, err := os.Open(path)
+ if err != nil {
+ return err
+ }
+ defer srcF.Close()
+
+ dstF, err := os.Create(dst)
+ if err != nil {
+ return err
+ }
+ defer dstF.Close()
+
+ _, err = io.Copy(dstF, srcF)
+ return err
+}
+
+// toBackslash returns the result of replacing each slash character
+// in path with a backslash ('\') character. Multiple separators are
+// replaced by multiple backslashes.
+func toBackslash(path string) string {
+ return strings.Replace(path, "/", "\\", -1)
+}
diff --git a/vendor/github.com/hashicorp/go-getter/get_git.go b/vendor/github.com/hashicorp/go-getter/get_git.go
new file mode 100644
index 00000000..07281398
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/get_git.go
@@ -0,0 +1,225 @@
+package getter
+
+import (
+ "encoding/base64"
+ "fmt"
+ "io/ioutil"
+ "net/url"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+
+ urlhelper "github.com/hashicorp/go-getter/helper/url"
+ "github.com/hashicorp/go-version"
+)
+
+// GitGetter is a Getter implementation that will download a module from
+// a git repository.
+type GitGetter struct{}
+
+func (g *GitGetter) ClientMode(_ *url.URL) (ClientMode, error) {
+ return ClientModeDir, nil
+}
+
+func (g *GitGetter) Get(dst string, u *url.URL) error {
+ if _, err := exec.LookPath("git"); err != nil {
+ return fmt.Errorf("git must be available and on the PATH")
+ }
+
+ // Extract some query parameters we use
+ var ref, sshKey string
+ q := u.Query()
+ if len(q) > 0 {
+ ref = q.Get("ref")
+ q.Del("ref")
+
+ sshKey = q.Get("sshkey")
+ q.Del("sshkey")
+
+ // Copy the URL
+ var newU url.URL = *u
+ u = &newU
+ u.RawQuery = q.Encode()
+ }
+
+ var sshKeyFile string
+ if sshKey != "" {
+ // Check that the git version is sufficiently new.
+ if err := checkGitVersion("2.3"); err != nil {
+ return fmt.Errorf("Error using ssh key: %v", err)
+ }
+
+ // We have an SSH key - decode it.
+ raw, err := base64.StdEncoding.DecodeString(sshKey)
+ if err != nil {
+ return err
+ }
+
+ // Create a temp file for the key and ensure it is removed.
+ fh, err := ioutil.TempFile("", "go-getter")
+ if err != nil {
+ return err
+ }
+ sshKeyFile = fh.Name()
+ defer os.Remove(sshKeyFile)
+
+ // Set the permissions prior to writing the key material.
+ if err := os.Chmod(sshKeyFile, 0600); err != nil {
+ return err
+ }
+
+ // Write the raw key into the temp file.
+ _, err = fh.Write(raw)
+ fh.Close()
+ if err != nil {
+ return err
+ }
+ }
+
+ // Clone or update the repository
+ _, err := os.Stat(dst)
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+ if err == nil {
+ err = g.update(dst, sshKeyFile, ref)
+ } else {
+ err = g.clone(dst, sshKeyFile, u)
+ }
+ if err != nil {
+ return err
+ }
+
+ // Next: check out the proper tag/branch if it is specified, and checkout
+ if ref != "" {
+ if err := g.checkout(dst, ref); err != nil {
+ return err
+ }
+ }
+
+ // Lastly, download any/all submodules.
+ return g.fetchSubmodules(dst, sshKeyFile)
+}
+
+// GetFile for Git doesn't support updating at this time. It will download
+// the file every time.
+func (g *GitGetter) GetFile(dst string, u *url.URL) error {
+ td, err := ioutil.TempDir("", "getter-git")
+ if err != nil {
+ return err
+ }
+ if err := os.RemoveAll(td); err != nil {
+ return err
+ }
+
+ // Get the filename, and strip the filename from the URL so we can
+ // just get the repository directly.
+ filename := filepath.Base(u.Path)
+ u.Path = filepath.Dir(u.Path)
+
+ // Get the full repository
+ if err := g.Get(td, u); err != nil {
+ return err
+ }
+
+ // Copy the single file
+ u, err = urlhelper.Parse(fmtFileURL(filepath.Join(td, filename)))
+ if err != nil {
+ return err
+ }
+
+ fg := &FileGetter{Copy: true}
+ return fg.GetFile(dst, u)
+}
+
+func (g *GitGetter) checkout(dst string, ref string) error {
+ cmd := exec.Command("git", "checkout", ref)
+ cmd.Dir = dst
+ return getRunCommand(cmd)
+}
+
+func (g *GitGetter) clone(dst, sshKeyFile string, u *url.URL) error {
+ cmd := exec.Command("git", "clone", u.String(), dst)
+ setupGitEnv(cmd, sshKeyFile)
+ return getRunCommand(cmd)
+}
+
+func (g *GitGetter) update(dst, sshKeyFile, ref string) error {
+ // Determine if we're a branch. If we're NOT a branch, then we just
+ // switch to master prior to checking out
+ cmd := exec.Command("git", "show-ref", "-q", "--verify", "refs/heads/"+ref)
+ cmd.Dir = dst
+
+ if getRunCommand(cmd) != nil {
+ // Not a branch, switch to master. This will also catch non-existent
+ // branches, in which case we want to switch to master and then
+ // checkout the proper branch later.
+ ref = "master"
+ }
+
+ // We have to be on a branch to pull
+ if err := g.checkout(dst, ref); err != nil {
+ return err
+ }
+
+ cmd = exec.Command("git", "pull", "--ff-only")
+ cmd.Dir = dst
+ setupGitEnv(cmd, sshKeyFile)
+ return getRunCommand(cmd)
+}
+
+// fetchSubmodules downloads any configured submodules recursively.
+func (g *GitGetter) fetchSubmodules(dst, sshKeyFile string) error {
+ cmd := exec.Command("git", "submodule", "update", "--init", "--recursive")
+ cmd.Dir = dst
+ setupGitEnv(cmd, sshKeyFile)
+ return getRunCommand(cmd)
+}
+
+// setupGitEnv sets up the environment for the given command. This is used to
+// pass configuration data to git and ssh and enables advanced cloning methods.
+func setupGitEnv(cmd *exec.Cmd, sshKeyFile string) {
+ var sshOpts []string
+
+ if sshKeyFile != "" {
+ // We have an SSH key temp file configured, tell ssh about this.
+ sshOpts = append(sshOpts, "-i", sshKeyFile)
+ }
+
+ cmd.Env = append(os.Environ(),
+ // Set the ssh command to use for clones.
+ "GIT_SSH_COMMAND=ssh "+strings.Join(sshOpts, " "),
+ )
+}
+
+// checkGitVersion is used to check the version of git installed on the system
+// against a known minimum version. Returns an error if the installed version
+// is older than the given minimum.
+func checkGitVersion(min string) error {
+ want, err := version.NewVersion(min)
+ if err != nil {
+ return err
+ }
+
+ out, err := exec.Command("git", "version").Output()
+ if err != nil {
+ return err
+ }
+
+ fields := strings.Fields(string(out))
+ if len(fields) != 3 {
+ return fmt.Errorf("Unexpected 'git version' output: %q", string(out))
+ }
+
+ have, err := version.NewVersion(fields[2])
+ if err != nil {
+ return err
+ }
+
+ if have.LessThan(want) {
+ return fmt.Errorf("Required git version = %s, have %s", want, have)
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/hashicorp/go-getter/get_hg.go b/vendor/github.com/hashicorp/go-getter/get_hg.go
new file mode 100644
index 00000000..820bdd48
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/get_hg.go
@@ -0,0 +1,131 @@
+package getter
+
+import (
+ "fmt"
+ "io/ioutil"
+ "net/url"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "runtime"
+
+ urlhelper "github.com/hashicorp/go-getter/helper/url"
+)
+
+// HgGetter is a Getter implementation that will download a module from
+// a Mercurial repository.
+type HgGetter struct{}
+
+func (g *HgGetter) ClientMode(_ *url.URL) (ClientMode, error) {
+ return ClientModeDir, nil
+}
+
+func (g *HgGetter) Get(dst string, u *url.URL) error {
+ if _, err := exec.LookPath("hg"); err != nil {
+ return fmt.Errorf("hg must be available and on the PATH")
+ }
+
+ newURL, err := urlhelper.Parse(u.String())
+ if err != nil {
+ return err
+ }
+ if fixWindowsDrivePath(newURL) {
+ // See valid file path form on http://www.selenic.com/hg/help/urls
+ newURL.Path = fmt.Sprintf("/%s", newURL.Path)
+ }
+
+ // Extract some query parameters we use
+ var rev string
+ q := newURL.Query()
+ if len(q) > 0 {
+ rev = q.Get("rev")
+ q.Del("rev")
+
+ newURL.RawQuery = q.Encode()
+ }
+
+ _, err = os.Stat(dst)
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+ if err != nil {
+ if err := g.clone(dst, newURL); err != nil {
+ return err
+ }
+ }
+
+ if err := g.pull(dst, newURL); err != nil {
+ return err
+ }
+
+ return g.update(dst, newURL, rev)
+}
+
+// GetFile for Hg doesn't support updating at this time. It will download
+// the file every time.
+func (g *HgGetter) GetFile(dst string, u *url.URL) error {
+ td, err := ioutil.TempDir("", "getter-hg")
+ if err != nil {
+ return err
+ }
+ if err := os.RemoveAll(td); err != nil {
+ return err
+ }
+
+ // Get the filename, and strip the filename from the URL so we can
+ // just get the repository directly.
+ filename := filepath.Base(u.Path)
+ u.Path = filepath.ToSlash(filepath.Dir(u.Path))
+
+ // If we're on Windows, we need to set the host to "localhost" for hg
+ if runtime.GOOS == "windows" {
+ u.Host = "localhost"
+ }
+
+ // Get the full repository
+ if err := g.Get(td, u); err != nil {
+ return err
+ }
+
+ // Copy the single file
+ u, err = urlhelper.Parse(fmtFileURL(filepath.Join(td, filename)))
+ if err != nil {
+ return err
+ }
+
+ fg := &FileGetter{Copy: true}
+ return fg.GetFile(dst, u)
+}
+
+func (g *HgGetter) clone(dst string, u *url.URL) error {
+ cmd := exec.Command("hg", "clone", "-U", u.String(), dst)
+ return getRunCommand(cmd)
+}
+
+func (g *HgGetter) pull(dst string, u *url.URL) error {
+ cmd := exec.Command("hg", "pull")
+ cmd.Dir = dst
+ return getRunCommand(cmd)
+}
+
+func (g *HgGetter) update(dst string, u *url.URL, rev string) error {
+ args := []string{"update"}
+ if rev != "" {
+ args = append(args, rev)
+ }
+
+ cmd := exec.Command("hg", args...)
+ cmd.Dir = dst
+ return getRunCommand(cmd)
+}
+
+func fixWindowsDrivePath(u *url.URL) bool {
+ // hg assumes a file:/// prefix for Windows drive letter file paths.
+ // (e.g. file:///c:/foo/bar)
+ // If the URL Path does not begin with a '/' character, the resulting URL
+ // path will have a file:// prefix. (e.g. file://c:/foo/bar)
+ // See http://www.selenic.com/hg/help/urls and the examples listed in
+ // http://selenic.com/repo/hg-stable/file/1265a3a71d75/mercurial/util.py#l1936
+ return runtime.GOOS == "windows" && u.Scheme == "file" &&
+ len(u.Path) > 1 && u.Path[0] != '/' && u.Path[1] == ':'
+}
diff --git a/vendor/github.com/hashicorp/go-getter/get_http.go b/vendor/github.com/hashicorp/go-getter/get_http.go
new file mode 100644
index 00000000..661d8989
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/get_http.go
@@ -0,0 +1,227 @@
+package getter
+
+import (
+ "encoding/xml"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+// HttpGetter is a Getter implementation that will download from an HTTP
+// endpoint.
+//
+// For file downloads, HTTP is used directly.
+//
+// The protocol for downloading a directory from an HTTP endpoing is as follows:
+//
+// An HTTP GET request is made to the URL with the additional GET parameter
+// "terraform-get=1". This lets you handle that scenario specially if you
+// wish. The response must be a 2xx.
+//
+// First, a header is looked for "X-Terraform-Get" which should contain
+// a source URL to download.
+//
+// If the header is not present, then a meta tag is searched for named
+// "terraform-get" and the content should be a source URL.
+//
+// The source URL, whether from the header or meta tag, must be a fully
+// formed URL. The shorthand syntax of "github.com/foo/bar" or relative
+// paths are not allowed.
+type HttpGetter struct {
+ // Netrc, if true, will lookup and use auth information found
+ // in the user's netrc file if available.
+ Netrc bool
+}
+
+func (g *HttpGetter) ClientMode(u *url.URL) (ClientMode, error) {
+ if strings.HasSuffix(u.Path, "/") {
+ return ClientModeDir, nil
+ }
+ return ClientModeFile, nil
+}
+
+func (g *HttpGetter) Get(dst string, u *url.URL) error {
+ // Copy the URL so we can modify it
+ var newU url.URL = *u
+ u = &newU
+
+ if g.Netrc {
+ // Add auth from netrc if we can
+ if err := addAuthFromNetrc(u); err != nil {
+ return err
+ }
+ }
+
+ // Add terraform-get to the parameter.
+ q := u.Query()
+ q.Add("terraform-get", "1")
+ u.RawQuery = q.Encode()
+
+ // Get the URL
+ resp, err := http.Get(u.String())
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode < 200 || resp.StatusCode >= 300 {
+ return fmt.Errorf("bad response code: %d", resp.StatusCode)
+ }
+
+ // Extract the source URL
+ var source string
+ if v := resp.Header.Get("X-Terraform-Get"); v != "" {
+ source = v
+ } else {
+ source, err = g.parseMeta(resp.Body)
+ if err != nil {
+ return err
+ }
+ }
+ if source == "" {
+ return fmt.Errorf("no source URL was returned")
+ }
+
+ // If there is a subdir component, then we download the root separately
+ // into a temporary directory, then copy over the proper subdir.
+ source, subDir := SourceDirSubdir(source)
+ if subDir == "" {
+ return Get(dst, source)
+ }
+
+ // We have a subdir, time to jump some hoops
+ return g.getSubdir(dst, source, subDir)
+}
+
+func (g *HttpGetter) GetFile(dst string, u *url.URL) error {
+
+ if g.Netrc {
+ // Add auth from netrc if we can
+ if err := addAuthFromNetrc(u); err != nil {
+ return err
+ }
+ }
+
+ resp, err := http.Get(u.String())
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode != 200 {
+ return fmt.Errorf("bad response code: %d", resp.StatusCode)
+ }
+
+ // Create all the parent directories
+ if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
+ return err
+ }
+
+ f, err := os.Create(dst)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ _, err = io.Copy(f, resp.Body)
+ return err
+}
+
+// getSubdir downloads the source into the destination, but with
+// the proper subdir.
+func (g *HttpGetter) getSubdir(dst, source, subDir string) error {
+ // Create a temporary directory to store the full source
+ td, err := ioutil.TempDir("", "tf")
+ if err != nil {
+ return err
+ }
+ defer os.RemoveAll(td)
+
+ // Download that into the given directory
+ if err := Get(td, source); err != nil {
+ return err
+ }
+
+ // Make sure the subdir path actually exists
+ sourcePath := filepath.Join(td, subDir)
+ if _, err := os.Stat(sourcePath); err != nil {
+ return fmt.Errorf(
+ "Error downloading %s: %s", source, err)
+ }
+
+ // Copy the subdirectory into our actual destination.
+ if err := os.RemoveAll(dst); err != nil {
+ return err
+ }
+
+ // Make the final destination
+ if err := os.MkdirAll(dst, 0755); err != nil {
+ return err
+ }
+
+ return copyDir(dst, sourcePath, false)
+}
+
+// parseMeta looks for the first meta tag in the given reader that
+// will give us the source URL.
+func (g *HttpGetter) parseMeta(r io.Reader) (string, error) {
+ d := xml.NewDecoder(r)
+ d.CharsetReader = charsetReader
+ d.Strict = false
+ var err error
+ var t xml.Token
+ for {
+ t, err = d.Token()
+ if err != nil {
+ if err == io.EOF {
+ err = nil
+ }
+ return "", err
+ }
+ if e, ok := t.(xml.StartElement); ok && strings.EqualFold(e.Name.Local, "body") {
+ return "", nil
+ }
+ if e, ok := t.(xml.EndElement); ok && strings.EqualFold(e.Name.Local, "head") {
+ return "", nil
+ }
+ e, ok := t.(xml.StartElement)
+ if !ok || !strings.EqualFold(e.Name.Local, "meta") {
+ continue
+ }
+ if attrValue(e.Attr, "name") != "terraform-get" {
+ continue
+ }
+ if f := attrValue(e.Attr, "content"); f != "" {
+ return f, nil
+ }
+ }
+}
+
+// attrValue returns the attribute value for the case-insensitive key
+// `name', or the empty string if nothing is found.
+func attrValue(attrs []xml.Attr, name string) string {
+ for _, a := range attrs {
+ if strings.EqualFold(a.Name.Local, name) {
+ return a.Value
+ }
+ }
+ return ""
+}
+
+// charsetReader returns a reader for the given charset. Currently
+// it only supports UTF-8 and ASCII. Otherwise, it returns a meaningful
+// error which is printed by go get, so the user can find why the package
+// wasn't downloaded if the encoding is not supported. Note that, in
+// order to reduce potential errors, ASCII is treated as UTF-8 (i.e. characters
+// greater than 0x7f are not rejected).
+func charsetReader(charset string, input io.Reader) (io.Reader, error) {
+ switch strings.ToLower(charset) {
+ case "ascii":
+ return input, nil
+ default:
+ return nil, fmt.Errorf("can't decode XML document using charset %q", charset)
+ }
+}
diff --git a/vendor/github.com/hashicorp/go-getter/get_mock.go b/vendor/github.com/hashicorp/go-getter/get_mock.go
new file mode 100644
index 00000000..882e694d
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/get_mock.go
@@ -0,0 +1,52 @@
+package getter
+
+import (
+ "net/url"
+)
+
+// MockGetter is an implementation of Getter that can be used for tests.
+type MockGetter struct {
+ // Proxy, if set, will be called after recording the calls below.
+ // If it isn't set, then the *Err values will be returned.
+ Proxy Getter
+
+ GetCalled bool
+ GetDst string
+ GetURL *url.URL
+ GetErr error
+
+ GetFileCalled bool
+ GetFileDst string
+ GetFileURL *url.URL
+ GetFileErr error
+}
+
+func (g *MockGetter) Get(dst string, u *url.URL) error {
+ g.GetCalled = true
+ g.GetDst = dst
+ g.GetURL = u
+
+ if g.Proxy != nil {
+ return g.Proxy.Get(dst, u)
+ }
+
+ return g.GetErr
+}
+
+func (g *MockGetter) GetFile(dst string, u *url.URL) error {
+ g.GetFileCalled = true
+ g.GetFileDst = dst
+ g.GetFileURL = u
+
+ if g.Proxy != nil {
+ return g.Proxy.GetFile(dst, u)
+ }
+ return g.GetFileErr
+}
+
+func (g *MockGetter) ClientMode(u *url.URL) (ClientMode, error) {
+ if l := len(u.Path); l > 0 && u.Path[l-1:] == "/" {
+ return ClientModeDir, nil
+ }
+ return ClientModeFile, nil
+}
diff --git a/vendor/github.com/hashicorp/go-getter/get_s3.go b/vendor/github.com/hashicorp/go-getter/get_s3.go
new file mode 100644
index 00000000..d3bffeb1
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/get_s3.go
@@ -0,0 +1,243 @@
+package getter
+
+import (
+ "fmt"
+ "io"
+ "net/url"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/credentials"
+ "github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds"
+ "github.com/aws/aws-sdk-go/aws/ec2metadata"
+ "github.com/aws/aws-sdk-go/aws/session"
+ "github.com/aws/aws-sdk-go/service/s3"
+)
+
+// S3Getter is a Getter implementation that will download a module from
+// a S3 bucket.
+type S3Getter struct{}
+
+func (g *S3Getter) ClientMode(u *url.URL) (ClientMode, error) {
+ // Parse URL
+ region, bucket, path, _, creds, err := g.parseUrl(u)
+ if err != nil {
+ return 0, err
+ }
+
+ // Create client config
+ config := g.getAWSConfig(region, creds)
+ sess := session.New(config)
+ client := s3.New(sess)
+
+ // List the object(s) at the given prefix
+ req := &s3.ListObjectsInput{
+ Bucket: aws.String(bucket),
+ Prefix: aws.String(path),
+ }
+ resp, err := client.ListObjects(req)
+ if err != nil {
+ return 0, err
+ }
+
+ for _, o := range resp.Contents {
+ // Use file mode on exact match.
+ if *o.Key == path {
+ return ClientModeFile, nil
+ }
+
+ // Use dir mode if child keys are found.
+ if strings.HasPrefix(*o.Key, path+"/") {
+ return ClientModeDir, nil
+ }
+ }
+
+ // There was no match, so just return file mode. The download is going
+ // to fail but we will let S3 return the proper error later.
+ return ClientModeFile, nil
+}
+
+func (g *S3Getter) Get(dst string, u *url.URL) error {
+ // Parse URL
+ region, bucket, path, _, creds, err := g.parseUrl(u)
+ if err != nil {
+ return err
+ }
+
+ // Remove destination if it already exists
+ _, err = os.Stat(dst)
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+
+ if err == nil {
+ // Remove the destination
+ if err := os.RemoveAll(dst); err != nil {
+ return err
+ }
+ }
+
+ // Create all the parent directories
+ if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
+ return err
+ }
+
+ config := g.getAWSConfig(region, creds)
+ sess := session.New(config)
+ client := s3.New(sess)
+
+ // List files in path, keep listing until no more objects are found
+ lastMarker := ""
+ hasMore := true
+ for hasMore {
+ req := &s3.ListObjectsInput{
+ Bucket: aws.String(bucket),
+ Prefix: aws.String(path),
+ }
+ if lastMarker != "" {
+ req.Marker = aws.String(lastMarker)
+ }
+
+ resp, err := client.ListObjects(req)
+ if err != nil {
+ return err
+ }
+
+ hasMore = aws.BoolValue(resp.IsTruncated)
+
+ // Get each object storing each file relative to the destination path
+ for _, object := range resp.Contents {
+ lastMarker = aws.StringValue(object.Key)
+ objPath := aws.StringValue(object.Key)
+
+ // If the key ends with a backslash assume it is a directory and ignore
+ if strings.HasSuffix(objPath, "/") {
+ continue
+ }
+
+ // Get the object destination path
+ objDst, err := filepath.Rel(path, objPath)
+ if err != nil {
+ return err
+ }
+ objDst = filepath.Join(dst, objDst)
+
+ if err := g.getObject(client, objDst, bucket, objPath, ""); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func (g *S3Getter) GetFile(dst string, u *url.URL) error {
+ region, bucket, path, version, creds, err := g.parseUrl(u)
+ if err != nil {
+ return err
+ }
+
+ config := g.getAWSConfig(region, creds)
+ sess := session.New(config)
+ client := s3.New(sess)
+ return g.getObject(client, dst, bucket, path, version)
+}
+
+func (g *S3Getter) getObject(client *s3.S3, dst, bucket, key, version string) error {
+ req := &s3.GetObjectInput{
+ Bucket: aws.String(bucket),
+ Key: aws.String(key),
+ }
+ if version != "" {
+ req.VersionId = aws.String(version)
+ }
+
+ resp, err := client.GetObject(req)
+ if err != nil {
+ return err
+ }
+
+ // Create all the parent directories
+ if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
+ return err
+ }
+
+ f, err := os.Create(dst)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ _, err = io.Copy(f, resp.Body)
+ return err
+}
+
+func (g *S3Getter) getAWSConfig(region string, creds *credentials.Credentials) *aws.Config {
+ conf := &aws.Config{}
+ if creds == nil {
+ // Grab the metadata URL
+ metadataURL := os.Getenv("AWS_METADATA_URL")
+ if metadataURL == "" {
+ metadataURL = "http://169.254.169.254:80/latest"
+ }
+
+ creds = credentials.NewChainCredentials(
+ []credentials.Provider{
+ &credentials.EnvProvider{},
+ &credentials.SharedCredentialsProvider{Filename: "", Profile: ""},
+ &ec2rolecreds.EC2RoleProvider{
+ Client: ec2metadata.New(session.New(&aws.Config{
+ Endpoint: aws.String(metadataURL),
+ })),
+ },
+ })
+ }
+
+ conf.Credentials = creds
+ if region != "" {
+ conf.Region = aws.String(region)
+ }
+
+ return conf
+}
+
+func (g *S3Getter) parseUrl(u *url.URL) (region, bucket, path, version string, creds *credentials.Credentials, err error) {
+ // Expected host style: s3.amazonaws.com. They always have 3 parts,
+ // although the first may differ if we're accessing a specific region.
+ hostParts := strings.Split(u.Host, ".")
+ if len(hostParts) != 3 {
+ err = fmt.Errorf("URL is not a valid S3 URL")
+ return
+ }
+
+ // Parse the region out of the first part of the host
+ region = strings.TrimPrefix(strings.TrimPrefix(hostParts[0], "s3-"), "s3")
+ if region == "" {
+ region = "us-east-1"
+ }
+
+ pathParts := strings.SplitN(u.Path, "/", 3)
+ if len(pathParts) != 3 {
+ err = fmt.Errorf("URL is not a valid S3 URL")
+ return
+ }
+
+ bucket = pathParts[1]
+ path = pathParts[2]
+ version = u.Query().Get("version")
+
+ _, hasAwsId := u.Query()["aws_access_key_id"]
+ _, hasAwsSecret := u.Query()["aws_access_key_secret"]
+ _, hasAwsToken := u.Query()["aws_access_token"]
+ if hasAwsId || hasAwsSecret || hasAwsToken {
+ creds = credentials.NewStaticCredentials(
+ u.Query().Get("aws_access_key_id"),
+ u.Query().Get("aws_access_key_secret"),
+ u.Query().Get("aws_access_token"),
+ )
+ }
+
+ return
+}
diff --git a/vendor/github.com/hashicorp/go-getter/helper/url/url.go b/vendor/github.com/hashicorp/go-getter/helper/url/url.go
new file mode 100644
index 00000000..02497c25
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/helper/url/url.go
@@ -0,0 +1,14 @@
+package url
+
+import (
+ "net/url"
+)
+
+// Parse parses rawURL into a URL structure.
+// The rawURL may be relative or absolute.
+//
+// Parse is a wrapper for the Go stdlib net/url Parse function, but returns
+// Windows "safe" URLs on Windows platforms.
+func Parse(rawURL string) (*url.URL, error) {
+ return parse(rawURL)
+}
diff --git a/vendor/github.com/hashicorp/go-getter/helper/url/url_unix.go b/vendor/github.com/hashicorp/go-getter/helper/url/url_unix.go
new file mode 100644
index 00000000..ed1352a9
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/helper/url/url_unix.go
@@ -0,0 +1,11 @@
+// +build !windows
+
+package url
+
+import (
+ "net/url"
+)
+
+func parse(rawURL string) (*url.URL, error) {
+ return url.Parse(rawURL)
+}
diff --git a/vendor/github.com/hashicorp/go-getter/helper/url/url_windows.go b/vendor/github.com/hashicorp/go-getter/helper/url/url_windows.go
new file mode 100644
index 00000000..4655226f
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/helper/url/url_windows.go
@@ -0,0 +1,40 @@
+package url
+
+import (
+ "fmt"
+ "net/url"
+ "path/filepath"
+ "strings"
+)
+
+func parse(rawURL string) (*url.URL, error) {
+ // Make sure we're using "/" since URLs are "/"-based.
+ rawURL = filepath.ToSlash(rawURL)
+
+ u, err := url.Parse(rawURL)
+ if err != nil {
+ return nil, err
+ }
+
+ if len(rawURL) > 1 && rawURL[1] == ':' {
+ // Assume we're dealing with a drive letter file path where the drive
+ // letter has been parsed into the URL Scheme, and the rest of the path
+ // has been parsed into the URL Path without the leading ':' character.
+ u.Path = fmt.Sprintf("%s:%s", string(rawURL[0]), u.Path)
+ u.Scheme = ""
+ }
+
+ if len(u.Host) > 1 && u.Host[1] == ':' && strings.HasPrefix(rawURL, "file://") {
+ // Assume we're dealing with a drive letter file path where the drive
+ // letter has been parsed into the URL Host.
+ u.Path = fmt.Sprintf("%s%s", u.Host, u.Path)
+ u.Host = ""
+ }
+
+ // Remove leading slash for absolute file paths.
+ if len(u.Path) > 2 && u.Path[0] == '/' && u.Path[2] == ':' {
+ u.Path = u.Path[1:]
+ }
+
+ return u, err
+}
diff --git a/vendor/github.com/hashicorp/go-getter/netrc.go b/vendor/github.com/hashicorp/go-getter/netrc.go
new file mode 100644
index 00000000..c7f6a3fb
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/netrc.go
@@ -0,0 +1,67 @@
+package getter
+
+import (
+ "fmt"
+ "net/url"
+ "os"
+ "runtime"
+
+ "github.com/bgentry/go-netrc/netrc"
+ "github.com/mitchellh/go-homedir"
+)
+
+// addAuthFromNetrc adds auth information to the URL from the user's
+// netrc file if it can be found. This will only add the auth info
+// if the URL doesn't already have auth info specified and the
+// the username is blank.
+func addAuthFromNetrc(u *url.URL) error {
+ // If the URL already has auth information, do nothing
+ if u.User != nil && u.User.Username() != "" {
+ return nil
+ }
+
+ // Get the netrc file path
+ path := os.Getenv("NETRC")
+ if path == "" {
+ filename := ".netrc"
+ if runtime.GOOS == "windows" {
+ filename = "_netrc"
+ }
+
+ var err error
+ path, err = homedir.Expand("~/" + filename)
+ if err != nil {
+ return err
+ }
+ }
+
+ // If the file is not a file, then do nothing
+ if fi, err := os.Stat(path); err != nil {
+ // File doesn't exist, do nothing
+ if os.IsNotExist(err) {
+ return nil
+ }
+
+ // Some other error!
+ return err
+ } else if fi.IsDir() {
+ // File is directory, ignore
+ return nil
+ }
+
+ // Load up the netrc file
+ net, err := netrc.ParseFile(path)
+ if err != nil {
+ return fmt.Errorf("Error parsing netrc file at %q: %s", path, err)
+ }
+
+ machine := net.FindMachine(u.Host)
+ if machine == nil {
+ // Machine not found, no problem
+ return nil
+ }
+
+ // Set the user info
+ u.User = url.UserPassword(machine.Login, machine.Password)
+ return nil
+}
diff --git a/vendor/github.com/hashicorp/go-getter/source.go b/vendor/github.com/hashicorp/go-getter/source.go
new file mode 100644
index 00000000..4d5ee3cc
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/source.go
@@ -0,0 +1,36 @@
+package getter
+
+import (
+ "strings"
+)
+
+// SourceDirSubdir takes a source and returns a tuple of the URL without
+// the subdir and the URL with the subdir.
+func SourceDirSubdir(src string) (string, string) {
+ // Calcaulate an offset to avoid accidentally marking the scheme
+ // as the dir.
+ var offset int
+ if idx := strings.Index(src, "://"); idx > -1 {
+ offset = idx + 3
+ }
+
+ // First see if we even have an explicit subdir
+ idx := strings.Index(src[offset:], "//")
+ if idx == -1 {
+ return src, ""
+ }
+
+ idx += offset
+ subdir := src[idx+2:]
+ src = src[:idx]
+
+ // Next, check if we have query parameters and push them onto the
+ // URL.
+ if idx = strings.Index(subdir, "?"); idx > -1 {
+ query := subdir[idx:]
+ subdir = subdir[:idx]
+ src += query
+ }
+
+ return src, subdir
+}
diff --git a/vendor/github.com/hashicorp/go-getter/storage.go b/vendor/github.com/hashicorp/go-getter/storage.go
new file mode 100644
index 00000000..2bc6b9ec
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-getter/storage.go
@@ -0,0 +1,13 @@
+package getter
+
+// Storage is an interface that knows how to lookup downloaded directories
+// as well as download and update directories from their sources into the
+// proper location.
+type Storage interface {
+ // Dir returns the directory on local disk where the directory source
+ // can be loaded from.
+ Dir(string) (string, bool, error)
+
+ // Get will download and optionally update the given directory.
+ Get(string, string, bool) error
+}