summaryrefslogtreecommitdiff
path: root/vendor/github.com/hashicorp/go-getter/client.go
blob: 876812a0a26812b51f306bfc5b0f54ce90949930 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
package getter

import (
	"bytes"
	"crypto/md5"
	"crypto/sha1"
	"crypto/sha256"
	"crypto/sha512"
	"encoding/hex"
	"fmt"
	"hash"
	"io"
	"io/ioutil"
	"os"
	"path/filepath"
	"strconv"
	"strings"

	urlhelper "github.com/hashicorp/go-getter/helper/url"
)

// Client is a client for downloading things.
//
// Top-level functions such as Get are shortcuts for interacting with a client.
// Using a client directly allows more fine-grained control over how downloading
// is done, as well as customizing the protocols supported.
type Client struct {
	// Src is the source URL to get.
	//
	// Dst is the path to save the downloaded thing as. If Dir is set to
	// true, then this should be a directory. If the directory doesn't exist,
	// it will be created for you.
	//
	// Pwd is the working directory for detection. If this isn't set, some
	// detection may fail. Client will not default pwd to the current
	// working directory for security reasons.
	Src string
	Dst string
	Pwd string

	// Mode is the method of download the client will use. See ClientMode
	// for documentation.
	Mode ClientMode

	// Detectors is the list of detectors that are tried on the source.
	// If this is nil, then the default Detectors will be used.
	Detectors []Detector

	// Decompressors is the map of decompressors supported by this client.
	// If this is nil, then the default value is the Decompressors global.
	Decompressors map[string]Decompressor

	// Getters is the map of protocols supported by this client. If this
	// is nil, then the default Getters variable will be used.
	Getters map[string]Getter

	// Dir, if true, tells the Client it is downloading a directory (versus
	// a single file). This distinction is necessary since filenames and
	// directory names follow the same format so disambiguating is impossible
	// without knowing ahead of time.
	//
	// WARNING: deprecated. If Mode is set, that will take precedence.
	Dir bool
}

// Get downloads the configured source to the destination.
func (c *Client) Get() error {
	// Store this locally since there are cases we swap this
	mode := c.Mode
	if mode == ClientModeInvalid {
		if c.Dir {
			mode = ClientModeDir
		} else {
			mode = ClientModeFile
		}
	}

	// Default decompressor value
	decompressors := c.Decompressors
	if decompressors == nil {
		decompressors = Decompressors
	}

	// Detect the URL. This is safe if it is already detected.
	detectors := c.Detectors
	if detectors == nil {
		detectors = Detectors
	}
	src, err := Detect(c.Src, c.Pwd, detectors)
	if err != nil {
		return err
	}

	// Determine if we have a forced protocol, i.e. "git::http://..."
	force, src := getForcedGetter(src)

	// If there is a subdir component, then we download the root separately
	// and then copy over the proper subdir.
	var realDst string
	dst := c.Dst
	src, subDir := SourceDirSubdir(src)
	if subDir != "" {
		tmpDir, err := ioutil.TempDir("", "tf")
		if err != nil {
			return err
		}
		if err := os.RemoveAll(tmpDir); err != nil {
			return err
		}
		defer os.RemoveAll(tmpDir)

		realDst = dst
		dst = tmpDir
	}

	u, err := urlhelper.Parse(src)
	if err != nil {
		return err
	}
	if force == "" {
		force = u.Scheme
	}

	getters := c.Getters
	if getters == nil {
		getters = Getters
	}

	g, ok := getters[force]
	if !ok {
		return fmt.Errorf(
			"download not supported for scheme '%s'", force)
	}

	// We have magic query parameters that we use to signal different features
	q := u.Query()

	// Determine if we have an archive type
	archiveV := q.Get("archive")
	if archiveV != "" {
		// Delete the paramter since it is a magic parameter we don't
		// want to pass on to the Getter
		q.Del("archive")
		u.RawQuery = q.Encode()

		// If we can parse the value as a bool and it is false, then
		// set the archive to "-" which should never map to a decompressor
		if b, err := strconv.ParseBool(archiveV); err == nil && !b {
			archiveV = "-"
		}
	}
	if archiveV == "" {
		// We don't appear to... but is it part of the filename?
		matchingLen := 0
		for k, _ := range decompressors {
			if strings.HasSuffix(u.Path, "."+k) && len(k) > matchingLen {
				archiveV = k
				matchingLen = len(k)
			}
		}
	}

	// If we have a decompressor, then we need to change the destination
	// to download to a temporary path. We unarchive this into the final,
	// real path.
	var decompressDst string
	var decompressDir bool
	decompressor := decompressors[archiveV]
	if decompressor != nil {
		// Create a temporary directory to store our archive. We delete
		// this at the end of everything.
		td, err := ioutil.TempDir("", "getter")
		if err != nil {
			return fmt.Errorf(
				"Error creating temporary directory for archive: %s", err)
		}
		defer os.RemoveAll(td)

		// Swap the download directory to be our temporary path and
		// store the old values.
		decompressDst = dst
		decompressDir = mode != ClientModeFile
		dst = filepath.Join(td, "archive")
		mode = ClientModeFile
	}

	// Determine if we have a checksum
	var checksumHash hash.Hash
	var checksumValue []byte
	if v := q.Get("checksum"); v != "" {
		// Delete the query parameter if we have it.
		q.Del("checksum")
		u.RawQuery = q.Encode()

		// Determine the checksum hash type
		checksumType := ""
		idx := strings.Index(v, ":")
		if idx > -1 {
			checksumType = v[:idx]
		}
		switch checksumType {
		case "md5":
			checksumHash = md5.New()
		case "sha1":
			checksumHash = sha1.New()
		case "sha256":
			checksumHash = sha256.New()
		case "sha512":
			checksumHash = sha512.New()
		default:
			return fmt.Errorf(
				"unsupported checksum type: %s", checksumType)
		}

		// Get the remainder of the value and parse it into bytes
		b, err := hex.DecodeString(v[idx+1:])
		if err != nil {
			return fmt.Errorf("invalid checksum: %s", err)
		}

		// Set our value
		checksumValue = b
	}

	if mode == ClientModeAny {
		// Ask the getter which client mode to use
		mode, err = g.ClientMode(u)
		if err != nil {
			return err
		}

		// Destination is the base name of the URL path in "any" mode when
		// a file source is detected.
		if mode == ClientModeFile {
			dst = filepath.Join(dst, filepath.Base(u.Path))
		}
	}

	// If we're not downloading a directory, then just download the file
	// and return.
	if mode == ClientModeFile {
		err := g.GetFile(dst, u)
		if err != nil {
			return err
		}

		if checksumHash != nil {
			if err := checksum(dst, checksumHash, checksumValue); err != nil {
				return err
			}
		}

		if decompressor != nil {
			// We have a decompressor, so decompress the current destination
			// into the final destination with the proper mode.
			err := decompressor.Decompress(decompressDst, dst, decompressDir)
			if err != nil {
				return err
			}

			// Swap the information back
			dst = decompressDst
			if decompressDir {
				mode = ClientModeAny
			} else {
				mode = ClientModeFile
			}
		}

		// We check the dir value again because it can be switched back
		// if we were unarchiving. If we're still only Get-ing a file, then
		// we're done.
		if mode == ClientModeFile {
			return nil
		}
	}

	// If we're at this point we're either downloading a directory or we've
	// downloaded and unarchived a directory and we're just checking subdir.
	// In the case we have a decompressor we don't Get because it was Get
	// above.
	if decompressor == nil {
		// If we're getting a directory, then this is an error. You cannot
		// checksum a directory. TODO: test
		if checksumHash != nil {
			return fmt.Errorf(
				"checksum cannot be specified for directory download")
		}

		// We're downloading a directory, which might require a bit more work
		// if we're specifying a subdir.
		err := g.Get(dst, u)
		if err != nil {
			err = fmt.Errorf("error downloading '%s': %s", src, err)
			return err
		}
	}

	// If we have a subdir, copy that over
	if subDir != "" {
		if err := os.RemoveAll(realDst); err != nil {
			return err
		}
		if err := os.MkdirAll(realDst, 0755); err != nil {
			return err
		}

		return copyDir(realDst, filepath.Join(dst, subDir), false)
	}

	return nil
}

// checksum is a simple method to compute the checksum of a source file
// and compare it to the given expected value.
func checksum(source string, h hash.Hash, v []byte) error {
	f, err := os.Open(source)
	if err != nil {
		return fmt.Errorf("Failed to open file for checksum: %s", err)
	}
	defer f.Close()

	if _, err := io.Copy(h, f); err != nil {
		return fmt.Errorf("Failed to hash: %s", err)
	}

	if actual := h.Sum(nil); !bytes.Equal(actual, v) {
		return fmt.Errorf(
			"Checksums did not match.\nExpected: %s\nGot: %s",
			hex.EncodeToString(v),
			hex.EncodeToString(actual))
	}

	return nil
}