diff options
author | appilon <apilon@hashicorp.com> | 2019-02-27 16:43:31 -0500 |
---|---|---|
committer | GitHub <noreply@github.com> | 2019-02-27 16:43:31 -0500 |
commit | 844b5a68d8af4791755b8f0ad293cc99f5959183 (patch) | |
tree | 255c250a5c9d4801c74092d33b7337d8c14438ff /vendor/github.com/hashicorp/go-getter | |
parent | 303b299eeb6b06e939e35905e4b34cb410dd9dc3 (diff) | |
parent | 15c0b25d011f37e7c20aeca9eaf461f78285b8d9 (diff) | |
download | terraform-provider-statuscake-844b5a68d8af4791755b8f0ad293cc99f5959183.tar.gz terraform-provider-statuscake-844b5a68d8af4791755b8f0ad293cc99f5959183.tar.zst terraform-provider-statuscake-844b5a68d8af4791755b8f0ad293cc99f5959183.zip |
Merge pull request #27 from terraform-providers/go-modules-2019-02-22
[MODULES] Switch to Go Modules
Diffstat (limited to 'vendor/github.com/hashicorp/go-getter')
21 files changed, 557 insertions, 194 deletions
diff --git a/vendor/github.com/hashicorp/go-getter/.travis.yml b/vendor/github.com/hashicorp/go-getter/.travis.yml new file mode 100644 index 0000000..da804c2 --- /dev/null +++ b/vendor/github.com/hashicorp/go-getter/.travis.yml | |||
@@ -0,0 +1,23 @@ | |||
1 | sudo: false | ||
2 | |||
3 | addons: | ||
4 | apt: | ||
5 | sources: | ||
6 | - sourceline: 'ppa:git-core/ppa' | ||
7 | packages: | ||
8 | - git | ||
9 | |||
10 | language: go | ||
11 | |||
12 | go: | ||
13 | - 1.8.x | ||
14 | - 1.9.x | ||
15 | - master | ||
16 | |||
17 | branches: | ||
18 | only: | ||
19 | - master | ||
20 | |||
21 | matrix: | ||
22 | allow_failures: | ||
23 | - go: master | ||
diff --git a/vendor/github.com/hashicorp/go-getter/README.md b/vendor/github.com/hashicorp/go-getter/README.md index 4a0b6a6..40ace74 100644 --- a/vendor/github.com/hashicorp/go-getter/README.md +++ b/vendor/github.com/hashicorp/go-getter/README.md | |||
@@ -21,8 +21,7 @@ URLs. For example: "github.com/hashicorp/go-getter" would turn into a | |||
21 | Git URL. Or "./foo" would turn into a file URL. These are extensible. | 21 | Git URL. Or "./foo" would turn into a file URL. These are extensible. |
22 | 22 | ||
23 | This library is used by [Terraform](https://terraform.io) for | 23 | This library is used by [Terraform](https://terraform.io) for |
24 | downloading modules, [Otto](https://ottoproject.io) for dependencies and | 24 | downloading modules and [Nomad](https://nomadproject.io) for downloading |
25 | Appfile imports, and [Nomad](https://nomadproject.io) for downloading | ||
26 | binaries. | 25 | binaries. |
27 | 26 | ||
28 | ## Installation and Usage | 27 | ## Installation and Usage |
@@ -119,6 +118,37 @@ The protocol-specific options are documented below the URL format | |||
119 | section. But because they are part of the URL, we point it out here so | 118 | section. But because they are part of the URL, we point it out here so |
120 | you know they exist. | 119 | you know they exist. |
121 | 120 | ||
121 | ### Subdirectories | ||
122 | |||
123 | If you want to download only a specific subdirectory from a downloaded | ||
124 | directory, you can specify a subdirectory after a double-slash `//`. | ||
125 | go-getter will first download the URL specified _before_ the double-slash | ||
126 | (as if you didn't specify a double-slash), but will then copy the | ||
127 | path after the double slash into the target directory. | ||
128 | |||
129 | For example, if you're downloading this GitHub repository, but you only | ||
130 | want to download the `test-fixtures` directory, you can do the following: | ||
131 | |||
132 | ``` | ||
133 | https://github.com/hashicorp/go-getter.git//test-fixtures | ||
134 | ``` | ||
135 | |||
136 | If you downloaded this to the `/tmp` directory, then the file | ||
137 | `/tmp/archive.gz` would exist. Notice that this file is in the `test-fixtures` | ||
138 | directory in this repository, but because we specified a subdirectory, | ||
139 | go-getter automatically copied only that directory contents. | ||
140 | |||
141 | Subdirectory paths may contain may also use filesystem glob patterns. | ||
142 | The path must match _exactly one_ entry or go-getter will return an error. | ||
143 | This is useful if you're not sure the exact directory name but it follows | ||
144 | a predictable naming structure. | ||
145 | |||
146 | For example, the following URL would also work: | ||
147 | |||
148 | ``` | ||
149 | https://github.com/hashicorp/go-getter.git//test-* | ||
150 | ``` | ||
151 | |||
122 | ### Checksumming | 152 | ### Checksumming |
123 | 153 | ||
124 | For file downloads of any protocol, go-getter can automatically verify | 154 | For file downloads of any protocol, go-getter can automatically verify |
@@ -154,9 +184,11 @@ The following archive formats are supported: | |||
154 | 184 | ||
155 | * `tar.gz` and `tgz` | 185 | * `tar.gz` and `tgz` |
156 | * `tar.bz2` and `tbz2` | 186 | * `tar.bz2` and `tbz2` |
187 | * `tar.xz` and `txz` | ||
157 | * `zip` | 188 | * `zip` |
158 | * `gz` | 189 | * `gz` |
159 | * `bz2` | 190 | * `bz2` |
191 | * `xz` | ||
160 | 192 | ||
161 | For example, an example URL is shown below: | 193 | For example, an example URL is shown below: |
162 | 194 | ||
@@ -200,6 +232,9 @@ The options below are available to all protocols: | |||
200 | * `checksum` - Checksum to verify the downloaded file or archive. See | 232 | * `checksum` - Checksum to verify the downloaded file or archive. See |
201 | the entire section on checksumming above for format and more details. | 233 | the entire section on checksumming above for format and more details. |
202 | 234 | ||
235 | * `filename` - When in file download mode, allows specifying the name of the | ||
236 | downloaded file on disk. Has no effect in directory mode. | ||
237 | |||
203 | ### Local Files (`file`) | 238 | ### Local Files (`file`) |
204 | 239 | ||
205 | None | 240 | None |
@@ -222,13 +257,17 @@ None | |||
222 | 257 | ||
223 | ### HTTP (`http`) | 258 | ### HTTP (`http`) |
224 | 259 | ||
225 | None | 260 | #### Basic Authentication |
261 | |||
262 | To use HTTP basic authentication with go-getter, simply prepend `username:password@` to the | ||
263 | hostname in the URL such as `https://Aladdin:OpenSesame@www.example.com/index.html`. All special | ||
264 | characters, including the username and password, must be URL encoded. | ||
226 | 265 | ||
227 | ### S3 (`s3`) | 266 | ### S3 (`s3`) |
228 | 267 | ||
229 | S3 takes various access configurations in the URL. Note that it will also | 268 | S3 takes various access configurations in the URL. Note that it will also |
230 | read these from standard AWS environment variables if they're set. If | 269 | read these from standard AWS environment variables if they're set. S3 compliant servers like Minio |
231 | the query parameters are present, these take priority. | 270 | are also supported. If the query parameters are present, these take priority. |
232 | 271 | ||
233 | * `aws_access_key_id` - AWS access key. | 272 | * `aws_access_key_id` - AWS access key. |
234 | * `aws_access_key_secret` - AWS access key secret. | 273 | * `aws_access_key_secret` - AWS access key secret. |
@@ -240,6 +279,14 @@ If you use go-getter and want to use an EC2 IAM Instance Profile to avoid | |||
240 | using credentials, then just omit these and the profile, if available will | 279 | using credentials, then just omit these and the profile, if available will |
241 | be used automatically. | 280 | be used automatically. |
242 | 281 | ||
282 | ### Using S3 with Minio | ||
283 | If you use go-gitter for Minio support, you must consider the following: | ||
284 | |||
285 | * `aws_access_key_id` (required) - Minio access key. | ||
286 | * `aws_access_key_secret` (required) - Minio access key secret. | ||
287 | * `region` (optional - defaults to us-east-1) - Region identifier to use. | ||
288 | * `version` (optional - defaults to Minio default) - Configuration file format. | ||
289 | |||
243 | #### S3 Bucket Examples | 290 | #### S3 Bucket Examples |
244 | 291 | ||
245 | S3 has several addressing schemes used to reference your bucket. These are | 292 | S3 has several addressing schemes used to reference your bucket. These are |
@@ -250,4 +297,5 @@ Some examples for these addressing schemes: | |||
250 | - s3::https://s3-eu-west-1.amazonaws.com/bucket/foo | 297 | - s3::https://s3-eu-west-1.amazonaws.com/bucket/foo |
251 | - bucket.s3.amazonaws.com/foo | 298 | - bucket.s3.amazonaws.com/foo |
252 | - bucket.s3-eu-west-1.amazonaws.com/foo/bar | 299 | - bucket.s3-eu-west-1.amazonaws.com/foo/bar |
300 | - "s3::http://127.0.0.1:9000/test-bucket/hello.txt?aws_access_key_id=KEYID&aws_access_key_secret=SECRETKEY®ion=us-east-2" | ||
253 | 301 | ||
diff --git a/vendor/github.com/hashicorp/go-getter/appveyor.yml b/vendor/github.com/hashicorp/go-getter/appveyor.yml index 159dad4..ec48d45 100644 --- a/vendor/github.com/hashicorp/go-getter/appveyor.yml +++ b/vendor/github.com/hashicorp/go-getter/appveyor.yml | |||
@@ -1,5 +1,5 @@ | |||
1 | version: "build-{branch}-{build}" | 1 | version: "build-{branch}-{build}" |
2 | image: Visual Studio 2015 | 2 | image: Visual Studio 2017 |
3 | clone_folder: c:\gopath\github.com\hashicorp\go-getter | 3 | clone_folder: c:\gopath\github.com\hashicorp\go-getter |
4 | environment: | 4 | environment: |
5 | GOPATH: c:\gopath | 5 | GOPATH: c:\gopath |
diff --git a/vendor/github.com/hashicorp/go-getter/client.go b/vendor/github.com/hashicorp/go-getter/client.go index 876812a..300301c 100644 --- a/vendor/github.com/hashicorp/go-getter/client.go +++ b/vendor/github.com/hashicorp/go-getter/client.go | |||
@@ -17,6 +17,7 @@ import ( | |||
17 | "strings" | 17 | "strings" |
18 | 18 | ||
19 | urlhelper "github.com/hashicorp/go-getter/helper/url" | 19 | urlhelper "github.com/hashicorp/go-getter/helper/url" |
20 | "github.com/hashicorp/go-safetemp" | ||
20 | ) | 21 | ) |
21 | 22 | ||
22 | // Client is a client for downloading things. | 23 | // Client is a client for downloading things. |
@@ -100,17 +101,14 @@ func (c *Client) Get() error { | |||
100 | dst := c.Dst | 101 | dst := c.Dst |
101 | src, subDir := SourceDirSubdir(src) | 102 | src, subDir := SourceDirSubdir(src) |
102 | if subDir != "" { | 103 | if subDir != "" { |
103 | tmpDir, err := ioutil.TempDir("", "tf") | 104 | td, tdcloser, err := safetemp.Dir("", "getter") |
104 | if err != nil { | 105 | if err != nil { |
105 | return err | 106 | return err |
106 | } | 107 | } |
107 | if err := os.RemoveAll(tmpDir); err != nil { | 108 | defer tdcloser.Close() |
108 | return err | ||
109 | } | ||
110 | defer os.RemoveAll(tmpDir) | ||
111 | 109 | ||
112 | realDst = dst | 110 | realDst = dst |
113 | dst = tmpDir | 111 | dst = td |
114 | } | 112 | } |
115 | 113 | ||
116 | u, err := urlhelper.Parse(src) | 114 | u, err := urlhelper.Parse(src) |
@@ -232,7 +230,18 @@ func (c *Client) Get() error { | |||
232 | // Destination is the base name of the URL path in "any" mode when | 230 | // Destination is the base name of the URL path in "any" mode when |
233 | // a file source is detected. | 231 | // a file source is detected. |
234 | if mode == ClientModeFile { | 232 | if mode == ClientModeFile { |
235 | dst = filepath.Join(dst, filepath.Base(u.Path)) | 233 | filename := filepath.Base(u.Path) |
234 | |||
235 | // Determine if we have a custom file name | ||
236 | if v := q.Get("filename"); v != "" { | ||
237 | // Delete the query parameter if we have it. | ||
238 | q.Del("filename") | ||
239 | u.RawQuery = q.Encode() | ||
240 | |||
241 | filename = v | ||
242 | } | ||
243 | |||
244 | dst = filepath.Join(dst, filename) | ||
236 | } | 245 | } |
237 | } | 246 | } |
238 | 247 | ||
@@ -305,7 +314,13 @@ func (c *Client) Get() error { | |||
305 | return err | 314 | return err |
306 | } | 315 | } |
307 | 316 | ||
308 | return copyDir(realDst, filepath.Join(dst, subDir), false) | 317 | // Process any globs |
318 | subDir, err := SubdirGlob(dst, subDir) | ||
319 | if err != nil { | ||
320 | return err | ||
321 | } | ||
322 | |||
323 | return copyDir(realDst, subDir, false) | ||
309 | } | 324 | } |
310 | 325 | ||
311 | return nil | 326 | return nil |
diff --git a/vendor/github.com/hashicorp/go-getter/decompress.go b/vendor/github.com/hashicorp/go-getter/decompress.go index d18174c..198bb0e 100644 --- a/vendor/github.com/hashicorp/go-getter/decompress.go +++ b/vendor/github.com/hashicorp/go-getter/decompress.go | |||
@@ -1,7 +1,15 @@ | |||
1 | package getter | 1 | package getter |
2 | 2 | ||
3 | import ( | ||
4 | "strings" | ||
5 | ) | ||
6 | |||
3 | // Decompressor defines the interface that must be implemented to add | 7 | // Decompressor defines the interface that must be implemented to add |
4 | // support for decompressing a type. | 8 | // support for decompressing a type. |
9 | // | ||
10 | // Important: if you're implementing a decompressor, please use the | ||
11 | // containsDotDot helper in this file to ensure that files can't be | ||
12 | // decompressed outside of the specified directory. | ||
5 | type Decompressor interface { | 13 | type Decompressor interface { |
6 | // Decompress should decompress src to dst. dir specifies whether dst | 14 | // Decompress should decompress src to dst. dir specifies whether dst |
7 | // is a directory or single file. src is guaranteed to be a single file | 15 | // is a directory or single file. src is guaranteed to be a single file |
@@ -16,14 +24,35 @@ var Decompressors map[string]Decompressor | |||
16 | func init() { | 24 | func init() { |
17 | tbzDecompressor := new(TarBzip2Decompressor) | 25 | tbzDecompressor := new(TarBzip2Decompressor) |
18 | tgzDecompressor := new(TarGzipDecompressor) | 26 | tgzDecompressor := new(TarGzipDecompressor) |
27 | txzDecompressor := new(TarXzDecompressor) | ||
19 | 28 | ||
20 | Decompressors = map[string]Decompressor{ | 29 | Decompressors = map[string]Decompressor{ |
21 | "bz2": new(Bzip2Decompressor), | 30 | "bz2": new(Bzip2Decompressor), |
22 | "gz": new(GzipDecompressor), | 31 | "gz": new(GzipDecompressor), |
32 | "xz": new(XzDecompressor), | ||
23 | "tar.bz2": tbzDecompressor, | 33 | "tar.bz2": tbzDecompressor, |
24 | "tar.gz": tgzDecompressor, | 34 | "tar.gz": tgzDecompressor, |
35 | "tar.xz": txzDecompressor, | ||
25 | "tbz2": tbzDecompressor, | 36 | "tbz2": tbzDecompressor, |
26 | "tgz": tgzDecompressor, | 37 | "tgz": tgzDecompressor, |
38 | "txz": txzDecompressor, | ||
27 | "zip": new(ZipDecompressor), | 39 | "zip": new(ZipDecompressor), |
28 | } | 40 | } |
29 | } | 41 | } |
42 | |||
43 | // containsDotDot checks if the filepath value v contains a ".." entry. | ||
44 | // This will check filepath components by splitting along / or \. This | ||
45 | // function is copied directly from the Go net/http implementation. | ||
46 | func containsDotDot(v string) bool { | ||
47 | if !strings.Contains(v, "..") { | ||
48 | return false | ||
49 | } | ||
50 | for _, ent := range strings.FieldsFunc(v, isSlashRune) { | ||
51 | if ent == ".." { | ||
52 | return true | ||
53 | } | ||
54 | } | ||
55 | return false | ||
56 | } | ||
57 | |||
58 | func isSlashRune(r rune) bool { return r == '/' || r == '\\' } | ||
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_gzip.go b/vendor/github.com/hashicorp/go-getter/decompress_gzip.go index 2001054..5ebf709 100644 --- a/vendor/github.com/hashicorp/go-getter/decompress_gzip.go +++ b/vendor/github.com/hashicorp/go-getter/decompress_gzip.go | |||
@@ -9,7 +9,7 @@ import ( | |||
9 | ) | 9 | ) |
10 | 10 | ||
11 | // GzipDecompressor is an implementation of Decompressor that can | 11 | // GzipDecompressor is an implementation of Decompressor that can |
12 | // decompress bz2 files. | 12 | // decompress gzip files. |
13 | type GzipDecompressor struct{} | 13 | type GzipDecompressor struct{} |
14 | 14 | ||
15 | func (d *GzipDecompressor) Decompress(dst, src string, dir bool) error { | 15 | func (d *GzipDecompressor) Decompress(dst, src string, dir bool) error { |
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_tar.go b/vendor/github.com/hashicorp/go-getter/decompress_tar.go new file mode 100644 index 0000000..39cb392 --- /dev/null +++ b/vendor/github.com/hashicorp/go-getter/decompress_tar.go | |||
@@ -0,0 +1,138 @@ | |||
1 | package getter | ||
2 | |||
3 | import ( | ||
4 | "archive/tar" | ||
5 | "fmt" | ||
6 | "io" | ||
7 | "os" | ||
8 | "path/filepath" | ||
9 | ) | ||
10 | |||
11 | // untar is a shared helper for untarring an archive. The reader should provide | ||
12 | // an uncompressed view of the tar archive. | ||
13 | func untar(input io.Reader, dst, src string, dir bool) error { | ||
14 | tarR := tar.NewReader(input) | ||
15 | done := false | ||
16 | dirHdrs := []*tar.Header{} | ||
17 | for { | ||
18 | hdr, err := tarR.Next() | ||
19 | if err == io.EOF { | ||
20 | if !done { | ||
21 | // Empty archive | ||
22 | return fmt.Errorf("empty archive: %s", src) | ||
23 | } | ||
24 | |||
25 | break | ||
26 | } | ||
27 | if err != nil { | ||
28 | return err | ||
29 | } | ||
30 | |||
31 | if hdr.Typeflag == tar.TypeXGlobalHeader || hdr.Typeflag == tar.TypeXHeader { | ||
32 | // don't unpack extended headers as files | ||
33 | continue | ||
34 | } | ||
35 | |||
36 | path := dst | ||
37 | if dir { | ||
38 | // Disallow parent traversal | ||
39 | if containsDotDot(hdr.Name) { | ||
40 | return fmt.Errorf("entry contains '..': %s", hdr.Name) | ||
41 | } | ||
42 | |||
43 | path = filepath.Join(path, hdr.Name) | ||
44 | } | ||
45 | |||
46 | if hdr.FileInfo().IsDir() { | ||
47 | if !dir { | ||
48 | return fmt.Errorf("expected a single file: %s", src) | ||
49 | } | ||
50 | |||
51 | // A directory, just make the directory and continue unarchiving... | ||
52 | if err := os.MkdirAll(path, 0755); err != nil { | ||
53 | return err | ||
54 | } | ||
55 | |||
56 | // Record the directory information so that we may set its attributes | ||
57 | // after all files have been extracted | ||
58 | dirHdrs = append(dirHdrs, hdr) | ||
59 | |||
60 | continue | ||
61 | } else { | ||
62 | // There is no ordering guarantee that a file in a directory is | ||
63 | // listed before the directory | ||
64 | dstPath := filepath.Dir(path) | ||
65 | |||
66 | // Check that the directory exists, otherwise create it | ||
67 | if _, err := os.Stat(dstPath); os.IsNotExist(err) { | ||
68 | if err := os.MkdirAll(dstPath, 0755); err != nil { | ||
69 | return err | ||
70 | } | ||
71 | } | ||
72 | } | ||
73 | |||
74 | // We have a file. If we already decoded, then it is an error | ||
75 | if !dir && done { | ||
76 | return fmt.Errorf("expected a single file, got multiple: %s", src) | ||
77 | } | ||
78 | |||
79 | // Mark that we're done so future in single file mode errors | ||
80 | done = true | ||
81 | |||
82 | // Open the file for writing | ||
83 | dstF, err := os.Create(path) | ||
84 | if err != nil { | ||
85 | return err | ||
86 | } | ||
87 | _, err = io.Copy(dstF, tarR) | ||
88 | dstF.Close() | ||
89 | if err != nil { | ||
90 | return err | ||
91 | } | ||
92 | |||
93 | // Chmod the file | ||
94 | if err := os.Chmod(path, hdr.FileInfo().Mode()); err != nil { | ||
95 | return err | ||
96 | } | ||
97 | |||
98 | // Set the access and modification time | ||
99 | if err := os.Chtimes(path, hdr.AccessTime, hdr.ModTime); err != nil { | ||
100 | return err | ||
101 | } | ||
102 | } | ||
103 | |||
104 | // Adding a file or subdirectory changes the mtime of a directory | ||
105 | // We therefore wait until we've extracted everything and then set the mtime and atime attributes | ||
106 | for _, dirHdr := range dirHdrs { | ||
107 | path := filepath.Join(dst, dirHdr.Name) | ||
108 | if err := os.Chtimes(path, dirHdr.AccessTime, dirHdr.ModTime); err != nil { | ||
109 | return err | ||
110 | } | ||
111 | } | ||
112 | |||
113 | return nil | ||
114 | } | ||
115 | |||
116 | // tarDecompressor is an implementation of Decompressor that can | ||
117 | // unpack tar files. | ||
118 | type tarDecompressor struct{} | ||
119 | |||
120 | func (d *tarDecompressor) Decompress(dst, src string, dir bool) error { | ||
121 | // If we're going into a directory we should make that first | ||
122 | mkdir := dst | ||
123 | if !dir { | ||
124 | mkdir = filepath.Dir(dst) | ||
125 | } | ||
126 | if err := os.MkdirAll(mkdir, 0755); err != nil { | ||
127 | return err | ||
128 | } | ||
129 | |||
130 | // File first | ||
131 | f, err := os.Open(src) | ||
132 | if err != nil { | ||
133 | return err | ||
134 | } | ||
135 | defer f.Close() | ||
136 | |||
137 | return untar(f, dst, src, dir) | ||
138 | } | ||
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go b/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go index c46ed44..5391b5c 100644 --- a/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go +++ b/vendor/github.com/hashicorp/go-getter/decompress_tbz2.go | |||
@@ -1,10 +1,7 @@ | |||
1 | package getter | 1 | package getter |
2 | 2 | ||
3 | import ( | 3 | import ( |
4 | "archive/tar" | ||
5 | "compress/bzip2" | 4 | "compress/bzip2" |
6 | "fmt" | ||
7 | "io" | ||
8 | "os" | 5 | "os" |
9 | "path/filepath" | 6 | "path/filepath" |
10 | ) | 7 | ) |
@@ -32,64 +29,5 @@ func (d *TarBzip2Decompressor) Decompress(dst, src string, dir bool) error { | |||
32 | 29 | ||
33 | // Bzip2 compression is second | 30 | // Bzip2 compression is second |
34 | bzipR := bzip2.NewReader(f) | 31 | bzipR := bzip2.NewReader(f) |
35 | 32 | return untar(bzipR, dst, src, dir) | |
36 | // Once bzip decompressed we have a tar format | ||
37 | tarR := tar.NewReader(bzipR) | ||
38 | done := false | ||
39 | for { | ||
40 | hdr, err := tarR.Next() | ||
41 | if err == io.EOF { | ||
42 | if !done { | ||
43 | // Empty archive | ||
44 | return fmt.Errorf("empty archive: %s", src) | ||
45 | } | ||
46 | |||
47 | return nil | ||
48 | } | ||
49 | if err != nil { | ||
50 | return err | ||
51 | } | ||
52 | |||
53 | path := dst | ||
54 | if dir { | ||
55 | path = filepath.Join(path, hdr.Name) | ||
56 | } | ||
57 | |||
58 | if hdr.FileInfo().IsDir() { | ||
59 | if dir { | ||
60 | return fmt.Errorf("expected a single file: %s", src) | ||
61 | } | ||
62 | |||
63 | // A directory, just make the directory and continue unarchiving... | ||
64 | if err := os.MkdirAll(path, 0755); err != nil { | ||
65 | return err | ||
66 | } | ||
67 | |||
68 | continue | ||
69 | } | ||
70 | |||
71 | // We have a file. If we already decoded, then it is an error | ||
72 | if !dir && done { | ||
73 | return fmt.Errorf("expected a single file, got multiple: %s", src) | ||
74 | } | ||
75 | |||
76 | // Mark that we're done so future in single file mode errors | ||
77 | done = true | ||
78 | |||
79 | // Open the file for writing | ||
80 | dstF, err := os.Create(path) | ||
81 | if err != nil { | ||
82 | return err | ||
83 | } | ||
84 | _, err = io.Copy(dstF, tarR) | ||
85 | dstF.Close() | ||
86 | if err != nil { | ||
87 | return err | ||
88 | } | ||
89 | |||
90 | // Chmod the file | ||
91 | if err := os.Chmod(path, hdr.FileInfo().Mode()); err != nil { | ||
92 | return err | ||
93 | } | ||
94 | } | ||
95 | } | 33 | } |
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_testing.go b/vendor/github.com/hashicorp/go-getter/decompress_testing.go index 686d6c2..91cf33d 100644 --- a/vendor/github.com/hashicorp/go-getter/decompress_testing.go +++ b/vendor/github.com/hashicorp/go-getter/decompress_testing.go | |||
@@ -11,7 +11,9 @@ import ( | |||
11 | "runtime" | 11 | "runtime" |
12 | "sort" | 12 | "sort" |
13 | "strings" | 13 | "strings" |
14 | "testing" | 14 | "time" |
15 | |||
16 | "github.com/mitchellh/go-testing-interface" | ||
15 | ) | 17 | ) |
16 | 18 | ||
17 | // TestDecompressCase is a single test case for testing decompressors | 19 | // TestDecompressCase is a single test case for testing decompressors |
@@ -21,10 +23,11 @@ type TestDecompressCase struct { | |||
21 | Err bool // Err is whether we expect an error or not | 23 | Err bool // Err is whether we expect an error or not |
22 | DirList []string // DirList is the list of files for Dir mode | 24 | DirList []string // DirList is the list of files for Dir mode |
23 | FileMD5 string // FileMD5 is the expected MD5 for a single file | 25 | FileMD5 string // FileMD5 is the expected MD5 for a single file |
26 | Mtime *time.Time // Mtime is the optionally expected mtime for a single file (or all files if in Dir mode) | ||
24 | } | 27 | } |
25 | 28 | ||
26 | // TestDecompressor is a helper function for testing generic decompressors. | 29 | // TestDecompressor is a helper function for testing generic decompressors. |
27 | func TestDecompressor(t *testing.T, d Decompressor, cases []TestDecompressCase) { | 30 | func TestDecompressor(t testing.T, d Decompressor, cases []TestDecompressCase) { |
28 | for _, tc := range cases { | 31 | for _, tc := range cases { |
29 | t.Logf("Testing: %s", tc.Input) | 32 | t.Logf("Testing: %s", tc.Input) |
30 | 33 | ||
@@ -67,6 +70,14 @@ func TestDecompressor(t *testing.T, d Decompressor, cases []TestDecompressCase) | |||
67 | } | 70 | } |
68 | } | 71 | } |
69 | 72 | ||
73 | if tc.Mtime != nil { | ||
74 | actual := fi.ModTime() | ||
75 | expected := *tc.Mtime | ||
76 | if actual != expected { | ||
77 | t.Fatalf("err %s: expected mtime '%s' for %s, got '%s'", tc.Input, expected.String(), dst, actual.String()) | ||
78 | } | ||
79 | } | ||
80 | |||
70 | return | 81 | return |
71 | } | 82 | } |
72 | 83 | ||
@@ -83,11 +94,26 @@ func TestDecompressor(t *testing.T, d Decompressor, cases []TestDecompressCase) | |||
83 | if !reflect.DeepEqual(actual, expected) { | 94 | if !reflect.DeepEqual(actual, expected) { |
84 | t.Fatalf("bad %s\n\n%#v\n\n%#v", tc.Input, actual, expected) | 95 | t.Fatalf("bad %s\n\n%#v\n\n%#v", tc.Input, actual, expected) |
85 | } | 96 | } |
97 | // Check for correct atime/mtime | ||
98 | for _, dir := range actual { | ||
99 | path := filepath.Join(dst, dir) | ||
100 | if tc.Mtime != nil { | ||
101 | fi, err := os.Stat(path) | ||
102 | if err != nil { | ||
103 | t.Fatalf("err: %s", err) | ||
104 | } | ||
105 | actual := fi.ModTime() | ||
106 | expected := *tc.Mtime | ||
107 | if actual != expected { | ||
108 | t.Fatalf("err %s: expected mtime '%s' for %s, got '%s'", tc.Input, expected.String(), path, actual.String()) | ||
109 | } | ||
110 | } | ||
111 | } | ||
86 | }() | 112 | }() |
87 | } | 113 | } |
88 | } | 114 | } |
89 | 115 | ||
90 | func testListDir(t *testing.T, path string) []string { | 116 | func testListDir(t testing.T, path string) []string { |
91 | var result []string | 117 | var result []string |
92 | err := filepath.Walk(path, func(sub string, info os.FileInfo, err error) error { | 118 | err := filepath.Walk(path, func(sub string, info os.FileInfo, err error) error { |
93 | if err != nil { | 119 | if err != nil { |
@@ -102,7 +128,7 @@ func testListDir(t *testing.T, path string) []string { | |||
102 | 128 | ||
103 | // If it is a dir, add trailing sep | 129 | // If it is a dir, add trailing sep |
104 | if info.IsDir() { | 130 | if info.IsDir() { |
105 | sub += "/" | 131 | sub += string(os.PathSeparator) |
106 | } | 132 | } |
107 | 133 | ||
108 | result = append(result, sub) | 134 | result = append(result, sub) |
@@ -116,7 +142,7 @@ func testListDir(t *testing.T, path string) []string { | |||
116 | return result | 142 | return result |
117 | } | 143 | } |
118 | 144 | ||
119 | func testMD5(t *testing.T, path string) string { | 145 | func testMD5(t testing.T, path string) string { |
120 | f, err := os.Open(path) | 146 | f, err := os.Open(path) |
121 | if err != nil { | 147 | if err != nil { |
122 | t.Fatalf("err: %s", err) | 148 | t.Fatalf("err: %s", err) |
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_tgz.go b/vendor/github.com/hashicorp/go-getter/decompress_tgz.go index e8b1c31..65eb70d 100644 --- a/vendor/github.com/hashicorp/go-getter/decompress_tgz.go +++ b/vendor/github.com/hashicorp/go-getter/decompress_tgz.go | |||
@@ -1,10 +1,8 @@ | |||
1 | package getter | 1 | package getter |
2 | 2 | ||
3 | import ( | 3 | import ( |
4 | "archive/tar" | ||
5 | "compress/gzip" | 4 | "compress/gzip" |
6 | "fmt" | 5 | "fmt" |
7 | "io" | ||
8 | "os" | 6 | "os" |
9 | "path/filepath" | 7 | "path/filepath" |
10 | ) | 8 | ) |
@@ -37,63 +35,5 @@ func (d *TarGzipDecompressor) Decompress(dst, src string, dir bool) error { | |||
37 | } | 35 | } |
38 | defer gzipR.Close() | 36 | defer gzipR.Close() |
39 | 37 | ||
40 | // Once gzip decompressed we have a tar format | 38 | return untar(gzipR, dst, src, dir) |
41 | tarR := tar.NewReader(gzipR) | ||
42 | done := false | ||
43 | for { | ||
44 | hdr, err := tarR.Next() | ||
45 | if err == io.EOF { | ||
46 | if !done { | ||
47 | // Empty archive | ||
48 | return fmt.Errorf("empty archive: %s", src) | ||
49 | } | ||
50 | |||
51 | return nil | ||
52 | } | ||
53 | if err != nil { | ||
54 | return err | ||
55 | } | ||
56 | |||
57 | path := dst | ||
58 | if dir { | ||
59 | path = filepath.Join(path, hdr.Name) | ||
60 | } | ||
61 | |||
62 | if hdr.FileInfo().IsDir() { | ||
63 | if !dir { | ||
64 | return fmt.Errorf("expected a single file: %s", src) | ||
65 | } | ||
66 | |||
67 | // A directory, just make the directory and continue unarchiving... | ||
68 | if err := os.MkdirAll(path, 0755); err != nil { | ||
69 | return err | ||
70 | } | ||
71 | |||
72 | continue | ||
73 | } | ||
74 | |||
75 | // We have a file. If we already decoded, then it is an error | ||
76 | if !dir && done { | ||
77 | return fmt.Errorf("expected a single file, got multiple: %s", src) | ||
78 | } | ||
79 | |||
80 | // Mark that we're done so future in single file mode errors | ||
81 | done = true | ||
82 | |||
83 | // Open the file for writing | ||
84 | dstF, err := os.Create(path) | ||
85 | if err != nil { | ||
86 | return err | ||
87 | } | ||
88 | _, err = io.Copy(dstF, tarR) | ||
89 | dstF.Close() | ||
90 | if err != nil { | ||
91 | return err | ||
92 | } | ||
93 | |||
94 | // Chmod the file | ||
95 | if err := os.Chmod(path, hdr.FileInfo().Mode()); err != nil { | ||
96 | return err | ||
97 | } | ||
98 | } | ||
99 | } | 39 | } |
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_txz.go b/vendor/github.com/hashicorp/go-getter/decompress_txz.go new file mode 100644 index 0000000..5e151c1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-getter/decompress_txz.go | |||
@@ -0,0 +1,39 @@ | |||
1 | package getter | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | "os" | ||
6 | "path/filepath" | ||
7 | |||
8 | "github.com/ulikunitz/xz" | ||
9 | ) | ||
10 | |||
11 | // TarXzDecompressor is an implementation of Decompressor that can | ||
12 | // decompress tar.xz files. | ||
13 | type TarXzDecompressor struct{} | ||
14 | |||
15 | func (d *TarXzDecompressor) Decompress(dst, src string, dir bool) error { | ||
16 | // If we're going into a directory we should make that first | ||
17 | mkdir := dst | ||
18 | if !dir { | ||
19 | mkdir = filepath.Dir(dst) | ||
20 | } | ||
21 | if err := os.MkdirAll(mkdir, 0755); err != nil { | ||
22 | return err | ||
23 | } | ||
24 | |||
25 | // File first | ||
26 | f, err := os.Open(src) | ||
27 | if err != nil { | ||
28 | return err | ||
29 | } | ||
30 | defer f.Close() | ||
31 | |||
32 | // xz compression is second | ||
33 | txzR, err := xz.NewReader(f) | ||
34 | if err != nil { | ||
35 | return fmt.Errorf("Error opening an xz reader for %s: %s", src, err) | ||
36 | } | ||
37 | |||
38 | return untar(txzR, dst, src, dir) | ||
39 | } | ||
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_xz.go b/vendor/github.com/hashicorp/go-getter/decompress_xz.go new file mode 100644 index 0000000..4e37aba --- /dev/null +++ b/vendor/github.com/hashicorp/go-getter/decompress_xz.go | |||
@@ -0,0 +1,49 @@ | |||
1 | package getter | ||
2 | |||
3 | import ( | ||
4 | "fmt" | ||
5 | "io" | ||
6 | "os" | ||
7 | "path/filepath" | ||
8 | |||
9 | "github.com/ulikunitz/xz" | ||
10 | ) | ||
11 | |||
12 | // XzDecompressor is an implementation of Decompressor that can | ||
13 | // decompress xz files. | ||
14 | type XzDecompressor struct{} | ||
15 | |||
16 | func (d *XzDecompressor) Decompress(dst, src string, dir bool) error { | ||
17 | // Directory isn't supported at all | ||
18 | if dir { | ||
19 | return fmt.Errorf("xz-compressed files can only unarchive to a single file") | ||
20 | } | ||
21 | |||
22 | // If we're going into a directory we should make that first | ||
23 | if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { | ||
24 | return err | ||
25 | } | ||
26 | |||
27 | // File first | ||
28 | f, err := os.Open(src) | ||
29 | if err != nil { | ||
30 | return err | ||
31 | } | ||
32 | defer f.Close() | ||
33 | |||
34 | // xz compression is second | ||
35 | xzR, err := xz.NewReader(f) | ||
36 | if err != nil { | ||
37 | return err | ||
38 | } | ||
39 | |||
40 | // Copy it out | ||
41 | dstF, err := os.Create(dst) | ||
42 | if err != nil { | ||
43 | return err | ||
44 | } | ||
45 | defer dstF.Close() | ||
46 | |||
47 | _, err = io.Copy(dstF, xzR) | ||
48 | return err | ||
49 | } | ||
diff --git a/vendor/github.com/hashicorp/go-getter/decompress_zip.go b/vendor/github.com/hashicorp/go-getter/decompress_zip.go index a065c07..b0e70ca 100644 --- a/vendor/github.com/hashicorp/go-getter/decompress_zip.go +++ b/vendor/github.com/hashicorp/go-getter/decompress_zip.go | |||
@@ -42,6 +42,11 @@ func (d *ZipDecompressor) Decompress(dst, src string, dir bool) error { | |||
42 | for _, f := range zipR.File { | 42 | for _, f := range zipR.File { |
43 | path := dst | 43 | path := dst |
44 | if dir { | 44 | if dir { |
45 | // Disallow parent traversal | ||
46 | if containsDotDot(f.Name) { | ||
47 | return fmt.Errorf("entry contains '..': %s", f.Name) | ||
48 | } | ||
49 | |||
45 | path = filepath.Join(path, f.Name) | 50 | path = filepath.Join(path, f.Name) |
46 | } | 51 | } |
47 | 52 | ||
diff --git a/vendor/github.com/hashicorp/go-getter/detect.go b/vendor/github.com/hashicorp/go-getter/detect.go index 481b737..c369551 100644 --- a/vendor/github.com/hashicorp/go-getter/detect.go +++ b/vendor/github.com/hashicorp/go-getter/detect.go | |||
@@ -72,12 +72,18 @@ func Detect(src string, pwd string, ds []Detector) (string, error) { | |||
72 | subDir = detectSubdir | 72 | subDir = detectSubdir |
73 | } | 73 | } |
74 | } | 74 | } |
75 | |||
75 | if subDir != "" { | 76 | if subDir != "" { |
76 | u, err := url.Parse(result) | 77 | u, err := url.Parse(result) |
77 | if err != nil { | 78 | if err != nil { |
78 | return "", fmt.Errorf("Error parsing URL: %s", err) | 79 | return "", fmt.Errorf("Error parsing URL: %s", err) |
79 | } | 80 | } |
80 | u.Path += "//" + subDir | 81 | u.Path += "//" + subDir |
82 | |||
83 | // a subdir may contain wildcards, but in order to support them we | ||
84 | // have to ensure the path isn't escaped. | ||
85 | u.RawPath = u.Path | ||
86 | |||
81 | result = u.String() | 87 | result = u.String() |
82 | } | 88 | } |
83 | 89 | ||
diff --git a/vendor/github.com/hashicorp/go-getter/detect_file.go b/vendor/github.com/hashicorp/go-getter/detect_file.go index 756ea43..4ef41ea 100644 --- a/vendor/github.com/hashicorp/go-getter/detect_file.go +++ b/vendor/github.com/hashicorp/go-getter/detect_file.go | |||
@@ -32,7 +32,7 @@ func (d *FileDetector) Detect(src, pwd string) (string, bool, error) { | |||
32 | return "", true, err | 32 | return "", true, err |
33 | } | 33 | } |
34 | if fi.Mode()&os.ModeSymlink != 0 { | 34 | if fi.Mode()&os.ModeSymlink != 0 { |
35 | pwd, err = os.Readlink(pwd) | 35 | pwd, err = filepath.EvalSymlinks(pwd) |
36 | if err != nil { | 36 | if err != nil { |
37 | return "", true, err | 37 | return "", true, err |
38 | } | 38 | } |
diff --git a/vendor/github.com/hashicorp/go-getter/get.go b/vendor/github.com/hashicorp/go-getter/get.go index c3236f5..e6053d9 100644 --- a/vendor/github.com/hashicorp/go-getter/get.go +++ b/vendor/github.com/hashicorp/go-getter/get.go | |||
@@ -18,6 +18,8 @@ import ( | |||
18 | "os/exec" | 18 | "os/exec" |
19 | "regexp" | 19 | "regexp" |
20 | "syscall" | 20 | "syscall" |
21 | |||
22 | cleanhttp "github.com/hashicorp/go-cleanhttp" | ||
21 | ) | 23 | ) |
22 | 24 | ||
23 | // Getter defines the interface that schemes must implement to download | 25 | // Getter defines the interface that schemes must implement to download |
@@ -49,8 +51,13 @@ var Getters map[string]Getter | |||
49 | // syntax is schema::url, example: git::https://foo.com | 51 | // syntax is schema::url, example: git::https://foo.com |
50 | var forcedRegexp = regexp.MustCompile(`^([A-Za-z0-9]+)::(.+)$`) | 52 | var forcedRegexp = regexp.MustCompile(`^([A-Za-z0-9]+)::(.+)$`) |
51 | 53 | ||
54 | // httpClient is the default client to be used by HttpGetters. | ||
55 | var httpClient = cleanhttp.DefaultClient() | ||
56 | |||
52 | func init() { | 57 | func init() { |
53 | httpGetter := &HttpGetter{Netrc: true} | 58 | httpGetter := &HttpGetter{ |
59 | Netrc: true, | ||
60 | } | ||
54 | 61 | ||
55 | Getters = map[string]Getter{ | 62 | Getters = map[string]Getter{ |
56 | "file": new(FileGetter), | 63 | "file": new(FileGetter), |
diff --git a/vendor/github.com/hashicorp/go-getter/get_git.go b/vendor/github.com/hashicorp/go-getter/get_git.go index 0728139..cb1d029 100644 --- a/vendor/github.com/hashicorp/go-getter/get_git.go +++ b/vendor/github.com/hashicorp/go-getter/get_git.go | |||
@@ -11,6 +11,7 @@ import ( | |||
11 | "strings" | 11 | "strings" |
12 | 12 | ||
13 | urlhelper "github.com/hashicorp/go-getter/helper/url" | 13 | urlhelper "github.com/hashicorp/go-getter/helper/url" |
14 | "github.com/hashicorp/go-safetemp" | ||
14 | "github.com/hashicorp/go-version" | 15 | "github.com/hashicorp/go-version" |
15 | ) | 16 | ) |
16 | 17 | ||
@@ -105,13 +106,11 @@ func (g *GitGetter) Get(dst string, u *url.URL) error { | |||
105 | // GetFile for Git doesn't support updating at this time. It will download | 106 | // GetFile for Git doesn't support updating at this time. It will download |
106 | // the file every time. | 107 | // the file every time. |
107 | func (g *GitGetter) GetFile(dst string, u *url.URL) error { | 108 | func (g *GitGetter) GetFile(dst string, u *url.URL) error { |
108 | td, err := ioutil.TempDir("", "getter-git") | 109 | td, tdcloser, err := safetemp.Dir("", "getter") |
109 | if err != nil { | 110 | if err != nil { |
110 | return err | 111 | return err |
111 | } | 112 | } |
112 | if err := os.RemoveAll(td); err != nil { | 113 | defer tdcloser.Close() |
113 | return err | ||
114 | } | ||
115 | 114 | ||
116 | // Get the filename, and strip the filename from the URL so we can | 115 | // Get the filename, and strip the filename from the URL so we can |
117 | // just get the repository directly. | 116 | // just get the repository directly. |
@@ -180,17 +179,34 @@ func (g *GitGetter) fetchSubmodules(dst, sshKeyFile string) error { | |||
180 | // setupGitEnv sets up the environment for the given command. This is used to | 179 | // setupGitEnv sets up the environment for the given command. This is used to |
181 | // pass configuration data to git and ssh and enables advanced cloning methods. | 180 | // pass configuration data to git and ssh and enables advanced cloning methods. |
182 | func setupGitEnv(cmd *exec.Cmd, sshKeyFile string) { | 181 | func setupGitEnv(cmd *exec.Cmd, sshKeyFile string) { |
183 | var sshOpts []string | 182 | const gitSSHCommand = "GIT_SSH_COMMAND=" |
183 | var sshCmd []string | ||
184 | |||
185 | // If we have an existing GIT_SSH_COMMAND, we need to append our options. | ||
186 | // We will also remove our old entry to make sure the behavior is the same | ||
187 | // with versions of Go < 1.9. | ||
188 | env := os.Environ() | ||
189 | for i, v := range env { | ||
190 | if strings.HasPrefix(v, gitSSHCommand) { | ||
191 | sshCmd = []string{v} | ||
192 | |||
193 | env[i], env[len(env)-1] = env[len(env)-1], env[i] | ||
194 | env = env[:len(env)-1] | ||
195 | break | ||
196 | } | ||
197 | } | ||
198 | |||
199 | if len(sshCmd) == 0 { | ||
200 | sshCmd = []string{gitSSHCommand + "ssh"} | ||
201 | } | ||
184 | 202 | ||
185 | if sshKeyFile != "" { | 203 | if sshKeyFile != "" { |
186 | // We have an SSH key temp file configured, tell ssh about this. | 204 | // We have an SSH key temp file configured, tell ssh about this. |
187 | sshOpts = append(sshOpts, "-i", sshKeyFile) | 205 | sshCmd = append(sshCmd, "-i", sshKeyFile) |
188 | } | 206 | } |
189 | 207 | ||
190 | cmd.Env = append(os.Environ(), | 208 | env = append(env, strings.Join(sshCmd, " ")) |
191 | // Set the ssh command to use for clones. | 209 | cmd.Env = env |
192 | "GIT_SSH_COMMAND=ssh "+strings.Join(sshOpts, " "), | ||
193 | ) | ||
194 | } | 210 | } |
195 | 211 | ||
196 | // checkGitVersion is used to check the version of git installed on the system | 212 | // checkGitVersion is used to check the version of git installed on the system |
diff --git a/vendor/github.com/hashicorp/go-getter/get_hg.go b/vendor/github.com/hashicorp/go-getter/get_hg.go index 820bdd4..f386922 100644 --- a/vendor/github.com/hashicorp/go-getter/get_hg.go +++ b/vendor/github.com/hashicorp/go-getter/get_hg.go | |||
@@ -2,7 +2,6 @@ package getter | |||
2 | 2 | ||
3 | import ( | 3 | import ( |
4 | "fmt" | 4 | "fmt" |
5 | "io/ioutil" | ||
6 | "net/url" | 5 | "net/url" |
7 | "os" | 6 | "os" |
8 | "os/exec" | 7 | "os/exec" |
@@ -10,6 +9,7 @@ import ( | |||
10 | "runtime" | 9 | "runtime" |
11 | 10 | ||
12 | urlhelper "github.com/hashicorp/go-getter/helper/url" | 11 | urlhelper "github.com/hashicorp/go-getter/helper/url" |
12 | "github.com/hashicorp/go-safetemp" | ||
13 | ) | 13 | ) |
14 | 14 | ||
15 | // HgGetter is a Getter implementation that will download a module from | 15 | // HgGetter is a Getter implementation that will download a module from |
@@ -64,13 +64,13 @@ func (g *HgGetter) Get(dst string, u *url.URL) error { | |||
64 | // GetFile for Hg doesn't support updating at this time. It will download | 64 | // GetFile for Hg doesn't support updating at this time. It will download |
65 | // the file every time. | 65 | // the file every time. |
66 | func (g *HgGetter) GetFile(dst string, u *url.URL) error { | 66 | func (g *HgGetter) GetFile(dst string, u *url.URL) error { |
67 | td, err := ioutil.TempDir("", "getter-hg") | 67 | // Create a temporary directory to store the full source. This has to be |
68 | // a non-existent directory. | ||
69 | td, tdcloser, err := safetemp.Dir("", "getter") | ||
68 | if err != nil { | 70 | if err != nil { |
69 | return err | 71 | return err |
70 | } | 72 | } |
71 | if err := os.RemoveAll(td); err != nil { | 73 | defer tdcloser.Close() |
72 | return err | ||
73 | } | ||
74 | 74 | ||
75 | // Get the filename, and strip the filename from the URL so we can | 75 | // Get the filename, and strip the filename from the URL so we can |
76 | // just get the repository directly. | 76 | // just get the repository directly. |
diff --git a/vendor/github.com/hashicorp/go-getter/get_http.go b/vendor/github.com/hashicorp/go-getter/get_http.go index 3c02034..d2e2879 100644 --- a/vendor/github.com/hashicorp/go-getter/get_http.go +++ b/vendor/github.com/hashicorp/go-getter/get_http.go | |||
@@ -4,12 +4,13 @@ import ( | |||
4 | "encoding/xml" | 4 | "encoding/xml" |
5 | "fmt" | 5 | "fmt" |
6 | "io" | 6 | "io" |
7 | "io/ioutil" | ||
8 | "net/http" | 7 | "net/http" |
9 | "net/url" | 8 | "net/url" |
10 | "os" | 9 | "os" |
11 | "path/filepath" | 10 | "path/filepath" |
12 | "strings" | 11 | "strings" |
12 | |||
13 | "github.com/hashicorp/go-safetemp" | ||
13 | ) | 14 | ) |
14 | 15 | ||
15 | // HttpGetter is a Getter implementation that will download from an HTTP | 16 | // HttpGetter is a Getter implementation that will download from an HTTP |
@@ -36,6 +37,10 @@ type HttpGetter struct { | |||
36 | // Netrc, if true, will lookup and use auth information found | 37 | // Netrc, if true, will lookup and use auth information found |
37 | // in the user's netrc file if available. | 38 | // in the user's netrc file if available. |
38 | Netrc bool | 39 | Netrc bool |
40 | |||
41 | // Client is the http.Client to use for Get requests. | ||
42 | // This defaults to a cleanhttp.DefaultClient if left unset. | ||
43 | Client *http.Client | ||
39 | } | 44 | } |
40 | 45 | ||
41 | func (g *HttpGetter) ClientMode(u *url.URL) (ClientMode, error) { | 46 | func (g *HttpGetter) ClientMode(u *url.URL) (ClientMode, error) { |
@@ -57,13 +62,17 @@ func (g *HttpGetter) Get(dst string, u *url.URL) error { | |||
57 | } | 62 | } |
58 | } | 63 | } |
59 | 64 | ||
65 | if g.Client == nil { | ||
66 | g.Client = httpClient | ||
67 | } | ||
68 | |||
60 | // Add terraform-get to the parameter. | 69 | // Add terraform-get to the parameter. |
61 | q := u.Query() | 70 | q := u.Query() |
62 | q.Add("terraform-get", "1") | 71 | q.Add("terraform-get", "1") |
63 | u.RawQuery = q.Encode() | 72 | u.RawQuery = q.Encode() |
64 | 73 | ||
65 | // Get the URL | 74 | // Get the URL |
66 | resp, err := http.Get(u.String()) | 75 | resp, err := g.Client.Get(u.String()) |
67 | if err != nil { | 76 | if err != nil { |
68 | return err | 77 | return err |
69 | } | 78 | } |
@@ -98,7 +107,18 @@ func (g *HttpGetter) Get(dst string, u *url.URL) error { | |||
98 | } | 107 | } |
99 | 108 | ||
100 | func (g *HttpGetter) GetFile(dst string, u *url.URL) error { | 109 | func (g *HttpGetter) GetFile(dst string, u *url.URL) error { |
101 | resp, err := http.Get(u.String()) | 110 | if g.Netrc { |
111 | // Add auth from netrc if we can | ||
112 | if err := addAuthFromNetrc(u); err != nil { | ||
113 | return err | ||
114 | } | ||
115 | } | ||
116 | |||
117 | if g.Client == nil { | ||
118 | g.Client = httpClient | ||
119 | } | ||
120 | |||
121 | resp, err := g.Client.Get(u.String()) | ||
102 | if err != nil { | 122 | if err != nil { |
103 | return err | 123 | return err |
104 | } | 124 | } |
@@ -116,29 +136,40 @@ func (g *HttpGetter) GetFile(dst string, u *url.URL) error { | |||
116 | if err != nil { | 136 | if err != nil { |
117 | return err | 137 | return err |
118 | } | 138 | } |
119 | defer f.Close() | ||
120 | 139 | ||
121 | _, err = io.Copy(f, resp.Body) | 140 | n, err := io.Copy(f, resp.Body) |
141 | if err == nil && n < resp.ContentLength { | ||
142 | err = io.ErrShortWrite | ||
143 | } | ||
144 | if err1 := f.Close(); err == nil { | ||
145 | err = err1 | ||
146 | } | ||
122 | return err | 147 | return err |
123 | } | 148 | } |
124 | 149 | ||
125 | // getSubdir downloads the source into the destination, but with | 150 | // getSubdir downloads the source into the destination, but with |
126 | // the proper subdir. | 151 | // the proper subdir. |
127 | func (g *HttpGetter) getSubdir(dst, source, subDir string) error { | 152 | func (g *HttpGetter) getSubdir(dst, source, subDir string) error { |
128 | // Create a temporary directory to store the full source | 153 | // Create a temporary directory to store the full source. This has to be |
129 | td, err := ioutil.TempDir("", "tf") | 154 | // a non-existent directory. |
155 | td, tdcloser, err := safetemp.Dir("", "getter") | ||
130 | if err != nil { | 156 | if err != nil { |
131 | return err | 157 | return err |
132 | } | 158 | } |
133 | defer os.RemoveAll(td) | 159 | defer tdcloser.Close() |
134 | 160 | ||
135 | // Download that into the given directory | 161 | // Download that into the given directory |
136 | if err := Get(td, source); err != nil { | 162 | if err := Get(td, source); err != nil { |
137 | return err | 163 | return err |
138 | } | 164 | } |
139 | 165 | ||
166 | // Process any globbing | ||
167 | sourcePath, err := SubdirGlob(td, subDir) | ||
168 | if err != nil { | ||
169 | return err | ||
170 | } | ||
171 | |||
140 | // Make sure the subdir path actually exists | 172 | // Make sure the subdir path actually exists |
141 | sourcePath := filepath.Join(td, subDir) | ||
142 | if _, err := os.Stat(sourcePath); err != nil { | 173 | if _, err := os.Stat(sourcePath); err != nil { |
143 | return fmt.Errorf( | 174 | return fmt.Errorf( |
144 | "Error downloading %s: %s", source, err) | 175 | "Error downloading %s: %s", source, err) |
diff --git a/vendor/github.com/hashicorp/go-getter/get_s3.go b/vendor/github.com/hashicorp/go-getter/get_s3.go index d3bffeb..ebb3217 100644 --- a/vendor/github.com/hashicorp/go-getter/get_s3.go +++ b/vendor/github.com/hashicorp/go-getter/get_s3.go | |||
@@ -28,7 +28,7 @@ func (g *S3Getter) ClientMode(u *url.URL) (ClientMode, error) { | |||
28 | } | 28 | } |
29 | 29 | ||
30 | // Create client config | 30 | // Create client config |
31 | config := g.getAWSConfig(region, creds) | 31 | config := g.getAWSConfig(region, u, creds) |
32 | sess := session.New(config) | 32 | sess := session.New(config) |
33 | client := s3.New(sess) | 33 | client := s3.New(sess) |
34 | 34 | ||
@@ -84,7 +84,7 @@ func (g *S3Getter) Get(dst string, u *url.URL) error { | |||
84 | return err | 84 | return err |
85 | } | 85 | } |
86 | 86 | ||
87 | config := g.getAWSConfig(region, creds) | 87 | config := g.getAWSConfig(region, u, creds) |
88 | sess := session.New(config) | 88 | sess := session.New(config) |
89 | client := s3.New(sess) | 89 | client := s3.New(sess) |
90 | 90 | ||
@@ -139,7 +139,7 @@ func (g *S3Getter) GetFile(dst string, u *url.URL) error { | |||
139 | return err | 139 | return err |
140 | } | 140 | } |
141 | 141 | ||
142 | config := g.getAWSConfig(region, creds) | 142 | config := g.getAWSConfig(region, u, creds) |
143 | sess := session.New(config) | 143 | sess := session.New(config) |
144 | client := s3.New(sess) | 144 | client := s3.New(sess) |
145 | return g.getObject(client, dst, bucket, path, version) | 145 | return g.getObject(client, dst, bucket, path, version) |
@@ -174,7 +174,7 @@ func (g *S3Getter) getObject(client *s3.S3, dst, bucket, key, version string) er | |||
174 | return err | 174 | return err |
175 | } | 175 | } |
176 | 176 | ||
177 | func (g *S3Getter) getAWSConfig(region string, creds *credentials.Credentials) *aws.Config { | 177 | func (g *S3Getter) getAWSConfig(region string, url *url.URL, creds *credentials.Credentials) *aws.Config { |
178 | conf := &aws.Config{} | 178 | conf := &aws.Config{} |
179 | if creds == nil { | 179 | if creds == nil { |
180 | // Grab the metadata URL | 180 | // Grab the metadata URL |
@@ -195,6 +195,14 @@ func (g *S3Getter) getAWSConfig(region string, creds *credentials.Credentials) * | |||
195 | }) | 195 | }) |
196 | } | 196 | } |
197 | 197 | ||
198 | if creds != nil { | ||
199 | conf.Endpoint = &url.Host | ||
200 | conf.S3ForcePathStyle = aws.Bool(true) | ||
201 | if url.Scheme == "http" { | ||
202 | conf.DisableSSL = aws.Bool(true) | ||
203 | } | ||
204 | } | ||
205 | |||
198 | conf.Credentials = creds | 206 | conf.Credentials = creds |
199 | if region != "" { | 207 | if region != "" { |
200 | conf.Region = aws.String(region) | 208 | conf.Region = aws.String(region) |
@@ -204,29 +212,48 @@ func (g *S3Getter) getAWSConfig(region string, creds *credentials.Credentials) * | |||
204 | } | 212 | } |
205 | 213 | ||
206 | func (g *S3Getter) parseUrl(u *url.URL) (region, bucket, path, version string, creds *credentials.Credentials, err error) { | 214 | func (g *S3Getter) parseUrl(u *url.URL) (region, bucket, path, version string, creds *credentials.Credentials, err error) { |
207 | // Expected host style: s3.amazonaws.com. They always have 3 parts, | 215 | // This just check whether we are dealing with S3 or |
208 | // although the first may differ if we're accessing a specific region. | 216 | // any other S3 compliant service. S3 has a predictable |
209 | hostParts := strings.Split(u.Host, ".") | 217 | // url as others do not |
210 | if len(hostParts) != 3 { | 218 | if strings.Contains(u.Host, "amazonaws.com") { |
211 | err = fmt.Errorf("URL is not a valid S3 URL") | 219 | // Expected host style: s3.amazonaws.com. They always have 3 parts, |
212 | return | 220 | // although the first may differ if we're accessing a specific region. |
213 | } | 221 | hostParts := strings.Split(u.Host, ".") |
222 | if len(hostParts) != 3 { | ||
223 | err = fmt.Errorf("URL is not a valid S3 URL") | ||
224 | return | ||
225 | } | ||
214 | 226 | ||
215 | // Parse the region out of the first part of the host | 227 | // Parse the region out of the first part of the host |
216 | region = strings.TrimPrefix(strings.TrimPrefix(hostParts[0], "s3-"), "s3") | 228 | region = strings.TrimPrefix(strings.TrimPrefix(hostParts[0], "s3-"), "s3") |
217 | if region == "" { | 229 | if region == "" { |
218 | region = "us-east-1" | 230 | region = "us-east-1" |
219 | } | 231 | } |
220 | 232 | ||
221 | pathParts := strings.SplitN(u.Path, "/", 3) | 233 | pathParts := strings.SplitN(u.Path, "/", 3) |
222 | if len(pathParts) != 3 { | 234 | if len(pathParts) != 3 { |
223 | err = fmt.Errorf("URL is not a valid S3 URL") | 235 | err = fmt.Errorf("URL is not a valid S3 URL") |
224 | return | 236 | return |
225 | } | 237 | } |
238 | |||
239 | bucket = pathParts[1] | ||
240 | path = pathParts[2] | ||
241 | version = u.Query().Get("version") | ||
226 | 242 | ||
227 | bucket = pathParts[1] | 243 | } else { |
228 | path = pathParts[2] | 244 | pathParts := strings.SplitN(u.Path, "/", 3) |
229 | version = u.Query().Get("version") | 245 | if len(pathParts) != 3 { |
246 | err = fmt.Errorf("URL is not a valid S3 complaint URL") | ||
247 | return | ||
248 | } | ||
249 | bucket = pathParts[1] | ||
250 | path = pathParts[2] | ||
251 | version = u.Query().Get("version") | ||
252 | region = u.Query().Get("region") | ||
253 | if region == "" { | ||
254 | region = "us-east-1" | ||
255 | } | ||
256 | } | ||
230 | 257 | ||
231 | _, hasAwsId := u.Query()["aws_access_key_id"] | 258 | _, hasAwsId := u.Query()["aws_access_key_id"] |
232 | _, hasAwsSecret := u.Query()["aws_access_key_secret"] | 259 | _, hasAwsSecret := u.Query()["aws_access_key_secret"] |
diff --git a/vendor/github.com/hashicorp/go-getter/source.go b/vendor/github.com/hashicorp/go-getter/source.go index 4d5ee3c..c63f2bb 100644 --- a/vendor/github.com/hashicorp/go-getter/source.go +++ b/vendor/github.com/hashicorp/go-getter/source.go | |||
@@ -1,6 +1,8 @@ | |||
1 | package getter | 1 | package getter |
2 | 2 | ||
3 | import ( | 3 | import ( |
4 | "fmt" | ||
5 | "path/filepath" | ||
4 | "strings" | 6 | "strings" |
5 | ) | 7 | ) |
6 | 8 | ||
@@ -34,3 +36,27 @@ func SourceDirSubdir(src string) (string, string) { | |||
34 | 36 | ||
35 | return src, subdir | 37 | return src, subdir |
36 | } | 38 | } |
39 | |||
40 | // SubdirGlob returns the actual subdir with globbing processed. | ||
41 | // | ||
42 | // dst should be a destination directory that is already populated (the | ||
43 | // download is complete) and subDir should be the set subDir. If subDir | ||
44 | // is an empty string, this returns an empty string. | ||
45 | // | ||
46 | // The returned path is the full absolute path. | ||
47 | func SubdirGlob(dst, subDir string) (string, error) { | ||
48 | matches, err := filepath.Glob(filepath.Join(dst, subDir)) | ||
49 | if err != nil { | ||
50 | return "", err | ||
51 | } | ||
52 | |||
53 | if len(matches) == 0 { | ||
54 | return "", fmt.Errorf("subdir %q not found", subDir) | ||
55 | } | ||
56 | |||
57 | if len(matches) > 1 { | ||
58 | return "", fmt.Errorf("subdir %q matches multiple paths", subDir) | ||
59 | } | ||
60 | |||
61 | return matches[0], nil | ||
62 | } | ||