]> git.immae.eu Git - github/fretlink/terraform-provider-statuscake.git/commitdiff
vendor: github.com/hashicorp/terraform/...@v0.10.0
authorRadek Simko <radek.simko@gmail.com>
Thu, 10 Aug 2017 12:38:14 +0000 (14:38 +0200)
committerRadek Simko <radek.simko@gmail.com>
Thu, 10 Aug 2017 12:38:14 +0000 (14:38 +0200)
101 files changed:
vendor/github.com/blang/semver/LICENSE [new file with mode: 0644]
vendor/github.com/blang/semver/README.md [new file with mode: 0644]
vendor/github.com/blang/semver/json.go [new file with mode: 0644]
vendor/github.com/blang/semver/package.json [new file with mode: 0644]
vendor/github.com/blang/semver/range.go [new file with mode: 0644]
vendor/github.com/blang/semver/semver.go [new file with mode: 0644]
vendor/github.com/blang/semver/sort.go [new file with mode: 0644]
vendor/github.com/blang/semver/sql.go [new file with mode: 0644]
vendor/github.com/hashicorp/go-cleanhttp/LICENSE [new file with mode: 0644]
vendor/github.com/hashicorp/go-cleanhttp/README.md [new file with mode: 0644]
vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go [new file with mode: 0644]
vendor/github.com/hashicorp/go-cleanhttp/doc.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/config/config.go
vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go
vendor/github.com/hashicorp/terraform/config/loader.go
vendor/github.com/hashicorp/terraform/config/loader_hcl.go
vendor/github.com/hashicorp/terraform/config/module/tree.go
vendor/github.com/hashicorp/terraform/config/providers.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/flatmap/expand.go
vendor/github.com/hashicorp/terraform/helper/resource/id.go
vendor/github.com/hashicorp/terraform/helper/resource/testing.go
vendor/github.com/hashicorp/terraform/helper/schema/provider.go
vendor/github.com/hashicorp/terraform/helper/schema/provisioner.go
vendor/github.com/hashicorp/terraform/helper/schema/resource.go
vendor/github.com/hashicorp/terraform/helper/schema/schema.go
vendor/github.com/hashicorp/terraform/helper/shadow/closer.go
vendor/github.com/hashicorp/terraform/helper/shadow/value.go
vendor/github.com/hashicorp/terraform/moduledeps/dependencies.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/moduledeps/doc.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/moduledeps/module.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/moduledeps/provider.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/plugin/client.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/plugin/discovery/error.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/plugin/discovery/find.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/plugin/discovery/get.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/plugin/discovery/meta.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/plugin/discovery/meta_set.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/plugin/discovery/requirements.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/plugin/discovery/signature.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/plugin/discovery/version.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/plugin/discovery/version_set.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/terraform/context.go
vendor/github.com/hashicorp/terraform/terraform/diff.go
vendor/github.com/hashicorp/terraform/terraform/eval_diff.go
vendor/github.com/hashicorp/terraform/terraform/graph_builder_plan.go
vendor/github.com/hashicorp/terraform/terraform/graph_builder_refresh.go
vendor/github.com/hashicorp/terraform/terraform/interpolate.go
vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/terraform/node_resource_refresh.go
vendor/github.com/hashicorp/terraform/terraform/plan.go
vendor/github.com/hashicorp/terraform/terraform/resource_address.go
vendor/github.com/hashicorp/terraform/terraform/resource_provider.go
vendor/github.com/hashicorp/terraform/terraform/state.go
vendor/github.com/hashicorp/terraform/terraform/test_failure [new file with mode: 0644]
vendor/github.com/hashicorp/terraform/terraform/transform_resource_refresh_plannable.go [deleted file]
vendor/github.com/hashicorp/terraform/terraform/transform_targets.go
vendor/github.com/hashicorp/terraform/terraform/util.go
vendor/github.com/hashicorp/terraform/terraform/version.go
vendor/golang.org/x/crypto/cast5/cast5.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/armor/armor.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/armor/encode.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/canonical_text.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/elgamal/elgamal.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/errors/errors.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/keys.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/compressed.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/config.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/encrypted_key.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/literal.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/ocfb.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/one_pass_signature.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/opaque.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/packet.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/private_key.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/public_key.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/public_key_v3.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/reader.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/signature.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/signature_v3.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/userattribute.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/packet/userid.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/read.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/s2k/s2k.go [new file with mode: 0644]
vendor/golang.org/x/crypto/openpgp/write.go [new file with mode: 0644]
vendor/golang.org/x/net/LICENSE [new file with mode: 0644]
vendor/golang.org/x/net/PATENTS [new file with mode: 0644]
vendor/golang.org/x/net/html/atom/atom.go [new file with mode: 0644]
vendor/golang.org/x/net/html/atom/table.go [new file with mode: 0644]
vendor/golang.org/x/net/html/const.go [new file with mode: 0644]
vendor/golang.org/x/net/html/doc.go [new file with mode: 0644]
vendor/golang.org/x/net/html/doctype.go [new file with mode: 0644]
vendor/golang.org/x/net/html/entity.go [new file with mode: 0644]
vendor/golang.org/x/net/html/escape.go [new file with mode: 0644]
vendor/golang.org/x/net/html/foreign.go [new file with mode: 0644]
vendor/golang.org/x/net/html/node.go [new file with mode: 0644]
vendor/golang.org/x/net/html/parse.go [new file with mode: 0644]
vendor/golang.org/x/net/html/render.go [new file with mode: 0644]
vendor/golang.org/x/net/html/token.go [new file with mode: 0644]
vendor/vendor.json

diff --git a/vendor/github.com/blang/semver/LICENSE b/vendor/github.com/blang/semver/LICENSE
new file mode 100644 (file)
index 0000000..5ba5c86
--- /dev/null
@@ -0,0 +1,22 @@
+The MIT License
+
+Copyright (c) 2014 Benedikt Lang <github at benediktlang.de>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
diff --git a/vendor/github.com/blang/semver/README.md b/vendor/github.com/blang/semver/README.md
new file mode 100644 (file)
index 0000000..08b2e4a
--- /dev/null
@@ -0,0 +1,194 @@
+semver for golang [![Build Status](https://travis-ci.org/blang/semver.svg?branch=master)](https://travis-ci.org/blang/semver) [![GoDoc](https://godoc.org/github.com/blang/semver?status.png)](https://godoc.org/github.com/blang/semver) [![Coverage Status](https://img.shields.io/coveralls/blang/semver.svg)](https://coveralls.io/r/blang/semver?branch=master)
+======
+
+semver is a [Semantic Versioning](http://semver.org/) library written in golang. It fully covers spec version `2.0.0`.
+
+Usage
+-----
+```bash
+$ go get github.com/blang/semver
+```
+Note: Always vendor your dependencies or fix on a specific version tag.
+
+```go
+import github.com/blang/semver
+v1, err := semver.Make("1.0.0-beta")
+v2, err := semver.Make("2.0.0-beta")
+v1.Compare(v2)
+```
+
+Also check the [GoDocs](http://godoc.org/github.com/blang/semver).
+
+Why should I use this lib?
+-----
+
+- Fully spec compatible
+- No reflection
+- No regex
+- Fully tested (Coverage >99%)
+- Readable parsing/validation errors
+- Fast (See [Benchmarks](#benchmarks))
+- Only Stdlib
+- Uses values instead of pointers
+- Many features, see below
+
+
+Features
+-----
+
+- Parsing and validation at all levels
+- Comparator-like comparisons
+- Compare Helper Methods
+- InPlace manipulation
+- Ranges `>=1.0.0 <2.0.0 || >=3.0.0 !3.0.1-beta.1`
+- Wildcards `>=1.x`, `<=2.5.x`
+- Sortable (implements sort.Interface)
+- database/sql compatible (sql.Scanner/Valuer)
+- encoding/json compatible (json.Marshaler/Unmarshaler)
+
+Ranges
+------
+
+A `Range` is a set of conditions which specify which versions satisfy the range.
+
+A condition is composed of an operator and a version. The supported operators are:
+
+- `<1.0.0` Less than `1.0.0`
+- `<=1.0.0` Less than or equal to `1.0.0`
+- `>1.0.0` Greater than `1.0.0`
+- `>=1.0.0` Greater than or equal to `1.0.0`
+- `1.0.0`, `=1.0.0`, `==1.0.0` Equal to `1.0.0`
+- `!1.0.0`, `!=1.0.0` Not equal to `1.0.0`. Excludes version `1.0.0`.
+
+Note that spaces between the operator and the version will be gracefully tolerated.
+
+A `Range` can link multiple `Ranges` separated by space:
+
+Ranges can be linked by logical AND:
+
+  - `>1.0.0 <2.0.0` would match between both ranges, so `1.1.1` and `1.8.7` but not `1.0.0` or `2.0.0`
+  - `>1.0.0 <3.0.0 !2.0.3-beta.2` would match every version between `1.0.0` and `3.0.0` except `2.0.3-beta.2`
+
+Ranges can also be linked by logical OR:
+
+  - `<2.0.0 || >=3.0.0` would match `1.x.x` and `3.x.x` but not `2.x.x`
+
+AND has a higher precedence than OR. It's not possible to use brackets.
+
+Ranges can be combined by both AND and OR
+
+  - `>1.0.0 <2.0.0 || >3.0.0 !4.2.1` would match `1.2.3`, `1.9.9`, `3.1.1`, but not `4.2.1`, `2.1.1`
+
+Range usage:
+
+```
+v, err := semver.Parse("1.2.3")
+range, err := semver.ParseRange(">1.0.0 <2.0.0 || >=3.0.0")
+if range(v) {
+    //valid
+}
+
+```
+
+Example
+-----
+
+Have a look at full examples in [examples/main.go](examples/main.go)
+
+```go
+import github.com/blang/semver
+
+v, err := semver.Make("0.0.1-alpha.preview+123.github")
+fmt.Printf("Major: %d\n", v.Major)
+fmt.Printf("Minor: %d\n", v.Minor)
+fmt.Printf("Patch: %d\n", v.Patch)
+fmt.Printf("Pre: %s\n", v.Pre)
+fmt.Printf("Build: %s\n", v.Build)
+
+// Prerelease versions array
+if len(v.Pre) > 0 {
+    fmt.Println("Prerelease versions:")
+    for i, pre := range v.Pre {
+        fmt.Printf("%d: %q\n", i, pre)
+    }
+}
+
+// Build meta data array
+if len(v.Build) > 0 {
+    fmt.Println("Build meta data:")
+    for i, build := range v.Build {
+        fmt.Printf("%d: %q\n", i, build)
+    }
+}
+
+v001, err := semver.Make("0.0.1")
+// Compare using helpers: v.GT(v2), v.LT, v.GTE, v.LTE
+v001.GT(v) == true
+v.LT(v001) == true
+v.GTE(v) == true
+v.LTE(v) == true
+
+// Or use v.Compare(v2) for comparisons (-1, 0, 1):
+v001.Compare(v) == 1
+v.Compare(v001) == -1
+v.Compare(v) == 0
+
+// Manipulate Version in place:
+v.Pre[0], err = semver.NewPRVersion("beta")
+if err != nil {
+    fmt.Printf("Error parsing pre release version: %q", err)
+}
+
+fmt.Println("\nValidate versions:")
+v.Build[0] = "?"
+
+err = v.Validate()
+if err != nil {
+    fmt.Printf("Validation failed: %s\n", err)
+}
+```
+
+
+Benchmarks
+-----
+
+    BenchmarkParseSimple-4           5000000    390    ns/op    48 B/op   1 allocs/op
+    BenchmarkParseComplex-4          1000000   1813    ns/op   256 B/op   7 allocs/op
+    BenchmarkParseAverage-4          1000000   1171    ns/op   163 B/op   4 allocs/op
+    BenchmarkStringSimple-4         20000000    119    ns/op    16 B/op   1 allocs/op
+    BenchmarkStringLarger-4         10000000    206    ns/op    32 B/op   2 allocs/op
+    BenchmarkStringComplex-4         5000000    324    ns/op    80 B/op   3 allocs/op
+    BenchmarkStringAverage-4         5000000    273    ns/op    53 B/op   2 allocs/op
+    BenchmarkValidateSimple-4      200000000      9.33 ns/op     0 B/op   0 allocs/op
+    BenchmarkValidateComplex-4       3000000    469    ns/op     0 B/op   0 allocs/op
+    BenchmarkValidateAverage-4       5000000    256    ns/op     0 B/op   0 allocs/op
+    BenchmarkCompareSimple-4       100000000     11.8  ns/op     0 B/op   0 allocs/op
+    BenchmarkCompareComplex-4       50000000     30.8  ns/op     0 B/op   0 allocs/op
+    BenchmarkCompareAverage-4       30000000     41.5  ns/op     0 B/op   0 allocs/op
+    BenchmarkSort-4                  3000000    419    ns/op   256 B/op   2 allocs/op
+    BenchmarkRangeParseSimple-4      2000000    850    ns/op   192 B/op   5 allocs/op
+    BenchmarkRangeParseAverage-4     1000000   1677    ns/op   400 B/op  10 allocs/op
+    BenchmarkRangeParseComplex-4      300000   5214    ns/op  1440 B/op  30 allocs/op
+    BenchmarkRangeMatchSimple-4     50000000     25.6  ns/op     0 B/op   0 allocs/op
+    BenchmarkRangeMatchAverage-4    30000000     56.4  ns/op     0 B/op   0 allocs/op
+    BenchmarkRangeMatchComplex-4    10000000    153    ns/op     0 B/op   0 allocs/op
+
+See benchmark cases at [semver_test.go](semver_test.go)
+
+
+Motivation
+-----
+
+I simply couldn't find any lib supporting the full spec. Others were just wrong or used reflection and regex which i don't like.
+
+
+Contribution
+-----
+
+Feel free to make a pull request. For bigger changes create a issue first to discuss about it.
+
+
+License
+-----
+
+See [LICENSE](LICENSE) file.
diff --git a/vendor/github.com/blang/semver/json.go b/vendor/github.com/blang/semver/json.go
new file mode 100644 (file)
index 0000000..a74bf7c
--- /dev/null
@@ -0,0 +1,23 @@
+package semver
+
+import (
+       "encoding/json"
+)
+
+// MarshalJSON implements the encoding/json.Marshaler interface.
+func (v Version) MarshalJSON() ([]byte, error) {
+       return json.Marshal(v.String())
+}
+
+// UnmarshalJSON implements the encoding/json.Unmarshaler interface.
+func (v *Version) UnmarshalJSON(data []byte) (err error) {
+       var versionString string
+
+       if err = json.Unmarshal(data, &versionString); err != nil {
+               return
+       }
+
+       *v, err = Parse(versionString)
+
+       return
+}
diff --git a/vendor/github.com/blang/semver/package.json b/vendor/github.com/blang/semver/package.json
new file mode 100644 (file)
index 0000000..1cf8ebd
--- /dev/null
@@ -0,0 +1,17 @@
+{
+  "author": "blang",
+  "bugs": {
+    "URL": "https://github.com/blang/semver/issues",
+    "url": "https://github.com/blang/semver/issues"
+  },
+  "gx": {
+    "dvcsimport": "github.com/blang/semver"
+  },
+  "gxVersion": "0.10.0",
+  "language": "go",
+  "license": "MIT",
+  "name": "semver",
+  "releaseCmd": "git commit -a -m \"gx publish $VERSION\"",
+  "version": "3.5.1"
+}
+
diff --git a/vendor/github.com/blang/semver/range.go b/vendor/github.com/blang/semver/range.go
new file mode 100644 (file)
index 0000000..fca406d
--- /dev/null
@@ -0,0 +1,416 @@
+package semver
+
+import (
+       "fmt"
+       "strconv"
+       "strings"
+       "unicode"
+)
+
+type wildcardType int
+
+const (
+       noneWildcard  wildcardType = iota
+       majorWildcard wildcardType = 1
+       minorWildcard wildcardType = 2
+       patchWildcard wildcardType = 3
+)
+
+func wildcardTypefromInt(i int) wildcardType {
+       switch i {
+       case 1:
+               return majorWildcard
+       case 2:
+               return minorWildcard
+       case 3:
+               return patchWildcard
+       default:
+               return noneWildcard
+       }
+}
+
+type comparator func(Version, Version) bool
+
+var (
+       compEQ comparator = func(v1 Version, v2 Version) bool {
+               return v1.Compare(v2) == 0
+       }
+       compNE = func(v1 Version, v2 Version) bool {
+               return v1.Compare(v2) != 0
+       }
+       compGT = func(v1 Version, v2 Version) bool {
+               return v1.Compare(v2) == 1
+       }
+       compGE = func(v1 Version, v2 Version) bool {
+               return v1.Compare(v2) >= 0
+       }
+       compLT = func(v1 Version, v2 Version) bool {
+               return v1.Compare(v2) == -1
+       }
+       compLE = func(v1 Version, v2 Version) bool {
+               return v1.Compare(v2) <= 0
+       }
+)
+
+type versionRange struct {
+       v Version
+       c comparator
+}
+
+// rangeFunc creates a Range from the given versionRange.
+func (vr *versionRange) rangeFunc() Range {
+       return Range(func(v Version) bool {
+               return vr.c(v, vr.v)
+       })
+}
+
+// Range represents a range of versions.
+// A Range can be used to check if a Version satisfies it:
+//
+//     range, err := semver.ParseRange(">1.0.0 <2.0.0")
+//     range(semver.MustParse("1.1.1") // returns true
+type Range func(Version) bool
+
+// OR combines the existing Range with another Range using logical OR.
+func (rf Range) OR(f Range) Range {
+       return Range(func(v Version) bool {
+               return rf(v) || f(v)
+       })
+}
+
+// AND combines the existing Range with another Range using logical AND.
+func (rf Range) AND(f Range) Range {
+       return Range(func(v Version) bool {
+               return rf(v) && f(v)
+       })
+}
+
+// ParseRange parses a range and returns a Range.
+// If the range could not be parsed an error is returned.
+//
+// Valid ranges are:
+//   - "<1.0.0"
+//   - "<=1.0.0"
+//   - ">1.0.0"
+//   - ">=1.0.0"
+//   - "1.0.0", "=1.0.0", "==1.0.0"
+//   - "!1.0.0", "!=1.0.0"
+//
+// A Range can consist of multiple ranges separated by space:
+// Ranges can be linked by logical AND:
+//   - ">1.0.0 <2.0.0" would match between both ranges, so "1.1.1" and "1.8.7" but not "1.0.0" or "2.0.0"
+//   - ">1.0.0 <3.0.0 !2.0.3-beta.2" would match every version between 1.0.0 and 3.0.0 except 2.0.3-beta.2
+//
+// Ranges can also be linked by logical OR:
+//   - "<2.0.0 || >=3.0.0" would match "1.x.x" and "3.x.x" but not "2.x.x"
+//
+// AND has a higher precedence than OR. It's not possible to use brackets.
+//
+// Ranges can be combined by both AND and OR
+//
+//  - `>1.0.0 <2.0.0 || >3.0.0 !4.2.1` would match `1.2.3`, `1.9.9`, `3.1.1`, but not `4.2.1`, `2.1.1`
+func ParseRange(s string) (Range, error) {
+       parts := splitAndTrim(s)
+       orParts, err := splitORParts(parts)
+       if err != nil {
+               return nil, err
+       }
+       expandedParts, err := expandWildcardVersion(orParts)
+       if err != nil {
+               return nil, err
+       }
+       var orFn Range
+       for _, p := range expandedParts {
+               var andFn Range
+               for _, ap := range p {
+                       opStr, vStr, err := splitComparatorVersion(ap)
+                       if err != nil {
+                               return nil, err
+                       }
+                       vr, err := buildVersionRange(opStr, vStr)
+                       if err != nil {
+                               return nil, fmt.Errorf("Could not parse Range %q: %s", ap, err)
+                       }
+                       rf := vr.rangeFunc()
+
+                       // Set function
+                       if andFn == nil {
+                               andFn = rf
+                       } else { // Combine with existing function
+                               andFn = andFn.AND(rf)
+                       }
+               }
+               if orFn == nil {
+                       orFn = andFn
+               } else {
+                       orFn = orFn.OR(andFn)
+               }
+
+       }
+       return orFn, nil
+}
+
+// splitORParts splits the already cleaned parts by '||'.
+// Checks for invalid positions of the operator and returns an
+// error if found.
+func splitORParts(parts []string) ([][]string, error) {
+       var ORparts [][]string
+       last := 0
+       for i, p := range parts {
+               if p == "||" {
+                       if i == 0 {
+                               return nil, fmt.Errorf("First element in range is '||'")
+                       }
+                       ORparts = append(ORparts, parts[last:i])
+                       last = i + 1
+               }
+       }
+       if last == len(parts) {
+               return nil, fmt.Errorf("Last element in range is '||'")
+       }
+       ORparts = append(ORparts, parts[last:])
+       return ORparts, nil
+}
+
+// buildVersionRange takes a slice of 2: operator and version
+// and builds a versionRange, otherwise an error.
+func buildVersionRange(opStr, vStr string) (*versionRange, error) {
+       c := parseComparator(opStr)
+       if c == nil {
+               return nil, fmt.Errorf("Could not parse comparator %q in %q", opStr, strings.Join([]string{opStr, vStr}, ""))
+       }
+       v, err := Parse(vStr)
+       if err != nil {
+               return nil, fmt.Errorf("Could not parse version %q in %q: %s", vStr, strings.Join([]string{opStr, vStr}, ""), err)
+       }
+
+       return &versionRange{
+               v: v,
+               c: c,
+       }, nil
+
+}
+
+// inArray checks if a byte is contained in an array of bytes
+func inArray(s byte, list []byte) bool {
+       for _, el := range list {
+               if el == s {
+                       return true
+               }
+       }
+       return false
+}
+
+// splitAndTrim splits a range string by spaces and cleans whitespaces
+func splitAndTrim(s string) (result []string) {
+       last := 0
+       var lastChar byte
+       excludeFromSplit := []byte{'>', '<', '='}
+       for i := 0; i < len(s); i++ {
+               if s[i] == ' ' && !inArray(lastChar, excludeFromSplit) {
+                       if last < i-1 {
+                               result = append(result, s[last:i])
+                       }
+                       last = i + 1
+               } else if s[i] != ' ' {
+                       lastChar = s[i]
+               }
+       }
+       if last < len(s)-1 {
+               result = append(result, s[last:])
+       }
+
+       for i, v := range result {
+               result[i] = strings.Replace(v, " ", "", -1)
+       }
+
+       // parts := strings.Split(s, " ")
+       // for _, x := range parts {
+       //      if s := strings.TrimSpace(x); len(s) != 0 {
+       //              result = append(result, s)
+       //      }
+       // }
+       return
+}
+
+// splitComparatorVersion splits the comparator from the version.
+// Input must be free of leading or trailing spaces.
+func splitComparatorVersion(s string) (string, string, error) {
+       i := strings.IndexFunc(s, unicode.IsDigit)
+       if i == -1 {
+               return "", "", fmt.Errorf("Could not get version from string: %q", s)
+       }
+       return strings.TrimSpace(s[0:i]), s[i:], nil
+}
+
+// getWildcardType will return the type of wildcard that the
+// passed version contains
+func getWildcardType(vStr string) wildcardType {
+       parts := strings.Split(vStr, ".")
+       nparts := len(parts)
+       wildcard := parts[nparts-1]
+
+       possibleWildcardType := wildcardTypefromInt(nparts)
+       if wildcard == "x" {
+               return possibleWildcardType
+       }
+
+       return noneWildcard
+}
+
+// createVersionFromWildcard will convert a wildcard version
+// into a regular version, replacing 'x's with '0's, handling
+// special cases like '1.x.x' and '1.x'
+func createVersionFromWildcard(vStr string) string {
+       // handle 1.x.x
+       vStr2 := strings.Replace(vStr, ".x.x", ".x", 1)
+       vStr2 = strings.Replace(vStr2, ".x", ".0", 1)
+       parts := strings.Split(vStr2, ".")
+
+       // handle 1.x
+       if len(parts) == 2 {
+               return vStr2 + ".0"
+       }
+
+       return vStr2
+}
+
+// incrementMajorVersion will increment the major version
+// of the passed version
+func incrementMajorVersion(vStr string) (string, error) {
+       parts := strings.Split(vStr, ".")
+       i, err := strconv.Atoi(parts[0])
+       if err != nil {
+               return "", err
+       }
+       parts[0] = strconv.Itoa(i + 1)
+
+       return strings.Join(parts, "."), nil
+}
+
+// incrementMajorVersion will increment the minor version
+// of the passed version
+func incrementMinorVersion(vStr string) (string, error) {
+       parts := strings.Split(vStr, ".")
+       i, err := strconv.Atoi(parts[1])
+       if err != nil {
+               return "", err
+       }
+       parts[1] = strconv.Itoa(i + 1)
+
+       return strings.Join(parts, "."), nil
+}
+
+// expandWildcardVersion will expand wildcards inside versions
+// following these rules:
+//
+// * when dealing with patch wildcards:
+// >= 1.2.x    will become    >= 1.2.0
+// <= 1.2.x    will become    <  1.3.0
+// >  1.2.x    will become    >= 1.3.0
+// <  1.2.x    will become    <  1.2.0
+// != 1.2.x    will become    <  1.2.0 >= 1.3.0
+//
+// * when dealing with minor wildcards:
+// >= 1.x      will become    >= 1.0.0
+// <= 1.x      will become    <  2.0.0
+// >  1.x      will become    >= 2.0.0
+// <  1.0      will become    <  1.0.0
+// != 1.x      will become    <  1.0.0 >= 2.0.0
+//
+// * when dealing with wildcards without
+// version operator:
+// 1.2.x       will become    >= 1.2.0 < 1.3.0
+// 1.x         will become    >= 1.0.0 < 2.0.0
+func expandWildcardVersion(parts [][]string) ([][]string, error) {
+       var expandedParts [][]string
+       for _, p := range parts {
+               var newParts []string
+               for _, ap := range p {
+                       if strings.Index(ap, "x") != -1 {
+                               opStr, vStr, err := splitComparatorVersion(ap)
+                               if err != nil {
+                                       return nil, err
+                               }
+
+                               versionWildcardType := getWildcardType(vStr)
+                               flatVersion := createVersionFromWildcard(vStr)
+
+                               var resultOperator string
+                               var shouldIncrementVersion bool
+                               switch opStr {
+                               case ">":
+                                       resultOperator = ">="
+                                       shouldIncrementVersion = true
+                               case ">=":
+                                       resultOperator = ">="
+                               case "<":
+                                       resultOperator = "<"
+                               case "<=":
+                                       resultOperator = "<"
+                                       shouldIncrementVersion = true
+                               case "", "=", "==":
+                                       newParts = append(newParts, ">="+flatVersion)
+                                       resultOperator = "<"
+                                       shouldIncrementVersion = true
+                               case "!=", "!":
+                                       newParts = append(newParts, "<"+flatVersion)
+                                       resultOperator = ">="
+                                       shouldIncrementVersion = true
+                               }
+
+                               var resultVersion string
+                               if shouldIncrementVersion {
+                                       switch versionWildcardType {
+                                       case patchWildcard:
+                                               resultVersion, _ = incrementMinorVersion(flatVersion)
+                                       case minorWildcard:
+                                               resultVersion, _ = incrementMajorVersion(flatVersion)
+                                       }
+                               } else {
+                                       resultVersion = flatVersion
+                               }
+
+                               ap = resultOperator + resultVersion
+                       }
+                       newParts = append(newParts, ap)
+               }
+               expandedParts = append(expandedParts, newParts)
+       }
+
+       return expandedParts, nil
+}
+
+func parseComparator(s string) comparator {
+       switch s {
+       case "==":
+               fallthrough
+       case "":
+               fallthrough
+       case "=":
+               return compEQ
+       case ">":
+               return compGT
+       case ">=":
+               return compGE
+       case "<":
+               return compLT
+       case "<=":
+               return compLE
+       case "!":
+               fallthrough
+       case "!=":
+               return compNE
+       }
+
+       return nil
+}
+
+// MustParseRange is like ParseRange but panics if the range cannot be parsed.
+func MustParseRange(s string) Range {
+       r, err := ParseRange(s)
+       if err != nil {
+               panic(`semver: ParseRange(` + s + `): ` + err.Error())
+       }
+       return r
+}
diff --git a/vendor/github.com/blang/semver/semver.go b/vendor/github.com/blang/semver/semver.go
new file mode 100644 (file)
index 0000000..8ee0842
--- /dev/null
@@ -0,0 +1,418 @@
+package semver
+
+import (
+       "errors"
+       "fmt"
+       "strconv"
+       "strings"
+)
+
+const (
+       numbers  string = "0123456789"
+       alphas          = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-"
+       alphanum        = alphas + numbers
+)
+
+// SpecVersion is the latest fully supported spec version of semver
+var SpecVersion = Version{
+       Major: 2,
+       Minor: 0,
+       Patch: 0,
+}
+
+// Version represents a semver compatible version
+type Version struct {
+       Major uint64
+       Minor uint64
+       Patch uint64
+       Pre   []PRVersion
+       Build []string //No Precendence
+}
+
+// Version to string
+func (v Version) String() string {
+       b := make([]byte, 0, 5)
+       b = strconv.AppendUint(b, v.Major, 10)
+       b = append(b, '.')
+       b = strconv.AppendUint(b, v.Minor, 10)
+       b = append(b, '.')
+       b = strconv.AppendUint(b, v.Patch, 10)
+
+       if len(v.Pre) > 0 {
+               b = append(b, '-')
+               b = append(b, v.Pre[0].String()...)
+
+               for _, pre := range v.Pre[1:] {
+                       b = append(b, '.')
+                       b = append(b, pre.String()...)
+               }
+       }
+
+       if len(v.Build) > 0 {
+               b = append(b, '+')
+               b = append(b, v.Build[0]...)
+
+               for _, build := range v.Build[1:] {
+                       b = append(b, '.')
+                       b = append(b, build...)
+               }
+       }
+
+       return string(b)
+}
+
+// Equals checks if v is equal to o.
+func (v Version) Equals(o Version) bool {
+       return (v.Compare(o) == 0)
+}
+
+// EQ checks if v is equal to o.
+func (v Version) EQ(o Version) bool {
+       return (v.Compare(o) == 0)
+}
+
+// NE checks if v is not equal to o.
+func (v Version) NE(o Version) bool {
+       return (v.Compare(o) != 0)
+}
+
+// GT checks if v is greater than o.
+func (v Version) GT(o Version) bool {
+       return (v.Compare(o) == 1)
+}
+
+// GTE checks if v is greater than or equal to o.
+func (v Version) GTE(o Version) bool {
+       return (v.Compare(o) >= 0)
+}
+
+// GE checks if v is greater than or equal to o.
+func (v Version) GE(o Version) bool {
+       return (v.Compare(o) >= 0)
+}
+
+// LT checks if v is less than o.
+func (v Version) LT(o Version) bool {
+       return (v.Compare(o) == -1)
+}
+
+// LTE checks if v is less than or equal to o.
+func (v Version) LTE(o Version) bool {
+       return (v.Compare(o) <= 0)
+}
+
+// LE checks if v is less than or equal to o.
+func (v Version) LE(o Version) bool {
+       return (v.Compare(o) <= 0)
+}
+
+// Compare compares Versions v to o:
+// -1 == v is less than o
+// 0 == v is equal to o
+// 1 == v is greater than o
+func (v Version) Compare(o Version) int {
+       if v.Major != o.Major {
+               if v.Major > o.Major {
+                       return 1
+               }
+               return -1
+       }
+       if v.Minor != o.Minor {
+               if v.Minor > o.Minor {
+                       return 1
+               }
+               return -1
+       }
+       if v.Patch != o.Patch {
+               if v.Patch > o.Patch {
+                       return 1
+               }
+               return -1
+       }
+
+       // Quick comparison if a version has no prerelease versions
+       if len(v.Pre) == 0 && len(o.Pre) == 0 {
+               return 0
+       } else if len(v.Pre) == 0 && len(o.Pre) > 0 {
+               return 1
+       } else if len(v.Pre) > 0 && len(o.Pre) == 0 {
+               return -1
+       }
+
+       i := 0
+       for ; i < len(v.Pre) && i < len(o.Pre); i++ {
+               if comp := v.Pre[i].Compare(o.Pre[i]); comp == 0 {
+                       continue
+               } else if comp == 1 {
+                       return 1
+               } else {
+                       return -1
+               }
+       }
+
+       // If all pr versions are the equal but one has further prversion, this one greater
+       if i == len(v.Pre) && i == len(o.Pre) {
+               return 0
+       } else if i == len(v.Pre) && i < len(o.Pre) {
+               return -1
+       } else {
+               return 1
+       }
+
+}
+
+// Validate validates v and returns error in case
+func (v Version) Validate() error {
+       // Major, Minor, Patch already validated using uint64
+
+       for _, pre := range v.Pre {
+               if !pre.IsNum { //Numeric prerelease versions already uint64
+                       if len(pre.VersionStr) == 0 {
+                               return fmt.Errorf("Prerelease can not be empty %q", pre.VersionStr)
+                       }
+                       if !containsOnly(pre.VersionStr, alphanum) {
+                               return fmt.Errorf("Invalid character(s) found in prerelease %q", pre.VersionStr)
+                       }
+               }
+       }
+
+       for _, build := range v.Build {
+               if len(build) == 0 {
+                       return fmt.Errorf("Build meta data can not be empty %q", build)
+               }
+               if !containsOnly(build, alphanum) {
+                       return fmt.Errorf("Invalid character(s) found in build meta data %q", build)
+               }
+       }
+
+       return nil
+}
+
+// New is an alias for Parse and returns a pointer, parses version string and returns a validated Version or error
+func New(s string) (vp *Version, err error) {
+       v, err := Parse(s)
+       vp = &v
+       return
+}
+
+// Make is an alias for Parse, parses version string and returns a validated Version or error
+func Make(s string) (Version, error) {
+       return Parse(s)
+}
+
+// ParseTolerant allows for certain version specifications that do not strictly adhere to semver
+// specs to be parsed by this library. It does so by normalizing versions before passing them to
+// Parse(). It currently trims spaces, removes a "v" prefix, and adds a 0 patch number to versions
+// with only major and minor components specified
+func ParseTolerant(s string) (Version, error) {
+       s = strings.TrimSpace(s)
+       s = strings.TrimPrefix(s, "v")
+
+       // Split into major.minor.(patch+pr+meta)
+       parts := strings.SplitN(s, ".", 3)
+       if len(parts) < 3 {
+               if strings.ContainsAny(parts[len(parts)-1], "+-") {
+                       return Version{}, errors.New("Short version cannot contain PreRelease/Build meta data")
+               }
+               for len(parts) < 3 {
+                       parts = append(parts, "0")
+               }
+               s = strings.Join(parts, ".")
+       }
+
+       return Parse(s)
+}
+
+// Parse parses version string and returns a validated Version or error
+func Parse(s string) (Version, error) {
+       if len(s) == 0 {
+               return Version{}, errors.New("Version string empty")
+       }
+
+       // Split into major.minor.(patch+pr+meta)
+       parts := strings.SplitN(s, ".", 3)
+       if len(parts) != 3 {
+               return Version{}, errors.New("No Major.Minor.Patch elements found")
+       }
+
+       // Major
+       if !containsOnly(parts[0], numbers) {
+               return Version{}, fmt.Errorf("Invalid character(s) found in major number %q", parts[0])
+       }
+       if hasLeadingZeroes(parts[0]) {
+               return Version{}, fmt.Errorf("Major number must not contain leading zeroes %q", parts[0])
+       }
+       major, err := strconv.ParseUint(parts[0], 10, 64)
+       if err != nil {
+               return Version{}, err
+       }
+
+       // Minor
+       if !containsOnly(parts[1], numbers) {
+               return Version{}, fmt.Errorf("Invalid character(s) found in minor number %q", parts[1])
+       }
+       if hasLeadingZeroes(parts[1]) {
+               return Version{}, fmt.Errorf("Minor number must not contain leading zeroes %q", parts[1])
+       }
+       minor, err := strconv.ParseUint(parts[1], 10, 64)
+       if err != nil {
+               return Version{}, err
+       }
+
+       v := Version{}
+       v.Major = major
+       v.Minor = minor
+
+       var build, prerelease []string
+       patchStr := parts[2]
+
+       if buildIndex := strings.IndexRune(patchStr, '+'); buildIndex != -1 {
+               build = strings.Split(patchStr[buildIndex+1:], ".")
+               patchStr = patchStr[:buildIndex]
+       }
+
+       if preIndex := strings.IndexRune(patchStr, '-'); preIndex != -1 {
+               prerelease = strings.Split(patchStr[preIndex+1:], ".")
+               patchStr = patchStr[:preIndex]
+       }
+
+       if !containsOnly(patchStr, numbers) {
+               return Version{}, fmt.Errorf("Invalid character(s) found in patch number %q", patchStr)
+       }
+       if hasLeadingZeroes(patchStr) {
+               return Version{}, fmt.Errorf("Patch number must not contain leading zeroes %q", patchStr)
+       }
+       patch, err := strconv.ParseUint(patchStr, 10, 64)
+       if err != nil {
+               return Version{}, err
+       }
+
+       v.Patch = patch
+
+       // Prerelease
+       for _, prstr := range prerelease {
+               parsedPR, err := NewPRVersion(prstr)
+               if err != nil {
+                       return Version{}, err
+               }
+               v.Pre = append(v.Pre, parsedPR)
+       }
+
+       // Build meta data
+       for _, str := range build {
+               if len(str) == 0 {
+                       return Version{}, errors.New("Build meta data is empty")
+               }
+               if !containsOnly(str, alphanum) {
+                       return Version{}, fmt.Errorf("Invalid character(s) found in build meta data %q", str)
+               }
+               v.Build = append(v.Build, str)
+       }
+
+       return v, nil
+}
+
+// MustParse is like Parse but panics if the version cannot be parsed.
+func MustParse(s string) Version {
+       v, err := Parse(s)
+       if err != nil {
+               panic(`semver: Parse(` + s + `): ` + err.Error())
+       }
+       return v
+}
+
+// PRVersion represents a PreRelease Version
+type PRVersion struct {
+       VersionStr string
+       VersionNum uint64
+       IsNum      bool
+}
+
+// NewPRVersion creates a new valid prerelease version
+func NewPRVersion(s string) (PRVersion, error) {
+       if len(s) == 0 {
+               return PRVersion{}, errors.New("Prerelease is empty")
+       }
+       v := PRVersion{}
+       if containsOnly(s, numbers) {
+               if hasLeadingZeroes(s) {
+                       return PRVersion{}, fmt.Errorf("Numeric PreRelease version must not contain leading zeroes %q", s)
+               }
+               num, err := strconv.ParseUint(s, 10, 64)
+
+               // Might never be hit, but just in case
+               if err != nil {
+                       return PRVersion{}, err
+               }
+               v.VersionNum = num
+               v.IsNum = true
+       } else if containsOnly(s, alphanum) {
+               v.VersionStr = s
+               v.IsNum = false
+       } else {
+               return PRVersion{}, fmt.Errorf("Invalid character(s) found in prerelease %q", s)
+       }
+       return v, nil
+}
+
+// IsNumeric checks if prerelease-version is numeric
+func (v PRVersion) IsNumeric() bool {
+       return v.IsNum
+}
+
+// Compare compares two PreRelease Versions v and o:
+// -1 == v is less than o
+// 0 == v is equal to o
+// 1 == v is greater than o
+func (v PRVersion) Compare(o PRVersion) int {
+       if v.IsNum && !o.IsNum {
+               return -1
+       } else if !v.IsNum && o.IsNum {
+               return 1
+       } else if v.IsNum && o.IsNum {
+               if v.VersionNum == o.VersionNum {
+                       return 0
+               } else if v.VersionNum > o.VersionNum {
+                       return 1
+               } else {
+                       return -1
+               }
+       } else { // both are Alphas
+               if v.VersionStr == o.VersionStr {
+                       return 0
+               } else if v.VersionStr > o.VersionStr {
+                       return 1
+               } else {
+                       return -1
+               }
+       }
+}
+
+// PreRelease version to string
+func (v PRVersion) String() string {
+       if v.IsNum {
+               return strconv.FormatUint(v.VersionNum, 10)
+       }
+       return v.VersionStr
+}
+
+func containsOnly(s string, set string) bool {
+       return strings.IndexFunc(s, func(r rune) bool {
+               return !strings.ContainsRune(set, r)
+       }) == -1
+}
+
+func hasLeadingZeroes(s string) bool {
+       return len(s) > 1 && s[0] == '0'
+}
+
+// NewBuildVersion creates a new valid build version
+func NewBuildVersion(s string) (string, error) {
+       if len(s) == 0 {
+               return "", errors.New("Buildversion is empty")
+       }
+       if !containsOnly(s, alphanum) {
+               return "", fmt.Errorf("Invalid character(s) found in build meta data %q", s)
+       }
+       return s, nil
+}
diff --git a/vendor/github.com/blang/semver/sort.go b/vendor/github.com/blang/semver/sort.go
new file mode 100644 (file)
index 0000000..e18f880
--- /dev/null
@@ -0,0 +1,28 @@
+package semver
+
+import (
+       "sort"
+)
+
+// Versions represents multiple versions.
+type Versions []Version
+
+// Len returns length of version collection
+func (s Versions) Len() int {
+       return len(s)
+}
+
+// Swap swaps two versions inside the collection by its indices
+func (s Versions) Swap(i, j int) {
+       s[i], s[j] = s[j], s[i]
+}
+
+// Less checks if version at index i is less than version at index j
+func (s Versions) Less(i, j int) bool {
+       return s[i].LT(s[j])
+}
+
+// Sort sorts a slice of versions
+func Sort(versions []Version) {
+       sort.Sort(Versions(versions))
+}
diff --git a/vendor/github.com/blang/semver/sql.go b/vendor/github.com/blang/semver/sql.go
new file mode 100644 (file)
index 0000000..eb4d802
--- /dev/null
@@ -0,0 +1,30 @@
+package semver
+
+import (
+       "database/sql/driver"
+       "fmt"
+)
+
+// Scan implements the database/sql.Scanner interface.
+func (v *Version) Scan(src interface{}) (err error) {
+       var str string
+       switch src := src.(type) {
+       case string:
+               str = src
+       case []byte:
+               str = string(src)
+       default:
+               return fmt.Errorf("Version.Scan: cannot convert %T to string.", src)
+       }
+
+       if t, err := Parse(str); err == nil {
+               *v = t
+       }
+
+       return
+}
+
+// Value implements the database/sql/driver.Valuer interface.
+func (v Version) Value() (driver.Value, error) {
+       return v.String(), nil
+}
diff --git a/vendor/github.com/hashicorp/go-cleanhttp/LICENSE b/vendor/github.com/hashicorp/go-cleanhttp/LICENSE
new file mode 100644 (file)
index 0000000..e87a115
--- /dev/null
@@ -0,0 +1,363 @@
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. "Contributor"
+
+     means each individual or legal entity that creates, contributes to the
+     creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+
+     means the combination of the Contributions of others (if any) used by a
+     Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+
+     means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+
+     means Source Code Form to which the initial Contributor has attached the
+     notice in Exhibit A, the Executable Form of such Source Code Form, and
+     Modifications of such Source Code Form, in each case including portions
+     thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+     means
+
+     a. that the initial Contributor has attached the notice described in
+        Exhibit B to the Covered Software; or
+
+     b. that the Covered Software was made available under the terms of
+        version 1.1 or earlier of the License, but not also under the terms of
+        a Secondary License.
+
+1.6. "Executable Form"
+
+     means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+
+     means a work that combines Covered Software with other material, in a
+     separate file or files, that is not Covered Software.
+
+1.8. "License"
+
+     means this document.
+
+1.9. "Licensable"
+
+     means having the right to grant, to the maximum extent possible, whether
+     at the time of the initial grant or subsequently, any and all of the
+     rights conveyed by this License.
+
+1.10. "Modifications"
+
+     means any of the following:
+
+     a. any file in Source Code Form that results from an addition to,
+        deletion from, or modification of the contents of Covered Software; or
+
+     b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. "Patent Claims" of a Contributor
+
+      means any patent claim(s), including without limitation, method,
+      process, and apparatus claims, in any patent Licensable by such
+      Contributor that would be infringed, but for the grant of the License,
+      by the making, using, selling, offering for sale, having made, import,
+      or transfer of either its Contributions or its Contributor Version.
+
+1.12. "Secondary License"
+
+      means either the GNU General Public License, Version 2.0, the GNU Lesser
+      General Public License, Version 2.1, the GNU Affero General Public
+      License, Version 3.0, or any later versions of those licenses.
+
+1.13. "Source Code Form"
+
+      means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+
+      means an individual or a legal entity exercising rights under this
+      License. For legal entities, "You" includes any entity that controls, is
+      controlled by, or is under common control with You. For purposes of this
+      definition, "control" means (a) the power, direct or indirect, to cause
+      the direction or management of such entity, whether by contract or
+      otherwise, or (b) ownership of more than fifty percent (50%) of the
+      outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+     Each Contributor hereby grants You a world-wide, royalty-free,
+     non-exclusive license:
+
+     a. under intellectual property rights (other than patent or trademark)
+        Licensable by such Contributor to use, reproduce, make available,
+        modify, display, perform, distribute, and otherwise exploit its
+        Contributions, either on an unmodified basis, with Modifications, or
+        as part of a Larger Work; and
+
+     b. under Patent Claims of such Contributor to make, use, sell, offer for
+        sale, have made, import, and otherwise transfer either its
+        Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+     The licenses granted in Section 2.1 with respect to any Contribution
+     become effective for each Contribution on the date the Contributor first
+     distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+     The licenses granted in this Section 2 are the only rights granted under
+     this License. No additional rights or licenses will be implied from the
+     distribution or licensing of Covered Software under this License.
+     Notwithstanding Section 2.1(b) above, no patent license is granted by a
+     Contributor:
+
+     a. for any code that a Contributor has removed from Covered Software; or
+
+     b. for infringements caused by: (i) Your and any other third party's
+        modifications of Covered Software, or (ii) the combination of its
+        Contributions with other software (except as part of its Contributor
+        Version); or
+
+     c. under Patent Claims infringed by Covered Software in the absence of
+        its Contributions.
+
+     This License does not grant any rights in the trademarks, service marks,
+     or logos of any Contributor (except as may be necessary to comply with
+     the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+     No Contributor makes additional grants as a result of Your choice to
+     distribute the Covered Software under a subsequent version of this
+     License (see Section 10.2) or under the terms of a Secondary License (if
+     permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+     Each Contributor represents that the Contributor believes its
+     Contributions are its original creation(s) or it has sufficient rights to
+     grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+     This License is not intended to limit any rights You have under
+     applicable copyright doctrines of fair use, fair dealing, or other
+     equivalents.
+
+2.7. Conditions
+
+     Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+     Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+     All distribution of Covered Software in Source Code Form, including any
+     Modifications that You create or to which You contribute, must be under
+     the terms of this License. You must inform recipients that the Source
+     Code Form of the Covered Software is governed by the terms of this
+     License, and how they can obtain a copy of this License. You may not
+     attempt to alter or restrict the recipients' rights in the Source Code
+     Form.
+
+3.2. Distribution of Executable Form
+
+     If You distribute Covered Software in Executable Form then:
+
+     a. such Covered Software must also be made available in Source Code Form,
+        as described in Section 3.1, and You must inform recipients of the
+        Executable Form how they can obtain a copy of such Source Code Form by
+        reasonable means in a timely manner, at a charge no more than the cost
+        of distribution to the recipient; and
+
+     b. You may distribute such Executable Form under the terms of this
+        License, or sublicense it under different terms, provided that the
+        license for the Executable Form does not attempt to limit or alter the
+        recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+     You may create and distribute a Larger Work under terms of Your choice,
+     provided that You also comply with the requirements of this License for
+     the Covered Software. If the Larger Work is a combination of Covered
+     Software with a work governed by one or more Secondary Licenses, and the
+     Covered Software is not Incompatible With Secondary Licenses, this
+     License permits You to additionally distribute such Covered Software
+     under the terms of such Secondary License(s), so that the recipient of
+     the Larger Work may, at their option, further distribute the Covered
+     Software under the terms of either this License or such Secondary
+     License(s).
+
+3.4. Notices
+
+     You may not remove or alter the substance of any license notices
+     (including copyright notices, patent notices, disclaimers of warranty, or
+     limitations of liability) contained within the Source Code Form of the
+     Covered Software, except that You may alter any license notices to the
+     extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+     You may choose to offer, and to charge a fee for, warranty, support,
+     indemnity or liability obligations to one or more recipients of Covered
+     Software. However, You may do so only on Your own behalf, and not on
+     behalf of any Contributor. You must make it absolutely clear that any
+     such warranty, support, indemnity, or liability obligation is offered by
+     You alone, and You hereby agree to indemnify every Contributor for any
+     liability incurred by such Contributor as a result of warranty, support,
+     indemnity or liability terms You offer. You may include additional
+     disclaimers of warranty and limitations of liability specific to any
+     jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+   If it is impossible for You to comply with any of the terms of this License
+   with respect to some or all of the Covered Software due to statute,
+   judicial order, or regulation then You must: (a) comply with the terms of
+   this License to the maximum extent possible; and (b) describe the
+   limitations and the code they affect. Such description must be placed in a
+   text file included with all distributions of the Covered Software under
+   this License. Except to the extent prohibited by statute or regulation,
+   such description must be sufficiently detailed for a recipient of ordinary
+   skill to be able to understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+     fail to comply with any of its terms. However, if You become compliant,
+     then the rights granted under this License from a particular Contributor
+     are reinstated (a) provisionally, unless and until such Contributor
+     explicitly and finally terminates Your grants, and (b) on an ongoing
+     basis, if such Contributor fails to notify You of the non-compliance by
+     some reasonable means prior to 60 days after You have come back into
+     compliance. Moreover, Your grants from a particular Contributor are
+     reinstated on an ongoing basis if such Contributor notifies You of the
+     non-compliance by some reasonable means, this is the first time You have
+     received notice of non-compliance with this License from such
+     Contributor, and You become compliant prior to 30 days after Your receipt
+     of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+     infringement claim (excluding declaratory judgment actions,
+     counter-claims, and cross-claims) alleging that a Contributor Version
+     directly or indirectly infringes any patent, then the rights granted to
+     You by any and all Contributors for the Covered Software under Section
+     2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+     license agreements (excluding distributors and resellers) which have been
+     validly granted by You or Your distributors under this License prior to
+     termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+   Covered Software is provided under this License on an "as is" basis,
+   without warranty of any kind, either expressed, implied, or statutory,
+   including, without limitation, warranties that the Covered Software is free
+   of defects, merchantable, fit for a particular purpose or non-infringing.
+   The entire risk as to the quality and performance of the Covered Software
+   is with You. Should any Covered Software prove defective in any respect,
+   You (not any Contributor) assume the cost of any necessary servicing,
+   repair, or correction. This disclaimer of warranty constitutes an essential
+   part of this License. No use of  any Covered Software is authorized under
+   this License except under this disclaimer.
+
+7. Limitation of Liability
+
+   Under no circumstances and under no legal theory, whether tort (including
+   negligence), contract, or otherwise, shall any Contributor, or anyone who
+   distributes Covered Software as permitted above, be liable to You for any
+   direct, indirect, special, incidental, or consequential damages of any
+   character including, without limitation, damages for lost profits, loss of
+   goodwill, work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses, even if such party shall have been
+   informed of the possibility of such damages. This limitation of liability
+   shall not apply to liability for death or personal injury resulting from
+   such party's negligence to the extent applicable law prohibits such
+   limitation. Some jurisdictions do not allow the exclusion or limitation of
+   incidental or consequential damages, so this exclusion and limitation may
+   not apply to You.
+
+8. Litigation
+
+   Any litigation relating to this License may be brought only in the courts
+   of a jurisdiction where the defendant maintains its principal place of
+   business and such litigation shall be governed by laws of that
+   jurisdiction, without reference to its conflict-of-law provisions. Nothing
+   in this Section shall prevent a party's ability to bring cross-claims or
+   counter-claims.
+
+9. Miscellaneous
+
+   This License represents the complete agreement concerning the subject
+   matter hereof. If any provision of this License is held to be
+   unenforceable, such provision shall be reformed only to the extent
+   necessary to make it enforceable. Any law or regulation which provides that
+   the language of a contract shall be construed against the drafter shall not
+   be used to construe this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+      Mozilla Foundation is the license steward. Except as provided in Section
+      10.3, no one other than the license steward has the right to modify or
+      publish new versions of this License. Each version will be given a
+      distinguishing version number.
+
+10.2. Effect of New Versions
+
+      You may distribute the Covered Software under the terms of the version
+      of the License under which You originally received the Covered Software,
+      or under the terms of any subsequent version published by the license
+      steward.
+
+10.3. Modified Versions
+
+      If you create software not governed by this License, and you want to
+      create a new license for such software, you may create and use a
+      modified version of this License if you rename the license and remove
+      any references to the name of the license steward (except to note that
+      such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+      Licenses If You choose to distribute Source Code Form that is
+      Incompatible With Secondary Licenses under the terms of this version of
+      the License, the notice described in Exhibit B of this License must be
+      attached.
+
+Exhibit A - Source Code Form License Notice
+
+      This Source Code Form is subject to the
+      terms of the Mozilla Public License, v.
+      2.0. If a copy of the MPL was not
+      distributed with this file, You can
+      obtain one at
+      http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file,
+then You may include the notice in a location (such as a LICENSE file in a
+relevant directory) where a recipient would be likely to look for such a
+notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+
+      This Source Code Form is "Incompatible
+      With Secondary Licenses", as defined by
+      the Mozilla Public License, v. 2.0.
+
diff --git a/vendor/github.com/hashicorp/go-cleanhttp/README.md b/vendor/github.com/hashicorp/go-cleanhttp/README.md
new file mode 100644 (file)
index 0000000..036e531
--- /dev/null
@@ -0,0 +1,30 @@
+# cleanhttp
+
+Functions for accessing "clean" Go http.Client values
+
+-------------
+
+The Go standard library contains a default `http.Client` called
+`http.DefaultClient`. It is a common idiom in Go code to start with
+`http.DefaultClient` and tweak it as necessary, and in fact, this is
+encouraged; from the `http` package documentation:
+
+> The Client's Transport typically has internal state (cached TCP connections),
+so Clients should be reused instead of created as needed. Clients are safe for
+concurrent use by multiple goroutines.
+
+Unfortunately, this is a shared value, and it is not uncommon for libraries to
+assume that they are free to modify it at will. With enough dependencies, it
+can be very easy to encounter strange problems and race conditions due to
+manipulation of this shared value across libraries and goroutines (clients are
+safe for concurrent use, but writing values to the client struct itself is not
+protected).
+
+Making things worse is the fact that a bare `http.Client` will use a default
+`http.Transport` called `http.DefaultTransport`, which is another global value
+that behaves the same way. So it is not simply enough to replace
+`http.DefaultClient` with `&http.Client{}`.
+
+This repository provides some simple functions to get a "clean" `http.Client`
+-- one that uses the same default values as the Go standard library, but
+returns a client that does not share any state with other clients.
diff --git a/vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go b/vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go
new file mode 100644 (file)
index 0000000..7d8a57c
--- /dev/null
@@ -0,0 +1,56 @@
+package cleanhttp
+
+import (
+       "net"
+       "net/http"
+       "runtime"
+       "time"
+)
+
+// DefaultTransport returns a new http.Transport with similar default values to
+// http.DefaultTransport, but with idle connections and keepalives disabled.
+func DefaultTransport() *http.Transport {
+       transport := DefaultPooledTransport()
+       transport.DisableKeepAlives = true
+       transport.MaxIdleConnsPerHost = -1
+       return transport
+}
+
+// DefaultPooledTransport returns a new http.Transport with similar default
+// values to http.DefaultTransport. Do not use this for transient transports as
+// it can leak file descriptors over time. Only use this for transports that
+// will be re-used for the same host(s).
+func DefaultPooledTransport() *http.Transport {
+       transport := &http.Transport{
+               Proxy: http.ProxyFromEnvironment,
+               DialContext: (&net.Dialer{
+                       Timeout:   30 * time.Second,
+                       KeepAlive: 30 * time.Second,
+               }).DialContext,
+               MaxIdleConns:          100,
+               IdleConnTimeout:       90 * time.Second,
+               TLSHandshakeTimeout:   10 * time.Second,
+               ExpectContinueTimeout: 1 * time.Second,
+               MaxIdleConnsPerHost:   runtime.GOMAXPROCS(0) + 1,
+       }
+       return transport
+}
+
+// DefaultClient returns a new http.Client with similar default values to
+// http.Client, but with a non-shared Transport, idle connections disabled, and
+// keepalives disabled.
+func DefaultClient() *http.Client {
+       return &http.Client{
+               Transport: DefaultTransport(),
+       }
+}
+
+// DefaultPooledClient returns a new http.Client with similar default values to
+// http.Client, but with a shared Transport. Do not use this function for
+// transient clients as it can leak file descriptors over time. Only use this
+// for clients that will be re-used for the same host(s).
+func DefaultPooledClient() *http.Client {
+       return &http.Client{
+               Transport: DefaultPooledTransport(),
+       }
+}
diff --git a/vendor/github.com/hashicorp/go-cleanhttp/doc.go b/vendor/github.com/hashicorp/go-cleanhttp/doc.go
new file mode 100644 (file)
index 0000000..0584109
--- /dev/null
@@ -0,0 +1,20 @@
+// Package cleanhttp offers convenience utilities for acquiring "clean"
+// http.Transport and http.Client structs.
+//
+// Values set on http.DefaultClient and http.DefaultTransport affect all
+// callers. This can have detrimental effects, esepcially in TLS contexts,
+// where client or root certificates set to talk to multiple endpoints can end
+// up displacing each other, leading to hard-to-debug issues. This package
+// provides non-shared http.Client and http.Transport structs to ensure that
+// the configuration will not be overwritten by other parts of the application
+// or dependencies.
+//
+// The DefaultClient and DefaultTransport functions disable idle connections
+// and keepalives. Without ensuring that idle connections are closed before
+// garbage collection, short-term clients/transports can leak file descriptors,
+// eventually leading to "too many open files" errors. If you will be
+// connecting to the same hosts repeatedly from the same client, you can use
+// DefaultPooledClient to receive a client that has connection pooling
+// semantics similar to http.DefaultClient.
+//
+package cleanhttp
index a157824290b9b2fdd075476ae38b11e092f8a7d9..3f756dcf4d85f68af638471c8c4077edc8ecbb68 100644 (file)
@@ -12,6 +12,7 @@ import (
        "github.com/hashicorp/hil"
        "github.com/hashicorp/hil/ast"
        "github.com/hashicorp/terraform/helper/hilmapstructure"
+       "github.com/hashicorp/terraform/plugin/discovery"
        "github.com/mitchellh/reflectwalk"
 )
 
@@ -64,6 +65,7 @@ type Module struct {
 type ProviderConfig struct {
        Name      string
        Alias     string
+       Version   string
        RawConfig *RawConfig
 }
 
@@ -238,6 +240,33 @@ func (r *Resource) Id() string {
        }
 }
 
+// ProviderFullName returns the full name of the provider for this resource,
+// which may either be specified explicitly using the "provider" meta-argument
+// or implied by the prefix on the resource type name.
+func (r *Resource) ProviderFullName() string {
+       return ResourceProviderFullName(r.Type, r.Provider)
+}
+
+// ResourceProviderFullName returns the full (dependable) name of the
+// provider for a hypothetical resource with the given resource type and
+// explicit provider string. If the explicit provider string is empty then
+// the provider name is inferred from the resource type name.
+func ResourceProviderFullName(resourceType, explicitProvider string) string {
+       if explicitProvider != "" {
+               return explicitProvider
+       }
+
+       idx := strings.IndexRune(resourceType, '_')
+       if idx == -1 {
+               // If no underscores, the resource name is assumed to be
+               // also the provider name, e.g. if the provider exposes
+               // only a single resource of each type.
+               return resourceType
+       }
+
+       return resourceType[:idx]
+}
+
 // Validate does some basic semantic checking of the configuration.
 func (c *Config) Validate() error {
        if c == nil {
@@ -349,7 +378,8 @@ func (c *Config) Validate() error {
                }
        }
 
-       // Check that providers aren't declared multiple times.
+       // Check that providers aren't declared multiple times and that their
+       // version constraints, where present, are syntactically valid.
        providerSet := make(map[string]struct{})
        for _, p := range c.ProviderConfigs {
                name := p.FullName()
@@ -360,6 +390,16 @@ func (c *Config) Validate() error {
                        continue
                }
 
+               if p.Version != "" {
+                       _, err := discovery.ConstraintStr(p.Version).Parse()
+                       if err != nil {
+                               errs = append(errs, fmt.Errorf(
+                                       "provider.%s: invalid version constraint %q: %s",
+                                       name, p.Version, err,
+                               ))
+                       }
+               }
+
                providerSet[name] = struct{}{}
        }
 
index 7b7b3f2620bcb9818ce2a5fe186c66aa248a17eb..a298cf2d3dca9bec09321043e2f1878a4446f498 100644 (file)
@@ -70,6 +70,7 @@ func Funcs() map[string]ast.Function {
                "coalescelist": interpolationFuncCoalesceList(),
                "compact":      interpolationFuncCompact(),
                "concat":       interpolationFuncConcat(),
+               "contains":     interpolationFuncContains(),
                "dirname":      interpolationFuncDirname(),
                "distinct":     interpolationFuncDistinct(),
                "element":      interpolationFuncElement(),
@@ -356,6 +357,22 @@ func interpolationFuncCoalesceList() ast.Function {
        }
 }
 
+// interpolationFuncContains returns true if an element is in the list
+// and return false otherwise
+func interpolationFuncContains() ast.Function {
+       return ast.Function{
+               ArgTypes:   []ast.Type{ast.TypeList, ast.TypeString},
+               ReturnType: ast.TypeBool,
+               Callback: func(args []interface{}) (interface{}, error) {
+                       _, err := interpolationFuncIndex().Callback(args)
+                       if err != nil {
+                               return false, nil
+                       }
+                       return true, nil
+               },
+       }
+}
+
 // interpolationFuncConcat implements the "concat" function that concatenates
 // multiple lists.
 func interpolationFuncConcat() ast.Function {
index 0bfa89c255dd6785c06d9ecf7f2e8f7f51c76cc7..5dd7d4689817722513c49c32fa27f59997a05d00 100644 (file)
@@ -194,7 +194,7 @@ func dirFiles(dir string) ([]string, []string, error) {
                        // Only care about files that are valid to load
                        name := fi.Name()
                        extValue := ext(name)
-                       if extValue == "" || isIgnoredFile(name) {
+                       if extValue == "" || IsIgnoredFile(name) {
                                continue
                        }
 
@@ -215,9 +215,9 @@ func dirFiles(dir string) ([]string, []string, error) {
        return files, overrides, nil
 }
 
-// isIgnoredFile returns true or false depending on whether the
+// IsIgnoredFile returns true or false depending on whether the
 // provided file name is a file that should be ignored.
-func isIgnoredFile(name string) bool {
+func IsIgnoredFile(name string) bool {
        return strings.HasPrefix(name, ".") || // Unix-like hidden files
                strings.HasSuffix(name, "~") || // vim
                strings.HasPrefix(name, "#") && strings.HasSuffix(name, "#") // emacs
index 9abb1960f30e88be67f193ddd87f4eb79dc827b8..e85e493555a849bc422e9f3b419205e39c05cc79 100644 (file)
@@ -17,6 +17,20 @@ type hclConfigurable struct {
        Root *ast.File
 }
 
+var ReservedResourceFields = []string{
+       "connection",
+       "count",
+       "depends_on",
+       "lifecycle",
+       "provider",
+       "provisioner",
+}
+
+var ReservedProviderFields = []string{
+       "alias",
+       "version",
+}
+
 func (t *hclConfigurable) Config() (*Config, error) {
        validKeys := map[string]struct{}{
                "atlas":     struct{}{},
@@ -562,6 +576,7 @@ func loadProvidersHcl(list *ast.ObjectList) ([]*ProviderConfig, error) {
                }
 
                delete(config, "alias")
+               delete(config, "version")
 
                rawConfig, err := NewRawConfig(config)
                if err != nil {
@@ -583,9 +598,22 @@ func loadProvidersHcl(list *ast.ObjectList) ([]*ProviderConfig, error) {
                        }
                }
 
+               // If we have a version field then extract it
+               var version string
+               if a := listVal.Filter("version"); len(a.Items) > 0 {
+                       err := hcl.DecodeObject(&version, a.Items[0].Val)
+                       if err != nil {
+                               return nil, fmt.Errorf(
+                                       "Error reading version for provider[%s]: %s",
+                                       n,
+                                       err)
+                       }
+               }
+
                result = append(result, &ProviderConfig{
                        Name:      n,
                        Alias:     alias,
+                       Version:   version,
                        RawConfig: rawConfig,
                })
        }
index b6f90fd9305b2aafcc255f5cfa2aee9547874395..4b0b153f726dbd2a7f0b9cc39372d611768b6da0 100644 (file)
@@ -92,6 +92,25 @@ func (t *Tree) Children() map[string]*Tree {
        return t.children
 }
 
+// DeepEach calls the provided callback for the receiver and then all of
+// its descendents in the tree, allowing an operation to be performed on
+// all modules in the tree.
+//
+// Parents will be visited before their children but otherwise the order is
+// not defined.
+func (t *Tree) DeepEach(cb func(*Tree)) {
+       t.lock.RLock()
+       defer t.lock.RUnlock()
+       t.deepEach(cb)
+}
+
+func (t *Tree) deepEach(cb func(*Tree)) {
+       cb(t)
+       for _, c := range t.children {
+               c.deepEach(cb)
+       }
+}
+
 // Loaded says whether or not this tree has been loaded or not yet.
 func (t *Tree) Loaded() bool {
        t.lock.RLock()
diff --git a/vendor/github.com/hashicorp/terraform/config/providers.go b/vendor/github.com/hashicorp/terraform/config/providers.go
new file mode 100644 (file)
index 0000000..7a50782
--- /dev/null
@@ -0,0 +1,103 @@
+package config
+
+import "github.com/blang/semver"
+
+// ProviderVersionConstraint presents a constraint for a particular
+// provider, identified by its full name.
+type ProviderVersionConstraint struct {
+       Constraint   string
+       ProviderType string
+}
+
+// ProviderVersionConstraints is a map from provider full name to its associated
+// ProviderVersionConstraint, as produced by Config.RequiredProviders.
+type ProviderVersionConstraints map[string]ProviderVersionConstraint
+
+// RequiredProviders returns the ProviderVersionConstraints for this
+// module.
+//
+// This includes both providers that are explicitly requested by provider
+// blocks and those that are used implicitly by instantiating one of their
+// resource types. In the latter case, the returned semver Range will
+// accept any version of the provider.
+func (c *Config) RequiredProviders() ProviderVersionConstraints {
+       ret := make(ProviderVersionConstraints, len(c.ProviderConfigs))
+
+       configs := c.ProviderConfigsByFullName()
+
+       // In order to find the *implied* dependencies (those without explicit
+       // "provider" blocks) we need to walk over all of the resources and
+       // cross-reference with the provider configs.
+       for _, rc := range c.Resources {
+               providerName := rc.ProviderFullName()
+               var providerType string
+
+               // Default to (effectively) no constraint whatsoever, but we might
+               // override if there's an explicit constraint in config.
+               constraint := ">=0.0.0"
+
+               config, ok := configs[providerName]
+               if ok {
+                       if config.Version != "" {
+                               constraint = config.Version
+                       }
+                       providerType = config.Name
+               } else {
+                       providerType = providerName
+               }
+
+               ret[providerName] = ProviderVersionConstraint{
+                       ProviderType: providerType,
+                       Constraint:   constraint,
+               }
+       }
+
+       return ret
+}
+
+// RequiredRanges returns a semver.Range for each distinct provider type in
+// the constraint map. If the same provider type appears more than once
+// (e.g. because aliases are in use) then their respective constraints are
+// combined such that they must *all* apply.
+//
+// The result of this method can be passed to the
+// PluginMetaSet.ConstrainVersions method within the plugin/discovery
+// package in order to filter down the available plugins to those which
+// satisfy the given constraints.
+//
+// This function will panic if any of the constraints within cannot be
+// parsed as semver ranges. This is guaranteed to never happen for a
+// constraint set that was built from a configuration that passed validation.
+func (cons ProviderVersionConstraints) RequiredRanges() map[string]semver.Range {
+       ret := make(map[string]semver.Range, len(cons))
+
+       for _, con := range cons {
+               spec := semver.MustParseRange(con.Constraint)
+               if existing, exists := ret[con.ProviderType]; exists {
+                       ret[con.ProviderType] = existing.AND(spec)
+               } else {
+                       ret[con.ProviderType] = spec
+               }
+       }
+
+       return ret
+}
+
+// ProviderConfigsByFullName returns a map from provider full names (as
+// returned by ProviderConfig.FullName()) to the corresponding provider
+// configs.
+//
+// This function returns no new information than what's already in
+// c.ProviderConfigs, but returns it in a more convenient shape. If there
+// is more than one provider config with the same full name then the result
+// is undefined, but that is guaranteed not to happen for any config that
+// has passed validation.
+func (c *Config) ProviderConfigsByFullName() map[string]*ProviderConfig {
+       ret := make(map[string]*ProviderConfig, len(c.ProviderConfigs))
+
+       for _, pc := range c.ProviderConfigs {
+               ret[pc.FullName()] = pc
+       }
+
+       return ret
+}
index e0b81b6410edb92ae71f4ff1dd6ccac03c6bc0cc..1449065e9d84efe3baab5cd470d1befa872c104d 100644 (file)
@@ -60,6 +60,11 @@ func expandArray(m map[string]string, prefix string) []interface{} {
                return []interface{}{}
        }
 
+       // NOTE: "num" is not necessarily accurate, e.g. if a user tampers
+       // with state, so the following code should not crash when given a
+       // number of items more or less than what's given in num. The
+       // num key is mainly just a hint that this is a list or set.
+
        // The Schema "Set" type stores its values in an array format, but
        // using numeric hash values instead of ordinal keys. Take the set
        // of keys regardless of value, and expand them in numeric order.
@@ -101,7 +106,7 @@ func expandArray(m map[string]string, prefix string) []interface{} {
        }
        sort.Ints(keysList)
 
-       result := make([]interface{}, num)
+       result := make([]interface{}, len(keysList))
        for i, key := range keysList {
                keyString := strconv.Itoa(key)
                if computed[keyString] {
index 629582b3a2da4c2fb0ce41f49e39c8df6534b7a6..1cde67c1aa7eaa044040707e24b3223b6e753857 100644 (file)
@@ -1,21 +1,17 @@
 package resource
 
 import (
-       "crypto/rand"
        "fmt"
-       "math/big"
+       "strings"
        "sync"
+       "time"
 )
 
 const UniqueIdPrefix = `terraform-`
 
-// idCounter is a randomly seeded monotonic counter for generating ordered
-// unique ids.  It uses a big.Int so we can easily increment a long numeric
-// string.  The max possible hex value here with 12 random bytes is
-// "01000000000000000000000000", so there's no chance of rollover during
-// operation.
+// idCounter is a monotonic counter for generating ordered unique ids.
 var idMutex sync.Mutex
-var idCounter = big.NewInt(0).SetBytes(randomBytes(12))
+var idCounter uint32
 
 // Helper for a resource to generate a unique identifier w/ default prefix
 func UniqueId() string {
@@ -25,15 +21,20 @@ func UniqueId() string {
 // Helper for a resource to generate a unique identifier w/ given prefix
 //
 // After the prefix, the ID consists of an incrementing 26 digit value (to match
-// previous timestamp output).
+// previous timestamp output).  After the prefix, the ID consists of a timestamp
+// and an incrementing 8 hex digit value The timestamp means that multiple IDs
+// created with the same prefix will sort in the order of their creation, even
+// across multiple terraform executions, as long as the clock is not turned back
+// between calls, and as long as any given terraform execution generates fewer
+// than 4 billion IDs.
 func PrefixedUniqueId(prefix string) string {
+       // Be precise to 4 digits of fractional seconds, but remove the dot before the
+       // fractional seconds.
+       timestamp := strings.Replace(
+               time.Now().UTC().Format("20060102150405.0000"), ".", "", 1)
+
        idMutex.Lock()
        defer idMutex.Unlock()
-       return fmt.Sprintf("%s%026x", prefix, idCounter.Add(idCounter, big.NewInt(1)))
-}
-
-func randomBytes(n int) []byte {
-       b := make([]byte, n)
-       rand.Read(b)
-       return b
+       idCounter++
+       return fmt.Sprintf("%s%s%08x", prefix, timestamp, idCounter)
 }
index ebdbde2b5d31425709eea4f843d53352631b4890..d7de1a030a4193d9c78bb6fc44c5bb42615591ef 100644 (file)
@@ -383,11 +383,11 @@ func Test(t TestT, c TestCase) {
                c.PreCheck()
        }
 
-       ctxProviders, err := testProviderFactories(c)
+       providerResolver, err := testProviderResolver(c)
        if err != nil {
                t.Fatal(err)
        }
-       opts := terraform.ContextOpts{Providers: ctxProviders}
+       opts := terraform.ContextOpts{ProviderResolver: providerResolver}
 
        // A single state variable to track the lifecycle, starting with no state
        var state *terraform.State
@@ -400,15 +400,22 @@ func Test(t TestT, c TestCase) {
                var err error
                log.Printf("[WARN] Test: Executing step %d", i)
 
-               // Determine the test mode to execute
-               if step.Config != "" {
-                       state, err = testStepConfig(opts, state, step)
-               } else if step.ImportState {
-                       state, err = testStepImportState(opts, state, step)
-               } else {
+               if step.Config == "" && !step.ImportState {
                        err = fmt.Errorf(
                                "unknown test mode for step. Please see TestStep docs\n\n%#v",
                                step)
+               } else {
+                       if step.ImportState {
+                               if step.Config == "" {
+                                       step.Config = testProviderConfig(c)
+                               }
+
+                               // Can optionally set step.Config in addition to
+                               // step.ImportState, to provide config for the import.
+                               state, err = testStepImportState(opts, state, step)
+                       } else {
+                               state, err = testStepConfig(opts, state, step)
+                       }
                }
 
                // If there was an error, exit
@@ -496,16 +503,29 @@ func Test(t TestT, c TestCase) {
        }
 }
 
-// testProviderFactories is a helper to build the ResourceProviderFactory map
+// testProviderConfig takes the list of Providers in a TestCase and returns a
+// config with only empty provider blocks. This is useful for Import, where no
+// config is provided, but the providers must be defined.
+func testProviderConfig(c TestCase) string {
+       var lines []string
+       for p := range c.Providers {
+               lines = append(lines, fmt.Sprintf("provider %q {}\n", p))
+       }
+
+       return strings.Join(lines, "")
+}
+
+// testProviderResolver is a helper to build a ResourceProviderResolver
 // with pre instantiated ResourceProviders, so that we can reset them for the
 // test, while only calling the factory function once.
 // Any errors are stored so that they can be returned by the factory in
 // terraform to match non-test behavior.
-func testProviderFactories(c TestCase) (map[string]terraform.ResourceProviderFactory, error) {
-       ctxProviders := c.ProviderFactories // make(map[string]terraform.ResourceProviderFactory)
+func testProviderResolver(c TestCase) (terraform.ResourceProviderResolver, error) {
+       ctxProviders := c.ProviderFactories
        if ctxProviders == nil {
                ctxProviders = make(map[string]terraform.ResourceProviderFactory)
        }
+
        // add any fixed providers
        for k, p := range c.Providers {
                ctxProviders[k] = terraform.ResourceProviderFactoryFixed(p)
@@ -527,7 +547,7 @@ func testProviderFactories(c TestCase) (map[string]terraform.ResourceProviderFac
                }
        }
 
-       return ctxProviders, nil
+       return terraform.ResourceProviderResolverFixed(ctxProviders), nil
 }
 
 // UnitTest is a helper to force the acceptance testing harness to run in the
index d52d2f5f06362ac0bfdd56f1648c56cd63f69ffd..fb28b4151d4fae65c261b0ee7227889bcb0c2d50 100644 (file)
@@ -8,6 +8,7 @@ import (
        "sync"
 
        "github.com/hashicorp/go-multierror"
+       "github.com/hashicorp/terraform/config"
        "github.com/hashicorp/terraform/terraform"
 )
 
@@ -89,6 +90,13 @@ func (p *Provider) InternalValidate() error {
                validationErrors = multierror.Append(validationErrors, err)
        }
 
+       // Provider-specific checks
+       for k, _ := range sm {
+               if isReservedProviderFieldName(k) {
+                       return fmt.Errorf("%s is a reserved field name for a provider", k)
+               }
+       }
+
        for k, r := range p.ResourcesMap {
                if err := r.InternalValidate(nil, true); err != nil {
                        validationErrors = multierror.Append(validationErrors, fmt.Errorf("resource %s: %s", k, err))
@@ -104,6 +112,15 @@ func (p *Provider) InternalValidate() error {
        return validationErrors
 }
 
+func isReservedProviderFieldName(name string) bool {
+       for _, reservedName := range config.ReservedProviderFields {
+               if name == reservedName {
+                       return true
+               }
+       }
+       return false
+}
+
 // Meta returns the metadata associated with this provider that was
 // returned by the Configure call. It will be nil until Configure is called.
 func (p *Provider) Meta() interface{} {
index 856c6758a8a128fd997283829d096596820c6ee2..476192e9d2de352dbc05b8eb5a93234e25520ef6 100644 (file)
@@ -43,7 +43,7 @@ type Provisioner struct {
 
        // ValidateFunc is a function for extended validation. This is optional
        // and should be used when individual field validation is not enough.
-       ValidateFunc func(*ResourceData) ([]string, []error)
+       ValidateFunc func(*terraform.ResourceConfig) ([]string, []error)
 
        stopCtx       context.Context
        stopCtxCancel context.CancelFunc
@@ -121,32 +121,6 @@ func (p *Provisioner) Stop() error {
        return nil
 }
 
-func (p *Provisioner) Validate(config *terraform.ResourceConfig) ([]string, []error) {
-       if err := p.InternalValidate(); err != nil {
-               return nil, []error{fmt.Errorf(
-                       "Internal validation of the provisioner failed! This is always a bug\n"+
-                               "with the provisioner itself, and not a user issue. Please report\n"+
-                               "this bug:\n\n%s", err)}
-       }
-       w := []string{}
-       e := []error{}
-       if p.Schema != nil {
-               w2, e2 := schemaMap(p.Schema).Validate(config)
-               w = append(w, w2...)
-               e = append(e, e2...)
-       }
-       if p.ValidateFunc != nil {
-               data := &ResourceData{
-                       schema: p.Schema,
-                       config: config,
-               }
-               w2, e2 := p.ValidateFunc(data)
-               w = append(w, w2...)
-               e = append(e, e2...)
-       }
-       return w, e
-}
-
 // Apply implementation of terraform.ResourceProvisioner interface.
 func (p *Provisioner) Apply(
        o terraform.UIOutput,
@@ -204,3 +178,27 @@ func (p *Provisioner) Apply(
        ctx = context.WithValue(ctx, ProvRawStateKey, s)
        return p.ApplyFunc(ctx)
 }
+
+// Validate implements the terraform.ResourceProvisioner interface.
+func (p *Provisioner) Validate(c *terraform.ResourceConfig) (ws []string, es []error) {
+       if err := p.InternalValidate(); err != nil {
+               return nil, []error{fmt.Errorf(
+                       "Internal validation of the provisioner failed! This is always a bug\n"+
+                               "with the provisioner itself, and not a user issue. Please report\n"+
+                               "this bug:\n\n%s", err)}
+       }
+
+       if p.Schema != nil {
+               w, e := schemaMap(p.Schema).Validate(c)
+               ws = append(ws, w...)
+               es = append(es, e...)
+       }
+
+       if p.ValidateFunc != nil {
+               w, e := p.ValidateFunc(c)
+               ws = append(ws, w...)
+               es = append(es, e...)
+       }
+
+       return ws, es
+}
index c8105588c8e9f9fb4505573d245d6a3a26b05f51..ddba1096d88017b654e63e0dac33923bd7633ae7 100644 (file)
@@ -6,6 +6,7 @@ import (
        "log"
        "strconv"
 
+       "github.com/hashicorp/terraform/config"
        "github.com/hashicorp/terraform/terraform"
 )
 
@@ -142,6 +143,12 @@ func (r *Resource) Apply(
                if err := rt.DiffDecode(d); err != nil {
                        log.Printf("[ERR] Error decoding ResourceTimeout: %s", err)
                }
+       } else if s != nil {
+               if _, ok := s.Meta[TimeoutKey]; ok {
+                       if err := rt.StateDecode(s); err != nil {
+                               log.Printf("[ERR] Error decoding ResourceTimeout: %s", err)
+                       }
+               }
        } else {
                log.Printf("[DEBUG] No meta timeoutkey found in Apply()")
        }
@@ -388,9 +395,25 @@ func (r *Resource) InternalValidate(topSchemaMap schemaMap, writable bool) error
                }
        }
 
+       // Resource-specific checks
+       for k, _ := range tsm {
+               if isReservedResourceFieldName(k) {
+                       return fmt.Errorf("%s is a reserved field name for a resource", k)
+               }
+       }
+
        return schemaMap(r.Schema).InternalValidate(tsm)
 }
 
+func isReservedResourceFieldName(name string) bool {
+       for _, reservedName := range config.ReservedResourceFields {
+               if name == reservedName {
+                       return true
+               }
+       }
+       return false
+}
+
 // Data returns a ResourceData struct for this Resource. Each return value
 // is a separate copy and can be safely modified differently.
 //
index 632672ae069381ba0c9f91daeaacce78b26e5faa..acb5618ba2656b259ce165faf7f1ccb6913ec012 100644 (file)
@@ -15,6 +15,7 @@ import (
        "fmt"
        "os"
        "reflect"
+       "regexp"
        "sort"
        "strconv"
        "strings"
@@ -661,7 +662,13 @@ func (m schemaMap) InternalValidate(topSchemaMap schemaMap) error {
                if v.ValidateFunc != nil {
                        switch v.Type {
                        case TypeList, TypeSet:
-                               return fmt.Errorf("ValidateFunc is not yet supported on lists or sets.")
+                               return fmt.Errorf("%s: ValidateFunc is not yet supported on lists or sets.", k)
+                       }
+               }
+
+               if v.Deprecated == "" && v.Removed == "" {
+                       if !isValidFieldName(k) {
+                               return fmt.Errorf("%s: Field name may only contain lowercase alphanumeric characters & underscores.", k)
                        }
                }
        }
@@ -669,6 +676,11 @@ func (m schemaMap) InternalValidate(topSchemaMap schemaMap) error {
        return nil
 }
 
+func isValidFieldName(name string) bool {
+       re := regexp.MustCompile("^[a-z0-9_]+$")
+       return re.MatchString(name)
+}
+
 func (m schemaMap) diff(
        k string,
        schema *Schema,
index 7edd5e75db399561ac18a2396769e0dbad4cb2a1..edc1e2a93016bed9238b1d15a7e45007629dc153 100644 (file)
@@ -39,6 +39,8 @@ func (w *closeWalker) Struct(reflect.Value) error {
        return nil
 }
 
+var closerType = reflect.TypeOf((*io.Closer)(nil)).Elem()
+
 func (w *closeWalker) StructField(f reflect.StructField, v reflect.Value) error {
        // Not sure why this would be but lets avoid some panics
        if !v.IsValid() {
@@ -56,17 +58,18 @@ func (w *closeWalker) StructField(f reflect.StructField, v reflect.Value) error
                return nil
        }
 
-       // We're looking for an io.Closer
-       raw := v.Interface()
-       if raw == nil {
-               return nil
+       var closer io.Closer
+       if v.Type().Implements(closerType) {
+               closer = v.Interface().(io.Closer)
+       } else if v.CanAddr() {
+               // The Close method may require a pointer receiver, but we only have a value.
+               v := v.Addr()
+               if v.Type().Implements(closerType) {
+                       closer = v.Interface().(io.Closer)
+               }
        }
 
-       closer, ok := raw.(io.Closer)
-       if !ok && v.CanAddr() {
-               closer, ok = v.Addr().Interface().(io.Closer)
-       }
-       if !ok {
+       if closer == nil {
                return reflectwalk.SkipEntry
        }
 
index 2413335b801a075cb1a7cf87ad7faec806f6ddff..178b7e78ade04e60fb47c11ff319d6051bc2eda0 100644 (file)
@@ -26,6 +26,14 @@ type Value struct {
        valueSet bool
 }
 
+func (v *Value) Lock() {
+       v.lock.Lock()
+}
+
+func (v *Value) Unlock() {
+       v.lock.Unlock()
+}
+
 // Close closes the value. This can never fail. For a definition of
 // "close" see the struct docs.
 func (w *Value) Close() error {
diff --git a/vendor/github.com/hashicorp/terraform/moduledeps/dependencies.go b/vendor/github.com/hashicorp/terraform/moduledeps/dependencies.go
new file mode 100644 (file)
index 0000000..87c8431
--- /dev/null
@@ -0,0 +1,43 @@
+package moduledeps
+
+import (
+       "github.com/hashicorp/terraform/plugin/discovery"
+)
+
+// Providers describes a set of provider dependencies for a given module.
+//
+// Each named provider instance can have one version constraint.
+type Providers map[ProviderInstance]ProviderDependency
+
+// ProviderDependency describes the dependency for a particular provider
+// instance, including both the set of allowed versions and the reason for
+// the dependency.
+type ProviderDependency struct {
+       Constraints discovery.Constraints
+       Reason      ProviderDependencyReason
+}
+
+// ProviderDependencyReason is an enumeration of reasons why a dependency might be
+// present.
+type ProviderDependencyReason int
+
+const (
+       // ProviderDependencyExplicit means that there is an explicit "provider"
+       // block in the configuration for this module.
+       ProviderDependencyExplicit ProviderDependencyReason = iota
+
+       // ProviderDependencyImplicit means that there is no explicit "provider"
+       // block but there is at least one resource that uses this provider.
+       ProviderDependencyImplicit
+
+       // ProviderDependencyInherited is a special case of
+       // ProviderDependencyImplicit where a parent module has defined a
+       // configuration for the provider that has been inherited by at least one
+       // resource in this module.
+       ProviderDependencyInherited
+
+       // ProviderDependencyFromState means that this provider is not currently
+       // referenced by configuration at all, but some existing instances in
+       // the state still depend on it.
+       ProviderDependencyFromState
+)
diff --git a/vendor/github.com/hashicorp/terraform/moduledeps/doc.go b/vendor/github.com/hashicorp/terraform/moduledeps/doc.go
new file mode 100644 (file)
index 0000000..7eff083
--- /dev/null
@@ -0,0 +1,7 @@
+// Package moduledeps contains types that can be used to describe the
+// providers required for all of the modules in a module tree.
+//
+// It does not itself contain the functionality for populating such
+// data structures; that's in Terraform core, since this package intentionally
+// does not depend on terraform core to avoid package dependency cycles.
+package moduledeps
diff --git a/vendor/github.com/hashicorp/terraform/moduledeps/module.go b/vendor/github.com/hashicorp/terraform/moduledeps/module.go
new file mode 100644 (file)
index 0000000..d6cbaf5
--- /dev/null
@@ -0,0 +1,204 @@
+package moduledeps
+
+import (
+       "sort"
+       "strings"
+
+       "github.com/hashicorp/terraform/plugin/discovery"
+)
+
+// Module represents the dependencies of a single module, as well being
+// a node in a tree of such structures representing the dependencies of
+// an entire configuration.
+type Module struct {
+       Name      string
+       Providers Providers
+       Children  []*Module
+}
+
+// WalkFunc is a callback type for use with Module.WalkTree
+type WalkFunc func(path []string, parent *Module, current *Module) error
+
+// WalkTree calls the given callback once for the receiver and then
+// once for each descendent, in an order such that parents are called
+// before their children and siblings are called in the order they
+// appear in the Children slice.
+//
+// When calling the callback, parent will be nil for the first call
+// for the receiving module, and then set to the direct parent of
+// each module for the subsequent calls.
+//
+// The path given to the callback is valid only until the callback
+// returns, after which it will be mutated and reused. Callbacks must
+// therefore copy the path slice if they wish to retain it.
+//
+// If the given callback returns an error, the walk will be aborted at
+// that point and that error returned to the caller.
+//
+// This function is not thread-safe for concurrent modifications of the
+// data structure, so it's the caller's responsibility to arrange for that
+// should it be needed.
+//
+// It is safe for a callback to modify the descendents of the "current"
+// module, including the ordering of the Children slice itself, but the
+// callback MUST NOT modify the parent module.
+func (m *Module) WalkTree(cb WalkFunc) error {
+       return walkModuleTree(make([]string, 0, 1), nil, m, cb)
+}
+
+func walkModuleTree(path []string, parent *Module, current *Module, cb WalkFunc) error {
+       path = append(path, current.Name)
+       err := cb(path, parent, current)
+       if err != nil {
+               return err
+       }
+
+       for _, child := range current.Children {
+               err := walkModuleTree(path, current, child, cb)
+               if err != nil {
+                       return err
+               }
+       }
+       return nil
+}
+
+// SortChildren sorts the Children slice into lexicographic order by
+// name, in-place.
+//
+// This is primarily useful prior to calling WalkTree so that the walk
+// will proceed in a consistent order.
+func (m *Module) SortChildren() {
+       sort.Sort(sortModules{m.Children})
+}
+
+// SortDescendents is a convenience wrapper for calling SortChildren on
+// the receiver and all of its descendent modules.
+func (m *Module) SortDescendents() {
+       m.WalkTree(func(path []string, parent *Module, current *Module) error {
+               current.SortChildren()
+               return nil
+       })
+}
+
+type sortModules struct {
+       modules []*Module
+}
+
+func (s sortModules) Len() int {
+       return len(s.modules)
+}
+
+func (s sortModules) Less(i, j int) bool {
+       cmp := strings.Compare(s.modules[i].Name, s.modules[j].Name)
+       return cmp < 0
+}
+
+func (s sortModules) Swap(i, j int) {
+       s.modules[i], s.modules[j] = s.modules[j], s.modules[i]
+}
+
+// PluginRequirements produces a PluginRequirements structure that can
+// be used with discovery.PluginMetaSet.ConstrainVersions to identify
+// suitable plugins to satisfy the module's provider dependencies.
+//
+// This method only considers the direct requirements of the receiver.
+// Use AllPluginRequirements to flatten the dependencies for the
+// entire tree of modules.
+//
+// Requirements returned by this method include only version constraints,
+// and apply no particular SHA256 hash constraint.
+func (m *Module) PluginRequirements() discovery.PluginRequirements {
+       ret := make(discovery.PluginRequirements)
+       for inst, dep := range m.Providers {
+               // m.Providers is keyed on provider names, such as "aws.foo".
+               // a PluginRequirements wants keys to be provider *types*, such
+               // as "aws". If there are multiple aliases for the same
+               // provider then we will flatten them into a single requirement
+               // by combining their constraint sets.
+               pty := inst.Type()
+               if existing, exists := ret[pty]; exists {
+                       ret[pty].Versions = existing.Versions.Append(dep.Constraints)
+               } else {
+                       ret[pty] = &discovery.PluginConstraints{
+                               Versions: dep.Constraints,
+                       }
+               }
+       }
+       return ret
+}
+
+// AllPluginRequirements calls PluginRequirements for the receiver and all
+// of its descendents, and merges the result into a single PluginRequirements
+// structure that would satisfy all of the modules together.
+//
+// Requirements returned by this method include only version constraints,
+// and apply no particular SHA256 hash constraint.
+func (m *Module) AllPluginRequirements() discovery.PluginRequirements {
+       var ret discovery.PluginRequirements
+       m.WalkTree(func(path []string, parent *Module, current *Module) error {
+               ret = ret.Merge(current.PluginRequirements())
+               return nil
+       })
+       return ret
+}
+
+// Equal returns true if the receiver is the root of an identical tree
+// to the other given Module. This is a deep comparison that considers
+// the equality of all downstream modules too.
+//
+// The children are considered to be ordered, so callers may wish to use
+// SortDescendents first to normalize the order of the slices of child nodes.
+//
+// The implementation of this function is not optimized since it is provided
+// primarily for use in tests.
+func (m *Module) Equal(other *Module) bool {
+       // take care of nils first
+       if m == nil && other == nil {
+               return true
+       } else if (m == nil && other != nil) || (m != nil && other == nil) {
+               return false
+       }
+
+       if m.Name != other.Name {
+               return false
+       }
+
+       if len(m.Providers) != len(other.Providers) {
+               return false
+       }
+       if len(m.Children) != len(other.Children) {
+               return false
+       }
+
+       // Can't use reflect.DeepEqual on this provider structure because
+       // the nested Constraints objects contain function pointers that
+       // never compare as equal. So we'll need to walk it the long way.
+       for inst, dep := range m.Providers {
+               if _, exists := other.Providers[inst]; !exists {
+                       return false
+               }
+
+               if dep.Reason != other.Providers[inst].Reason {
+                       return false
+               }
+
+               // Constraints are not too easy to compare robustly, so
+               // we'll just use their string representations as a proxy
+               // for now.
+               if dep.Constraints.String() != other.Providers[inst].Constraints.String() {
+                       return false
+               }
+       }
+
+       // Above we already checked that we have the same number of children
+       // in each module, so now we just need to check that they are
+       // recursively equal.
+       for i := range m.Children {
+               if !m.Children[i].Equal(other.Children[i]) {
+                       return false
+               }
+       }
+
+       // If we fall out here then they are equal
+       return true
+}
diff --git a/vendor/github.com/hashicorp/terraform/moduledeps/provider.go b/vendor/github.com/hashicorp/terraform/moduledeps/provider.go
new file mode 100644 (file)
index 0000000..89ceefb
--- /dev/null
@@ -0,0 +1,30 @@
+package moduledeps
+
+import (
+       "strings"
+)
+
+// ProviderInstance describes a particular provider instance by its full name,
+// like "null" or "aws.foo".
+type ProviderInstance string
+
+// Type returns the provider type of this instance. For example, for an instance
+// named "aws.foo" the type is "aws".
+func (p ProviderInstance) Type() string {
+       t := string(p)
+       if dotPos := strings.Index(t, "."); dotPos != -1 {
+               t = t[:dotPos]
+       }
+       return t
+}
+
+// Alias returns the alias of this provider, if any. An instance named "aws.foo"
+// has the alias "foo", while an instance named just "docker" has no alias,
+// so the empty string would be returned.
+func (p ProviderInstance) Alias() string {
+       t := string(p)
+       if dotPos := strings.Index(t, "."); dotPos != -1 {
+               return t[dotPos+1:]
+       }
+       return ""
+}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/client.go b/vendor/github.com/hashicorp/terraform/plugin/client.go
new file mode 100644 (file)
index 0000000..3a5cb7a
--- /dev/null
@@ -0,0 +1,24 @@
+package plugin
+
+import (
+       "os/exec"
+
+       plugin "github.com/hashicorp/go-plugin"
+       "github.com/hashicorp/terraform/plugin/discovery"
+)
+
+// ClientConfig returns a configuration object that can be used to instantiate
+// a client for the plugin described by the given metadata.
+func ClientConfig(m discovery.PluginMeta) *plugin.ClientConfig {
+       return &plugin.ClientConfig{
+               Cmd:             exec.Command(m.Path),
+               HandshakeConfig: Handshake,
+               Managed:         true,
+               Plugins:         PluginMap,
+       }
+}
+
+// Client returns a plugin client for the plugin described by the given metadata.
+func Client(m discovery.PluginMeta) *plugin.Client {
+       return plugin.NewClient(ClientConfig(m))
+}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/error.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/error.go
new file mode 100644 (file)
index 0000000..df855a7
--- /dev/null
@@ -0,0 +1,30 @@
+package discovery
+
+// Error is a type used to describe situations that the caller must handle
+// since they indicate some form of user error.
+//
+// The functions and methods that return these specialized errors indicate so
+// in their documentation. The Error type should not itself be used directly,
+// but rather errors should be compared using the == operator with the
+// error constants in this package.
+//
+// Values of this type are _not_ used when the error being reported is an
+// operational error (server unavailable, etc) or indicative of a bug in
+// this package or its caller.
+type Error string
+
+// ErrorNoSuitableVersion indicates that a suitable version (meeting given
+// constraints) is not available.
+const ErrorNoSuitableVersion = Error("no suitable version is available")
+
+// ErrorNoVersionCompatible indicates that all of the available versions
+// that otherwise met constraints are not compatible with the current
+// version of Terraform.
+const ErrorNoVersionCompatible = Error("no available version is compatible with this version of Terraform")
+
+// ErrorNoSuchProvider indicates that no provider exists with a name given
+const ErrorNoSuchProvider = Error("no provider exists with the given name")
+
+func (err Error) Error() string {
+       return string(err)
+}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/find.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/find.go
new file mode 100644 (file)
index 0000000..f5bc4c1
--- /dev/null
@@ -0,0 +1,168 @@
+package discovery
+
+import (
+       "io/ioutil"
+       "log"
+       "path/filepath"
+       "strings"
+)
+
+// FindPlugins looks in the given directories for files whose filenames
+// suggest that they are plugins of the given kind (e.g. "provider") and
+// returns a PluginMetaSet representing the discovered potential-plugins.
+//
+// Currently this supports two different naming schemes. The current
+// standard naming scheme is a subdirectory called $GOOS-$GOARCH containing
+// files named terraform-$KIND-$NAME-V$VERSION. The legacy naming scheme is
+// files directly in the given directory whose names are like
+// terraform-$KIND-$NAME.
+//
+// Only one plugin will be returned for each unique plugin (name, version)
+// pair, with preference given to files found in earlier directories.
+//
+// This is a convenience wrapper around FindPluginPaths and ResolvePluginsPaths.
+func FindPlugins(kind string, dirs []string) PluginMetaSet {
+       return ResolvePluginPaths(FindPluginPaths(kind, dirs))
+}
+
+// FindPluginPaths looks in the given directories for files whose filenames
+// suggest that they are plugins of the given kind (e.g. "provider").
+//
+// The return value is a list of absolute paths that appear to refer to
+// plugins in the given directories, based only on what can be inferred
+// from the naming scheme. The paths returned are ordered such that files
+// in later dirs appear after files in earlier dirs in the given directory
+// list. Within the same directory plugins are returned in a consistent but
+// undefined order.
+func FindPluginPaths(kind string, dirs []string) []string {
+       // This is just a thin wrapper around findPluginPaths so that we can
+       // use the latter in tests with a fake machineName so we can use our
+       // test fixtures.
+       return findPluginPaths(kind, dirs)
+}
+
+func findPluginPaths(kind string, dirs []string) []string {
+       prefix := "terraform-" + kind + "-"
+
+       ret := make([]string, 0, len(dirs))
+
+       for _, dir := range dirs {
+               items, err := ioutil.ReadDir(dir)
+               if err != nil {
+                       // Ignore missing dirs, non-dirs, etc
+                       continue
+               }
+
+               log.Printf("[DEBUG] checking for %s in %q", kind, dir)
+
+               for _, item := range items {
+                       fullName := item.Name()
+
+                       if !strings.HasPrefix(fullName, prefix) {
+                               log.Printf("[DEBUG] skipping %q, not a %s", fullName, kind)
+                               continue
+                       }
+
+                       // New-style paths must have a version segment in filename
+                       if strings.Contains(strings.ToLower(fullName), "_v") {
+                               absPath, err := filepath.Abs(filepath.Join(dir, fullName))
+                               if err != nil {
+                                       log.Printf("[ERROR] plugin filepath error: %s", err)
+                                       continue
+                               }
+
+                               log.Printf("[DEBUG] found %s %q", kind, fullName)
+                               ret = append(ret, filepath.Clean(absPath))
+                               continue
+                       }
+
+                       // Legacy style with files directly in the base directory
+                       absPath, err := filepath.Abs(filepath.Join(dir, fullName))
+                       if err != nil {
+                               log.Printf("[ERROR] plugin filepath error: %s", err)
+                               continue
+                       }
+
+                       log.Printf("[WARNING] found legacy %s %q", kind, fullName)
+
+                       ret = append(ret, filepath.Clean(absPath))
+               }
+       }
+
+       return ret
+}
+
+// ResolvePluginPaths takes a list of paths to plugin executables (as returned
+// by e.g. FindPluginPaths) and produces a PluginMetaSet describing the
+// referenced plugins.
+//
+// If the same combination of plugin name and version appears multiple times,
+// the earlier reference will be preferred. Several different versions of
+// the same plugin name may be returned, in which case the methods of
+// PluginMetaSet can be used to filter down.
+func ResolvePluginPaths(paths []string) PluginMetaSet {
+       s := make(PluginMetaSet)
+
+       type nameVersion struct {
+               Name    string
+               Version string
+       }
+       found := make(map[nameVersion]struct{})
+
+       for _, path := range paths {
+               baseName := strings.ToLower(filepath.Base(path))
+               if !strings.HasPrefix(baseName, "terraform-") {
+                       // Should never happen with reasonable input
+                       continue
+               }
+
+               baseName = baseName[10:]
+               firstDash := strings.Index(baseName, "-")
+               if firstDash == -1 {
+                       // Should never happen with reasonable input
+                       continue
+               }
+
+               baseName = baseName[firstDash+1:]
+               if baseName == "" {
+                       // Should never happen with reasonable input
+                       continue
+               }
+
+               // Trim the .exe suffix used on Windows before we start wrangling
+               // the remainder of the path.
+               if strings.HasSuffix(baseName, ".exe") {
+                       baseName = baseName[:len(baseName)-4]
+               }
+
+               parts := strings.SplitN(baseName, "_v", 2)
+               name := parts[0]
+               version := VersionZero
+               if len(parts) == 2 {
+                       version = parts[1]
+               }
+
+               // Auto-installed plugins contain an extra name portion representing
+               // the expected plugin version, which we must trim off.
+               if underX := strings.Index(version, "_x"); underX != -1 {
+                       version = version[:underX]
+               }
+
+               if _, ok := found[nameVersion{name, version}]; ok {
+                       // Skip duplicate versions of the same plugin
+                       // (We do this during this step because after this we will be
+                       // dealing with sets and thus lose our ordering with which to
+                       // decide preference.)
+                       continue
+               }
+
+               s.Add(PluginMeta{
+                       Name:    name,
+                       Version: VersionStr(version),
+                       Path:    path,
+               })
+               found[nameVersion{name, version}] = struct{}{}
+       }
+
+       return s
+}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/get.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/get.go
new file mode 100644 (file)
index 0000000..241b5cb
--- /dev/null
@@ -0,0 +1,424 @@
+package discovery
+
+import (
+       "errors"
+       "fmt"
+       "io/ioutil"
+       "log"
+       "net/http"
+       "os"
+       "runtime"
+       "strconv"
+       "strings"
+
+       "golang.org/x/net/html"
+
+       cleanhttp "github.com/hashicorp/go-cleanhttp"
+       getter "github.com/hashicorp/go-getter"
+       multierror "github.com/hashicorp/go-multierror"
+)
+
+// Releases are located by parsing the html listing from releases.hashicorp.com.
+//
+// The URL for releases follows the pattern:
+//    https://releases.hashicorp.com/terraform-provider-name/<x.y.z>/terraform-provider-name_<x.y.z>_<os>_<arch>.<ext>
+//
+// The plugin protocol version will be saved with the release and returned in
+// the header X-TERRAFORM_PROTOCOL_VERSION.
+
+const protocolVersionHeader = "x-terraform-protocol-version"
+
+var releaseHost = "https://releases.hashicorp.com"
+
+var httpClient = cleanhttp.DefaultClient()
+
+// An Installer maintains a local cache of plugins by downloading plugins
+// from an online repository.
+type Installer interface {
+       Get(name string, req Constraints) (PluginMeta, error)
+       PurgeUnused(used map[string]PluginMeta) (removed PluginMetaSet, err error)
+}
+
+// ProviderInstaller is an Installer implementation that knows how to
+// download Terraform providers from the official HashiCorp releases service
+// into a local directory. The files downloaded are compliant with the
+// naming scheme expected by FindPlugins, so the target directory of a
+// provider installer can be used as one of several plugin discovery sources.
+type ProviderInstaller struct {
+       Dir string
+
+       PluginProtocolVersion uint
+
+       // OS and Arch specify the OS and architecture that should be used when
+       // installing plugins. These use the same labels as the runtime.GOOS and
+       // runtime.GOARCH variables respectively, and indeed the values of these
+       // are used as defaults if either of these is the empty string.
+       OS   string
+       Arch string
+
+       // Skip checksum and signature verification
+       SkipVerify bool
+}
+
+// Get is part of an implementation of type Installer, and attempts to download
+// and install a Terraform provider matching the given constraints.
+//
+// This method may return one of a number of sentinel errors from this
+// package to indicate issues that are likely to be resolvable via user action:
+//
+//     ErrorNoSuchProvider: no provider with the given name exists in the repository.
+//     ErrorNoSuitableVersion: the provider exists but no available version matches constraints.
+//     ErrorNoVersionCompatible: a plugin was found within the constraints but it is
+//                               incompatible with the current Terraform version.
+//
+// These errors should be recognized and handled as special cases by the caller
+// to present a suitable user-oriented error message.
+//
+// All other errors indicate an internal problem that is likely _not_ solvable
+// through user action, or at least not within Terraform's scope. Error messages
+// are produced under the assumption that if presented to the user they will
+// be presented alongside context about what is being installed, and thus the
+// error messages do not redundantly include such information.
+func (i *ProviderInstaller) Get(provider string, req Constraints) (PluginMeta, error) {
+       versions, err := i.listProviderVersions(provider)
+       // TODO: return multiple errors
+       if err != nil {
+               return PluginMeta{}, err
+       }
+
+       if len(versions) == 0 {
+               return PluginMeta{}, ErrorNoSuitableVersion
+       }
+
+       versions = allowedVersions(versions, req)
+       if len(versions) == 0 {
+               return PluginMeta{}, ErrorNoSuitableVersion
+       }
+
+       // sort them newest to oldest
+       Versions(versions).Sort()
+
+       // take the first matching plugin we find
+       for _, v := range versions {
+               url := i.providerURL(provider, v.String())
+
+               if !i.SkipVerify {
+                       sha256, err := i.getProviderChecksum(provider, v.String())
+                       if err != nil {
+                               return PluginMeta{}, err
+                       }
+
+                       // add the checksum parameter for go-getter to verify the download for us.
+                       if sha256 != "" {
+                               url = url + "?checksum=sha256:" + sha256
+                       }
+               }
+
+               log.Printf("[DEBUG] fetching provider info for %s version %s", provider, v)
+               if checkPlugin(url, i.PluginProtocolVersion) {
+                       log.Printf("[DEBUG] getting provider %q version %q at %s", provider, v, url)
+                       err := getter.Get(i.Dir, url)
+                       if err != nil {
+                               return PluginMeta{}, err
+                       }
+
+                       // Find what we just installed
+                       // (This is weird, because go-getter doesn't directly return
+                       //  information about what was extracted, and we just extracted
+                       //  the archive directly into a shared dir here.)
+                       log.Printf("[DEBUG] looking for the %s %s plugin we just installed", provider, v)
+                       metas := FindPlugins("provider", []string{i.Dir})
+                       log.Printf("[DEBUG] all plugins found %#v", metas)
+                       metas, _ = metas.ValidateVersions()
+                       metas = metas.WithName(provider).WithVersion(v)
+                       log.Printf("[DEBUG] filtered plugins %#v", metas)
+                       if metas.Count() == 0 {
+                               // This should never happen. Suggests that the release archive
+                               // contains an executable file whose name doesn't match the
+                               // expected convention.
+                               return PluginMeta{}, fmt.Errorf(
+                                       "failed to find installed plugin version %s; this is a bug in Terraform and should be reported",
+                                       v,
+                               )
+                       }
+
+                       if metas.Count() > 1 {
+                               // This should also never happen, and suggests that a
+                               // particular version was re-released with a different
+                               // executable filename. We consider releases as immutable, so
+                               // this is an error.
+                               return PluginMeta{}, fmt.Errorf(
+                                       "multiple plugins installed for version %s; this is a bug in Terraform and should be reported",
+                                       v,
+                               )
+                       }
+
+                       // By now we know we have exactly one meta, and so "Newest" will
+                       // return that one.
+                       return metas.Newest(), nil
+               }
+
+               log.Printf("[INFO] incompatible ProtocolVersion for %s version %s", provider, v)
+       }
+
+       return PluginMeta{}, ErrorNoVersionCompatible
+}
+
+func (i *ProviderInstaller) PurgeUnused(used map[string]PluginMeta) (PluginMetaSet, error) {
+       purge := make(PluginMetaSet)
+
+       present := FindPlugins("provider", []string{i.Dir})
+       for meta := range present {
+               chosen, ok := used[meta.Name]
+               if !ok {
+                       purge.Add(meta)
+               }
+               if chosen.Path != meta.Path {
+                       purge.Add(meta)
+               }
+       }
+
+       removed := make(PluginMetaSet)
+       var errs error
+       for meta := range purge {
+               path := meta.Path
+               err := os.Remove(path)
+               if err != nil {
+                       errs = multierror.Append(errs, fmt.Errorf(
+                               "failed to remove unused provider plugin %s: %s",
+                               path, err,
+                       ))
+               } else {
+                       removed.Add(meta)
+               }
+       }
+
+       return removed, errs
+}
+
+// Plugins are referred to by the short name, but all URLs and files will use
+// the full name prefixed with terraform-<plugin_type>-
+func (i *ProviderInstaller) providerName(name string) string {
+       return "terraform-provider-" + name
+}
+
+func (i *ProviderInstaller) providerFileName(name, version string) string {
+       os := i.OS
+       arch := i.Arch
+       if os == "" {
+               os = runtime.GOOS
+       }
+       if arch == "" {
+               arch = runtime.GOARCH
+       }
+       return fmt.Sprintf("%s_%s_%s_%s.zip", i.providerName(name), version, os, arch)
+}
+
+// providerVersionsURL returns the path to the released versions directory for the provider:
+// https://releases.hashicorp.com/terraform-provider-name/
+func (i *ProviderInstaller) providerVersionsURL(name string) string {
+       return releaseHost + "/" + i.providerName(name) + "/"
+}
+
+// providerURL returns the full path to the provider file, using the current OS
+// and ARCH:
+// .../terraform-provider-name_<x.y.z>/terraform-provider-name_<x.y.z>_<os>_<arch>.<ext>
+func (i *ProviderInstaller) providerURL(name, version string) string {
+       return fmt.Sprintf("%s%s/%s", i.providerVersionsURL(name), version, i.providerFileName(name, version))
+}
+
+func (i *ProviderInstaller) providerChecksumURL(name, version string) string {
+       fileName := fmt.Sprintf("%s_%s_SHA256SUMS", i.providerName(name), version)
+       u := fmt.Sprintf("%s%s/%s", i.providerVersionsURL(name), version, fileName)
+       return u
+}
+
+func (i *ProviderInstaller) getProviderChecksum(name, version string) (string, error) {
+       checksums, err := getPluginSHA256SUMs(i.providerChecksumURL(name, version))
+       if err != nil {
+               return "", err
+       }
+
+       return checksumForFile(checksums, i.providerFileName(name, version)), nil
+}
+
+// Return the plugin version by making a HEAD request to the provided url.
+// If the header is not present, we assume the latest version will be
+// compatible, and leave the check for discovery or execution.
+func checkPlugin(url string, pluginProtocolVersion uint) bool {
+       resp, err := httpClient.Head(url)
+       if err != nil {
+               log.Printf("[ERROR] error fetching plugin headers: %s", err)
+               return false
+       }
+
+       if resp.StatusCode != http.StatusOK {
+               log.Println("[ERROR] non-200 status fetching plugin headers:", resp.Status)
+               return false
+       }
+
+       proto := resp.Header.Get(protocolVersionHeader)
+       if proto == "" {
+               // The header isn't present, but we don't make this error fatal since
+               // the latest version will probably work.
+               log.Printf("[WARNING] missing %s from: %s", protocolVersionHeader, url)
+               return true
+       }
+
+       protoVersion, err := strconv.Atoi(proto)
+       if err != nil {
+               log.Printf("[ERROR] invalid ProtocolVersion: %s", proto)
+               return false
+       }
+
+       return protoVersion == int(pluginProtocolVersion)
+}
+
+// list the version available for the named plugin
+func (i *ProviderInstaller) listProviderVersions(name string) ([]Version, error) {
+       versions, err := listPluginVersions(i.providerVersionsURL(name))
+       if err != nil {
+               // listPluginVersions returns a verbose error message indicating
+               // what was being accessed and what failed
+               return nil, err
+       }
+       return versions, nil
+}
+
+var errVersionNotFound = errors.New("version not found")
+
+// take the list of available versions for a plugin, and filter out those that
+// don't fit the constraints.
+func allowedVersions(available []Version, required Constraints) []Version {
+       var allowed []Version
+
+       for _, v := range available {
+               if required.Allows(v) {
+                       allowed = append(allowed, v)
+               }
+       }
+
+       return allowed
+}
+
+// return a list of the plugin versions at the given URL
+func listPluginVersions(url string) ([]Version, error) {
+       resp, err := httpClient.Get(url)
+       if err != nil {
+               // http library produces a verbose error message that includes the
+               // URL being accessed, etc.
+               return nil, err
+       }
+       defer resp.Body.Close()
+
+       if resp.StatusCode != http.StatusOK {
+               body, _ := ioutil.ReadAll(resp.Body)
+               log.Printf("[ERROR] failed to fetch plugin versions from %s\n%s\n%s", url, resp.Status, body)
+
+               switch resp.StatusCode {
+               case http.StatusNotFound, http.StatusForbidden:
+                       // These are treated as indicative of the given name not being
+                       // a valid provider name at all.
+                       return nil, ErrorNoSuchProvider
+
+               default:
+                       // All other errors are assumed to be operational problems.
+                       return nil, fmt.Errorf("error accessing %s: %s", url, resp.Status)
+               }
+
+       }
+
+       body, err := html.Parse(resp.Body)
+       if err != nil {
+               log.Fatal(err)
+       }
+
+       names := []string{}
+
+       // all we need to do is list links on the directory listing page that look like plugins
+       var f func(*html.Node)
+       f = func(n *html.Node) {
+               if n.Type == html.ElementNode && n.Data == "a" {
+                       c := n.FirstChild
+                       if c != nil && c.Type == html.TextNode && strings.HasPrefix(c.Data, "terraform-") {
+                               names = append(names, c.Data)
+                               return
+                       }
+               }
+               for c := n.FirstChild; c != nil; c = c.NextSibling {
+                       f(c)
+               }
+       }
+       f(body)
+
+       return versionsFromNames(names), nil
+}
+
+// parse the list of directory names into a sorted list of available versions
+func versionsFromNames(names []string) []Version {
+       var versions []Version
+       for _, name := range names {
+               parts := strings.SplitN(name, "_", 2)
+               if len(parts) == 2 && parts[1] != "" {
+                       v, err := VersionStr(parts[1]).Parse()
+                       if err != nil {
+                               // filter invalid versions scraped from the page
+                               log.Printf("[WARN] invalid version found for %q: %s", name, err)
+                               continue
+                       }
+
+                       versions = append(versions, v)
+               }
+       }
+
+       return versions
+}
+
+func checksumForFile(sums []byte, name string) string {
+       for _, line := range strings.Split(string(sums), "\n") {
+               parts := strings.Fields(line)
+               if len(parts) > 1 && parts[1] == name {
+                       return parts[0]
+               }
+       }
+       return ""
+}
+
+// fetch the SHA256SUMS file provided, and verify its signature.
+func getPluginSHA256SUMs(sumsURL string) ([]byte, error) {
+       sigURL := sumsURL + ".sig"
+
+       sums, err := getFile(sumsURL)
+       if err != nil {
+               return nil, fmt.Errorf("error fetching checksums: %s", err)
+       }
+
+       sig, err := getFile(sigURL)
+       if err != nil {
+               return nil, fmt.Errorf("error fetching checksums signature: %s", err)
+       }
+
+       if err := verifySig(sums, sig); err != nil {
+               return nil, err
+       }
+
+       return sums, nil
+}
+
+func getFile(url string) ([]byte, error) {
+       resp, err := httpClient.Get(url)
+       if err != nil {
+               return nil, err
+       }
+       defer resp.Body.Close()
+
+       if resp.StatusCode != http.StatusOK {
+               return nil, fmt.Errorf("%s", resp.Status)
+       }
+
+       data, err := ioutil.ReadAll(resp.Body)
+       if err != nil {
+               return data, err
+       }
+       return data, nil
+}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/meta.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/meta.go
new file mode 100644 (file)
index 0000000..bdcebcb
--- /dev/null
@@ -0,0 +1,41 @@
+package discovery
+
+import (
+       "crypto/sha256"
+       "io"
+       "os"
+)
+
+// PluginMeta is metadata about a plugin, useful for launching the plugin
+// and for understanding which plugins are available.
+type PluginMeta struct {
+       // Name is the name of the plugin, e.g. as inferred from the plugin
+       // binary's filename, or by explicit configuration.
+       Name string
+
+       // Version is the semver version of the plugin, expressed as a string
+       // that might not be semver-valid.
+       Version VersionStr
+
+       // Path is the absolute path of the executable that can be launched
+       // to provide the RPC server for this plugin.
+       Path string
+}
+
+// SHA256 returns a SHA256 hash of the content of the referenced executable
+// file, or an error if the file's contents cannot be read.
+func (m PluginMeta) SHA256() ([]byte, error) {
+       f, err := os.Open(m.Path)
+       if err != nil {
+               return nil, err
+       }
+       defer f.Close()
+
+       h := sha256.New()
+       _, err = io.Copy(h, f)
+       if err != nil {
+               return nil, err
+       }
+
+       return h.Sum(nil), nil
+}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/meta_set.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/meta_set.go
new file mode 100644 (file)
index 0000000..181ea1f
--- /dev/null
@@ -0,0 +1,195 @@
+package discovery
+
+// A PluginMetaSet is a set of PluginMeta objects meeting a certain criteria.
+//
+// Methods on this type allow filtering of the set to produce subsets that
+// meet more restrictive criteria.
+type PluginMetaSet map[PluginMeta]struct{}
+
+// Add inserts the given PluginMeta into the receiving set. This is a no-op
+// if the given meta is already present.
+func (s PluginMetaSet) Add(p PluginMeta) {
+       s[p] = struct{}{}
+}
+
+// Remove removes the given PluginMeta from the receiving set. This is a no-op
+// if the given meta is not already present.
+func (s PluginMetaSet) Remove(p PluginMeta) {
+       delete(s, p)
+}
+
+// Has returns true if the given meta is in the receiving set, or false
+// otherwise.
+func (s PluginMetaSet) Has(p PluginMeta) bool {
+       _, ok := s[p]
+       return ok
+}
+
+// Count returns the number of metas in the set
+func (s PluginMetaSet) Count() int {
+       return len(s)
+}
+
+// ValidateVersions returns two new PluginMetaSets, separating those with
+// versions that have syntax-valid semver versions from those that don't.
+//
+// Eliminating invalid versions from consideration (and possibly warning about
+// them) is usually the first step of working with a meta set after discovery
+// has completed.
+func (s PluginMetaSet) ValidateVersions() (valid, invalid PluginMetaSet) {
+       valid = make(PluginMetaSet)
+       invalid = make(PluginMetaSet)
+       for p := range s {
+               if _, err := p.Version.Parse(); err == nil {
+                       valid.Add(p)
+               } else {
+                       invalid.Add(p)
+               }
+       }
+       return
+}
+
+// WithName returns the subset of metas that have the given name.
+func (s PluginMetaSet) WithName(name string) PluginMetaSet {
+       ns := make(PluginMetaSet)
+       for p := range s {
+               if p.Name == name {
+                       ns.Add(p)
+               }
+       }
+       return ns
+}
+
+// WithVersion returns the subset of metas that have the given version.
+//
+// This should be used only with the "valid" result from ValidateVersions;
+// it will ignore any plugin metas that have a invalid version strings.
+func (s PluginMetaSet) WithVersion(version Version) PluginMetaSet {
+       ns := make(PluginMetaSet)
+       for p := range s {
+               gotVersion, err := p.Version.Parse()
+               if err != nil {
+                       continue
+               }
+               if gotVersion.Equal(version) {
+                       ns.Add(p)
+               }
+       }
+       return ns
+}
+
+// ByName groups the metas in the set by their Names, returning a map.
+func (s PluginMetaSet) ByName() map[string]PluginMetaSet {
+       ret := make(map[string]PluginMetaSet)
+       for p := range s {
+               if _, ok := ret[p.Name]; !ok {
+                       ret[p.Name] = make(PluginMetaSet)
+               }
+               ret[p.Name].Add(p)
+       }
+       return ret
+}
+
+// Newest returns the one item from the set that has the newest Version value.
+//
+// The result is meaningful only if the set is already filtered such that
+// all of the metas have the same Name.
+//
+// If there isn't at least one meta in the set then this function will panic.
+// Use Count() to ensure that there is at least one value before calling.
+//
+// If any of the metas have invalid version strings then this function will
+// panic. Use ValidateVersions() first to filter out metas with invalid
+// versions.
+//
+// If two metas have the same Version then one is arbitrarily chosen. This
+// situation should be avoided by pre-filtering the set.
+func (s PluginMetaSet) Newest() PluginMeta {
+       if len(s) == 0 {
+               panic("can't call NewestStable on empty PluginMetaSet")
+       }
+
+       var first = true
+       var winner PluginMeta
+       var winnerVersion Version
+       for p := range s {
+               version, err := p.Version.Parse()
+               if err != nil {
+                       panic(err)
+               }
+
+               if first == true || version.NewerThan(winnerVersion) {
+                       winner = p
+                       winnerVersion = version
+                       first = false
+               }
+       }
+
+       return winner
+}
+
+// ConstrainVersions takes a set of requirements and attempts to
+// return a map from name to a set of metas that have the matching
+// name and an appropriate version.
+//
+// If any of the given requirements match *no* plugins then its PluginMetaSet
+// in the returned map will be empty.
+//
+// All viable metas are returned, so the caller can apply any desired filtering
+// to reduce down to a single option. For example, calling Newest() to obtain
+// the highest available version.
+//
+// If any of the metas in the set have invalid version strings then this
+// function will panic. Use ValidateVersions() first to filter out metas with
+// invalid versions.
+func (s PluginMetaSet) ConstrainVersions(reqd PluginRequirements) map[string]PluginMetaSet {
+       ret := make(map[string]PluginMetaSet)
+       for p := range s {
+               name := p.Name
+               allowedVersions, ok := reqd[name]
+               if !ok {
+                       continue
+               }
+               if _, ok := ret[p.Name]; !ok {
+                       ret[p.Name] = make(PluginMetaSet)
+               }
+               version, err := p.Version.Parse()
+               if err != nil {
+                       panic(err)
+               }
+               if allowedVersions.Allows(version) {
+                       ret[p.Name].Add(p)
+               }
+       }
+       return ret
+}
+
+// OverridePaths returns a new set where any existing plugins with the given
+// names are removed and replaced with the single path given in the map.
+//
+// This is here only to continue to support the legacy way of overriding
+// plugin binaries in the .terraformrc file. It treats all given plugins
+// as pre-versioning (version 0.0.0). This mechanism will eventually be
+// phased out, with vendor directories being the intended replacement.
+func (s PluginMetaSet) OverridePaths(paths map[string]string) PluginMetaSet {
+       ret := make(PluginMetaSet)
+       for p := range s {
+               if _, ok := paths[p.Name]; ok {
+                       // Skip plugins that we're overridding
+                       continue
+               }
+
+               ret.Add(p)
+       }
+
+       // Now add the metadata for overriding plugins
+       for name, path := range paths {
+               ret.Add(PluginMeta{
+                       Name:    name,
+                       Version: VersionZero,
+                       Path:    path,
+               })
+       }
+
+       return ret
+}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/requirements.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/requirements.go
new file mode 100644 (file)
index 0000000..75430fd
--- /dev/null
@@ -0,0 +1,105 @@
+package discovery
+
+import (
+       "bytes"
+)
+
+// PluginRequirements describes a set of plugins (assumed to be of a consistent
+// kind) that are required to exist and have versions within the given
+// corresponding sets.
+type PluginRequirements map[string]*PluginConstraints
+
+// PluginConstraints represents an element of PluginRequirements describing
+// the constraints for a single plugin.
+type PluginConstraints struct {
+       // Specifies that the plugin's version must be within the given
+       // constraints.
+       Versions Constraints
+
+       // If non-nil, the hash of the on-disk plugin executable must exactly
+       // match the SHA256 hash given here.
+       SHA256 []byte
+}
+
+// Allows returns true if the given version is within the receiver's version
+// constraints.
+func (s *PluginConstraints) Allows(v Version) bool {
+       return s.Versions.Allows(v)
+}
+
+// AcceptsSHA256 returns true if the given executable SHA256 hash is acceptable,
+// either because it matches the constraint or because there is no such
+// constraint.
+func (s *PluginConstraints) AcceptsSHA256(digest []byte) bool {
+       if s.SHA256 == nil {
+               return true
+       }
+       return bytes.Equal(s.SHA256, digest)
+}
+
+// Merge takes the contents of the receiver and the other given requirements
+// object and merges them together into a single requirements structure
+// that satisfies both sets of requirements.
+//
+// Note that it doesn't make sense to merge two PluginRequirements with
+// differing required plugin SHA256 hashes, since the result will never
+// match any plugin.
+func (r PluginRequirements) Merge(other PluginRequirements) PluginRequirements {
+       ret := make(PluginRequirements)
+       for n, c := range r {
+               ret[n] = &PluginConstraints{
+                       Versions: Constraints{}.Append(c.Versions),
+                       SHA256:   c.SHA256,
+               }
+       }
+       for n, c := range other {
+               if existing, exists := ret[n]; exists {
+                       ret[n].Versions = ret[n].Versions.Append(c.Versions)
+
+                       if existing.SHA256 != nil {
+                               if c.SHA256 != nil && !bytes.Equal(c.SHA256, existing.SHA256) {
+                                       // If we've been asked to merge two constraints with
+                                       // different SHA256 hashes then we'll produce a dummy value
+                                       // that can never match anything. This is a silly edge case
+                                       // that no reasonable caller should hit.
+                                       ret[n].SHA256 = []byte(invalidProviderHash)
+                               }
+                       } else {
+                               ret[n].SHA256 = c.SHA256 // might still be nil
+                       }
+               } else {
+                       ret[n] = &PluginConstraints{
+                               Versions: Constraints{}.Append(c.Versions),
+                               SHA256:   c.SHA256,
+                       }
+               }
+       }
+       return ret
+}
+
+// LockExecutables applies additional constraints to the receiver that
+// require plugin executables with specific SHA256 digests. This modifies
+// the receiver in-place, since it's intended to be applied after
+// version constraints have been resolved.
+//
+// The given map must include a key for every plugin that is already
+// required. If not, any missing keys will cause the corresponding plugin
+// to never match, though the direct caller doesn't necessarily need to
+// guarantee this as long as the downstream code _applying_ these constraints
+// is able to deal with the non-match in some way.
+func (r PluginRequirements) LockExecutables(sha256s map[string][]byte) {
+       for name, cons := range r {
+               digest := sha256s[name]
+
+               if digest == nil {
+                       // Prevent any match, which will then presumably cause the
+                       // downstream consumer of this requirements to report an error.
+                       cons.SHA256 = []byte(invalidProviderHash)
+                       continue
+               }
+
+               cons.SHA256 = digest
+       }
+}
+
+const invalidProviderHash = "<invalid>"
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/signature.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/signature.go
new file mode 100644 (file)
index 0000000..b6686a5
--- /dev/null
@@ -0,0 +1,53 @@
+package discovery
+
+import (
+       "bytes"
+       "log"
+       "strings"
+
+       "golang.org/x/crypto/openpgp"
+)
+
+// Verify the data using the provided openpgp detached signature and the
+// embedded hashicorp public key.
+func verifySig(data, sig []byte) error {
+       el, err := openpgp.ReadArmoredKeyRing(strings.NewReader(hashiPublicKey))
+       if err != nil {
+               log.Fatal(err)
+       }
+
+       _, err = openpgp.CheckDetachedSignature(el, bytes.NewReader(data), bytes.NewReader(sig))
+       return err
+}
+
+// this is the public key that signs the checksums file for releases.
+const hashiPublicKey = `-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1
+
+mQENBFMORM0BCADBRyKO1MhCirazOSVwcfTr1xUxjPvfxD3hjUwHtjsOy/bT6p9f
+W2mRPfwnq2JB5As+paL3UGDsSRDnK9KAxQb0NNF4+eVhr/EJ18s3wwXXDMjpIifq
+fIm2WyH3G+aRLTLPIpscUNKDyxFOUbsmgXAmJ46Re1fn8uKxKRHbfa39aeuEYWFA
+3drdL1WoUngvED7f+RnKBK2G6ZEpO+LDovQk19xGjiMTtPJrjMjZJ3QXqPvx5wca
+KSZLr4lMTuoTI/ZXyZy5bD4tShiZz6KcyX27cD70q2iRcEZ0poLKHyEIDAi3TM5k
+SwbbWBFd5RNPOR0qzrb/0p9ksKK48IIfH2FvABEBAAG0K0hhc2hpQ29ycCBTZWN1
+cml0eSA8c2VjdXJpdHlAaGFzaGljb3JwLmNvbT6JATgEEwECACIFAlMORM0CGwMG
+CwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJEFGFLYc0j/xMyWIIAIPhcVqiQ59n
+Jc07gjUX0SWBJAxEG1lKxfzS4Xp+57h2xxTpdotGQ1fZwsihaIqow337YHQI3q0i
+SqV534Ms+j/tU7X8sq11xFJIeEVG8PASRCwmryUwghFKPlHETQ8jJ+Y8+1asRydi
+psP3B/5Mjhqv/uOK+Vy3zAyIpyDOMtIpOVfjSpCplVRdtSTFWBu9Em7j5I2HMn1w
+sJZnJgXKpybpibGiiTtmnFLOwibmprSu04rsnP4ncdC2XRD4wIjoyA+4PKgX3sCO
+klEzKryWYBmLkJOMDdo52LttP3279s7XrkLEE7ia0fXa2c12EQ0f0DQ1tGUvyVEW
+WmJVccm5bq25AQ0EUw5EzQEIANaPUY04/g7AmYkOMjaCZ6iTp9hB5Rsj/4ee/ln9
+wArzRO9+3eejLWh53FoN1rO+su7tiXJA5YAzVy6tuolrqjM8DBztPxdLBbEi4V+j
+2tK0dATdBQBHEh3OJApO2UBtcjaZBT31zrG9K55D+CrcgIVEHAKY8Cb4kLBkb5wM
+skn+DrASKU0BNIV1qRsxfiUdQHZfSqtp004nrql1lbFMLFEuiY8FZrkkQ9qduixo
+mTT6f34/oiY+Jam3zCK7RDN/OjuWheIPGj/Qbx9JuNiwgX6yRj7OE1tjUx6d8g9y
+0H1fmLJbb3WZZbuuGFnK6qrE3bGeY8+AWaJAZ37wpWh1p0cAEQEAAYkBHwQYAQIA
+CQUCUw5EzQIbDAAKCRBRhS2HNI/8TJntCAClU7TOO/X053eKF1jqNW4A1qpxctVc
+z8eTcY8Om5O4f6a/rfxfNFKn9Qyja/OG1xWNobETy7MiMXYjaa8uUx5iFy6kMVaP
+0BXJ59NLZjMARGw6lVTYDTIvzqqqwLxgliSDfSnqUhubGwvykANPO+93BBx89MRG
+unNoYGXtPlhNFrAsB1VR8+EyKLv2HQtGCPSFBhrjuzH3gxGibNDDdFQLxxuJWepJ
+EK1UbTS4ms0NgZ2Uknqn1WRU1Ki7rE4sTy68iZtWpKQXZEJa0IGnuI2sSINGcXCJ
+oEIgXTMyCILo34Fa/C6VCm2WBgz9zZO8/rHIiQm1J5zqz0DrDwKBUM9C
+=LYpS
+-----END PGP PUBLIC KEY BLOCK-----`
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/version.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/version.go
new file mode 100644 (file)
index 0000000..8fad58d
--- /dev/null
@@ -0,0 +1,72 @@
+package discovery
+
+import (
+       "fmt"
+       "sort"
+
+       version "github.com/hashicorp/go-version"
+)
+
+const VersionZero = "0.0.0"
+
+// A VersionStr is a string containing a possibly-invalid representation
+// of a semver version number. Call Parse on it to obtain a real Version
+// object, or discover that it is invalid.
+type VersionStr string
+
+// Parse transforms a VersionStr into a Version if it is
+// syntactically valid. If it isn't then an error is returned instead.
+func (s VersionStr) Parse() (Version, error) {
+       raw, err := version.NewVersion(string(s))
+       if err != nil {
+               return Version{}, err
+       }
+       return Version{raw}, nil
+}
+
+// MustParse transforms a VersionStr into a Version if it is
+// syntactically valid. If it isn't then it panics.
+func (s VersionStr) MustParse() Version {
+       ret, err := s.Parse()
+       if err != nil {
+               panic(err)
+       }
+       return ret
+}
+
+// Version represents a version number that has been parsed from
+// a semver string and known to be valid.
+type Version struct {
+       // We wrap this here just because it avoids a proliferation of
+       // direct go-version imports all over the place, and keeps the
+       // version-processing details within this package.
+       raw *version.Version
+}
+
+func (v Version) String() string {
+       return v.raw.String()
+}
+
+func (v Version) NewerThan(other Version) bool {
+       return v.raw.GreaterThan(other.raw)
+}
+
+func (v Version) Equal(other Version) bool {
+       return v.raw.Equal(other.raw)
+}
+
+// MinorUpgradeConstraintStr returns a ConstraintStr that would permit
+// minor upgrades relative to the receiving version.
+func (v Version) MinorUpgradeConstraintStr() ConstraintStr {
+       segments := v.raw.Segments()
+       return ConstraintStr(fmt.Sprintf("~> %d.%d", segments[0], segments[1]))
+}
+
+type Versions []Version
+
+// Sort sorts version from newest to oldest.
+func (v Versions) Sort() {
+       sort.Slice(v, func(i, j int) bool {
+               return v[i].NewerThan(v[j])
+       })
+}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/version_set.go b/vendor/github.com/hashicorp/terraform/plugin/discovery/version_set.go
new file mode 100644 (file)
index 0000000..0aefd75
--- /dev/null
@@ -0,0 +1,84 @@
+package discovery
+
+import (
+       "sort"
+
+       version "github.com/hashicorp/go-version"
+)
+
+// A ConstraintStr is a string containing a possibly-invalid representation
+// of a version constraint provided in configuration. Call Parse on it to
+// obtain a real Constraint object, or discover that it is invalid.
+type ConstraintStr string
+
+// Parse transforms a ConstraintStr into a Constraints if it is
+// syntactically valid. If it isn't then an error is returned instead.
+func (s ConstraintStr) Parse() (Constraints, error) {
+       raw, err := version.NewConstraint(string(s))
+       if err != nil {
+               return Constraints{}, err
+       }
+       return Constraints{raw}, nil
+}
+
+// MustParse is like Parse but it panics if the constraint string is invalid.
+func (s ConstraintStr) MustParse() Constraints {
+       ret, err := s.Parse()
+       if err != nil {
+               panic(err)
+       }
+       return ret
+}
+
+// Constraints represents a set of versions which any given Version is either
+// a member of or not.
+type Constraints struct {
+       raw version.Constraints
+}
+
+// AllVersions is a Constraints containing all versions
+var AllVersions Constraints
+
+func init() {
+       AllVersions = Constraints{
+               raw: make(version.Constraints, 0),
+       }
+}
+
+// Allows returns true if the given version permitted by the receiving
+// constraints set.
+func (s Constraints) Allows(v Version) bool {
+       return s.raw.Check(v.raw)
+}
+
+// Append combines the receiving set with the given other set to produce
+// a set that is the intersection of both sets, which is to say that resulting
+// constraints contain only the versions that are members of both.
+func (s Constraints) Append(other Constraints) Constraints {
+       raw := make(version.Constraints, 0, len(s.raw)+len(other.raw))
+
+       // Since "raw" is a list of constraints that remove versions from the set,
+       // "Intersection" is implemented by concatenating together those lists,
+       // thus leaving behind only the versions not removed by either list.
+       raw = append(raw, s.raw...)
+       raw = append(raw, other.raw...)
+
+       // while the set is unordered, we sort these lexically for consistent output
+       sort.Slice(raw, func(i, j int) bool {
+               return raw[i].String() < raw[j].String()
+       })
+
+       return Constraints{raw}
+}
+
+// String returns a string representation of the set members as a set
+// of range constraints.
+func (s Constraints) String() string {
+       return s.raw.String()
+}
+
+// Unconstrained returns true if and only if the receiver is an empty
+// constraint set.
+func (s Constraints) Unconstrained() bool {
+       return len(s.raw) == 0
+}
index 306128edfb40771368e17345885990956426a466..a814a85ddcad96d03b50f1dc754f9b7271649c92 100644 (file)
@@ -57,12 +57,17 @@ type ContextOpts struct {
        Parallelism        int
        State              *State
        StateFutureAllowed bool
-       Providers          map[string]ResourceProviderFactory
+       ProviderResolver   ResourceProviderResolver
        Provisioners       map[string]ResourceProvisionerFactory
        Shadow             bool
        Targets            []string
        Variables          map[string]interface{}
 
+       // If non-nil, will apply as additional constraints on the provider
+       // plugins that will be requested from the provider resolver.
+       ProviderSHA256s    map[string][]byte
+       SkipProviderVerify bool
+
        UIInput UIInput
 }
 
@@ -102,6 +107,7 @@ type Context struct {
        l                   sync.Mutex // Lock acquired during any task
        parallelSem         Semaphore
        providerInputConfig map[string]map[string]interface{}
+       providerSHA256s     map[string][]byte
        runLock             sync.Mutex
        runCond             *sync.Cond
        runContext          context.Context
@@ -166,7 +172,6 @@ func NewContext(opts *ContextOpts) (*Context, error) {
        //        set by environment variables if necessary. This includes
        //        values taken from -var-file in addition.
        variables := make(map[string]interface{})
-
        if opts.Module != nil {
                var err error
                variables, err = Variables(opts.Module, opts.Variables)
@@ -175,6 +180,23 @@ func NewContext(opts *ContextOpts) (*Context, error) {
                }
        }
 
+       // Bind available provider plugins to the constraints in config
+       var providers map[string]ResourceProviderFactory
+       if opts.ProviderResolver != nil {
+               var err error
+               deps := ModuleTreeDependencies(opts.Module, state)
+               reqd := deps.AllPluginRequirements()
+               if opts.ProviderSHA256s != nil && !opts.SkipProviderVerify {
+                       reqd.LockExecutables(opts.ProviderSHA256s)
+               }
+               providers, err = resourceProviderFactories(opts.ProviderResolver, reqd)
+               if err != nil {
+                       return nil, err
+               }
+       } else {
+               providers = make(map[string]ResourceProviderFactory)
+       }
+
        diff := opts.Diff
        if diff == nil {
                diff = &Diff{}
@@ -182,7 +204,7 @@ func NewContext(opts *ContextOpts) (*Context, error) {
 
        return &Context{
                components: &basicComponentFactory{
-                       providers:    opts.Providers,
+                       providers:    providers,
                        provisioners: opts.Provisioners,
                },
                destroy:   opts.Destroy,
@@ -198,6 +220,7 @@ func NewContext(opts *ContextOpts) (*Context, error) {
 
                parallelSem:         NewSemaphore(par),
                providerInputConfig: make(map[string]map[string]interface{}),
+               providerSHA256s:     opts.ProviderSHA256s,
                sh:                  sh,
        }, nil
 }
@@ -509,6 +532,9 @@ func (c *Context) Plan() (*Plan, error) {
                Vars:    c.variables,
                State:   c.state,
                Targets: c.targets,
+
+               TerraformVersion: VersionString(),
+               ProviderSHA256s:  c.providerSHA256s,
        }
 
        var operation walkOperation
index a9fae6c2c82248b54424e49b975478fc90a56ca4..fd1687e7ed6c622812f6c7bd2e031c5836da6f68 100644 (file)
@@ -28,7 +28,7 @@ const (
 // multiVal matches the index key to a flatmapped set, list or map
 var multiVal = regexp.MustCompile(`\.(#|%)$`)
 
-// Diff trackes the changes that are necessary to apply a configuration
+// Diff tracks the changes that are necessary to apply a configuration
 // to an existing infrastructure.
 type Diff struct {
        // Modules contains all the modules that have a diff
@@ -370,7 +370,7 @@ type InstanceDiff struct {
 
        // Meta is a simple K/V map that is stored in a diff and persisted to
        // plans but otherwise is completely ignored by Terraform core. It is
-       // mean to be used for additional data a resource may want to pass through.
+       // meant to be used for additional data a resource may want to pass through.
        // The value here must only contain Go primitives and collections.
        Meta map[string]interface{}
 }
@@ -551,7 +551,7 @@ func (d *InstanceDiff) SetDestroyDeposed(b bool) {
 }
 
 // These methods are properly locked, for use outside other InstanceDiff
-// methods but everywhere else within in the terraform package.
+// methods but everywhere else within the terraform package.
 // TODO refactor the locking scheme
 func (d *InstanceDiff) SetTainted(b bool) {
        d.mu.Lock()
index 6f09526a4c144cab5ef58332c41c186838ea41b0..c35f9083f89cefcdd162e17c05596ec31da720ce 100644 (file)
@@ -81,6 +81,12 @@ type EvalDiff struct {
        // Resource is needed to fetch the ignore_changes list so we can
        // filter user-requested ignored attributes from the diff.
        Resource *config.Resource
+
+       // Stub is used to flag the generated InstanceDiff as a stub. This is used to
+       // ensure that the node exists to perform interpolations and generate
+       // computed paths off of, but not as an actual diff where resouces should be
+       // counted, and not as a diff that should be acted on.
+       Stub bool
 }
 
 // TODO: test
@@ -90,11 +96,13 @@ func (n *EvalDiff) Eval(ctx EvalContext) (interface{}, error) {
        provider := *n.Provider
 
        // Call pre-diff hook
-       err := ctx.Hook(func(h Hook) (HookAction, error) {
-               return h.PreDiff(n.Info, state)
-       })
-       if err != nil {
-               return nil, err
+       if !n.Stub {
+               err := ctx.Hook(func(h Hook) (HookAction, error) {
+                       return h.PreDiff(n.Info, state)
+               })
+               if err != nil {
+                       return nil, err
+               }
        }
 
        // The state for the diff must never be nil
@@ -158,15 +166,19 @@ func (n *EvalDiff) Eval(ctx EvalContext) (interface{}, error) {
        }
 
        // Call post-refresh hook
-       err = ctx.Hook(func(h Hook) (HookAction, error) {
-               return h.PostDiff(n.Info, diff)
-       })
-       if err != nil {
-               return nil, err
+       if !n.Stub {
+               err = ctx.Hook(func(h Hook) (HookAction, error) {
+                       return h.PostDiff(n.Info, diff)
+               })
+               if err != nil {
+                       return nil, err
+               }
        }
 
-       // Update our output
-       *n.OutputDiff = diff
+       // Update our output if we care
+       if n.OutputDiff != nil {
+               *n.OutputDiff = diff
+       }
 
        // Update the state if we care
        if n.OutputState != nil {
index a6a3a90d48607dca3ac5f9a14fd3135b07cd818a..4b29bbb4b8ba70c9811e6e8be209b83888deebbd 100644 (file)
@@ -117,7 +117,15 @@ func (b *PlanGraphBuilder) Steps() []GraphTransformer {
                &CountBoundaryTransformer{},
 
                // Target
-               &TargetsTransformer{Targets: b.Targets},
+               &TargetsTransformer{
+                       Targets: b.Targets,
+
+                       // Resource nodes from config have not yet been expanded for
+                       // "count", so we must apply targeting without indices. Exact
+                       // targeting will be dealt with later when these resources
+                       // DynamicExpand.
+                       IgnoreIndices: true,
+               },
 
                // Close opened plugin connections
                &CloseProviderTransformer{},
index 0634f9698d8fe7715f698af5b1c08dfc01a6c68c..3d3e968fae9ede5c518709f06877a0c247447090 100644 (file)
@@ -144,7 +144,15 @@ func (b *RefreshGraphBuilder) Steps() []GraphTransformer {
                &ReferenceTransformer{},
 
                // Target
-               &TargetsTransformer{Targets: b.Targets},
+               &TargetsTransformer{
+                       Targets: b.Targets,
+
+                       // Resource nodes from config have not yet been expanded for
+                       // "count", so we must apply targeting without indices. Exact
+                       // targeting will be dealt with later when these resources
+                       // DynamicExpand.
+                       IgnoreIndices: true,
+               },
 
                // Close opened plugin connections
                &CloseProviderTransformer{},
index 0def295fa94002361aca709b9d0a8316d5ab9a29..22ddce6c836b50518532d2646f9428b458b89a8f 100644 (file)
@@ -317,9 +317,13 @@ func (i *Interpolater) valueTerraformVar(
        n string,
        v *config.TerraformVariable,
        result map[string]ast.Variable) error {
-       if v.Field != "env" {
+
+       // "env" is supported for backward compatibility, but it's deprecated and
+       // so we won't advertise it as being allowed in the error message. It will
+       // be removed in a future version of Terraform.
+       if v.Field != "workspace" && v.Field != "env" {
                return fmt.Errorf(
-                       "%s: only supported key for 'terraform.X' interpolations is 'env'", n)
+                       "%s: only supported key for 'terraform.X' interpolations is 'workspace'", n)
        }
 
        if i.Meta == nil {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go b/vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go
new file mode 100644 (file)
index 0000000..b9f44a0
--- /dev/null
@@ -0,0 +1,156 @@
+package terraform
+
+import (
+       "github.com/hashicorp/terraform/config"
+       "github.com/hashicorp/terraform/config/module"
+       "github.com/hashicorp/terraform/moduledeps"
+       "github.com/hashicorp/terraform/plugin/discovery"
+)
+
+// ModuleTreeDependencies returns the dependencies of the tree of modules
+// described by the given configuration tree and state.
+//
+// Both configuration and state are required because there can be resources
+// implied by instances in the state that no longer exist in config.
+//
+// This function will panic if any invalid version constraint strings are
+// present in the configuration. This is guaranteed not to happen for any
+// configuration that has passed a call to Config.Validate().
+func ModuleTreeDependencies(root *module.Tree, state *State) *moduledeps.Module {
+
+       // First we walk the configuration tree to build the overall structure
+       // and capture the explicit/implicit/inherited provider dependencies.
+       deps := moduleTreeConfigDependencies(root, nil)
+
+       // Next we walk over the resources in the state to catch any additional
+       // dependencies created by existing resources that are no longer in config.
+       // Most things we find in state will already be present in 'deps', but
+       // we're interested in the rare thing that isn't.
+       moduleTreeMergeStateDependencies(deps, state)
+
+       return deps
+}
+
+func moduleTreeConfigDependencies(root *module.Tree, inheritProviders map[string]*config.ProviderConfig) *moduledeps.Module {
+       if root == nil {
+               // If no config is provided, we'll make a synthetic root.
+               // This isn't necessarily correct if we're called with a nil that
+               // *isn't* at the root, but in practice that can never happen.
+               return &moduledeps.Module{
+                       Name: "root",
+               }
+       }
+
+       ret := &moduledeps.Module{
+               Name: root.Name(),
+       }
+
+       cfg := root.Config()
+       providerConfigs := cfg.ProviderConfigsByFullName()
+
+       // Provider dependencies
+       {
+               providers := make(moduledeps.Providers, len(providerConfigs))
+
+               // Any providerConfigs elements are *explicit* provider dependencies,
+               // which is the only situation where the user might provide an actual
+               // version constraint. We'll take care of these first.
+               for fullName, pCfg := range providerConfigs {
+                       inst := moduledeps.ProviderInstance(fullName)
+                       versionSet := discovery.AllVersions
+                       if pCfg.Version != "" {
+                               versionSet = discovery.ConstraintStr(pCfg.Version).MustParse()
+                       }
+                       providers[inst] = moduledeps.ProviderDependency{
+                               Constraints: versionSet,
+                               Reason:      moduledeps.ProviderDependencyExplicit,
+                       }
+               }
+
+               // Each resource in the configuration creates an *implicit* provider
+               // dependency, though we'll only record it if there isn't already
+               // an explicit dependency on the same provider.
+               for _, rc := range cfg.Resources {
+                       fullName := rc.ProviderFullName()
+                       inst := moduledeps.ProviderInstance(fullName)
+                       if _, exists := providers[inst]; exists {
+                               // Explicit dependency already present
+                               continue
+                       }
+
+                       reason := moduledeps.ProviderDependencyImplicit
+                       if _, inherited := inheritProviders[fullName]; inherited {
+                               reason = moduledeps.ProviderDependencyInherited
+                       }
+
+                       providers[inst] = moduledeps.ProviderDependency{
+                               Constraints: discovery.AllVersions,
+                               Reason:      reason,
+                       }
+               }
+
+               ret.Providers = providers
+       }
+
+       childInherit := make(map[string]*config.ProviderConfig)
+       for k, v := range inheritProviders {
+               childInherit[k] = v
+       }
+       for k, v := range providerConfigs {
+               childInherit[k] = v
+       }
+       for _, c := range root.Children() {
+               ret.Children = append(ret.Children, moduleTreeConfigDependencies(c, childInherit))
+       }
+
+       return ret
+}
+
+func moduleTreeMergeStateDependencies(root *moduledeps.Module, state *State) {
+       if state == nil {
+               return
+       }
+
+       findModule := func(path []string) *moduledeps.Module {
+               module := root
+               for _, name := range path[1:] { // skip initial "root"
+                       var next *moduledeps.Module
+                       for _, cm := range module.Children {
+                               if cm.Name == name {
+                                       next = cm
+                                       break
+                               }
+                       }
+
+                       if next == nil {
+                               // If we didn't find a next node, we'll need to make one
+                               next = &moduledeps.Module{
+                                       Name: name,
+                               }
+                               module.Children = append(module.Children, next)
+                       }
+
+                       module = next
+               }
+               return module
+       }
+
+       for _, ms := range state.Modules {
+               module := findModule(ms.Path)
+
+               for _, is := range ms.Resources {
+                       fullName := config.ResourceProviderFullName(is.Type, is.Provider)
+                       inst := moduledeps.ProviderInstance(fullName)
+                       if _, exists := module.Providers[inst]; !exists {
+                               if module.Providers == nil {
+                                       module.Providers = make(moduledeps.Providers)
+                               }
+                               module.Providers[inst] = moduledeps.ProviderDependency{
+                                       Constraints: discovery.AllVersions,
+                                       Reason:      moduledeps.ProviderDependencyFromState,
+                               }
+                       }
+               }
+       }
+
+}
index 6ab9df7a26f8a2b6f2edb91487858ea475a11ce7..cd4fe9201ae4929a4863d028f1b9ef4035e8791b 100644 (file)
@@ -45,13 +45,6 @@ func (n *NodeRefreshableManagedResource) DynamicExpand(ctx EvalContext) (*Graph,
                        Addr:     n.ResourceAddr(),
                },
 
-               // Switch up any node missing state to a plannable resource. This helps
-               // catch cases where data sources depend on the counts from this resource
-               // during a scale out.
-               &ResourceRefreshPlannableTransformer{
-                       State: state,
-               },
-
                // Add the count orphans to make sure these resources are accounted for
                // during a scale in.
                &OrphanResourceCountTransformer{
@@ -100,6 +93,9 @@ func (n *NodeRefreshableManagedResourceInstance) EvalTree() EvalNode {
        // Eval info is different depending on what kind of resource this is
        switch mode := n.Addr.Mode; mode {
        case config.ManagedResourceMode:
+               if n.ResourceState == nil {
+                       return n.evalTreeManagedResourceNoState()
+               }
                return n.evalTreeManagedResource()
 
        case config.DataResourceMode:
@@ -176,3 +172,88 @@ func (n *NodeRefreshableManagedResourceInstance) evalTreeManagedResource() EvalN
                },
        }
 }
+
+// evalTreeManagedResourceNoState produces an EvalSequence for refresh resource
+// nodes that don't have state attached. An example of where this functionality
+// is useful is when a resource that already exists in state is being scaled
+// out, ie: has its resource count increased. In this case, the scaled out node
+// needs to be available to other nodes (namely data sources) that may depend
+// on it for proper interpolation, or confusing "index out of range" errors can
+// occur.
+//
+// The steps in this sequence are very similar to the steps carried out in
+// plan, but nothing is done with the diff after it is created - it is dropped,
+// and its changes are not counted in the UI.
+func (n *NodeRefreshableManagedResourceInstance) evalTreeManagedResourceNoState() EvalNode {
+       // Declare a bunch of variables that are used for state during
+       // evaluation. Most of this are written to by-address below.
+       var provider ResourceProvider
+       var state *InstanceState
+       var resourceConfig *ResourceConfig
+
+       addr := n.NodeAbstractResource.Addr
+       stateID := addr.stateId()
+       info := &InstanceInfo{
+               Id:         stateID,
+               Type:       addr.Type,
+               ModulePath: normalizeModulePath(addr.Path),
+       }
+
+       // Build the resource for eval
+       resource := &Resource{
+               Name:       addr.Name,
+               Type:       addr.Type,
+               CountIndex: addr.Index,
+       }
+       if resource.CountIndex < 0 {
+               resource.CountIndex = 0
+       }
+
+       // Determine the dependencies for the state.
+       stateDeps := n.StateReferences()
+
+       return &EvalSequence{
+               Nodes: []EvalNode{
+                       &EvalInterpolate{
+                               Config:   n.Config.RawConfig.Copy(),
+                               Resource: resource,
+                               Output:   &resourceConfig,
+                       },
+                       &EvalGetProvider{
+                               Name:   n.ProvidedBy()[0],
+                               Output: &provider,
+                       },
+                       // Re-run validation to catch any errors we missed, e.g. type
+                       // mismatches on computed values.
+                       &EvalValidateResource{
+                               Provider:       &provider,
+                               Config:         &resourceConfig,
+                               ResourceName:   n.Config.Name,
+                               ResourceType:   n.Config.Type,
+                               ResourceMode:   n.Config.Mode,
+                               IgnoreWarnings: true,
+                       },
+                       &EvalReadState{
+                               Name:   stateID,
+                               Output: &state,
+                       },
+                       &EvalDiff{
+                               Name:        stateID,
+                               Info:        info,
+                               Config:      &resourceConfig,
+                               Resource:    n.Config,
+                               Provider:    &provider,
+                               State:       &state,
+                               OutputState: &state,
+                               Stub:        true,
+                       },
+                       &EvalWriteState{
+                               Name:         stateID,
+                               ResourceType: n.Config.Type,
+                               Provider:     n.Config.Provider,
+                               Dependencies: stateDeps,
+                               State:        &state,
+                       },
+               },
+       }
+}
index ea0884505a7874d4e276fb0f4f1ad95368284a4a..51d66529b435c72e04e22a5a4a77b5d53c25e14a 100644 (file)
@@ -6,6 +6,7 @@ import (
        "errors"
        "fmt"
        "io"
+       "log"
        "sync"
 
        "github.com/hashicorp/terraform/config/module"
@@ -31,6 +32,9 @@ type Plan struct {
        Vars    map[string]interface{}
        Targets []string
 
+       TerraformVersion string
+       ProviderSHA256s  map[string][]byte
+
        // Backend is the backend that this plan should use and store data with.
        Backend *BackendState
 
@@ -40,19 +44,58 @@ type Plan struct {
 // Context returns a Context with the data encapsulated in this plan.
 //
 // The following fields in opts are overridden by the plan: Config,
-// Diff, State, Variables.
+// Diff, Variables.
+//
+// If State is not provided, it is set from the plan. If it _is_ provided,
+// it must be Equal to the state stored in plan, but may have a newer
+// serial.
 func (p *Plan) Context(opts *ContextOpts) (*Context, error) {
+       var err error
+       opts, err = p.contextOpts(opts)
+       if err != nil {
+               return nil, err
+       }
+       return NewContext(opts)
+}
+
+// contextOpts mutates the given base ContextOpts in place to use input
+// objects obtained from the receiving plan.
+func (p *Plan) contextOpts(base *ContextOpts) (*ContextOpts, error) {
+       opts := base
+
        opts.Diff = p.Diff
        opts.Module = p.Module
-       opts.State = p.State
        opts.Targets = p.Targets
+       opts.ProviderSHA256s = p.ProviderSHA256s
+
+       if opts.State == nil {
+               opts.State = p.State
+       } else if !opts.State.Equal(p.State) {
+               // Even if we're overriding the state, it should be logically equal
+               // to what's in plan. The only valid change to have made by the time
+               // we get here is to have incremented the serial.
+               //
+               // Due to the fact that serialization may change the representation of
+               // the state, there is little chance that these aren't actually equal.
+               // Log the error condition for reference, but continue with the state
+               // we have.
+               log.Println("[WARNING] Plan state and ContextOpts state are not equal")
+       }
+
+       thisVersion := VersionString()
+       if p.TerraformVersion != "" && p.TerraformVersion != thisVersion {
+               return nil, fmt.Errorf(
+                       "plan was created with a different version of Terraform (created with %s, but running %s)",
+                       p.TerraformVersion, thisVersion,
+               )
+       }
 
        opts.Variables = make(map[string]interface{})
        for k, v := range p.Vars {
                opts.Variables[k] = v
        }
 
-       return NewContext(opts)
+       return opts, nil
 }
 
 func (p *Plan) String() string {
@@ -86,7 +129,7 @@ func (p *Plan) init() {
 // the ability in the future to change the file format if we want for any
 // reason.
 const planFormatMagic = "tfplan"
-const planFormatVersion byte = 1
+const planFormatVersion byte = 2
 
 // ReadPlan reads a plan structure out of a reader in the format that
 // was written by WritePlan.
index a8a0c95530fe7d256988dc196c0ded0845d6a201..8badca8053c68a21db115cc1da76593c9a420e86 100644 (file)
@@ -8,6 +8,7 @@ import (
        "strings"
 
        "github.com/hashicorp/terraform/config"
+       "github.com/hashicorp/terraform/config/module"
 )
 
 // ResourceAddress is a way of identifying an individual resource (or,
@@ -89,6 +90,51 @@ func (r *ResourceAddress) String() string {
        return strings.Join(result, ".")
 }
 
+// HasResourceSpec returns true if the address has a resource spec, as
+// defined in the documentation:
+//    https://www.terraform.io/docs/internals/resource-addressing.html
+// In particular, this returns false if the address contains only
+// a module path, thus addressing the entire module.
+func (r *ResourceAddress) HasResourceSpec() bool {
+       return r.Type != "" && r.Name != ""
+}
+
+// WholeModuleAddress returns the resource address that refers to all
+// resources in the same module as the receiver address.
+func (r *ResourceAddress) WholeModuleAddress() *ResourceAddress {
+       return &ResourceAddress{
+               Path:            r.Path,
+               Index:           -1,
+               InstanceTypeSet: false,
+       }
+}
+
+// MatchesConfig returns true if the receiver matches the given
+// configuration resource within the given configuration module.
+//
+// Since resource configuration blocks represent all of the instances of
+// a multi-instance resource, the index of the address (if any) is not
+// considered.
+func (r *ResourceAddress) MatchesConfig(mod *module.Tree, rc *config.Resource) bool {
+       if r.HasResourceSpec() {
+               if r.Mode != rc.Mode || r.Type != rc.Type || r.Name != rc.Name {
+                       return false
+               }
+       }
+
+       addrPath := r.Path
+       cfgPath := mod.Path()
+
+       // normalize
+       if len(addrPath) == 0 {
+               addrPath = nil
+       }
+       if len(cfgPath) == 0 {
+               cfgPath = nil
+       }
+       return reflect.DeepEqual(addrPath, cfgPath)
+}
+
 // stateId returns the ID that this resource should be entered with
 // in the state. This is also used for diffs. In the future, we'd like to
 // move away from this string field so I don't export this.
@@ -185,7 +231,10 @@ func ParseResourceAddress(s string) (*ResourceAddress, error) {
 
        // not allowed to say "data." without a type following
        if mode == config.DataResourceMode && matches["type"] == "" {
-               return nil, fmt.Errorf("must target specific data instance")
+               return nil, fmt.Errorf(
+                       "invalid resource address %q: must target specific data instance",
+                       s,
+               )
        }
 
        return &ResourceAddress{
@@ -199,6 +248,75 @@ func ParseResourceAddress(s string) (*ResourceAddress, error) {
        }, nil
 }
 
+// ParseResourceAddressForInstanceDiff creates a ResourceAddress for a
+// resource name as described in a module diff.
+//
+// For historical reasons a different addressing format is used in this
+// context. The internal format should not be shown in the UI and instead
+// this function should be used to translate to a ResourceAddress and
+// then, where appropriate, use the String method to produce a canonical
+// resource address string for display in the UI.
+//
+// The given path slice must be empty (or nil) for the root module, and
+// otherwise consist of a sequence of module names traversing down into
+// the module tree. If a non-nil path is provided, the caller must not
+// modify its underlying array after passing it to this function.
+func ParseResourceAddressForInstanceDiff(path []string, key string) (*ResourceAddress, error) {
+       addr, err := parseResourceAddressInternal(key)
+       if err != nil {
+               return nil, err
+       }
+       addr.Path = path
+       return addr, nil
+}
+
+// Contains returns true if and only if the given node is contained within
+// the receiver.
+//
+// Containment is defined in terms of the module and resource heirarchy:
+// a resource is contained within its module and any ancestor modules,
+// an indexed resource instance is contained with the unindexed resource, etc.
+func (addr *ResourceAddress) Contains(other *ResourceAddress) bool {
+       ourPath := addr.Path
+       givenPath := other.Path
+       if len(givenPath) < len(ourPath) {
+               return false
+       }
+       for i := range ourPath {
+               if ourPath[i] != givenPath[i] {
+                       return false
+               }
+       }
+
+       // If the receiver is a whole-module address then the path prefix
+       // matching is all we need.
+       if !addr.HasResourceSpec() {
+               return true
+       }
+
+       if addr.Type != other.Type || addr.Name != other.Name || addr.Mode != other.Mode {
+               return false
+       }
+
+       if addr.Index != -1 && addr.Index != other.Index {
+               return false
+       }
+
+       if addr.InstanceTypeSet && (addr.InstanceTypeSet != other.InstanceTypeSet || addr.InstanceType != other.InstanceType) {
+               return false
+       }
+
+       return true
+}
+
+// Equals returns true if the receiver matches the given address.
+//
+// The name of this method is a misnomer, since it doesn't test for exact
+// equality. Instead, it tests that the _specified_ parts of each
+// address match, treating any unspecified parts as wildcards.
+//
+// See also Contains, which takes a more heirarchical approach to comparing
+// addresses.
 func (addr *ResourceAddress) Equals(raw interface{}) bool {
        other, ok := raw.(*ResourceAddress)
        if !ok {
@@ -233,6 +351,58 @@ func (addr *ResourceAddress) Equals(raw interface{}) bool {
                modeMatch
 }
 
+// Less returns true if and only if the receiver should be sorted before
+// the given address when presenting a list of resource addresses to
+// an end-user.
+//
+// This sort uses lexicographic sorting for most components, but uses
+// numeric sort for indices, thus causing index 10 to sort after
+// index 9, rather than after index 1.
+func (addr *ResourceAddress) Less(other *ResourceAddress) bool {
+
+       switch {
+
+       case len(addr.Path) < len(other.Path):
+               return true
+
+       case !reflect.DeepEqual(addr.Path, other.Path):
+               // If the two paths are the same length but don't match, we'll just
+               // cheat and compare the string forms since it's easier than
+               // comparing all of the path segments in turn.
+               addrStr := addr.String()
+               otherStr := other.String()
+               return addrStr < otherStr
+
+       case addr.Mode == config.DataResourceMode && other.Mode != config.DataResourceMode:
+               return true
+
+       case addr.Type < other.Type:
+               return true
+
+       case addr.Name < other.Name:
+               return true
+
+       case addr.Index < other.Index:
+               // Since "Index" is -1 for an un-indexed address, this also conveniently
+               // sorts unindexed addresses before indexed ones, should they both
+               // appear for some reason.
+               return true
+
+       case other.InstanceTypeSet && !addr.InstanceTypeSet:
+               return true
+
+       case addr.InstanceType < other.InstanceType:
+               // InstanceType is actually an enum, so this is just an arbitrary
+               // sort based on the enum numeric values, and thus not particularly
+               // meaningful.
+               return true
+
+       default:
+               return false
+
+       }
+}
+
 func ParseResourceIndex(s string) (int, error) {
        if s == "" {
                return -1, nil
@@ -275,7 +445,7 @@ func tokenizeResourceAddress(s string) (map[string]string, error) {
        // string "aws_instance.web.tainted[1]"
        re := regexp.MustCompile(`\A` +
                // "module.foo.module.bar" (optional)
-               `(?P<path>(?:module\.[^.]+\.?)*)` +
+               `(?P<path>(?:module\.(?P<module_name>[^.]+)\.?)*)` +
                // possibly "data.", if targeting is a data resource
                `(?P<data_prefix>(?:data\.)?)` +
                // "aws_instance.web" (optional when module path specified)
@@ -289,7 +459,7 @@ func tokenizeResourceAddress(s string) (map[string]string, error) {
        groupNames := re.SubexpNames()
        rawMatches := re.FindAllStringSubmatch(s, -1)
        if len(rawMatches) != 1 {
-               return nil, fmt.Errorf("Problem parsing address: %q", s)
+               return nil, fmt.Errorf("invalid resource address %q", s)
        }
 
        matches := make(map[string]string)
index 1a68c8699c4255e288a7233e5e0b1923f2bb4ed1..7d78f67ef9288dccd09f16b5247371625829dc49 100644 (file)
@@ -1,5 +1,12 @@
 package terraform
 
+import (
+       "fmt"
+
+       multierror "github.com/hashicorp/go-multierror"
+       "github.com/hashicorp/terraform/plugin/discovery"
+)
+
 // ResourceProvider is an interface that must be implemented by any
 // resource provider: the thing that creates and manages the resources in
 // a Terraform configuration.
@@ -154,6 +161,18 @@ type ResourceProvider interface {
        ReadDataApply(*InstanceInfo, *InstanceDiff) (*InstanceState, error)
 }
 
+// ResourceProviderError may be returned when creating a Context if the
+// required providers cannot be satisfied. This error can then be used to
+// format a more useful message for the user.
+type ResourceProviderError struct {
+       Errors []error
+}
+
+func (e *ResourceProviderError) Error() string {
+       // use multierror to format the default output
+       return multierror.Append(nil, e.Errors...).Error()
+}
+
 // ResourceProviderCloser is an interface that providers that can close
 // connections that aren't needed anymore must implement.
 type ResourceProviderCloser interface {
@@ -171,6 +190,50 @@ type DataSource struct {
        Name string
 }
 
+// ResourceProviderResolver is an interface implemented by objects that are
+// able to resolve a given set of resource provider version constraints
+// into ResourceProviderFactory callbacks.
+type ResourceProviderResolver interface {
+       // Given a constraint map, return a ResourceProviderFactory for each
+       // requested provider. If some or all of the constraints cannot be
+       // satisfied, return a non-nil slice of errors describing the problems.
+       ResolveProviders(reqd discovery.PluginRequirements) (map[string]ResourceProviderFactory, []error)
+}
+
+// ResourceProviderResolverFunc wraps a callback function and turns it into
+// a ResourceProviderResolver implementation, for convenience in situations
+// where a function and its associated closure are sufficient as a resolver
+// implementation.
+type ResourceProviderResolverFunc func(reqd discovery.PluginRequirements) (map[string]ResourceProviderFactory, []error)
+
+// ResolveProviders implements ResourceProviderResolver by calling the
+// wrapped function.
+func (f ResourceProviderResolverFunc) ResolveProviders(reqd discovery.PluginRequirements) (map[string]ResourceProviderFactory, []error) {
+       return f(reqd)
+}
+
+// ResourceProviderResolverFixed returns a ResourceProviderResolver that
+// has a fixed set of provider factories provided by the caller. The returned
+// resolver ignores version constraints entirely and just returns the given
+// factory for each requested provider name.
+//
+// This function is primarily used in tests, to provide mock providers or
+// in-process providers under test.
+func ResourceProviderResolverFixed(factories map[string]ResourceProviderFactory) ResourceProviderResolver {
+       return ResourceProviderResolverFunc(func(reqd discovery.PluginRequirements) (map[string]ResourceProviderFactory, []error) {
+               ret := make(map[string]ResourceProviderFactory, len(reqd))
+               var errs []error
+               for name := range reqd {
+                       if factory, exists := factories[name]; exists {
+                               ret[name] = factory
+                       } else {
+                               errs = append(errs, fmt.Errorf("provider %q is not available", name))
+                       }
+               }
+               return ret, errs
+       })
+}
+
 // ResourceProviderFactory is a function type that creates a new instance
 // of a resource provider.
 type ResourceProviderFactory func() (ResourceProvider, error)
@@ -202,3 +265,21 @@ func ProviderHasDataSource(p ResourceProvider, n string) bool {
 
        return false
 }
+
+// resourceProviderFactories matches available plugins to the given version
+// requirements to produce a map of compatible provider plugins if possible,
+// or an error if the currently-available plugins are insufficient.
+//
+// This should be called only with configurations that have passed calls
+// to config.Validate(), which ensures that all of the given version
+// constraints are valid. It will panic if any invalid constraints are present.
+func resourceProviderFactories(resolver ResourceProviderResolver, reqd discovery.PluginRequirements) (map[string]ResourceProviderFactory, error) {
+       ret, errs := resolver.ResolveProviders(reqd)
+       if errs != nil {
+               return nil, &ResourceProviderError{
+                       Errors: errs,
+               }
+       }
+
+       return ret, nil
+}
index 074b68245466ada2fc214857e9e0341594dbf39c..0c46194d6fb5d9b9fe7b65a77dcc7b634d55e626 100644 (file)
@@ -533,6 +533,43 @@ func (s *State) equal(other *State) bool {
        return true
 }
 
+// MarshalEqual is similar to Equal but provides a stronger definition of
+// "equal", where two states are equal if and only if their serialized form
+// is byte-for-byte identical.
+//
+// This is primarily useful for callers that are trying to save snapshots
+// of state to persistent storage, allowing them to detect when a new
+// snapshot must be taken.
+//
+// Note that the serial number and lineage are included in the serialized form,
+// so it's the caller's responsibility to properly manage these attributes
+// so that this method is only called on two states that have the same
+// serial and lineage, unless detecting such differences is desired.
+func (s *State) MarshalEqual(other *State) bool {
+       if s == nil && other == nil {
+               return true
+       } else if s == nil || other == nil {
+               return false
+       }
+
+       recvBuf := &bytes.Buffer{}
+       otherBuf := &bytes.Buffer{}
+
+       err := WriteState(s, recvBuf)
+       if err != nil {
+               // should never happen, since we're writing to a buffer
+               panic(err)
+       }
+
+       err = WriteState(other, otherBuf)
+       if err != nil {
+               // should never happen, since we're writing to a buffer
+               panic(err)
+       }
+
+       return bytes.Equal(recvBuf.Bytes(), otherBuf.Bytes())
+}
+
 type StateAgeComparison int
 
 const (
@@ -603,6 +640,10 @@ func (s *State) SameLineage(other *State) bool {
 // DeepCopy performs a deep copy of the state structure and returns
 // a new structure.
 func (s *State) DeepCopy() *State {
+       if s == nil {
+               return nil
+       }
+
        copy, err := copystructure.Config{Lock: true}.Copy(s)
        if err != nil {
                panic(err)
@@ -611,30 +652,6 @@ func (s *State) DeepCopy() *State {
        return copy.(*State)
 }
 
-// IncrementSerialMaybe increments the serial number of this state
-// if it different from the other state.
-func (s *State) IncrementSerialMaybe(other *State) {
-       if s == nil {
-               return
-       }
-       if other == nil {
-               return
-       }
-       s.Lock()
-       defer s.Unlock()
-
-       if s.Serial > other.Serial {
-               return
-       }
-       if other.TFVersion != s.TFVersion || !s.equal(other) {
-               if other.Serial > s.Serial {
-                       s.Serial = other.Serial
-               }
-
-               s.Serial++
-       }
-}
-
 // FromFutureTerraform checks if this state was written by a Terraform
 // version from the future.
 func (s *State) FromFutureTerraform() bool {
@@ -660,6 +677,7 @@ func (s *State) init() {
        if s.Version == 0 {
                s.Version = StateVersion
        }
+
        if s.moduleByPath(rootModulePath) == nil {
                s.addModule(rootModulePath)
        }
diff --git a/vendor/github.com/hashicorp/terraform/terraform/test_failure b/vendor/github.com/hashicorp/terraform/terraform/test_failure
new file mode 100644 (file)
index 0000000..5d3ad1a
--- /dev/null
@@ -0,0 +1,9 @@
+--- FAIL: TestContext2Plan_moduleProviderInherit (0.01s)
+       context_plan_test.go:552: bad: []string{"child"}
+map[string]dag.Vertex{}
+"module.middle.null"
+map[string]dag.Vertex{}
+"module.middle.module.inner.null"
+map[string]dag.Vertex{}
+"aws"
+FAIL
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_resource_refresh_plannable.go b/vendor/github.com/hashicorp/terraform/terraform/transform_resource_refresh_plannable.go
deleted file mode 100644 (file)
index 35358a3..0000000
+++ /dev/null
@@ -1,55 +0,0 @@
-package terraform
-
-import (
-       "fmt"
-       "log"
-)
-
-// ResourceRefreshPlannableTransformer is a GraphTransformer that replaces any
-// nodes that don't have state yet exist in config with
-// NodePlannableResourceInstance.
-//
-// This transformer is used when expanding count on managed resource nodes
-// during the refresh phase to ensure that data sources that have
-// interpolations that depend on resources existing in the graph can be walked
-// properly.
-type ResourceRefreshPlannableTransformer struct {
-       // The full global state.
-       State *State
-}
-
-// Transform implements GraphTransformer for
-// ResourceRefreshPlannableTransformer.
-func (t *ResourceRefreshPlannableTransformer) Transform(g *Graph) error {
-nextVertex:
-       for _, v := range g.Vertices() {
-               addr := v.(*NodeRefreshableManagedResourceInstance).Addr
-
-               // Find the state for this address, if there is one
-               filter := &StateFilter{State: t.State}
-               results, err := filter.Filter(addr.String())
-               if err != nil {
-                       return err
-               }
-
-               // Check to see if we have a state for this resource. If we do, skip this
-               // node.
-               for _, result := range results {
-                       if _, ok := result.Value.(*ResourceState); ok {
-                               continue nextVertex
-                       }
-               }
-               // If we don't, convert this resource to a NodePlannableResourceInstance node
-               // with all of the data we need to make it happen.
-               log.Printf("[TRACE] No state for %s, converting to NodePlannableResourceInstance", addr.String())
-               new := &NodePlannableResourceInstance{
-                       NodeAbstractResource: v.(*NodeRefreshableManagedResourceInstance).NodeAbstractResource,
-               }
-               // Replace the node in the graph
-               if !g.Replace(v, new) {
-                       return fmt.Errorf("ResourceRefreshPlannableTransformer: Could not replace node %#v with %#v", v, new)
-               }
-       }
-
-       return nil
-}
index 125f9e302155a5a8b817d0d3afbdc6b7aff574eb..4f117b4f732be059fa37439589f3edc5ec9a24fd 100644 (file)
@@ -41,6 +41,12 @@ type TargetsTransformer struct {
        // that already have the targets parsed
        ParsedTargets []ResourceAddress
 
+       // If set, the index portions of resource addresses will be ignored
+       // for comparison. This is used when transforming a graph where
+       // counted resources have not yet been expanded, since otherwise
+       // the unexpanded nodes (which never have indices) would not match.
+       IgnoreIndices bool
+
        // Set to true when we're in a `terraform destroy` or a
        // `terraform plan -destroy`
        Destroy bool
@@ -199,7 +205,12 @@ func (t *TargetsTransformer) nodeIsTarget(
 
        addr := r.ResourceAddr()
        for _, targetAddr := range addrs {
-               if targetAddr.Equals(addr) {
+               if t.IgnoreIndices {
+                       // targetAddr is not a pointer, so we can safely mutate it without
+                       // interfering with references elsewhere.
+                       targetAddr.Index = -1
+               }
+               if targetAddr.Contains(addr) {
                        return true
                }
        }
index f41f0d7d634aaf7b4bf0b96e3bb5d6306becad6f..752241af1ee438d53b82bd3e1cc8045b56fa808f 100644 (file)
@@ -2,7 +2,8 @@ package terraform
 
 import (
        "sort"
-       "strings"
+
+       "github.com/hashicorp/terraform/config"
 )
 
 // Semaphore is a wrapper around a channel to provide
@@ -47,21 +48,8 @@ func (s Semaphore) Release() {
        }
 }
 
-// resourceProvider returns the provider name for the given type.
-func resourceProvider(t, alias string) string {
-       if alias != "" {
-               return alias
-       }
-
-       idx := strings.IndexRune(t, '_')
-       if idx == -1 {
-               // If no underscores, the resource name is assumed to be
-               // also the provider name, e.g. if the provider exposes
-               // only a single resource of each type.
-               return t
-       }
-
-       return t[:idx]
+func resourceProvider(resourceType, explicitProvider string) string {
+       return config.ResourceProviderFullName(resourceType, explicitProvider)
 }
 
 // strSliceContains checks if a given string is contained in a slice
index cdfb8fb66571cb1be5f5bcf45411fe265a53fbf2..d61b11ea2fe36bab41584bcba992483cc84e1676 100644 (file)
@@ -7,12 +7,12 @@ import (
 )
 
 // The main version number that is being run at the moment.
-const Version = "0.9.8"
+const Version = "0.10.0"
 
 // A pre-release marker for the version. If this is "" (empty string)
 // then it means that it is a final release. Otherwise, this is a pre-release
 // such as "dev" (in development), "beta", "rc1", etc.
-var VersionPrerelease = ""
+var VersionPrerelease = "dev"
 
 // SemVersion is an instance of version.Version. This has the secondary
 // benefit of verifying during tests and init time that our version is a
diff --git a/vendor/golang.org/x/crypto/cast5/cast5.go b/vendor/golang.org/x/crypto/cast5/cast5.go
new file mode 100644 (file)
index 0000000..0b4af37
--- /dev/null
@@ -0,0 +1,526 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cast5 implements CAST5, as defined in RFC 2144. CAST5 is a common
+// OpenPGP cipher.
+package cast5 // import "golang.org/x/crypto/cast5"
+
+import "errors"
+
+const BlockSize = 8
+const KeySize = 16
+
+type Cipher struct {
+       masking [16]uint32
+       rotate  [16]uint8
+}
+
+func NewCipher(key []byte) (c *Cipher, err error) {
+       if len(key) != KeySize {
+               return nil, errors.New("CAST5: keys must be 16 bytes")
+       }
+
+       c = new(Cipher)
+       c.keySchedule(key)
+       return
+}
+
+func (c *Cipher) BlockSize() int {
+       return BlockSize
+}
+
+func (c *Cipher) Encrypt(dst, src []byte) {
+       l := uint32(src[0])<<24 | uint32(src[1])<<16 | uint32(src[2])<<8 | uint32(src[3])
+       r := uint32(src[4])<<24 | uint32(src[5])<<16 | uint32(src[6])<<8 | uint32(src[7])
+
+       l, r = r, l^f1(r, c.masking[0], c.rotate[0])
+       l, r = r, l^f2(r, c.masking[1], c.rotate[1])
+       l, r = r, l^f3(r, c.masking[2], c.rotate[2])
+       l, r = r, l^f1(r, c.masking[3], c.rotate[3])
+
+       l, r = r, l^f2(r, c.masking[4], c.rotate[4])
+       l, r = r, l^f3(r, c.masking[5], c.rotate[5])
+       l, r = r, l^f1(r, c.masking[6], c.rotate[6])
+       l, r = r, l^f2(r, c.masking[7], c.rotate[7])
+
+       l, r = r, l^f3(r, c.masking[8], c.rotate[8])
+       l, r = r, l^f1(r, c.masking[9], c.rotate[9])
+       l, r = r, l^f2(r, c.masking[10], c.rotate[10])
+       l, r = r, l^f3(r, c.masking[11], c.rotate[11])
+
+       l, r = r, l^f1(r, c.masking[12], c.rotate[12])
+       l, r = r, l^f2(r, c.masking[13], c.rotate[13])
+       l, r = r, l^f3(r, c.masking[14], c.rotate[14])
+       l, r = r, l^f1(r, c.masking[15], c.rotate[15])
+
+       dst[0] = uint8(r >> 24)
+       dst[1] = uint8(r >> 16)
+       dst[2] = uint8(r >> 8)
+       dst[3] = uint8(r)
+       dst[4] = uint8(l >> 24)
+       dst[5] = uint8(l >> 16)
+       dst[6] = uint8(l >> 8)
+       dst[7] = uint8(l)
+}
+
+func (c *Cipher) Decrypt(dst, src []byte) {
+       l := uint32(src[0])<<24 | uint32(src[1])<<16 | uint32(src[2])<<8 | uint32(src[3])
+       r := uint32(src[4])<<24 | uint32(src[5])<<16 | uint32(src[6])<<8 | uint32(src[7])
+
+       l, r = r, l^f1(r, c.masking[15], c.rotate[15])
+       l, r = r, l^f3(r, c.masking[14], c.rotate[14])
+       l, r = r, l^f2(r, c.masking[13], c.rotate[13])
+       l, r = r, l^f1(r, c.masking[12], c.rotate[12])
+
+       l, r = r, l^f3(r, c.masking[11], c.rotate[11])
+       l, r = r, l^f2(r, c.masking[10], c.rotate[10])
+       l, r = r, l^f1(r, c.masking[9], c.rotate[9])
+       l, r = r, l^f3(r, c.masking[8], c.rotate[8])
+
+       l, r = r, l^f2(r, c.masking[7], c.rotate[7])
+       l, r = r, l^f1(r, c.masking[6], c.rotate[6])
+       l, r = r, l^f3(r, c.masking[5], c.rotate[5])
+       l, r = r, l^f2(r, c.masking[4], c.rotate[4])
+
+       l, r = r, l^f1(r, c.masking[3], c.rotate[3])
+       l, r = r, l^f3(r, c.masking[2], c.rotate[2])
+       l, r = r, l^f2(r, c.masking[1], c.rotate[1])
+       l, r = r, l^f1(r, c.masking[0], c.rotate[0])
+
+       dst[0] = uint8(r >> 24)
+       dst[1] = uint8(r >> 16)
+       dst[2] = uint8(r >> 8)
+       dst[3] = uint8(r)
+       dst[4] = uint8(l >> 24)
+       dst[5] = uint8(l >> 16)
+       dst[6] = uint8(l >> 8)
+       dst[7] = uint8(l)
+}
+
+type keyScheduleA [4][7]uint8
+type keyScheduleB [4][5]uint8
+
+// keyScheduleRound contains the magic values for a round of the key schedule.
+// The keyScheduleA deals with the lines like:
+//   z0z1z2z3 = x0x1x2x3 ^ S5[xD] ^ S6[xF] ^ S7[xC] ^ S8[xE] ^ S7[x8]
+// Conceptually, both x and z are in the same array, x first. The first
+// element describes which word of this array gets written to and the
+// second, which word gets read. So, for the line above, it's "4, 0", because
+// it's writing to the first word of z, which, being after x, is word 4, and
+// reading from the first word of x: word 0.
+//
+// Next are the indexes into the S-boxes. Now the array is treated as bytes. So
+// "xD" is 0xd. The first byte of z is written as "16 + 0", just to be clear
+// that it's z that we're indexing.
+//
+// keyScheduleB deals with lines like:
+//   K1 = S5[z8] ^ S6[z9] ^ S7[z7] ^ S8[z6] ^ S5[z2]
+// "K1" is ignored because key words are always written in order. So the five
+// elements are the S-box indexes. They use the same form as in keyScheduleA,
+// above.
+
+type keyScheduleRound struct{}
+type keySchedule []keyScheduleRound
+
+var schedule = []struct {
+       a keyScheduleA
+       b keyScheduleB
+}{
+       {
+               keyScheduleA{
+                       {4, 0, 0xd, 0xf, 0xc, 0xe, 0x8},
+                       {5, 2, 16 + 0, 16 + 2, 16 + 1, 16 + 3, 0xa},
+                       {6, 3, 16 + 7, 16 + 6, 16 + 5, 16 + 4, 9},
+                       {7, 1, 16 + 0xa, 16 + 9, 16 + 0xb, 16 + 8, 0xb},
+               },
+               keyScheduleB{
+                       {16 + 8, 16 + 9, 16 + 7, 16 + 6, 16 + 2},
+                       {16 + 0xa, 16 + 0xb, 16 + 5, 16 + 4, 16 + 6},
+                       {16 + 0xc, 16 + 0xd, 16 + 3, 16 + 2, 16 + 9},
+                       {16 + 0xe, 16 + 0xf, 16 + 1, 16 + 0, 16 + 0xc},
+               },
+       },
+       {
+               keyScheduleA{
+                       {0, 6, 16 + 5, 16 + 7, 16 + 4, 16 + 6, 16 + 0},
+                       {1, 4, 0, 2, 1, 3, 16 + 2},
+                       {2, 5, 7, 6, 5, 4, 16 + 1},
+                       {3, 7, 0xa, 9, 0xb, 8, 16 + 3},
+               },
+               keyScheduleB{
+                       {3, 2, 0xc, 0xd, 8},
+                       {1, 0, 0xe, 0xf, 0xd},
+                       {7, 6, 8, 9, 3},
+                       {5, 4, 0xa, 0xb, 7},
+               },
+       },
+       {
+               keyScheduleA{
+                       {4, 0, 0xd, 0xf, 0xc, 0xe, 8},
+                       {5, 2, 16 + 0, 16 + 2, 16 + 1, 16 + 3, 0xa},
+                       {6, 3, 16 + 7, 16 + 6, 16 + 5, 16 + 4, 9},
+                       {7, 1, 16 + 0xa, 16 + 9, 16 + 0xb, 16 + 8, 0xb},
+               },
+               keyScheduleB{
+                       {16 + 3, 16 + 2, 16 + 0xc, 16 + 0xd, 16 + 9},
+                       {16 + 1, 16 + 0, 16 + 0xe, 16 + 0xf, 16 + 0xc},
+                       {16 + 7, 16 + 6, 16 + 8, 16 + 9, 16 + 2},
+                       {16 + 5, 16 + 4, 16 + 0xa, 16 + 0xb, 16 + 6},
+               },
+       },
+       {
+               keyScheduleA{
+                       {0, 6, 16 + 5, 16 + 7, 16 + 4, 16 + 6, 16 + 0},
+                       {1, 4, 0, 2, 1, 3, 16 + 2},
+                       {2, 5, 7, 6, 5, 4, 16 + 1},
+                       {3, 7, 0xa, 9, 0xb, 8, 16 + 3},
+               },
+               keyScheduleB{
+                       {8, 9, 7, 6, 3},
+                       {0xa, 0xb, 5, 4, 7},
+                       {0xc, 0xd, 3, 2, 8},
+                       {0xe, 0xf, 1, 0, 0xd},
+               },
+       },
+}
+
+func (c *Cipher) keySchedule(in []byte) {
+       var t [8]uint32
+       var k [32]uint32
+
+       for i := 0; i < 4; i++ {
+               j := i * 4
+               t[i] = uint32(in[j])<<24 | uint32(in[j+1])<<16 | uint32(in[j+2])<<8 | uint32(in[j+3])
+       }
+
+       x := []byte{6, 7, 4, 5}
+       ki := 0
+
+       for half := 0; half < 2; half++ {
+               for _, round := range schedule {
+                       for j := 0; j < 4; j++ {
+                               var a [7]uint8
+                               copy(a[:], round.a[j][:])
+                               w := t[a[1]]
+                               w ^= sBox[4][(t[a[2]>>2]>>(24-8*(a[2]&3)))&0xff]
+                               w ^= sBox[5][(t[a[3]>>2]>>(24-8*(a[3]&3)))&0xff]
+                               w ^= sBox[6][(t[a[4]>>2]>>(24-8*(a[4]&3)))&0xff]
+                               w ^= sBox[7][(t[a[5]>>2]>>(24-8*(a[5]&3)))&0xff]
+                               w ^= sBox[x[j]][(t[a[6]>>2]>>(24-8*(a[6]&3)))&0xff]
+                               t[a[0]] = w
+                       }
+
+                       for j := 0; j < 4; j++ {
+                               var b [5]uint8
+                               copy(b[:], round.b[j][:])
+                               w := sBox[4][(t[b[0]>>2]>>(24-8*(b[0]&3)))&0xff]
+                               w ^= sBox[5][(t[b[1]>>2]>>(24-8*(b[1]&3)))&0xff]
+                               w ^= sBox[6][(t[b[2]>>2]>>(24-8*(b[2]&3)))&0xff]
+                               w ^= sBox[7][(t[b[3]>>2]>>(24-8*(b[3]&3)))&0xff]
+                               w ^= sBox[4+j][(t[b[4]>>2]>>(24-8*(b[4]&3)))&0xff]
+                               k[ki] = w
+                               ki++
+                       }
+               }
+       }
+
+       for i := 0; i < 16; i++ {
+               c.masking[i] = k[i]
+               c.rotate[i] = uint8(k[16+i] & 0x1f)
+       }
+}
+
+// These are the three 'f' functions. See RFC 2144, section 2.2.
+func f1(d, m uint32, r uint8) uint32 {
+       t := m + d
+       I := (t << r) | (t >> (32 - r))
+       return ((sBox[0][I>>24] ^ sBox[1][(I>>16)&0xff]) - sBox[2][(I>>8)&0xff]) + sBox[3][I&0xff]
+}
+
+func f2(d, m uint32, r uint8) uint32 {
+       t := m ^ d
+       I := (t << r) | (t >> (32 - r))
+       return ((sBox[0][I>>24] - sBox[1][(I>>16)&0xff]) + sBox[2][(I>>8)&0xff]) ^ sBox[3][I&0xff]
+}
+
+func f3(d, m uint32, r uint8) uint32 {
+       t := m - d
+       I := (t << r) | (t >> (32 - r))
+       return ((sBox[0][I>>24] + sBox[1][(I>>16)&0xff]) ^ sBox[2][(I>>8)&0xff]) - sBox[3][I&0xff]
+}
+
+var sBox = [8][256]uint32{
+       {
+               0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f, 0x9c004dd3, 0x6003e540, 0xcf9fc949,
+               0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0, 0x15c361d2, 0xc2e7661d, 0x22d4ff8e,
+               0x28683b6f, 0xc07fd059, 0xff2379c8, 0x775f50e2, 0x43c340d3, 0xdf2f8656, 0x887ca41a, 0xa2d2bd2d,
+               0xa1c9e0d6, 0x346c4819, 0x61b76d87, 0x22540f2f, 0x2abe32e1, 0xaa54166b, 0x22568e3a, 0xa2d341d0,
+               0x66db40c8, 0xa784392f, 0x004dff2f, 0x2db9d2de, 0x97943fac, 0x4a97c1d8, 0x527644b7, 0xb5f437a7,
+               0xb82cbaef, 0xd751d159, 0x6ff7f0ed, 0x5a097a1f, 0x827b68d0, 0x90ecf52e, 0x22b0c054, 0xbc8e5935,
+               0x4b6d2f7f, 0x50bb64a2, 0xd2664910, 0xbee5812d, 0xb7332290, 0xe93b159f, 0xb48ee411, 0x4bff345d,
+               0xfd45c240, 0xad31973f, 0xc4f6d02e, 0x55fc8165, 0xd5b1caad, 0xa1ac2dae, 0xa2d4b76d, 0xc19b0c50,
+               0x882240f2, 0x0c6e4f38, 0xa4e4bfd7, 0x4f5ba272, 0x564c1d2f, 0xc59c5319, 0xb949e354, 0xb04669fe,
+               0xb1b6ab8a, 0xc71358dd, 0x6385c545, 0x110f935d, 0x57538ad5, 0x6a390493, 0xe63d37e0, 0x2a54f6b3,
+               0x3a787d5f, 0x6276a0b5, 0x19a6fcdf, 0x7a42206a, 0x29f9d4d5, 0xf61b1891, 0xbb72275e, 0xaa508167,
+               0x38901091, 0xc6b505eb, 0x84c7cb8c, 0x2ad75a0f, 0x874a1427, 0xa2d1936b, 0x2ad286af, 0xaa56d291,
+               0xd7894360, 0x425c750d, 0x93b39e26, 0x187184c9, 0x6c00b32d, 0x73e2bb14, 0xa0bebc3c, 0x54623779,
+               0x64459eab, 0x3f328b82, 0x7718cf82, 0x59a2cea6, 0x04ee002e, 0x89fe78e6, 0x3fab0950, 0x325ff6c2,
+               0x81383f05, 0x6963c5c8, 0x76cb5ad6, 0xd49974c9, 0xca180dcf, 0x380782d5, 0xc7fa5cf6, 0x8ac31511,
+               0x35e79e13, 0x47da91d0, 0xf40f9086, 0xa7e2419e, 0x31366241, 0x051ef495, 0xaa573b04, 0x4a805d8d,
+               0x548300d0, 0x00322a3c, 0xbf64cddf, 0xba57a68e, 0x75c6372b, 0x50afd341, 0xa7c13275, 0x915a0bf5,
+               0x6b54bfab, 0x2b0b1426, 0xab4cc9d7, 0x449ccd82, 0xf7fbf265, 0xab85c5f3, 0x1b55db94, 0xaad4e324,
+               0xcfa4bd3f, 0x2deaa3e2, 0x9e204d02, 0xc8bd25ac, 0xeadf55b3, 0xd5bd9e98, 0xe31231b2, 0x2ad5ad6c,
+               0x954329de, 0xadbe4528, 0xd8710f69, 0xaa51c90f, 0xaa786bf6, 0x22513f1e, 0xaa51a79b, 0x2ad344cc,
+               0x7b5a41f0, 0xd37cfbad, 0x1b069505, 0x41ece491, 0xb4c332e6, 0x032268d4, 0xc9600acc, 0xce387e6d,
+               0xbf6bb16c, 0x6a70fb78, 0x0d03d9c9, 0xd4df39de, 0xe01063da, 0x4736f464, 0x5ad328d8, 0xb347cc96,
+               0x75bb0fc3, 0x98511bfb, 0x4ffbcc35, 0xb58bcf6a, 0xe11f0abc, 0xbfc5fe4a, 0xa70aec10, 0xac39570a,
+               0x3f04442f, 0x6188b153, 0xe0397a2e, 0x5727cb79, 0x9ceb418f, 0x1cacd68d, 0x2ad37c96, 0x0175cb9d,
+               0xc69dff09, 0xc75b65f0, 0xd9db40d8, 0xec0e7779, 0x4744ead4, 0xb11c3274, 0xdd24cb9e, 0x7e1c54bd,
+               0xf01144f9, 0xd2240eb1, 0x9675b3fd, 0xa3ac3755, 0xd47c27af, 0x51c85f4d, 0x56907596, 0xa5bb15e6,
+               0x580304f0, 0xca042cf1, 0x011a37ea, 0x8dbfaadb, 0x35ba3e4a, 0x3526ffa0, 0xc37b4d09, 0xbc306ed9,
+               0x98a52666, 0x5648f725, 0xff5e569d, 0x0ced63d0, 0x7c63b2cf, 0x700b45e1, 0xd5ea50f1, 0x85a92872,
+               0xaf1fbda7, 0xd4234870, 0xa7870bf3, 0x2d3b4d79, 0x42e04198, 0x0cd0ede7, 0x26470db8, 0xf881814c,
+               0x474d6ad7, 0x7c0c5e5c, 0xd1231959, 0x381b7298, 0xf5d2f4db, 0xab838653, 0x6e2f1e23, 0x83719c9e,
+               0xbd91e046, 0x9a56456e, 0xdc39200c, 0x20c8c571, 0x962bda1c, 0xe1e696ff, 0xb141ab08, 0x7cca89b9,
+               0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c, 0x5ac9f049, 0xdd8f0f00, 0x5c8165bf,
+       },
+       {
+               0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a, 0xeec5207a, 0x55889c94, 0x72fc0651,
+               0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef, 0x5f0c0794, 0x18dcdb7d, 0xa1d6eff3,
+               0xa0b52f7b, 0x59e83605, 0xee15b094, 0xe9ffd909, 0xdc440086, 0xef944459, 0xba83ccb3, 0xe0c3cdfb,
+               0xd1da4181, 0x3b092ab1, 0xf997f1c1, 0xa5e6cf7b, 0x01420ddb, 0xe4e7ef5b, 0x25a1ff41, 0xe180f806,
+               0x1fc41080, 0x179bee7a, 0xd37ac6a9, 0xfe5830a4, 0x98de8b7f, 0x77e83f4e, 0x79929269, 0x24fa9f7b,
+               0xe113c85b, 0xacc40083, 0xd7503525, 0xf7ea615f, 0x62143154, 0x0d554b63, 0x5d681121, 0xc866c359,
+               0x3d63cf73, 0xcee234c0, 0xd4d87e87, 0x5c672b21, 0x071f6181, 0x39f7627f, 0x361e3084, 0xe4eb573b,
+               0x602f64a4, 0xd63acd9c, 0x1bbc4635, 0x9e81032d, 0x2701f50c, 0x99847ab4, 0xa0e3df79, 0xba6cf38c,
+               0x10843094, 0x2537a95e, 0xf46f6ffe, 0xa1ff3b1f, 0x208cfb6a, 0x8f458c74, 0xd9e0a227, 0x4ec73a34,
+               0xfc884f69, 0x3e4de8df, 0xef0e0088, 0x3559648d, 0x8a45388c, 0x1d804366, 0x721d9bfd, 0xa58684bb,
+               0xe8256333, 0x844e8212, 0x128d8098, 0xfed33fb4, 0xce280ae1, 0x27e19ba5, 0xd5a6c252, 0xe49754bd,
+               0xc5d655dd, 0xeb667064, 0x77840b4d, 0xa1b6a801, 0x84db26a9, 0xe0b56714, 0x21f043b7, 0xe5d05860,
+               0x54f03084, 0x066ff472, 0xa31aa153, 0xdadc4755, 0xb5625dbf, 0x68561be6, 0x83ca6b94, 0x2d6ed23b,
+               0xeccf01db, 0xa6d3d0ba, 0xb6803d5c, 0xaf77a709, 0x33b4a34c, 0x397bc8d6, 0x5ee22b95, 0x5f0e5304,
+               0x81ed6f61, 0x20e74364, 0xb45e1378, 0xde18639b, 0x881ca122, 0xb96726d1, 0x8049a7e8, 0x22b7da7b,
+               0x5e552d25, 0x5272d237, 0x79d2951c, 0xc60d894c, 0x488cb402, 0x1ba4fe5b, 0xa4b09f6b, 0x1ca815cf,
+               0xa20c3005, 0x8871df63, 0xb9de2fcb, 0x0cc6c9e9, 0x0beeff53, 0xe3214517, 0xb4542835, 0x9f63293c,
+               0xee41e729, 0x6e1d2d7c, 0x50045286, 0x1e6685f3, 0xf33401c6, 0x30a22c95, 0x31a70850, 0x60930f13,
+               0x73f98417, 0xa1269859, 0xec645c44, 0x52c877a9, 0xcdff33a6, 0xa02b1741, 0x7cbad9a2, 0x2180036f,
+               0x50d99c08, 0xcb3f4861, 0xc26bd765, 0x64a3f6ab, 0x80342676, 0x25a75e7b, 0xe4e6d1fc, 0x20c710e6,
+               0xcdf0b680, 0x17844d3b, 0x31eef84d, 0x7e0824e4, 0x2ccb49eb, 0x846a3bae, 0x8ff77888, 0xee5d60f6,
+               0x7af75673, 0x2fdd5cdb, 0xa11631c1, 0x30f66f43, 0xb3faec54, 0x157fd7fa, 0xef8579cc, 0xd152de58,
+               0xdb2ffd5e, 0x8f32ce19, 0x306af97a, 0x02f03ef8, 0x99319ad5, 0xc242fa0f, 0xa7e3ebb0, 0xc68e4906,
+               0xb8da230c, 0x80823028, 0xdcdef3c8, 0xd35fb171, 0x088a1bc8, 0xbec0c560, 0x61a3c9e8, 0xbca8f54d,
+               0xc72feffa, 0x22822e99, 0x82c570b4, 0xd8d94e89, 0x8b1c34bc, 0x301e16e6, 0x273be979, 0xb0ffeaa6,
+               0x61d9b8c6, 0x00b24869, 0xb7ffce3f, 0x08dc283b, 0x43daf65a, 0xf7e19798, 0x7619b72f, 0x8f1c9ba4,
+               0xdc8637a0, 0x16a7d3b1, 0x9fc393b7, 0xa7136eeb, 0xc6bcc63e, 0x1a513742, 0xef6828bc, 0x520365d6,
+               0x2d6a77ab, 0x3527ed4b, 0x821fd216, 0x095c6e2e, 0xdb92f2fb, 0x5eea29cb, 0x145892f5, 0x91584f7f,
+               0x5483697b, 0x2667a8cc, 0x85196048, 0x8c4bacea, 0x833860d4, 0x0d23e0f9, 0x6c387e8a, 0x0ae6d249,
+               0xb284600c, 0xd835731d, 0xdcb1c647, 0xac4c56ea, 0x3ebd81b3, 0x230eabb0, 0x6438bc87, 0xf0b5b1fa,
+               0x8f5ea2b3, 0xfc184642, 0x0a036b7a, 0x4fb089bd, 0x649da589, 0xa345415e, 0x5c038323, 0x3e5d3bb9,
+               0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539, 0x73bfbe70, 0x83877605, 0x4523ecf1,
+       },
+       {
+               0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff, 0x369fe44b, 0x8c1fc644, 0xaececa90,
+               0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806, 0xf0ad0548, 0xe13c8d83, 0x927010d5,
+               0x11107d9f, 0x07647db9, 0xb2e3e4d4, 0x3d4f285e, 0xb9afa820, 0xfade82e0, 0xa067268b, 0x8272792e,
+               0x553fb2c0, 0x489ae22b, 0xd4ef9794, 0x125e3fbc, 0x21fffcee, 0x825b1bfd, 0x9255c5ed, 0x1257a240,
+               0x4e1a8302, 0xbae07fff, 0x528246e7, 0x8e57140e, 0x3373f7bf, 0x8c9f8188, 0xa6fc4ee8, 0xc982b5a5,
+               0xa8c01db7, 0x579fc264, 0x67094f31, 0xf2bd3f5f, 0x40fff7c1, 0x1fb78dfc, 0x8e6bd2c1, 0x437be59b,
+               0x99b03dbf, 0xb5dbc64b, 0x638dc0e6, 0x55819d99, 0xa197c81c, 0x4a012d6e, 0xc5884a28, 0xccc36f71,
+               0xb843c213, 0x6c0743f1, 0x8309893c, 0x0feddd5f, 0x2f7fe850, 0xd7c07f7e, 0x02507fbf, 0x5afb9a04,
+               0xa747d2d0, 0x1651192e, 0xaf70bf3e, 0x58c31380, 0x5f98302e, 0x727cc3c4, 0x0a0fb402, 0x0f7fef82,
+               0x8c96fdad, 0x5d2c2aae, 0x8ee99a49, 0x50da88b8, 0x8427f4a0, 0x1eac5790, 0x796fb449, 0x8252dc15,
+               0xefbd7d9b, 0xa672597d, 0xada840d8, 0x45f54504, 0xfa5d7403, 0xe83ec305, 0x4f91751a, 0x925669c2,
+               0x23efe941, 0xa903f12e, 0x60270df2, 0x0276e4b6, 0x94fd6574, 0x927985b2, 0x8276dbcb, 0x02778176,
+               0xf8af918d, 0x4e48f79e, 0x8f616ddf, 0xe29d840e, 0x842f7d83, 0x340ce5c8, 0x96bbb682, 0x93b4b148,
+               0xef303cab, 0x984faf28, 0x779faf9b, 0x92dc560d, 0x224d1e20, 0x8437aa88, 0x7d29dc96, 0x2756d3dc,
+               0x8b907cee, 0xb51fd240, 0xe7c07ce3, 0xe566b4a1, 0xc3e9615e, 0x3cf8209d, 0x6094d1e3, 0xcd9ca341,
+               0x5c76460e, 0x00ea983b, 0xd4d67881, 0xfd47572c, 0xf76cedd9, 0xbda8229c, 0x127dadaa, 0x438a074e,
+               0x1f97c090, 0x081bdb8a, 0x93a07ebe, 0xb938ca15, 0x97b03cff, 0x3dc2c0f8, 0x8d1ab2ec, 0x64380e51,
+               0x68cc7bfb, 0xd90f2788, 0x12490181, 0x5de5ffd4, 0xdd7ef86a, 0x76a2e214, 0xb9a40368, 0x925d958f,
+               0x4b39fffa, 0xba39aee9, 0xa4ffd30b, 0xfaf7933b, 0x6d498623, 0x193cbcfa, 0x27627545, 0x825cf47a,
+               0x61bd8ba0, 0xd11e42d1, 0xcead04f4, 0x127ea392, 0x10428db7, 0x8272a972, 0x9270c4a8, 0x127de50b,
+               0x285ba1c8, 0x3c62f44f, 0x35c0eaa5, 0xe805d231, 0x428929fb, 0xb4fcdf82, 0x4fb66a53, 0x0e7dc15b,
+               0x1f081fab, 0x108618ae, 0xfcfd086d, 0xf9ff2889, 0x694bcc11, 0x236a5cae, 0x12deca4d, 0x2c3f8cc5,
+               0xd2d02dfe, 0xf8ef5896, 0xe4cf52da, 0x95155b67, 0x494a488c, 0xb9b6a80c, 0x5c8f82bc, 0x89d36b45,
+               0x3a609437, 0xec00c9a9, 0x44715253, 0x0a874b49, 0xd773bc40, 0x7c34671c, 0x02717ef6, 0x4feb5536,
+               0xa2d02fff, 0xd2bf60c4, 0xd43f03c0, 0x50b4ef6d, 0x07478cd1, 0x006e1888, 0xa2e53f55, 0xb9e6d4bc,
+               0xa2048016, 0x97573833, 0xd7207d67, 0xde0f8f3d, 0x72f87b33, 0xabcc4f33, 0x7688c55d, 0x7b00a6b0,
+               0x947b0001, 0x570075d2, 0xf9bb88f8, 0x8942019e, 0x4264a5ff, 0x856302e0, 0x72dbd92b, 0xee971b69,
+               0x6ea22fde, 0x5f08ae2b, 0xaf7a616d, 0xe5c98767, 0xcf1febd2, 0x61efc8c2, 0xf1ac2571, 0xcc8239c2,
+               0x67214cb8, 0xb1e583d1, 0xb7dc3e62, 0x7f10bdce, 0xf90a5c38, 0x0ff0443d, 0x606e6dc6, 0x60543a49,
+               0x5727c148, 0x2be98a1d, 0x8ab41738, 0x20e1be24, 0xaf96da0f, 0x68458425, 0x99833be5, 0x600d457d,
+               0x282f9350, 0x8334b362, 0xd91d1120, 0x2b6d8da0, 0x642b1e31, 0x9c305a00, 0x52bce688, 0x1b03588a,
+               0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636, 0xa133c501, 0xe9d3531c, 0xee353783,
+       },
+       {
+               0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb, 0x64ad8c57, 0x85510443, 0xfa020ed1,
+               0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43, 0x6497b7b1, 0xf3641f63, 0x241e4adf,
+               0x28147f5f, 0x4fa2b8cd, 0xc9430040, 0x0cc32220, 0xfdd30b30, 0xc0a5374f, 0x1d2d00d9, 0x24147b15,
+               0xee4d111a, 0x0fca5167, 0x71ff904c, 0x2d195ffe, 0x1a05645f, 0x0c13fefe, 0x081b08ca, 0x05170121,
+               0x80530100, 0xe83e5efe, 0xac9af4f8, 0x7fe72701, 0xd2b8ee5f, 0x06df4261, 0xbb9e9b8a, 0x7293ea25,
+               0xce84ffdf, 0xf5718801, 0x3dd64b04, 0xa26f263b, 0x7ed48400, 0x547eebe6, 0x446d4ca0, 0x6cf3d6f5,
+               0x2649abdf, 0xaea0c7f5, 0x36338cc1, 0x503f7e93, 0xd3772061, 0x11b638e1, 0x72500e03, 0xf80eb2bb,
+               0xabe0502e, 0xec8d77de, 0x57971e81, 0xe14f6746, 0xc9335400, 0x6920318f, 0x081dbb99, 0xffc304a5,
+               0x4d351805, 0x7f3d5ce3, 0xa6c866c6, 0x5d5bcca9, 0xdaec6fea, 0x9f926f91, 0x9f46222f, 0x3991467d,
+               0xa5bf6d8e, 0x1143c44f, 0x43958302, 0xd0214eeb, 0x022083b8, 0x3fb6180c, 0x18f8931e, 0x281658e6,
+               0x26486e3e, 0x8bd78a70, 0x7477e4c1, 0xb506e07c, 0xf32d0a25, 0x79098b02, 0xe4eabb81, 0x28123b23,
+               0x69dead38, 0x1574ca16, 0xdf871b62, 0x211c40b7, 0xa51a9ef9, 0x0014377b, 0x041e8ac8, 0x09114003,
+               0xbd59e4d2, 0xe3d156d5, 0x4fe876d5, 0x2f91a340, 0x557be8de, 0x00eae4a7, 0x0ce5c2ec, 0x4db4bba6,
+               0xe756bdff, 0xdd3369ac, 0xec17b035, 0x06572327, 0x99afc8b0, 0x56c8c391, 0x6b65811c, 0x5e146119,
+               0x6e85cb75, 0xbe07c002, 0xc2325577, 0x893ff4ec, 0x5bbfc92d, 0xd0ec3b25, 0xb7801ab7, 0x8d6d3b24,
+               0x20c763ef, 0xc366a5fc, 0x9c382880, 0x0ace3205, 0xaac9548a, 0xeca1d7c7, 0x041afa32, 0x1d16625a,
+               0x6701902c, 0x9b757a54, 0x31d477f7, 0x9126b031, 0x36cc6fdb, 0xc70b8b46, 0xd9e66a48, 0x56e55a79,
+               0x026a4ceb, 0x52437eff, 0x2f8f76b4, 0x0df980a5, 0x8674cde3, 0xedda04eb, 0x17a9be04, 0x2c18f4df,
+               0xb7747f9d, 0xab2af7b4, 0xefc34d20, 0x2e096b7c, 0x1741a254, 0xe5b6a035, 0x213d42f6, 0x2c1c7c26,
+               0x61c2f50f, 0x6552daf9, 0xd2c231f8, 0x25130f69, 0xd8167fa2, 0x0418f2c8, 0x001a96a6, 0x0d1526ab,
+               0x63315c21, 0x5e0a72ec, 0x49bafefd, 0x187908d9, 0x8d0dbd86, 0x311170a7, 0x3e9b640c, 0xcc3e10d7,
+               0xd5cad3b6, 0x0caec388, 0xf73001e1, 0x6c728aff, 0x71eae2a1, 0x1f9af36e, 0xcfcbd12f, 0xc1de8417,
+               0xac07be6b, 0xcb44a1d8, 0x8b9b0f56, 0x013988c3, 0xb1c52fca, 0xb4be31cd, 0xd8782806, 0x12a3a4e2,
+               0x6f7de532, 0x58fd7eb6, 0xd01ee900, 0x24adffc2, 0xf4990fc5, 0x9711aac5, 0x001d7b95, 0x82e5e7d2,
+               0x109873f6, 0x00613096, 0xc32d9521, 0xada121ff, 0x29908415, 0x7fbb977f, 0xaf9eb3db, 0x29c9ed2a,
+               0x5ce2a465, 0xa730f32c, 0xd0aa3fe8, 0x8a5cc091, 0xd49e2ce7, 0x0ce454a9, 0xd60acd86, 0x015f1919,
+               0x77079103, 0xdea03af6, 0x78a8565e, 0xdee356df, 0x21f05cbe, 0x8b75e387, 0xb3c50651, 0xb8a5c3ef,
+               0xd8eeb6d2, 0xe523be77, 0xc2154529, 0x2f69efdf, 0xafe67afb, 0xf470c4b2, 0xf3e0eb5b, 0xd6cc9876,
+               0x39e4460c, 0x1fda8538, 0x1987832f, 0xca007367, 0xa99144f8, 0x296b299e, 0x492fc295, 0x9266beab,
+               0xb5676e69, 0x9bd3ddda, 0xdf7e052f, 0xdb25701c, 0x1b5e51ee, 0xf65324e6, 0x6afce36c, 0x0316cc04,
+               0x8644213e, 0xb7dc59d0, 0x7965291f, 0xccd6fd43, 0x41823979, 0x932bcdf6, 0xb657c34d, 0x4edfd282,
+               0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0, 0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2,
+       },
+       {
+               0x7ec90c04, 0x2c6e74b9, 0x9b0e66df, 0xa6337911, 0xb86a7fff, 0x1dd358f5, 0x44dd9d44, 0x1731167f,
+               0x08fbf1fa, 0xe7f511cc, 0xd2051b00, 0x735aba00, 0x2ab722d8, 0x386381cb, 0xacf6243a, 0x69befd7a,
+               0xe6a2e77f, 0xf0c720cd, 0xc4494816, 0xccf5c180, 0x38851640, 0x15b0a848, 0xe68b18cb, 0x4caadeff,
+               0x5f480a01, 0x0412b2aa, 0x259814fc, 0x41d0efe2, 0x4e40b48d, 0x248eb6fb, 0x8dba1cfe, 0x41a99b02,
+               0x1a550a04, 0xba8f65cb, 0x7251f4e7, 0x95a51725, 0xc106ecd7, 0x97a5980a, 0xc539b9aa, 0x4d79fe6a,
+               0xf2f3f763, 0x68af8040, 0xed0c9e56, 0x11b4958b, 0xe1eb5a88, 0x8709e6b0, 0xd7e07156, 0x4e29fea7,
+               0x6366e52d, 0x02d1c000, 0xc4ac8e05, 0x9377f571, 0x0c05372a, 0x578535f2, 0x2261be02, 0xd642a0c9,
+               0xdf13a280, 0x74b55bd2, 0x682199c0, 0xd421e5ec, 0x53fb3ce8, 0xc8adedb3, 0x28a87fc9, 0x3d959981,
+               0x5c1ff900, 0xfe38d399, 0x0c4eff0b, 0x062407ea, 0xaa2f4fb1, 0x4fb96976, 0x90c79505, 0xb0a8a774,
+               0xef55a1ff, 0xe59ca2c2, 0xa6b62d27, 0xe66a4263, 0xdf65001f, 0x0ec50966, 0xdfdd55bc, 0x29de0655,
+               0x911e739a, 0x17af8975, 0x32c7911c, 0x89f89468, 0x0d01e980, 0x524755f4, 0x03b63cc9, 0x0cc844b2,
+               0xbcf3f0aa, 0x87ac36e9, 0xe53a7426, 0x01b3d82b, 0x1a9e7449, 0x64ee2d7e, 0xcddbb1da, 0x01c94910,
+               0xb868bf80, 0x0d26f3fd, 0x9342ede7, 0x04a5c284, 0x636737b6, 0x50f5b616, 0xf24766e3, 0x8eca36c1,
+               0x136e05db, 0xfef18391, 0xfb887a37, 0xd6e7f7d4, 0xc7fb7dc9, 0x3063fcdf, 0xb6f589de, 0xec2941da,
+               0x26e46695, 0xb7566419, 0xf654efc5, 0xd08d58b7, 0x48925401, 0xc1bacb7f, 0xe5ff550f, 0xb6083049,
+               0x5bb5d0e8, 0x87d72e5a, 0xab6a6ee1, 0x223a66ce, 0xc62bf3cd, 0x9e0885f9, 0x68cb3e47, 0x086c010f,
+               0xa21de820, 0xd18b69de, 0xf3f65777, 0xfa02c3f6, 0x407edac3, 0xcbb3d550, 0x1793084d, 0xb0d70eba,
+               0x0ab378d5, 0xd951fb0c, 0xded7da56, 0x4124bbe4, 0x94ca0b56, 0x0f5755d1, 0xe0e1e56e, 0x6184b5be,
+               0x580a249f, 0x94f74bc0, 0xe327888e, 0x9f7b5561, 0xc3dc0280, 0x05687715, 0x646c6bd7, 0x44904db3,
+               0x66b4f0a3, 0xc0f1648a, 0x697ed5af, 0x49e92ff6, 0x309e374f, 0x2cb6356a, 0x85808573, 0x4991f840,
+               0x76f0ae02, 0x083be84d, 0x28421c9a, 0x44489406, 0x736e4cb8, 0xc1092910, 0x8bc95fc6, 0x7d869cf4,
+               0x134f616f, 0x2e77118d, 0xb31b2be1, 0xaa90b472, 0x3ca5d717, 0x7d161bba, 0x9cad9010, 0xaf462ba2,
+               0x9fe459d2, 0x45d34559, 0xd9f2da13, 0xdbc65487, 0xf3e4f94e, 0x176d486f, 0x097c13ea, 0x631da5c7,
+               0x445f7382, 0x175683f4, 0xcdc66a97, 0x70be0288, 0xb3cdcf72, 0x6e5dd2f3, 0x20936079, 0x459b80a5,
+               0xbe60e2db, 0xa9c23101, 0xeba5315c, 0x224e42f2, 0x1c5c1572, 0xf6721b2c, 0x1ad2fff3, 0x8c25404e,
+               0x324ed72f, 0x4067b7fd, 0x0523138e, 0x5ca3bc78, 0xdc0fd66e, 0x75922283, 0x784d6b17, 0x58ebb16e,
+               0x44094f85, 0x3f481d87, 0xfcfeae7b, 0x77b5ff76, 0x8c2302bf, 0xaaf47556, 0x5f46b02a, 0x2b092801,
+               0x3d38f5f7, 0x0ca81f36, 0x52af4a8a, 0x66d5e7c0, 0xdf3b0874, 0x95055110, 0x1b5ad7a8, 0xf61ed5ad,
+               0x6cf6e479, 0x20758184, 0xd0cefa65, 0x88f7be58, 0x4a046826, 0x0ff6f8f3, 0xa09c7f70, 0x5346aba0,
+               0x5ce96c28, 0xe176eda3, 0x6bac307f, 0x376829d2, 0x85360fa9, 0x17e3fe2a, 0x24b79767, 0xf5a96b20,
+               0xd6cd2595, 0x68ff1ebf, 0x7555442c, 0xf19f06be, 0xf9e0659a, 0xeeb9491d, 0x34010718, 0xbb30cab8,
+               0xe822fe15, 0x88570983, 0x750e6249, 0xda627e55, 0x5e76ffa8, 0xb1534546, 0x6d47de08, 0xefe9e7d4,
+       },
+       {
+               0xf6fa8f9d, 0x2cac6ce1, 0x4ca34867, 0xe2337f7c, 0x95db08e7, 0x016843b4, 0xeced5cbc, 0x325553ac,
+               0xbf9f0960, 0xdfa1e2ed, 0x83f0579d, 0x63ed86b9, 0x1ab6a6b8, 0xde5ebe39, 0xf38ff732, 0x8989b138,
+               0x33f14961, 0xc01937bd, 0xf506c6da, 0xe4625e7e, 0xa308ea99, 0x4e23e33c, 0x79cbd7cc, 0x48a14367,
+               0xa3149619, 0xfec94bd5, 0xa114174a, 0xeaa01866, 0xa084db2d, 0x09a8486f, 0xa888614a, 0x2900af98,
+               0x01665991, 0xe1992863, 0xc8f30c60, 0x2e78ef3c, 0xd0d51932, 0xcf0fec14, 0xf7ca07d2, 0xd0a82072,
+               0xfd41197e, 0x9305a6b0, 0xe86be3da, 0x74bed3cd, 0x372da53c, 0x4c7f4448, 0xdab5d440, 0x6dba0ec3,
+               0x083919a7, 0x9fbaeed9, 0x49dbcfb0, 0x4e670c53, 0x5c3d9c01, 0x64bdb941, 0x2c0e636a, 0xba7dd9cd,
+               0xea6f7388, 0xe70bc762, 0x35f29adb, 0x5c4cdd8d, 0xf0d48d8c, 0xb88153e2, 0x08a19866, 0x1ae2eac8,
+               0x284caf89, 0xaa928223, 0x9334be53, 0x3b3a21bf, 0x16434be3, 0x9aea3906, 0xefe8c36e, 0xf890cdd9,
+               0x80226dae, 0xc340a4a3, 0xdf7e9c09, 0xa694a807, 0x5b7c5ecc, 0x221db3a6, 0x9a69a02f, 0x68818a54,
+               0xceb2296f, 0x53c0843a, 0xfe893655, 0x25bfe68a, 0xb4628abc, 0xcf222ebf, 0x25ac6f48, 0xa9a99387,
+               0x53bddb65, 0xe76ffbe7, 0xe967fd78, 0x0ba93563, 0x8e342bc1, 0xe8a11be9, 0x4980740d, 0xc8087dfc,
+               0x8de4bf99, 0xa11101a0, 0x7fd37975, 0xda5a26c0, 0xe81f994f, 0x9528cd89, 0xfd339fed, 0xb87834bf,
+               0x5f04456d, 0x22258698, 0xc9c4c83b, 0x2dc156be, 0x4f628daa, 0x57f55ec5, 0xe2220abe, 0xd2916ebf,
+               0x4ec75b95, 0x24f2c3c0, 0x42d15d99, 0xcd0d7fa0, 0x7b6e27ff, 0xa8dc8af0, 0x7345c106, 0xf41e232f,
+               0x35162386, 0xe6ea8926, 0x3333b094, 0x157ec6f2, 0x372b74af, 0x692573e4, 0xe9a9d848, 0xf3160289,
+               0x3a62ef1d, 0xa787e238, 0xf3a5f676, 0x74364853, 0x20951063, 0x4576698d, 0xb6fad407, 0x592af950,
+               0x36f73523, 0x4cfb6e87, 0x7da4cec0, 0x6c152daa, 0xcb0396a8, 0xc50dfe5d, 0xfcd707ab, 0x0921c42f,
+               0x89dff0bb, 0x5fe2be78, 0x448f4f33, 0x754613c9, 0x2b05d08d, 0x48b9d585, 0xdc049441, 0xc8098f9b,
+               0x7dede786, 0xc39a3373, 0x42410005, 0x6a091751, 0x0ef3c8a6, 0x890072d6, 0x28207682, 0xa9a9f7be,
+               0xbf32679d, 0xd45b5b75, 0xb353fd00, 0xcbb0e358, 0x830f220a, 0x1f8fb214, 0xd372cf08, 0xcc3c4a13,
+               0x8cf63166, 0x061c87be, 0x88c98f88, 0x6062e397, 0x47cf8e7a, 0xb6c85283, 0x3cc2acfb, 0x3fc06976,
+               0x4e8f0252, 0x64d8314d, 0xda3870e3, 0x1e665459, 0xc10908f0, 0x513021a5, 0x6c5b68b7, 0x822f8aa0,
+               0x3007cd3e, 0x74719eef, 0xdc872681, 0x073340d4, 0x7e432fd9, 0x0c5ec241, 0x8809286c, 0xf592d891,
+               0x08a930f6, 0x957ef305, 0xb7fbffbd, 0xc266e96f, 0x6fe4ac98, 0xb173ecc0, 0xbc60b42a, 0x953498da,
+               0xfba1ae12, 0x2d4bd736, 0x0f25faab, 0xa4f3fceb, 0xe2969123, 0x257f0c3d, 0x9348af49, 0x361400bc,
+               0xe8816f4a, 0x3814f200, 0xa3f94043, 0x9c7a54c2, 0xbc704f57, 0xda41e7f9, 0xc25ad33a, 0x54f4a084,
+               0xb17f5505, 0x59357cbe, 0xedbd15c8, 0x7f97c5ab, 0xba5ac7b5, 0xb6f6deaf, 0x3a479c3a, 0x5302da25,
+               0x653d7e6a, 0x54268d49, 0x51a477ea, 0x5017d55b, 0xd7d25d88, 0x44136c76, 0x0404a8c8, 0xb8e5a121,
+               0xb81a928a, 0x60ed5869, 0x97c55b96, 0xeaec991b, 0x29935913, 0x01fdb7f1, 0x088e8dfa, 0x9ab6f6f5,
+               0x3b4cbf9f, 0x4a5de3ab, 0xe6051d35, 0xa0e1d855, 0xd36b4cf1, 0xf544edeb, 0xb0e93524, 0xbebb8fbd,
+               0xa2d762cf, 0x49c92f54, 0x38b5f331, 0x7128a454, 0x48392905, 0xa65b1db8, 0x851c97bd, 0xd675cf2f,
+       },
+       {
+               0x85e04019, 0x332bf567, 0x662dbfff, 0xcfc65693, 0x2a8d7f6f, 0xab9bc912, 0xde6008a1, 0x2028da1f,
+               0x0227bce7, 0x4d642916, 0x18fac300, 0x50f18b82, 0x2cb2cb11, 0xb232e75c, 0x4b3695f2, 0xb28707de,
+               0xa05fbcf6, 0xcd4181e9, 0xe150210c, 0xe24ef1bd, 0xb168c381, 0xfde4e789, 0x5c79b0d8, 0x1e8bfd43,
+               0x4d495001, 0x38be4341, 0x913cee1d, 0x92a79c3f, 0x089766be, 0xbaeeadf4, 0x1286becf, 0xb6eacb19,
+               0x2660c200, 0x7565bde4, 0x64241f7a, 0x8248dca9, 0xc3b3ad66, 0x28136086, 0x0bd8dfa8, 0x356d1cf2,
+               0x107789be, 0xb3b2e9ce, 0x0502aa8f, 0x0bc0351e, 0x166bf52a, 0xeb12ff82, 0xe3486911, 0xd34d7516,
+               0x4e7b3aff, 0x5f43671b, 0x9cf6e037, 0x4981ac83, 0x334266ce, 0x8c9341b7, 0xd0d854c0, 0xcb3a6c88,
+               0x47bc2829, 0x4725ba37, 0xa66ad22b, 0x7ad61f1e, 0x0c5cbafa, 0x4437f107, 0xb6e79962, 0x42d2d816,
+               0x0a961288, 0xe1a5c06e, 0x13749e67, 0x72fc081a, 0xb1d139f7, 0xf9583745, 0xcf19df58, 0xbec3f756,
+               0xc06eba30, 0x07211b24, 0x45c28829, 0xc95e317f, 0xbc8ec511, 0x38bc46e9, 0xc6e6fa14, 0xbae8584a,
+               0xad4ebc46, 0x468f508b, 0x7829435f, 0xf124183b, 0x821dba9f, 0xaff60ff4, 0xea2c4e6d, 0x16e39264,
+               0x92544a8b, 0x009b4fc3, 0xaba68ced, 0x9ac96f78, 0x06a5b79a, 0xb2856e6e, 0x1aec3ca9, 0xbe838688,
+               0x0e0804e9, 0x55f1be56, 0xe7e5363b, 0xb3a1f25d, 0xf7debb85, 0x61fe033c, 0x16746233, 0x3c034c28,
+               0xda6d0c74, 0x79aac56c, 0x3ce4e1ad, 0x51f0c802, 0x98f8f35a, 0x1626a49f, 0xeed82b29, 0x1d382fe3,
+               0x0c4fb99a, 0xbb325778, 0x3ec6d97b, 0x6e77a6a9, 0xcb658b5c, 0xd45230c7, 0x2bd1408b, 0x60c03eb7,
+               0xb9068d78, 0xa33754f4, 0xf430c87d, 0xc8a71302, 0xb96d8c32, 0xebd4e7be, 0xbe8b9d2d, 0x7979fb06,
+               0xe7225308, 0x8b75cf77, 0x11ef8da4, 0xe083c858, 0x8d6b786f, 0x5a6317a6, 0xfa5cf7a0, 0x5dda0033,
+               0xf28ebfb0, 0xf5b9c310, 0xa0eac280, 0x08b9767a, 0xa3d9d2b0, 0x79d34217, 0x021a718d, 0x9ac6336a,
+               0x2711fd60, 0x438050e3, 0x069908a8, 0x3d7fedc4, 0x826d2bef, 0x4eeb8476, 0x488dcf25, 0x36c9d566,
+               0x28e74e41, 0xc2610aca, 0x3d49a9cf, 0xbae3b9df, 0xb65f8de6, 0x92aeaf64, 0x3ac7d5e6, 0x9ea80509,
+               0xf22b017d, 0xa4173f70, 0xdd1e16c3, 0x15e0d7f9, 0x50b1b887, 0x2b9f4fd5, 0x625aba82, 0x6a017962,
+               0x2ec01b9c, 0x15488aa9, 0xd716e740, 0x40055a2c, 0x93d29a22, 0xe32dbf9a, 0x058745b9, 0x3453dc1e,
+               0xd699296e, 0x496cff6f, 0x1c9f4986, 0xdfe2ed07, 0xb87242d1, 0x19de7eae, 0x053e561a, 0x15ad6f8c,
+               0x66626c1c, 0x7154c24c, 0xea082b2a, 0x93eb2939, 0x17dcb0f0, 0x58d4f2ae, 0x9ea294fb, 0x52cf564c,
+               0x9883fe66, 0x2ec40581, 0x763953c3, 0x01d6692e, 0xd3a0c108, 0xa1e7160e, 0xe4f2dfa6, 0x693ed285,
+               0x74904698, 0x4c2b0edd, 0x4f757656, 0x5d393378, 0xa132234f, 0x3d321c5d, 0xc3f5e194, 0x4b269301,
+               0xc79f022f, 0x3c997e7e, 0x5e4f9504, 0x3ffafbbd, 0x76f7ad0e, 0x296693f4, 0x3d1fce6f, 0xc61e45be,
+               0xd3b5ab34, 0xf72bf9b7, 0x1b0434c0, 0x4e72b567, 0x5592a33d, 0xb5229301, 0xcfd2a87f, 0x60aeb767,
+               0x1814386b, 0x30bcc33d, 0x38a0c07d, 0xfd1606f2, 0xc363519b, 0x589dd390, 0x5479f8e6, 0x1cb8d647,
+               0x97fd61a9, 0xea7759f4, 0x2d57539d, 0x569a58cf, 0xe84e63ad, 0x462e1b78, 0x6580f87e, 0xf3817914,
+               0x91da55f4, 0x40a230f3, 0xd1988f35, 0xb6e318d2, 0x3ffa50bc, 0x3d40f021, 0xc3c0bdae, 0x4958c24c,
+               0x518f36b2, 0x84b1d370, 0x0fedce83, 0x878ddada, 0xf2a279c7, 0x94e01be8, 0x90716f4b, 0x954b8aa3,
+       },
+       {
+               0xe216300d, 0xbbddfffc, 0xa7ebdabd, 0x35648095, 0x7789f8b7, 0xe6c1121b, 0x0e241600, 0x052ce8b5,
+               0x11a9cfb0, 0xe5952f11, 0xece7990a, 0x9386d174, 0x2a42931c, 0x76e38111, 0xb12def3a, 0x37ddddfc,
+               0xde9adeb1, 0x0a0cc32c, 0xbe197029, 0x84a00940, 0xbb243a0f, 0xb4d137cf, 0xb44e79f0, 0x049eedfd,
+               0x0b15a15d, 0x480d3168, 0x8bbbde5a, 0x669ded42, 0xc7ece831, 0x3f8f95e7, 0x72df191b, 0x7580330d,
+               0x94074251, 0x5c7dcdfa, 0xabbe6d63, 0xaa402164, 0xb301d40a, 0x02e7d1ca, 0x53571dae, 0x7a3182a2,
+               0x12a8ddec, 0xfdaa335d, 0x176f43e8, 0x71fb46d4, 0x38129022, 0xce949ad4, 0xb84769ad, 0x965bd862,
+               0x82f3d055, 0x66fb9767, 0x15b80b4e, 0x1d5b47a0, 0x4cfde06f, 0xc28ec4b8, 0x57e8726e, 0x647a78fc,
+               0x99865d44, 0x608bd593, 0x6c200e03, 0x39dc5ff6, 0x5d0b00a3, 0xae63aff2, 0x7e8bd632, 0x70108c0c,
+               0xbbd35049, 0x2998df04, 0x980cf42a, 0x9b6df491, 0x9e7edd53, 0x06918548, 0x58cb7e07, 0x3b74ef2e,
+               0x522fffb1, 0xd24708cc, 0x1c7e27cd, 0xa4eb215b, 0x3cf1d2e2, 0x19b47a38, 0x424f7618, 0x35856039,
+               0x9d17dee7, 0x27eb35e6, 0xc9aff67b, 0x36baf5b8, 0x09c467cd, 0xc18910b1, 0xe11dbf7b, 0x06cd1af8,
+               0x7170c608, 0x2d5e3354, 0xd4de495a, 0x64c6d006, 0xbcc0c62c, 0x3dd00db3, 0x708f8f34, 0x77d51b42,
+               0x264f620f, 0x24b8d2bf, 0x15c1b79e, 0x46a52564, 0xf8d7e54e, 0x3e378160, 0x7895cda5, 0x859c15a5,
+               0xe6459788, 0xc37bc75f, 0xdb07ba0c, 0x0676a3ab, 0x7f229b1e, 0x31842e7b, 0x24259fd7, 0xf8bef472,
+               0x835ffcb8, 0x6df4c1f2, 0x96f5b195, 0xfd0af0fc, 0xb0fe134c, 0xe2506d3d, 0x4f9b12ea, 0xf215f225,
+               0xa223736f, 0x9fb4c428, 0x25d04979, 0x34c713f8, 0xc4618187, 0xea7a6e98, 0x7cd16efc, 0x1436876c,
+               0xf1544107, 0xbedeee14, 0x56e9af27, 0xa04aa441, 0x3cf7c899, 0x92ecbae6, 0xdd67016d, 0x151682eb,
+               0xa842eedf, 0xfdba60b4, 0xf1907b75, 0x20e3030f, 0x24d8c29e, 0xe139673b, 0xefa63fb8, 0x71873054,
+               0xb6f2cf3b, 0x9f326442, 0xcb15a4cc, 0xb01a4504, 0xf1e47d8d, 0x844a1be5, 0xbae7dfdc, 0x42cbda70,
+               0xcd7dae0a, 0x57e85b7a, 0xd53f5af6, 0x20cf4d8c, 0xcea4d428, 0x79d130a4, 0x3486ebfb, 0x33d3cddc,
+               0x77853b53, 0x37effcb5, 0xc5068778, 0xe580b3e6, 0x4e68b8f4, 0xc5c8b37e, 0x0d809ea2, 0x398feb7c,
+               0x132a4f94, 0x43b7950e, 0x2fee7d1c, 0x223613bd, 0xdd06caa2, 0x37df932b, 0xc4248289, 0xacf3ebc3,
+               0x5715f6b7, 0xef3478dd, 0xf267616f, 0xc148cbe4, 0x9052815e, 0x5e410fab, 0xb48a2465, 0x2eda7fa4,
+               0xe87b40e4, 0xe98ea084, 0x5889e9e1, 0xefd390fc, 0xdd07d35b, 0xdb485694, 0x38d7e5b2, 0x57720101,
+               0x730edebc, 0x5b643113, 0x94917e4f, 0x503c2fba, 0x646f1282, 0x7523d24a, 0xe0779695, 0xf9c17a8f,
+               0x7a5b2121, 0xd187b896, 0x29263a4d, 0xba510cdf, 0x81f47c9f, 0xad1163ed, 0xea7b5965, 0x1a00726e,
+               0x11403092, 0x00da6d77, 0x4a0cdd61, 0xad1f4603, 0x605bdfb0, 0x9eedc364, 0x22ebe6a8, 0xcee7d28a,
+               0xa0e736a0, 0x5564a6b9, 0x10853209, 0xc7eb8f37, 0x2de705ca, 0x8951570f, 0xdf09822b, 0xbd691a6c,
+               0xaa12e4f2, 0x87451c0f, 0xe0f6a27a, 0x3ada4819, 0x4cf1764f, 0x0d771c2b, 0x67cdb156, 0x350d8384,
+               0x5938fa0f, 0x42399ef3, 0x36997b07, 0x0e84093d, 0x4aa93e61, 0x8360d87b, 0x1fa98b0c, 0x1149382c,
+               0xe97625a5, 0x0614d1b7, 0x0e25244b, 0x0c768347, 0x589e8d82, 0x0d2059d1, 0xa466bb1e, 0xf8da0a82,
+               0x04f19130, 0xba6e4ec0, 0x99265164, 0x1ee7230d, 0x50b2ad80, 0xeaee6801, 0x8db2a283, 0xea8bf59e,
+       },
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/armor/armor.go b/vendor/golang.org/x/crypto/openpgp/armor/armor.go
new file mode 100644 (file)
index 0000000..592d186
--- /dev/null
@@ -0,0 +1,219 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package armor implements OpenPGP ASCII Armor, see RFC 4880. OpenPGP Armor is
+// very similar to PEM except that it has an additional CRC checksum.
+package armor // import "golang.org/x/crypto/openpgp/armor"
+
+import (
+       "bufio"
+       "bytes"
+       "encoding/base64"
+       "golang.org/x/crypto/openpgp/errors"
+       "io"
+)
+
+// A Block represents an OpenPGP armored structure.
+//
+// The encoded form is:
+//    -----BEGIN Type-----
+//    Headers
+//
+//    base64-encoded Bytes
+//    '=' base64 encoded checksum
+//    -----END Type-----
+// where Headers is a possibly empty sequence of Key: Value lines.
+//
+// Since the armored data can be very large, this package presents a streaming
+// interface.
+type Block struct {
+       Type    string            // The type, taken from the preamble (i.e. "PGP SIGNATURE").
+       Header  map[string]string // Optional headers.
+       Body    io.Reader         // A Reader from which the contents can be read
+       lReader lineReader
+       oReader openpgpReader
+}
+
+var ArmorCorrupt error = errors.StructuralError("armor invalid")
+
+const crc24Init = 0xb704ce
+const crc24Poly = 0x1864cfb
+const crc24Mask = 0xffffff
+
+// crc24 calculates the OpenPGP checksum as specified in RFC 4880, section 6.1
+func crc24(crc uint32, d []byte) uint32 {
+       for _, b := range d {
+               crc ^= uint32(b) << 16
+               for i := 0; i < 8; i++ {
+                       crc <<= 1
+                       if crc&0x1000000 != 0 {
+                               crc ^= crc24Poly
+                       }
+               }
+       }
+       return crc
+}
+
+var armorStart = []byte("-----BEGIN ")
+var armorEnd = []byte("-----END ")
+var armorEndOfLine = []byte("-----")
+
+// lineReader wraps a line based reader. It watches for the end of an armor
+// block and records the expected CRC value.
+type lineReader struct {
+       in  *bufio.Reader
+       buf []byte
+       eof bool
+       crc uint32
+}
+
+func (l *lineReader) Read(p []byte) (n int, err error) {
+       if l.eof {
+               return 0, io.EOF
+       }
+
+       if len(l.buf) > 0 {
+               n = copy(p, l.buf)
+               l.buf = l.buf[n:]
+               return
+       }
+
+       line, isPrefix, err := l.in.ReadLine()
+       if err != nil {
+               return
+       }
+       if isPrefix {
+               return 0, ArmorCorrupt
+       }
+
+       if len(line) == 5 && line[0] == '=' {
+               // This is the checksum line
+               var expectedBytes [3]byte
+               var m int
+               m, err = base64.StdEncoding.Decode(expectedBytes[0:], line[1:])
+               if m != 3 || err != nil {
+                       return
+               }
+               l.crc = uint32(expectedBytes[0])<<16 |
+                       uint32(expectedBytes[1])<<8 |
+                       uint32(expectedBytes[2])
+
+               line, _, err = l.in.ReadLine()
+               if err != nil && err != io.EOF {
+                       return
+               }
+               if !bytes.HasPrefix(line, armorEnd) {
+                       return 0, ArmorCorrupt
+               }
+
+               l.eof = true
+               return 0, io.EOF
+       }
+
+       if len(line) > 96 {
+               return 0, ArmorCorrupt
+       }
+
+       n = copy(p, line)
+       bytesToSave := len(line) - n
+       if bytesToSave > 0 {
+               if cap(l.buf) < bytesToSave {
+                       l.buf = make([]byte, 0, bytesToSave)
+               }
+               l.buf = l.buf[0:bytesToSave]
+               copy(l.buf, line[n:])
+       }
+
+       return
+}
+
+// openpgpReader passes Read calls to the underlying base64 decoder, but keeps
+// a running CRC of the resulting data and checks the CRC against the value
+// found by the lineReader at EOF.
+type openpgpReader struct {
+       lReader    *lineReader
+       b64Reader  io.Reader
+       currentCRC uint32
+}
+
+func (r *openpgpReader) Read(p []byte) (n int, err error) {
+       n, err = r.b64Reader.Read(p)
+       r.currentCRC = crc24(r.currentCRC, p[:n])
+
+       if err == io.EOF {
+               if r.lReader.crc != uint32(r.currentCRC&crc24Mask) {
+                       return 0, ArmorCorrupt
+               }
+       }
+
+       return
+}
+
+// Decode reads a PGP armored block from the given Reader. It will ignore
+// leading garbage. If it doesn't find a block, it will return nil, io.EOF. The
+// given Reader is not usable after calling this function: an arbitrary amount
+// of data may have been read past the end of the block.
+func Decode(in io.Reader) (p *Block, err error) {
+       r := bufio.NewReaderSize(in, 100)
+       var line []byte
+       ignoreNext := false
+
+TryNextBlock:
+       p = nil
+
+       // Skip leading garbage
+       for {
+               ignoreThis := ignoreNext
+               line, ignoreNext, err = r.ReadLine()
+               if err != nil {
+                       return
+               }
+               if ignoreNext || ignoreThis {
+                       continue
+               }
+               line = bytes.TrimSpace(line)
+               if len(line) > len(armorStart)+len(armorEndOfLine) && bytes.HasPrefix(line, armorStart) {
+                       break
+               }
+       }
+
+       p = new(Block)
+       p.Type = string(line[len(armorStart) : len(line)-len(armorEndOfLine)])
+       p.Header = make(map[string]string)
+       nextIsContinuation := false
+       var lastKey string
+
+       // Read headers
+       for {
+               isContinuation := nextIsContinuation
+               line, nextIsContinuation, err = r.ReadLine()
+               if err != nil {
+                       p = nil
+                       return
+               }
+               if isContinuation {
+                       p.Header[lastKey] += string(line)
+                       continue
+               }
+               line = bytes.TrimSpace(line)
+               if len(line) == 0 {
+                       break
+               }
+
+               i := bytes.Index(line, []byte(": "))
+               if i == -1 {
+                       goto TryNextBlock
+               }
+               lastKey = string(line[:i])
+               p.Header[lastKey] = string(line[i+2:])
+       }
+
+       p.lReader.in = r
+       p.oReader.currentCRC = crc24Init
+       p.oReader.lReader = &p.lReader
+       p.oReader.b64Reader = base64.NewDecoder(base64.StdEncoding, &p.lReader)
+       p.Body = &p.oReader
+
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/armor/encode.go b/vendor/golang.org/x/crypto/openpgp/armor/encode.go
new file mode 100644 (file)
index 0000000..6f07582
--- /dev/null
@@ -0,0 +1,160 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package armor
+
+import (
+       "encoding/base64"
+       "io"
+)
+
+var armorHeaderSep = []byte(": ")
+var blockEnd = []byte("\n=")
+var newline = []byte("\n")
+var armorEndOfLineOut = []byte("-----\n")
+
+// writeSlices writes its arguments to the given Writer.
+func writeSlices(out io.Writer, slices ...[]byte) (err error) {
+       for _, s := range slices {
+               _, err = out.Write(s)
+               if err != nil {
+                       return err
+               }
+       }
+       return
+}
+
+// lineBreaker breaks data across several lines, all of the same byte length
+// (except possibly the last). Lines are broken with a single '\n'.
+type lineBreaker struct {
+       lineLength  int
+       line        []byte
+       used        int
+       out         io.Writer
+       haveWritten bool
+}
+
+func newLineBreaker(out io.Writer, lineLength int) *lineBreaker {
+       return &lineBreaker{
+               lineLength: lineLength,
+               line:       make([]byte, lineLength),
+               used:       0,
+               out:        out,
+       }
+}
+
+func (l *lineBreaker) Write(b []byte) (n int, err error) {
+       n = len(b)
+
+       if n == 0 {
+               return
+       }
+
+       if l.used == 0 && l.haveWritten {
+               _, err = l.out.Write([]byte{'\n'})
+               if err != nil {
+                       return
+               }
+       }
+
+       if l.used+len(b) < l.lineLength {
+               l.used += copy(l.line[l.used:], b)
+               return
+       }
+
+       l.haveWritten = true
+       _, err = l.out.Write(l.line[0:l.used])
+       if err != nil {
+               return
+       }
+       excess := l.lineLength - l.used
+       l.used = 0
+
+       _, err = l.out.Write(b[0:excess])
+       if err != nil {
+               return
+       }
+
+       _, err = l.Write(b[excess:])
+       return
+}
+
+func (l *lineBreaker) Close() (err error) {
+       if l.used > 0 {
+               _, err = l.out.Write(l.line[0:l.used])
+               if err != nil {
+                       return
+               }
+       }
+
+       return
+}
+
+// encoding keeps track of a running CRC24 over the data which has been written
+// to it and outputs a OpenPGP checksum when closed, followed by an armor
+// trailer.
+//
+// It's built into a stack of io.Writers:
+//    encoding -> base64 encoder -> lineBreaker -> out
+type encoding struct {
+       out       io.Writer
+       breaker   *lineBreaker
+       b64       io.WriteCloser
+       crc       uint32
+       blockType []byte
+}
+
+func (e *encoding) Write(data []byte) (n int, err error) {
+       e.crc = crc24(e.crc, data)
+       return e.b64.Write(data)
+}
+
+func (e *encoding) Close() (err error) {
+       err = e.b64.Close()
+       if err != nil {
+               return
+       }
+       e.breaker.Close()
+
+       var checksumBytes [3]byte
+       checksumBytes[0] = byte(e.crc >> 16)
+       checksumBytes[1] = byte(e.crc >> 8)
+       checksumBytes[2] = byte(e.crc)
+
+       var b64ChecksumBytes [4]byte
+       base64.StdEncoding.Encode(b64ChecksumBytes[:], checksumBytes[:])
+
+       return writeSlices(e.out, blockEnd, b64ChecksumBytes[:], newline, armorEnd, e.blockType, armorEndOfLine)
+}
+
+// Encode returns a WriteCloser which will encode the data written to it in
+// OpenPGP armor.
+func Encode(out io.Writer, blockType string, headers map[string]string) (w io.WriteCloser, err error) {
+       bType := []byte(blockType)
+       err = writeSlices(out, armorStart, bType, armorEndOfLineOut)
+       if err != nil {
+               return
+       }
+
+       for k, v := range headers {
+               err = writeSlices(out, []byte(k), armorHeaderSep, []byte(v), newline)
+               if err != nil {
+                       return
+               }
+       }
+
+       _, err = out.Write(newline)
+       if err != nil {
+               return
+       }
+
+       e := &encoding{
+               out:       out,
+               breaker:   newLineBreaker(out, 64),
+               crc:       crc24Init,
+               blockType: bType,
+       }
+       e.b64 = base64.NewEncoder(base64.StdEncoding, e.breaker)
+       return e, nil
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/canonical_text.go b/vendor/golang.org/x/crypto/openpgp/canonical_text.go
new file mode 100644 (file)
index 0000000..e601e38
--- /dev/null
@@ -0,0 +1,59 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package openpgp
+
+import "hash"
+
+// NewCanonicalTextHash reformats text written to it into the canonical
+// form and then applies the hash h.  See RFC 4880, section 5.2.1.
+func NewCanonicalTextHash(h hash.Hash) hash.Hash {
+       return &canonicalTextHash{h, 0}
+}
+
+type canonicalTextHash struct {
+       h hash.Hash
+       s int
+}
+
+var newline = []byte{'\r', '\n'}
+
+func (cth *canonicalTextHash) Write(buf []byte) (int, error) {
+       start := 0
+
+       for i, c := range buf {
+               switch cth.s {
+               case 0:
+                       if c == '\r' {
+                               cth.s = 1
+                       } else if c == '\n' {
+                               cth.h.Write(buf[start:i])
+                               cth.h.Write(newline)
+                               start = i + 1
+                       }
+               case 1:
+                       cth.s = 0
+               }
+       }
+
+       cth.h.Write(buf[start:])
+       return len(buf), nil
+}
+
+func (cth *canonicalTextHash) Sum(in []byte) []byte {
+       return cth.h.Sum(in)
+}
+
+func (cth *canonicalTextHash) Reset() {
+       cth.h.Reset()
+       cth.s = 0
+}
+
+func (cth *canonicalTextHash) Size() int {
+       return cth.h.Size()
+}
+
+func (cth *canonicalTextHash) BlockSize() int {
+       return cth.h.BlockSize()
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/elgamal/elgamal.go b/vendor/golang.org/x/crypto/openpgp/elgamal/elgamal.go
new file mode 100644 (file)
index 0000000..73f4fe3
--- /dev/null
@@ -0,0 +1,122 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package elgamal implements ElGamal encryption, suitable for OpenPGP,
+// as specified in "A Public-Key Cryptosystem and a Signature Scheme Based on
+// Discrete Logarithms," IEEE Transactions on Information Theory, v. IT-31,
+// n. 4, 1985, pp. 469-472.
+//
+// This form of ElGamal embeds PKCS#1 v1.5 padding, which may make it
+// unsuitable for other protocols. RSA should be used in preference in any
+// case.
+package elgamal // import "golang.org/x/crypto/openpgp/elgamal"
+
+import (
+       "crypto/rand"
+       "crypto/subtle"
+       "errors"
+       "io"
+       "math/big"
+)
+
+// PublicKey represents an ElGamal public key.
+type PublicKey struct {
+       G, P, Y *big.Int
+}
+
+// PrivateKey represents an ElGamal private key.
+type PrivateKey struct {
+       PublicKey
+       X *big.Int
+}
+
+// Encrypt encrypts the given message to the given public key. The result is a
+// pair of integers. Errors can result from reading random, or because msg is
+// too large to be encrypted to the public key.
+func Encrypt(random io.Reader, pub *PublicKey, msg []byte) (c1, c2 *big.Int, err error) {
+       pLen := (pub.P.BitLen() + 7) / 8
+       if len(msg) > pLen-11 {
+               err = errors.New("elgamal: message too long")
+               return
+       }
+
+       // EM = 0x02 || PS || 0x00 || M
+       em := make([]byte, pLen-1)
+       em[0] = 2
+       ps, mm := em[1:len(em)-len(msg)-1], em[len(em)-len(msg):]
+       err = nonZeroRandomBytes(ps, random)
+       if err != nil {
+               return
+       }
+       em[len(em)-len(msg)-1] = 0
+       copy(mm, msg)
+
+       m := new(big.Int).SetBytes(em)
+
+       k, err := rand.Int(random, pub.P)
+       if err != nil {
+               return
+       }
+
+       c1 = new(big.Int).Exp(pub.G, k, pub.P)
+       s := new(big.Int).Exp(pub.Y, k, pub.P)
+       c2 = s.Mul(s, m)
+       c2.Mod(c2, pub.P)
+
+       return
+}
+
+// Decrypt takes two integers, resulting from an ElGamal encryption, and
+// returns the plaintext of the message. An error can result only if the
+// ciphertext is invalid. Users should keep in mind that this is a padding
+// oracle and thus, if exposed to an adaptive chosen ciphertext attack, can
+// be used to break the cryptosystem.  See ``Chosen Ciphertext Attacks
+// Against Protocols Based on the RSA Encryption Standard PKCS #1'', Daniel
+// Bleichenbacher, Advances in Cryptology (Crypto '98),
+func Decrypt(priv *PrivateKey, c1, c2 *big.Int) (msg []byte, err error) {
+       s := new(big.Int).Exp(c1, priv.X, priv.P)
+       s.ModInverse(s, priv.P)
+       s.Mul(s, c2)
+       s.Mod(s, priv.P)
+       em := s.Bytes()
+
+       firstByteIsTwo := subtle.ConstantTimeByteEq(em[0], 2)
+
+       // The remainder of the plaintext must be a string of non-zero random
+       // octets, followed by a 0, followed by the message.
+       //   lookingForIndex: 1 iff we are still looking for the zero.
+       //   index: the offset of the first zero byte.
+       var lookingForIndex, index int
+       lookingForIndex = 1
+
+       for i := 1; i < len(em); i++ {
+               equals0 := subtle.ConstantTimeByteEq(em[i], 0)
+               index = subtle.ConstantTimeSelect(lookingForIndex&equals0, i, index)
+               lookingForIndex = subtle.ConstantTimeSelect(equals0, 0, lookingForIndex)
+       }
+
+       if firstByteIsTwo != 1 || lookingForIndex != 0 || index < 9 {
+               return nil, errors.New("elgamal: decryption error")
+       }
+       return em[index+1:], nil
+}
+
+// nonZeroRandomBytes fills the given slice with non-zero random octets.
+func nonZeroRandomBytes(s []byte, rand io.Reader) (err error) {
+       _, err = io.ReadFull(rand, s)
+       if err != nil {
+               return
+       }
+
+       for i := 0; i < len(s); i++ {
+               for s[i] == 0 {
+                       _, err = io.ReadFull(rand, s[i:i+1])
+                       if err != nil {
+                               return
+                       }
+               }
+       }
+
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/errors/errors.go b/vendor/golang.org/x/crypto/openpgp/errors/errors.go
new file mode 100644 (file)
index 0000000..eb0550b
--- /dev/null
@@ -0,0 +1,72 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package errors contains common error types for the OpenPGP packages.
+package errors // import "golang.org/x/crypto/openpgp/errors"
+
+import (
+       "strconv"
+)
+
+// A StructuralError is returned when OpenPGP data is found to be syntactically
+// invalid.
+type StructuralError string
+
+func (s StructuralError) Error() string {
+       return "openpgp: invalid data: " + string(s)
+}
+
+// UnsupportedError indicates that, although the OpenPGP data is valid, it
+// makes use of currently unimplemented features.
+type UnsupportedError string
+
+func (s UnsupportedError) Error() string {
+       return "openpgp: unsupported feature: " + string(s)
+}
+
+// InvalidArgumentError indicates that the caller is in error and passed an
+// incorrect value.
+type InvalidArgumentError string
+
+func (i InvalidArgumentError) Error() string {
+       return "openpgp: invalid argument: " + string(i)
+}
+
+// SignatureError indicates that a syntactically valid signature failed to
+// validate.
+type SignatureError string
+
+func (b SignatureError) Error() string {
+       return "openpgp: invalid signature: " + string(b)
+}
+
+type keyIncorrectError int
+
+func (ki keyIncorrectError) Error() string {
+       return "openpgp: incorrect key"
+}
+
+var ErrKeyIncorrect error = keyIncorrectError(0)
+
+type unknownIssuerError int
+
+func (unknownIssuerError) Error() string {
+       return "openpgp: signature made by unknown entity"
+}
+
+var ErrUnknownIssuer error = unknownIssuerError(0)
+
+type keyRevokedError int
+
+func (keyRevokedError) Error() string {
+       return "openpgp: signature made by revoked key"
+}
+
+var ErrKeyRevoked error = keyRevokedError(0)
+
+type UnknownPacketTypeError uint8
+
+func (upte UnknownPacketTypeError) Error() string {
+       return "openpgp: unknown packet type: " + strconv.Itoa(int(upte))
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/keys.go b/vendor/golang.org/x/crypto/openpgp/keys.go
new file mode 100644 (file)
index 0000000..68b14c6
--- /dev/null
@@ -0,0 +1,637 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package openpgp
+
+import (
+       "crypto/rsa"
+       "io"
+       "time"
+
+       "golang.org/x/crypto/openpgp/armor"
+       "golang.org/x/crypto/openpgp/errors"
+       "golang.org/x/crypto/openpgp/packet"
+)
+
+// PublicKeyType is the armor type for a PGP public key.
+var PublicKeyType = "PGP PUBLIC KEY BLOCK"
+
+// PrivateKeyType is the armor type for a PGP private key.
+var PrivateKeyType = "PGP PRIVATE KEY BLOCK"
+
+// An Entity represents the components of an OpenPGP key: a primary public key
+// (which must be a signing key), one or more identities claimed by that key,
+// and zero or more subkeys, which may be encryption keys.
+type Entity struct {
+       PrimaryKey  *packet.PublicKey
+       PrivateKey  *packet.PrivateKey
+       Identities  map[string]*Identity // indexed by Identity.Name
+       Revocations []*packet.Signature
+       Subkeys     []Subkey
+}
+
+// An Identity represents an identity claimed by an Entity and zero or more
+// assertions by other entities about that claim.
+type Identity struct {
+       Name          string // by convention, has the form "Full Name (comment) <email@example.com>"
+       UserId        *packet.UserId
+       SelfSignature *packet.Signature
+       Signatures    []*packet.Signature
+}
+
+// A Subkey is an additional public key in an Entity. Subkeys can be used for
+// encryption.
+type Subkey struct {
+       PublicKey  *packet.PublicKey
+       PrivateKey *packet.PrivateKey
+       Sig        *packet.Signature
+}
+
+// A Key identifies a specific public key in an Entity. This is either the
+// Entity's primary key or a subkey.
+type Key struct {
+       Entity        *Entity
+       PublicKey     *packet.PublicKey
+       PrivateKey    *packet.PrivateKey
+       SelfSignature *packet.Signature
+}
+
+// A KeyRing provides access to public and private keys.
+type KeyRing interface {
+       // KeysById returns the set of keys that have the given key id.
+       KeysById(id uint64) []Key
+       // KeysByIdAndUsage returns the set of keys with the given id
+       // that also meet the key usage given by requiredUsage.
+       // The requiredUsage is expressed as the bitwise-OR of
+       // packet.KeyFlag* values.
+       KeysByIdUsage(id uint64, requiredUsage byte) []Key
+       // DecryptionKeys returns all private keys that are valid for
+       // decryption.
+       DecryptionKeys() []Key
+}
+
+// primaryIdentity returns the Identity marked as primary or the first identity
+// if none are so marked.
+func (e *Entity) primaryIdentity() *Identity {
+       var firstIdentity *Identity
+       for _, ident := range e.Identities {
+               if firstIdentity == nil {
+                       firstIdentity = ident
+               }
+               if ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId {
+                       return ident
+               }
+       }
+       return firstIdentity
+}
+
+// encryptionKey returns the best candidate Key for encrypting a message to the
+// given Entity.
+func (e *Entity) encryptionKey(now time.Time) (Key, bool) {
+       candidateSubkey := -1
+
+       // Iterate the keys to find the newest key
+       var maxTime time.Time
+       for i, subkey := range e.Subkeys {
+               if subkey.Sig.FlagsValid &&
+                       subkey.Sig.FlagEncryptCommunications &&
+                       subkey.PublicKey.PubKeyAlgo.CanEncrypt() &&
+                       !subkey.Sig.KeyExpired(now) &&
+                       (maxTime.IsZero() || subkey.Sig.CreationTime.After(maxTime)) {
+                       candidateSubkey = i
+                       maxTime = subkey.Sig.CreationTime
+               }
+       }
+
+       if candidateSubkey != -1 {
+               subkey := e.Subkeys[candidateSubkey]
+               return Key{e, subkey.PublicKey, subkey.PrivateKey, subkey.Sig}, true
+       }
+
+       // If we don't have any candidate subkeys for encryption and
+       // the primary key doesn't have any usage metadata then we
+       // assume that the primary key is ok. Or, if the primary key is
+       // marked as ok to encrypt to, then we can obviously use it.
+       i := e.primaryIdentity()
+       if !i.SelfSignature.FlagsValid || i.SelfSignature.FlagEncryptCommunications &&
+               e.PrimaryKey.PubKeyAlgo.CanEncrypt() &&
+               !i.SelfSignature.KeyExpired(now) {
+               return Key{e, e.PrimaryKey, e.PrivateKey, i.SelfSignature}, true
+       }
+
+       // This Entity appears to be signing only.
+       return Key{}, false
+}
+
+// signingKey return the best candidate Key for signing a message with this
+// Entity.
+func (e *Entity) signingKey(now time.Time) (Key, bool) {
+       candidateSubkey := -1
+
+       for i, subkey := range e.Subkeys {
+               if subkey.Sig.FlagsValid &&
+                       subkey.Sig.FlagSign &&
+                       subkey.PublicKey.PubKeyAlgo.CanSign() &&
+                       !subkey.Sig.KeyExpired(now) {
+                       candidateSubkey = i
+                       break
+               }
+       }
+
+       if candidateSubkey != -1 {
+               subkey := e.Subkeys[candidateSubkey]
+               return Key{e, subkey.PublicKey, subkey.PrivateKey, subkey.Sig}, true
+       }
+
+       // If we have no candidate subkey then we assume that it's ok to sign
+       // with the primary key.
+       i := e.primaryIdentity()
+       if !i.SelfSignature.FlagsValid || i.SelfSignature.FlagSign &&
+               !i.SelfSignature.KeyExpired(now) {
+               return Key{e, e.PrimaryKey, e.PrivateKey, i.SelfSignature}, true
+       }
+
+       return Key{}, false
+}
+
+// An EntityList contains one or more Entities.
+type EntityList []*Entity
+
+// KeysById returns the set of keys that have the given key id.
+func (el EntityList) KeysById(id uint64) (keys []Key) {
+       for _, e := range el {
+               if e.PrimaryKey.KeyId == id {
+                       var selfSig *packet.Signature
+                       for _, ident := range e.Identities {
+                               if selfSig == nil {
+                                       selfSig = ident.SelfSignature
+                               } else if ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId {
+                                       selfSig = ident.SelfSignature
+                                       break
+                               }
+                       }
+                       keys = append(keys, Key{e, e.PrimaryKey, e.PrivateKey, selfSig})
+               }
+
+               for _, subKey := range e.Subkeys {
+                       if subKey.PublicKey.KeyId == id {
+                               keys = append(keys, Key{e, subKey.PublicKey, subKey.PrivateKey, subKey.Sig})
+                       }
+               }
+       }
+       return
+}
+
+// KeysByIdAndUsage returns the set of keys with the given id that also meet
+// the key usage given by requiredUsage.  The requiredUsage is expressed as
+// the bitwise-OR of packet.KeyFlag* values.
+func (el EntityList) KeysByIdUsage(id uint64, requiredUsage byte) (keys []Key) {
+       for _, key := range el.KeysById(id) {
+               if len(key.Entity.Revocations) > 0 {
+                       continue
+               }
+
+               if key.SelfSignature.RevocationReason != nil {
+                       continue
+               }
+
+               if key.SelfSignature.FlagsValid && requiredUsage != 0 {
+                       var usage byte
+                       if key.SelfSignature.FlagCertify {
+                               usage |= packet.KeyFlagCertify
+                       }
+                       if key.SelfSignature.FlagSign {
+                               usage |= packet.KeyFlagSign
+                       }
+                       if key.SelfSignature.FlagEncryptCommunications {
+                               usage |= packet.KeyFlagEncryptCommunications
+                       }
+                       if key.SelfSignature.FlagEncryptStorage {
+                               usage |= packet.KeyFlagEncryptStorage
+                       }
+                       if usage&requiredUsage != requiredUsage {
+                               continue
+                       }
+               }
+
+               keys = append(keys, key)
+       }
+       return
+}
+
+// DecryptionKeys returns all private keys that are valid for decryption.
+func (el EntityList) DecryptionKeys() (keys []Key) {
+       for _, e := range el {
+               for _, subKey := range e.Subkeys {
+                       if subKey.PrivateKey != nil && (!subKey.Sig.FlagsValid || subKey.Sig.FlagEncryptStorage || subKey.Sig.FlagEncryptCommunications) {
+                               keys = append(keys, Key{e, subKey.PublicKey, subKey.PrivateKey, subKey.Sig})
+                       }
+               }
+       }
+       return
+}
+
+// ReadArmoredKeyRing reads one or more public/private keys from an armor keyring file.
+func ReadArmoredKeyRing(r io.Reader) (EntityList, error) {
+       block, err := armor.Decode(r)
+       if err == io.EOF {
+               return nil, errors.InvalidArgumentError("no armored data found")
+       }
+       if err != nil {
+               return nil, err
+       }
+       if block.Type != PublicKeyType && block.Type != PrivateKeyType {
+               return nil, errors.InvalidArgumentError("expected public or private key block, got: " + block.Type)
+       }
+
+       return ReadKeyRing(block.Body)
+}
+
+// ReadKeyRing reads one or more public/private keys. Unsupported keys are
+// ignored as long as at least a single valid key is found.
+func ReadKeyRing(r io.Reader) (el EntityList, err error) {
+       packets := packet.NewReader(r)
+       var lastUnsupportedError error
+
+       for {
+               var e *Entity
+               e, err = ReadEntity(packets)
+               if err != nil {
+                       // TODO: warn about skipped unsupported/unreadable keys
+                       if _, ok := err.(errors.UnsupportedError); ok {
+                               lastUnsupportedError = err
+                               err = readToNextPublicKey(packets)
+                       } else if _, ok := err.(errors.StructuralError); ok {
+                               // Skip unreadable, badly-formatted keys
+                               lastUnsupportedError = err
+                               err = readToNextPublicKey(packets)
+                       }
+                       if err == io.EOF {
+                               err = nil
+                               break
+                       }
+                       if err != nil {
+                               el = nil
+                               break
+                       }
+               } else {
+                       el = append(el, e)
+               }
+       }
+
+       if len(el) == 0 && err == nil {
+               err = lastUnsupportedError
+       }
+       return
+}
+
+// readToNextPublicKey reads packets until the start of the entity and leaves
+// the first packet of the new entity in the Reader.
+func readToNextPublicKey(packets *packet.Reader) (err error) {
+       var p packet.Packet
+       for {
+               p, err = packets.Next()
+               if err == io.EOF {
+                       return
+               } else if err != nil {
+                       if _, ok := err.(errors.UnsupportedError); ok {
+                               err = nil
+                               continue
+                       }
+                       return
+               }
+
+               if pk, ok := p.(*packet.PublicKey); ok && !pk.IsSubkey {
+                       packets.Unread(p)
+                       return
+               }
+       }
+}
+
+// ReadEntity reads an entity (public key, identities, subkeys etc) from the
+// given Reader.
+func ReadEntity(packets *packet.Reader) (*Entity, error) {
+       e := new(Entity)
+       e.Identities = make(map[string]*Identity)
+
+       p, err := packets.Next()
+       if err != nil {
+               return nil, err
+       }
+
+       var ok bool
+       if e.PrimaryKey, ok = p.(*packet.PublicKey); !ok {
+               if e.PrivateKey, ok = p.(*packet.PrivateKey); !ok {
+                       packets.Unread(p)
+                       return nil, errors.StructuralError("first packet was not a public/private key")
+               } else {
+                       e.PrimaryKey = &e.PrivateKey.PublicKey
+               }
+       }
+
+       if !e.PrimaryKey.PubKeyAlgo.CanSign() {
+               return nil, errors.StructuralError("primary key cannot be used for signatures")
+       }
+
+       var current *Identity
+       var revocations []*packet.Signature
+EachPacket:
+       for {
+               p, err := packets.Next()
+               if err == io.EOF {
+                       break
+               } else if err != nil {
+                       return nil, err
+               }
+
+               switch pkt := p.(type) {
+               case *packet.UserId:
+                       current = new(Identity)
+                       current.Name = pkt.Id
+                       current.UserId = pkt
+                       e.Identities[pkt.Id] = current
+
+                       for {
+                               p, err = packets.Next()
+                               if err == io.EOF {
+                                       return nil, io.ErrUnexpectedEOF
+                               } else if err != nil {
+                                       return nil, err
+                               }
+
+                               sig, ok := p.(*packet.Signature)
+                               if !ok {
+                                       return nil, errors.StructuralError("user ID packet not followed by self-signature")
+                               }
+
+                               if (sig.SigType == packet.SigTypePositiveCert || sig.SigType == packet.SigTypeGenericCert) && sig.IssuerKeyId != nil && *sig.IssuerKeyId == e.PrimaryKey.KeyId {
+                                       if err = e.PrimaryKey.VerifyUserIdSignature(pkt.Id, e.PrimaryKey, sig); err != nil {
+                                               return nil, errors.StructuralError("user ID self-signature invalid: " + err.Error())
+                                       }
+                                       current.SelfSignature = sig
+                                       break
+                               }
+                               current.Signatures = append(current.Signatures, sig)
+                       }
+               case *packet.Signature:
+                       if pkt.SigType == packet.SigTypeKeyRevocation {
+                               revocations = append(revocations, pkt)
+                       } else if pkt.SigType == packet.SigTypeDirectSignature {
+                               // TODO: RFC4880 5.2.1 permits signatures
+                               // directly on keys (eg. to bind additional
+                               // revocation keys).
+                       } else if current == nil {
+                               return nil, errors.StructuralError("signature packet found before user id packet")
+                       } else {
+                               current.Signatures = append(current.Signatures, pkt)
+                       }
+               case *packet.PrivateKey:
+                       if pkt.IsSubkey == false {
+                               packets.Unread(p)
+                               break EachPacket
+                       }
+                       err = addSubkey(e, packets, &pkt.PublicKey, pkt)
+                       if err != nil {
+                               return nil, err
+                       }
+               case *packet.PublicKey:
+                       if pkt.IsSubkey == false {
+                               packets.Unread(p)
+                               break EachPacket
+                       }
+                       err = addSubkey(e, packets, pkt, nil)
+                       if err != nil {
+                               return nil, err
+                       }
+               default:
+                       // we ignore unknown packets
+               }
+       }
+
+       if len(e.Identities) == 0 {
+               return nil, errors.StructuralError("entity without any identities")
+       }
+
+       for _, revocation := range revocations {
+               err = e.PrimaryKey.VerifyRevocationSignature(revocation)
+               if err == nil {
+                       e.Revocations = append(e.Revocations, revocation)
+               } else {
+                       // TODO: RFC 4880 5.2.3.15 defines revocation keys.
+                       return nil, errors.StructuralError("revocation signature signed by alternate key")
+               }
+       }
+
+       return e, nil
+}
+
+func addSubkey(e *Entity, packets *packet.Reader, pub *packet.PublicKey, priv *packet.PrivateKey) error {
+       var subKey Subkey
+       subKey.PublicKey = pub
+       subKey.PrivateKey = priv
+       p, err := packets.Next()
+       if err == io.EOF {
+               return io.ErrUnexpectedEOF
+       }
+       if err != nil {
+               return errors.StructuralError("subkey signature invalid: " + err.Error())
+       }
+       var ok bool
+       subKey.Sig, ok = p.(*packet.Signature)
+       if !ok {
+               return errors.StructuralError("subkey packet not followed by signature")
+       }
+       if subKey.Sig.SigType != packet.SigTypeSubkeyBinding && subKey.Sig.SigType != packet.SigTypeSubkeyRevocation {
+               return errors.StructuralError("subkey signature with wrong type")
+       }
+       err = e.PrimaryKey.VerifyKeySignature(subKey.PublicKey, subKey.Sig)
+       if err != nil {
+               return errors.StructuralError("subkey signature invalid: " + err.Error())
+       }
+       e.Subkeys = append(e.Subkeys, subKey)
+       return nil
+}
+
+const defaultRSAKeyBits = 2048
+
+// NewEntity returns an Entity that contains a fresh RSA/RSA keypair with a
+// single identity composed of the given full name, comment and email, any of
+// which may be empty but must not contain any of "()<>\x00".
+// If config is nil, sensible defaults will be used.
+func NewEntity(name, comment, email string, config *packet.Config) (*Entity, error) {
+       currentTime := config.Now()
+
+       bits := defaultRSAKeyBits
+       if config != nil && config.RSABits != 0 {
+               bits = config.RSABits
+       }
+
+       uid := packet.NewUserId(name, comment, email)
+       if uid == nil {
+               return nil, errors.InvalidArgumentError("user id field contained invalid characters")
+       }
+       signingPriv, err := rsa.GenerateKey(config.Random(), bits)
+       if err != nil {
+               return nil, err
+       }
+       encryptingPriv, err := rsa.GenerateKey(config.Random(), bits)
+       if err != nil {
+               return nil, err
+       }
+
+       e := &Entity{
+               PrimaryKey: packet.NewRSAPublicKey(currentTime, &signingPriv.PublicKey),
+               PrivateKey: packet.NewRSAPrivateKey(currentTime, signingPriv),
+               Identities: make(map[string]*Identity),
+       }
+       isPrimaryId := true
+       e.Identities[uid.Id] = &Identity{
+               Name:   uid.Name,
+               UserId: uid,
+               SelfSignature: &packet.Signature{
+                       CreationTime: currentTime,
+                       SigType:      packet.SigTypePositiveCert,
+                       PubKeyAlgo:   packet.PubKeyAlgoRSA,
+                       Hash:         config.Hash(),
+                       IsPrimaryId:  &isPrimaryId,
+                       FlagsValid:   true,
+                       FlagSign:     true,
+                       FlagCertify:  true,
+                       IssuerKeyId:  &e.PrimaryKey.KeyId,
+               },
+       }
+
+       // If the user passes in a DefaultHash via packet.Config,
+       // set the PreferredHash for the SelfSignature.
+       if config != nil && config.DefaultHash != 0 {
+               e.Identities[uid.Id].SelfSignature.PreferredHash = []uint8{hashToHashId(config.DefaultHash)}
+       }
+
+       e.Subkeys = make([]Subkey, 1)
+       e.Subkeys[0] = Subkey{
+               PublicKey:  packet.NewRSAPublicKey(currentTime, &encryptingPriv.PublicKey),
+               PrivateKey: packet.NewRSAPrivateKey(currentTime, encryptingPriv),
+               Sig: &packet.Signature{
+                       CreationTime:              currentTime,
+                       SigType:                   packet.SigTypeSubkeyBinding,
+                       PubKeyAlgo:                packet.PubKeyAlgoRSA,
+                       Hash:                      config.Hash(),
+                       FlagsValid:                true,
+                       FlagEncryptStorage:        true,
+                       FlagEncryptCommunications: true,
+                       IssuerKeyId:               &e.PrimaryKey.KeyId,
+               },
+       }
+       e.Subkeys[0].PublicKey.IsSubkey = true
+       e.Subkeys[0].PrivateKey.IsSubkey = true
+
+       return e, nil
+}
+
+// SerializePrivate serializes an Entity, including private key material, to
+// the given Writer. For now, it must only be used on an Entity returned from
+// NewEntity.
+// If config is nil, sensible defaults will be used.
+func (e *Entity) SerializePrivate(w io.Writer, config *packet.Config) (err error) {
+       err = e.PrivateKey.Serialize(w)
+       if err != nil {
+               return
+       }
+       for _, ident := range e.Identities {
+               err = ident.UserId.Serialize(w)
+               if err != nil {
+                       return
+               }
+               err = ident.SelfSignature.SignUserId(ident.UserId.Id, e.PrimaryKey, e.PrivateKey, config)
+               if err != nil {
+                       return
+               }
+               err = ident.SelfSignature.Serialize(w)
+               if err != nil {
+                       return
+               }
+       }
+       for _, subkey := range e.Subkeys {
+               err = subkey.PrivateKey.Serialize(w)
+               if err != nil {
+                       return
+               }
+               err = subkey.Sig.SignKey(subkey.PublicKey, e.PrivateKey, config)
+               if err != nil {
+                       return
+               }
+               err = subkey.Sig.Serialize(w)
+               if err != nil {
+                       return
+               }
+       }
+       return nil
+}
+
+// Serialize writes the public part of the given Entity to w. (No private
+// key material will be output).
+func (e *Entity) Serialize(w io.Writer) error {
+       err := e.PrimaryKey.Serialize(w)
+       if err != nil {
+               return err
+       }
+       for _, ident := range e.Identities {
+               err = ident.UserId.Serialize(w)
+               if err != nil {
+                       return err
+               }
+               err = ident.SelfSignature.Serialize(w)
+               if err != nil {
+                       return err
+               }
+               for _, sig := range ident.Signatures {
+                       err = sig.Serialize(w)
+                       if err != nil {
+                               return err
+                       }
+               }
+       }
+       for _, subkey := range e.Subkeys {
+               err = subkey.PublicKey.Serialize(w)
+               if err != nil {
+                       return err
+               }
+               err = subkey.Sig.Serialize(w)
+               if err != nil {
+                       return err
+               }
+       }
+       return nil
+}
+
+// SignIdentity adds a signature to e, from signer, attesting that identity is
+// associated with e. The provided identity must already be an element of
+// e.Identities and the private key of signer must have been decrypted if
+// necessary.
+// If config is nil, sensible defaults will be used.
+func (e *Entity) SignIdentity(identity string, signer *Entity, config *packet.Config) error {
+       if signer.PrivateKey == nil {
+               return errors.InvalidArgumentError("signing Entity must have a private key")
+       }
+       if signer.PrivateKey.Encrypted {
+               return errors.InvalidArgumentError("signing Entity's private key must be decrypted")
+       }
+       ident, ok := e.Identities[identity]
+       if !ok {
+               return errors.InvalidArgumentError("given identity string not found in Entity")
+       }
+
+       sig := &packet.Signature{
+               SigType:      packet.SigTypeGenericCert,
+               PubKeyAlgo:   signer.PrivateKey.PubKeyAlgo,
+               Hash:         config.Hash(),
+               CreationTime: config.Now(),
+               IssuerKeyId:  &signer.PrivateKey.KeyId,
+       }
+       if err := sig.SignUserId(identity, e.PrimaryKey, signer.PrivateKey, config); err != nil {
+               return err
+       }
+       ident.Signatures = append(ident.Signatures, sig)
+       return nil
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/compressed.go b/vendor/golang.org/x/crypto/openpgp/packet/compressed.go
new file mode 100644 (file)
index 0000000..e8f0b5c
--- /dev/null
@@ -0,0 +1,123 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "compress/bzip2"
+       "compress/flate"
+       "compress/zlib"
+       "golang.org/x/crypto/openpgp/errors"
+       "io"
+       "strconv"
+)
+
+// Compressed represents a compressed OpenPGP packet. The decompressed contents
+// will contain more OpenPGP packets. See RFC 4880, section 5.6.
+type Compressed struct {
+       Body io.Reader
+}
+
+const (
+       NoCompression      = flate.NoCompression
+       BestSpeed          = flate.BestSpeed
+       BestCompression    = flate.BestCompression
+       DefaultCompression = flate.DefaultCompression
+)
+
+// CompressionConfig contains compressor configuration settings.
+type CompressionConfig struct {
+       // Level is the compression level to use. It must be set to
+       // between -1 and 9, with -1 causing the compressor to use the
+       // default compression level, 0 causing the compressor to use
+       // no compression and 1 to 9 representing increasing (better,
+       // slower) compression levels. If Level is less than -1 or
+       // more then 9, a non-nil error will be returned during
+       // encryption. See the constants above for convenient common
+       // settings for Level.
+       Level int
+}
+
+func (c *Compressed) parse(r io.Reader) error {
+       var buf [1]byte
+       _, err := readFull(r, buf[:])
+       if err != nil {
+               return err
+       }
+
+       switch buf[0] {
+       case 1:
+               c.Body = flate.NewReader(r)
+       case 2:
+               c.Body, err = zlib.NewReader(r)
+       case 3:
+               c.Body = bzip2.NewReader(r)
+       default:
+               err = errors.UnsupportedError("unknown compression algorithm: " + strconv.Itoa(int(buf[0])))
+       }
+
+       return err
+}
+
+// compressedWriterCloser represents the serialized compression stream
+// header and the compressor. Its Close() method ensures that both the
+// compressor and serialized stream header are closed. Its Write()
+// method writes to the compressor.
+type compressedWriteCloser struct {
+       sh io.Closer      // Stream Header
+       c  io.WriteCloser // Compressor
+}
+
+func (cwc compressedWriteCloser) Write(p []byte) (int, error) {
+       return cwc.c.Write(p)
+}
+
+func (cwc compressedWriteCloser) Close() (err error) {
+       err = cwc.c.Close()
+       if err != nil {
+               return err
+       }
+
+       return cwc.sh.Close()
+}
+
+// SerializeCompressed serializes a compressed data packet to w and
+// returns a WriteCloser to which the literal data packets themselves
+// can be written and which MUST be closed on completion. If cc is
+// nil, sensible defaults will be used to configure the compression
+// algorithm.
+func SerializeCompressed(w io.WriteCloser, algo CompressionAlgo, cc *CompressionConfig) (literaldata io.WriteCloser, err error) {
+       compressed, err := serializeStreamHeader(w, packetTypeCompressed)
+       if err != nil {
+               return
+       }
+
+       _, err = compressed.Write([]byte{uint8(algo)})
+       if err != nil {
+               return
+       }
+
+       level := DefaultCompression
+       if cc != nil {
+               level = cc.Level
+       }
+
+       var compressor io.WriteCloser
+       switch algo {
+       case CompressionZIP:
+               compressor, err = flate.NewWriter(compressed, level)
+       case CompressionZLIB:
+               compressor, err = zlib.NewWriterLevel(compressed, level)
+       default:
+               s := strconv.Itoa(int(algo))
+               err = errors.UnsupportedError("Unsupported compression algorithm: " + s)
+       }
+       if err != nil {
+               return
+       }
+
+       literaldata = compressedWriteCloser{compressed, compressor}
+
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/config.go b/vendor/golang.org/x/crypto/openpgp/packet/config.go
new file mode 100644 (file)
index 0000000..c76eecc
--- /dev/null
@@ -0,0 +1,91 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "crypto"
+       "crypto/rand"
+       "io"
+       "time"
+)
+
+// Config collects a number of parameters along with sensible defaults.
+// A nil *Config is valid and results in all default values.
+type Config struct {
+       // Rand provides the source of entropy.
+       // If nil, the crypto/rand Reader is used.
+       Rand io.Reader
+       // DefaultHash is the default hash function to be used.
+       // If zero, SHA-256 is used.
+       DefaultHash crypto.Hash
+       // DefaultCipher is the cipher to be used.
+       // If zero, AES-128 is used.
+       DefaultCipher CipherFunction
+       // Time returns the current time as the number of seconds since the
+       // epoch. If Time is nil, time.Now is used.
+       Time func() time.Time
+       // DefaultCompressionAlgo is the compression algorithm to be
+       // applied to the plaintext before encryption. If zero, no
+       // compression is done.
+       DefaultCompressionAlgo CompressionAlgo
+       // CompressionConfig configures the compression settings.
+       CompressionConfig *CompressionConfig
+       // S2KCount is only used for symmetric encryption. It
+       // determines the strength of the passphrase stretching when
+       // the said passphrase is hashed to produce a key. S2KCount
+       // should be between 1024 and 65011712, inclusive. If Config
+       // is nil or S2KCount is 0, the value 65536 used. Not all
+       // values in the above range can be represented. S2KCount will
+       // be rounded up to the next representable value if it cannot
+       // be encoded exactly. When set, it is strongly encrouraged to
+       // use a value that is at least 65536. See RFC 4880 Section
+       // 3.7.1.3.
+       S2KCount int
+       // RSABits is the number of bits in new RSA keys made with NewEntity.
+       // If zero, then 2048 bit keys are created.
+       RSABits int
+}
+
+func (c *Config) Random() io.Reader {
+       if c == nil || c.Rand == nil {
+               return rand.Reader
+       }
+       return c.Rand
+}
+
+func (c *Config) Hash() crypto.Hash {
+       if c == nil || uint(c.DefaultHash) == 0 {
+               return crypto.SHA256
+       }
+       return c.DefaultHash
+}
+
+func (c *Config) Cipher() CipherFunction {
+       if c == nil || uint8(c.DefaultCipher) == 0 {
+               return CipherAES128
+       }
+       return c.DefaultCipher
+}
+
+func (c *Config) Now() time.Time {
+       if c == nil || c.Time == nil {
+               return time.Now()
+       }
+       return c.Time()
+}
+
+func (c *Config) Compression() CompressionAlgo {
+       if c == nil {
+               return CompressionNone
+       }
+       return c.DefaultCompressionAlgo
+}
+
+func (c *Config) PasswordHashIterations() int {
+       if c == nil || c.S2KCount == 0 {
+               return 0
+       }
+       return c.S2KCount
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/encrypted_key.go b/vendor/golang.org/x/crypto/openpgp/packet/encrypted_key.go
new file mode 100644 (file)
index 0000000..266840d
--- /dev/null
@@ -0,0 +1,199 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "crypto/rsa"
+       "encoding/binary"
+       "io"
+       "math/big"
+       "strconv"
+
+       "golang.org/x/crypto/openpgp/elgamal"
+       "golang.org/x/crypto/openpgp/errors"
+)
+
+const encryptedKeyVersion = 3
+
+// EncryptedKey represents a public-key encrypted session key. See RFC 4880,
+// section 5.1.
+type EncryptedKey struct {
+       KeyId      uint64
+       Algo       PublicKeyAlgorithm
+       CipherFunc CipherFunction // only valid after a successful Decrypt
+       Key        []byte         // only valid after a successful Decrypt
+
+       encryptedMPI1, encryptedMPI2 parsedMPI
+}
+
+func (e *EncryptedKey) parse(r io.Reader) (err error) {
+       var buf [10]byte
+       _, err = readFull(r, buf[:])
+       if err != nil {
+               return
+       }
+       if buf[0] != encryptedKeyVersion {
+               return errors.UnsupportedError("unknown EncryptedKey version " + strconv.Itoa(int(buf[0])))
+       }
+       e.KeyId = binary.BigEndian.Uint64(buf[1:9])
+       e.Algo = PublicKeyAlgorithm(buf[9])
+       switch e.Algo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly:
+               e.encryptedMPI1.bytes, e.encryptedMPI1.bitLength, err = readMPI(r)
+       case PubKeyAlgoElGamal:
+               e.encryptedMPI1.bytes, e.encryptedMPI1.bitLength, err = readMPI(r)
+               if err != nil {
+                       return
+               }
+               e.encryptedMPI2.bytes, e.encryptedMPI2.bitLength, err = readMPI(r)
+       }
+       _, err = consumeAll(r)
+       return
+}
+
+func checksumKeyMaterial(key []byte) uint16 {
+       var checksum uint16
+       for _, v := range key {
+               checksum += uint16(v)
+       }
+       return checksum
+}
+
+// Decrypt decrypts an encrypted session key with the given private key. The
+// private key must have been decrypted first.
+// If config is nil, sensible defaults will be used.
+func (e *EncryptedKey) Decrypt(priv *PrivateKey, config *Config) error {
+       var err error
+       var b []byte
+
+       // TODO(agl): use session key decryption routines here to avoid
+       // padding oracle attacks.
+       switch priv.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly:
+               b, err = rsa.DecryptPKCS1v15(config.Random(), priv.PrivateKey.(*rsa.PrivateKey), e.encryptedMPI1.bytes)
+       case PubKeyAlgoElGamal:
+               c1 := new(big.Int).SetBytes(e.encryptedMPI1.bytes)
+               c2 := new(big.Int).SetBytes(e.encryptedMPI2.bytes)
+               b, err = elgamal.Decrypt(priv.PrivateKey.(*elgamal.PrivateKey), c1, c2)
+       default:
+               err = errors.InvalidArgumentError("cannot decrypted encrypted session key with private key of type " + strconv.Itoa(int(priv.PubKeyAlgo)))
+       }
+
+       if err != nil {
+               return err
+       }
+
+       e.CipherFunc = CipherFunction(b[0])
+       e.Key = b[1 : len(b)-2]
+       expectedChecksum := uint16(b[len(b)-2])<<8 | uint16(b[len(b)-1])
+       checksum := checksumKeyMaterial(e.Key)
+       if checksum != expectedChecksum {
+               return errors.StructuralError("EncryptedKey checksum incorrect")
+       }
+
+       return nil
+}
+
+// Serialize writes the encrypted key packet, e, to w.
+func (e *EncryptedKey) Serialize(w io.Writer) error {
+       var mpiLen int
+       switch e.Algo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly:
+               mpiLen = 2 + len(e.encryptedMPI1.bytes)
+       case PubKeyAlgoElGamal:
+               mpiLen = 2 + len(e.encryptedMPI1.bytes) + 2 + len(e.encryptedMPI2.bytes)
+       default:
+               return errors.InvalidArgumentError("don't know how to serialize encrypted key type " + strconv.Itoa(int(e.Algo)))
+       }
+
+       serializeHeader(w, packetTypeEncryptedKey, 1 /* version */ +8 /* key id */ +1 /* algo */ +mpiLen)
+
+       w.Write([]byte{encryptedKeyVersion})
+       binary.Write(w, binary.BigEndian, e.KeyId)
+       w.Write([]byte{byte(e.Algo)})
+
+       switch e.Algo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly:
+               writeMPIs(w, e.encryptedMPI1)
+       case PubKeyAlgoElGamal:
+               writeMPIs(w, e.encryptedMPI1, e.encryptedMPI2)
+       default:
+               panic("internal error")
+       }
+
+       return nil
+}
+
+// SerializeEncryptedKey serializes an encrypted key packet to w that contains
+// key, encrypted to pub.
+// If config is nil, sensible defaults will be used.
+func SerializeEncryptedKey(w io.Writer, pub *PublicKey, cipherFunc CipherFunction, key []byte, config *Config) error {
+       var buf [10]byte
+       buf[0] = encryptedKeyVersion
+       binary.BigEndian.PutUint64(buf[1:9], pub.KeyId)
+       buf[9] = byte(pub.PubKeyAlgo)
+
+       keyBlock := make([]byte, 1 /* cipher type */ +len(key)+2 /* checksum */)
+       keyBlock[0] = byte(cipherFunc)
+       copy(keyBlock[1:], key)
+       checksum := checksumKeyMaterial(key)
+       keyBlock[1+len(key)] = byte(checksum >> 8)
+       keyBlock[1+len(key)+1] = byte(checksum)
+
+       switch pub.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly:
+               return serializeEncryptedKeyRSA(w, config.Random(), buf, pub.PublicKey.(*rsa.PublicKey), keyBlock)
+       case PubKeyAlgoElGamal:
+               return serializeEncryptedKeyElGamal(w, config.Random(), buf, pub.PublicKey.(*elgamal.PublicKey), keyBlock)
+       case PubKeyAlgoDSA, PubKeyAlgoRSASignOnly:
+               return errors.InvalidArgumentError("cannot encrypt to public key of type " + strconv.Itoa(int(pub.PubKeyAlgo)))
+       }
+
+       return errors.UnsupportedError("encrypting a key to public key of type " + strconv.Itoa(int(pub.PubKeyAlgo)))
+}
+
+func serializeEncryptedKeyRSA(w io.Writer, rand io.Reader, header [10]byte, pub *rsa.PublicKey, keyBlock []byte) error {
+       cipherText, err := rsa.EncryptPKCS1v15(rand, pub, keyBlock)
+       if err != nil {
+               return errors.InvalidArgumentError("RSA encryption failed: " + err.Error())
+       }
+
+       packetLen := 10 /* header length */ + 2 /* mpi size */ + len(cipherText)
+
+       err = serializeHeader(w, packetTypeEncryptedKey, packetLen)
+       if err != nil {
+               return err
+       }
+       _, err = w.Write(header[:])
+       if err != nil {
+               return err
+       }
+       return writeMPI(w, 8*uint16(len(cipherText)), cipherText)
+}
+
+func serializeEncryptedKeyElGamal(w io.Writer, rand io.Reader, header [10]byte, pub *elgamal.PublicKey, keyBlock []byte) error {
+       c1, c2, err := elgamal.Encrypt(rand, pub, keyBlock)
+       if err != nil {
+               return errors.InvalidArgumentError("ElGamal encryption failed: " + err.Error())
+       }
+
+       packetLen := 10 /* header length */
+       packetLen += 2 /* mpi size */ + (c1.BitLen()+7)/8
+       packetLen += 2 /* mpi size */ + (c2.BitLen()+7)/8
+
+       err = serializeHeader(w, packetTypeEncryptedKey, packetLen)
+       if err != nil {
+               return err
+       }
+       _, err = w.Write(header[:])
+       if err != nil {
+               return err
+       }
+       err = writeBig(w, c1)
+       if err != nil {
+               return err
+       }
+       return writeBig(w, c2)
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/literal.go b/vendor/golang.org/x/crypto/openpgp/packet/literal.go
new file mode 100644 (file)
index 0000000..1a9ec6e
--- /dev/null
@@ -0,0 +1,89 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "encoding/binary"
+       "io"
+)
+
+// LiteralData represents an encrypted file. See RFC 4880, section 5.9.
+type LiteralData struct {
+       IsBinary bool
+       FileName string
+       Time     uint32 // Unix epoch time. Either creation time or modification time. 0 means undefined.
+       Body     io.Reader
+}
+
+// ForEyesOnly returns whether the contents of the LiteralData have been marked
+// as especially sensitive.
+func (l *LiteralData) ForEyesOnly() bool {
+       return l.FileName == "_CONSOLE"
+}
+
+func (l *LiteralData) parse(r io.Reader) (err error) {
+       var buf [256]byte
+
+       _, err = readFull(r, buf[:2])
+       if err != nil {
+               return
+       }
+
+       l.IsBinary = buf[0] == 'b'
+       fileNameLen := int(buf[1])
+
+       _, err = readFull(r, buf[:fileNameLen])
+       if err != nil {
+               return
+       }
+
+       l.FileName = string(buf[:fileNameLen])
+
+       _, err = readFull(r, buf[:4])
+       if err != nil {
+               return
+       }
+
+       l.Time = binary.BigEndian.Uint32(buf[:4])
+       l.Body = r
+       return
+}
+
+// SerializeLiteral serializes a literal data packet to w and returns a
+// WriteCloser to which the data itself can be written and which MUST be closed
+// on completion. The fileName is truncated to 255 bytes.
+func SerializeLiteral(w io.WriteCloser, isBinary bool, fileName string, time uint32) (plaintext io.WriteCloser, err error) {
+       var buf [4]byte
+       buf[0] = 't'
+       if isBinary {
+               buf[0] = 'b'
+       }
+       if len(fileName) > 255 {
+               fileName = fileName[:255]
+       }
+       buf[1] = byte(len(fileName))
+
+       inner, err := serializeStreamHeader(w, packetTypeLiteralData)
+       if err != nil {
+               return
+       }
+
+       _, err = inner.Write(buf[:2])
+       if err != nil {
+               return
+       }
+       _, err = inner.Write([]byte(fileName))
+       if err != nil {
+               return
+       }
+       binary.BigEndian.PutUint32(buf[:], time)
+       _, err = inner.Write(buf[:])
+       if err != nil {
+               return
+       }
+
+       plaintext = inner
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/ocfb.go b/vendor/golang.org/x/crypto/openpgp/packet/ocfb.go
new file mode 100644 (file)
index 0000000..ce2a33a
--- /dev/null
@@ -0,0 +1,143 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// OpenPGP CFB Mode. http://tools.ietf.org/html/rfc4880#section-13.9
+
+package packet
+
+import (
+       "crypto/cipher"
+)
+
+type ocfbEncrypter struct {
+       b       cipher.Block
+       fre     []byte
+       outUsed int
+}
+
+// An OCFBResyncOption determines if the "resynchronization step" of OCFB is
+// performed.
+type OCFBResyncOption bool
+
+const (
+       OCFBResync   OCFBResyncOption = true
+       OCFBNoResync OCFBResyncOption = false
+)
+
+// NewOCFBEncrypter returns a cipher.Stream which encrypts data with OpenPGP's
+// cipher feedback mode using the given cipher.Block, and an initial amount of
+// ciphertext.  randData must be random bytes and be the same length as the
+// cipher.Block's block size. Resync determines if the "resynchronization step"
+// from RFC 4880, 13.9 step 7 is performed. Different parts of OpenPGP vary on
+// this point.
+func NewOCFBEncrypter(block cipher.Block, randData []byte, resync OCFBResyncOption) (cipher.Stream, []byte) {
+       blockSize := block.BlockSize()
+       if len(randData) != blockSize {
+               return nil, nil
+       }
+
+       x := &ocfbEncrypter{
+               b:       block,
+               fre:     make([]byte, blockSize),
+               outUsed: 0,
+       }
+       prefix := make([]byte, blockSize+2)
+
+       block.Encrypt(x.fre, x.fre)
+       for i := 0; i < blockSize; i++ {
+               prefix[i] = randData[i] ^ x.fre[i]
+       }
+
+       block.Encrypt(x.fre, prefix[:blockSize])
+       prefix[blockSize] = x.fre[0] ^ randData[blockSize-2]
+       prefix[blockSize+1] = x.fre[1] ^ randData[blockSize-1]
+
+       if resync {
+               block.Encrypt(x.fre, prefix[2:])
+       } else {
+               x.fre[0] = prefix[blockSize]
+               x.fre[1] = prefix[blockSize+1]
+               x.outUsed = 2
+       }
+       return x, prefix
+}
+
+func (x *ocfbEncrypter) XORKeyStream(dst, src []byte) {
+       for i := 0; i < len(src); i++ {
+               if x.outUsed == len(x.fre) {
+                       x.b.Encrypt(x.fre, x.fre)
+                       x.outUsed = 0
+               }
+
+               x.fre[x.outUsed] ^= src[i]
+               dst[i] = x.fre[x.outUsed]
+               x.outUsed++
+       }
+}
+
+type ocfbDecrypter struct {
+       b       cipher.Block
+       fre     []byte
+       outUsed int
+}
+
+// NewOCFBDecrypter returns a cipher.Stream which decrypts data with OpenPGP's
+// cipher feedback mode using the given cipher.Block. Prefix must be the first
+// blockSize + 2 bytes of the ciphertext, where blockSize is the cipher.Block's
+// block size. If an incorrect key is detected then nil is returned. On
+// successful exit, blockSize+2 bytes of decrypted data are written into
+// prefix. Resync determines if the "resynchronization step" from RFC 4880,
+// 13.9 step 7 is performed. Different parts of OpenPGP vary on this point.
+func NewOCFBDecrypter(block cipher.Block, prefix []byte, resync OCFBResyncOption) cipher.Stream {
+       blockSize := block.BlockSize()
+       if len(prefix) != blockSize+2 {
+               return nil
+       }
+
+       x := &ocfbDecrypter{
+               b:       block,
+               fre:     make([]byte, blockSize),
+               outUsed: 0,
+       }
+       prefixCopy := make([]byte, len(prefix))
+       copy(prefixCopy, prefix)
+
+       block.Encrypt(x.fre, x.fre)
+       for i := 0; i < blockSize; i++ {
+               prefixCopy[i] ^= x.fre[i]
+       }
+
+       block.Encrypt(x.fre, prefix[:blockSize])
+       prefixCopy[blockSize] ^= x.fre[0]
+       prefixCopy[blockSize+1] ^= x.fre[1]
+
+       if prefixCopy[blockSize-2] != prefixCopy[blockSize] ||
+               prefixCopy[blockSize-1] != prefixCopy[blockSize+1] {
+               return nil
+       }
+
+       if resync {
+               block.Encrypt(x.fre, prefix[2:])
+       } else {
+               x.fre[0] = prefix[blockSize]
+               x.fre[1] = prefix[blockSize+1]
+               x.outUsed = 2
+       }
+       copy(prefix, prefixCopy)
+       return x
+}
+
+func (x *ocfbDecrypter) XORKeyStream(dst, src []byte) {
+       for i := 0; i < len(src); i++ {
+               if x.outUsed == len(x.fre) {
+                       x.b.Encrypt(x.fre, x.fre)
+                       x.outUsed = 0
+               }
+
+               c := src[i]
+               dst[i] = x.fre[x.outUsed] ^ src[i]
+               x.fre[x.outUsed] = c
+               x.outUsed++
+       }
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/one_pass_signature.go b/vendor/golang.org/x/crypto/openpgp/packet/one_pass_signature.go
new file mode 100644 (file)
index 0000000..1713503
--- /dev/null
@@ -0,0 +1,73 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "crypto"
+       "encoding/binary"
+       "golang.org/x/crypto/openpgp/errors"
+       "golang.org/x/crypto/openpgp/s2k"
+       "io"
+       "strconv"
+)
+
+// OnePassSignature represents a one-pass signature packet. See RFC 4880,
+// section 5.4.
+type OnePassSignature struct {
+       SigType    SignatureType
+       Hash       crypto.Hash
+       PubKeyAlgo PublicKeyAlgorithm
+       KeyId      uint64
+       IsLast     bool
+}
+
+const onePassSignatureVersion = 3
+
+func (ops *OnePassSignature) parse(r io.Reader) (err error) {
+       var buf [13]byte
+
+       _, err = readFull(r, buf[:])
+       if err != nil {
+               return
+       }
+       if buf[0] != onePassSignatureVersion {
+               err = errors.UnsupportedError("one-pass-signature packet version " + strconv.Itoa(int(buf[0])))
+       }
+
+       var ok bool
+       ops.Hash, ok = s2k.HashIdToHash(buf[2])
+       if !ok {
+               return errors.UnsupportedError("hash function: " + strconv.Itoa(int(buf[2])))
+       }
+
+       ops.SigType = SignatureType(buf[1])
+       ops.PubKeyAlgo = PublicKeyAlgorithm(buf[3])
+       ops.KeyId = binary.BigEndian.Uint64(buf[4:12])
+       ops.IsLast = buf[12] != 0
+       return
+}
+
+// Serialize marshals the given OnePassSignature to w.
+func (ops *OnePassSignature) Serialize(w io.Writer) error {
+       var buf [13]byte
+       buf[0] = onePassSignatureVersion
+       buf[1] = uint8(ops.SigType)
+       var ok bool
+       buf[2], ok = s2k.HashToHashId(ops.Hash)
+       if !ok {
+               return errors.UnsupportedError("hash type: " + strconv.Itoa(int(ops.Hash)))
+       }
+       buf[3] = uint8(ops.PubKeyAlgo)
+       binary.BigEndian.PutUint64(buf[4:12], ops.KeyId)
+       if ops.IsLast {
+               buf[12] = 1
+       }
+
+       if err := serializeHeader(w, packetTypeOnePassSignature, len(buf)); err != nil {
+               return err
+       }
+       _, err := w.Write(buf[:])
+       return err
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/opaque.go b/vendor/golang.org/x/crypto/openpgp/packet/opaque.go
new file mode 100644 (file)
index 0000000..456d807
--- /dev/null
@@ -0,0 +1,162 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "bytes"
+       "io"
+       "io/ioutil"
+
+       "golang.org/x/crypto/openpgp/errors"
+)
+
+// OpaquePacket represents an OpenPGP packet as raw, unparsed data. This is
+// useful for splitting and storing the original packet contents separately,
+// handling unsupported packet types or accessing parts of the packet not yet
+// implemented by this package.
+type OpaquePacket struct {
+       // Packet type
+       Tag uint8
+       // Reason why the packet was parsed opaquely
+       Reason error
+       // Binary contents of the packet data
+       Contents []byte
+}
+
+func (op *OpaquePacket) parse(r io.Reader) (err error) {
+       op.Contents, err = ioutil.ReadAll(r)
+       return
+}
+
+// Serialize marshals the packet to a writer in its original form, including
+// the packet header.
+func (op *OpaquePacket) Serialize(w io.Writer) (err error) {
+       err = serializeHeader(w, packetType(op.Tag), len(op.Contents))
+       if err == nil {
+               _, err = w.Write(op.Contents)
+       }
+       return
+}
+
+// Parse attempts to parse the opaque contents into a structure supported by
+// this package. If the packet is not known then the result will be another
+// OpaquePacket.
+func (op *OpaquePacket) Parse() (p Packet, err error) {
+       hdr := bytes.NewBuffer(nil)
+       err = serializeHeader(hdr, packetType(op.Tag), len(op.Contents))
+       if err != nil {
+               op.Reason = err
+               return op, err
+       }
+       p, err = Read(io.MultiReader(hdr, bytes.NewBuffer(op.Contents)))
+       if err != nil {
+               op.Reason = err
+               p = op
+       }
+       return
+}
+
+// OpaqueReader reads OpaquePackets from an io.Reader.
+type OpaqueReader struct {
+       r io.Reader
+}
+
+func NewOpaqueReader(r io.Reader) *OpaqueReader {
+       return &OpaqueReader{r: r}
+}
+
+// Read the next OpaquePacket.
+func (or *OpaqueReader) Next() (op *OpaquePacket, err error) {
+       tag, _, contents, err := readHeader(or.r)
+       if err != nil {
+               return
+       }
+       op = &OpaquePacket{Tag: uint8(tag), Reason: err}
+       err = op.parse(contents)
+       if err != nil {
+               consumeAll(contents)
+       }
+       return
+}
+
+// OpaqueSubpacket represents an unparsed OpenPGP subpacket,
+// as found in signature and user attribute packets.
+type OpaqueSubpacket struct {
+       SubType  uint8
+       Contents []byte
+}
+
+// OpaqueSubpackets extracts opaque, unparsed OpenPGP subpackets from
+// their byte representation.
+func OpaqueSubpackets(contents []byte) (result []*OpaqueSubpacket, err error) {
+       var (
+               subHeaderLen int
+               subPacket    *OpaqueSubpacket
+       )
+       for len(contents) > 0 {
+               subHeaderLen, subPacket, err = nextSubpacket(contents)
+               if err != nil {
+                       break
+               }
+               result = append(result, subPacket)
+               contents = contents[subHeaderLen+len(subPacket.Contents):]
+       }
+       return
+}
+
+func nextSubpacket(contents []byte) (subHeaderLen int, subPacket *OpaqueSubpacket, err error) {
+       // RFC 4880, section 5.2.3.1
+       var subLen uint32
+       if len(contents) < 1 {
+               goto Truncated
+       }
+       subPacket = &OpaqueSubpacket{}
+       switch {
+       case contents[0] < 192:
+               subHeaderLen = 2 // 1 length byte, 1 subtype byte
+               if len(contents) < subHeaderLen {
+                       goto Truncated
+               }
+               subLen = uint32(contents[0])
+               contents = contents[1:]
+       case contents[0] < 255:
+               subHeaderLen = 3 // 2 length bytes, 1 subtype
+               if len(contents) < subHeaderLen {
+                       goto Truncated
+               }
+               subLen = uint32(contents[0]-192)<<8 + uint32(contents[1]) + 192
+               contents = contents[2:]
+       default:
+               subHeaderLen = 6 // 5 length bytes, 1 subtype
+               if len(contents) < subHeaderLen {
+                       goto Truncated
+               }
+               subLen = uint32(contents[1])<<24 |
+                       uint32(contents[2])<<16 |
+                       uint32(contents[3])<<8 |
+                       uint32(contents[4])
+               contents = contents[5:]
+       }
+       if subLen > uint32(len(contents)) || subLen == 0 {
+               goto Truncated
+       }
+       subPacket.SubType = contents[0]
+       subPacket.Contents = contents[1:subLen]
+       return
+Truncated:
+       err = errors.StructuralError("subpacket truncated")
+       return
+}
+
+func (osp *OpaqueSubpacket) Serialize(w io.Writer) (err error) {
+       buf := make([]byte, 6)
+       n := serializeSubpacketLength(buf, len(osp.Contents)+1)
+       buf[n] = osp.SubType
+       if _, err = w.Write(buf[:n+1]); err != nil {
+               return
+       }
+       _, err = w.Write(osp.Contents)
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/packet.go b/vendor/golang.org/x/crypto/openpgp/packet/packet.go
new file mode 100644 (file)
index 0000000..3eded93
--- /dev/null
@@ -0,0 +1,537 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package packet implements parsing and serialization of OpenPGP packets, as
+// specified in RFC 4880.
+package packet // import "golang.org/x/crypto/openpgp/packet"
+
+import (
+       "bufio"
+       "crypto/aes"
+       "crypto/cipher"
+       "crypto/des"
+       "golang.org/x/crypto/cast5"
+       "golang.org/x/crypto/openpgp/errors"
+       "io"
+       "math/big"
+)
+
+// readFull is the same as io.ReadFull except that reading zero bytes returns
+// ErrUnexpectedEOF rather than EOF.
+func readFull(r io.Reader, buf []byte) (n int, err error) {
+       n, err = io.ReadFull(r, buf)
+       if err == io.EOF {
+               err = io.ErrUnexpectedEOF
+       }
+       return
+}
+
+// readLength reads an OpenPGP length from r. See RFC 4880, section 4.2.2.
+func readLength(r io.Reader) (length int64, isPartial bool, err error) {
+       var buf [4]byte
+       _, err = readFull(r, buf[:1])
+       if err != nil {
+               return
+       }
+       switch {
+       case buf[0] < 192:
+               length = int64(buf[0])
+       case buf[0] < 224:
+               length = int64(buf[0]-192) << 8
+               _, err = readFull(r, buf[0:1])
+               if err != nil {
+                       return
+               }
+               length += int64(buf[0]) + 192
+       case buf[0] < 255:
+               length = int64(1) << (buf[0] & 0x1f)
+               isPartial = true
+       default:
+               _, err = readFull(r, buf[0:4])
+               if err != nil {
+                       return
+               }
+               length = int64(buf[0])<<24 |
+                       int64(buf[1])<<16 |
+                       int64(buf[2])<<8 |
+                       int64(buf[3])
+       }
+       return
+}
+
+// partialLengthReader wraps an io.Reader and handles OpenPGP partial lengths.
+// The continuation lengths are parsed and removed from the stream and EOF is
+// returned at the end of the packet. See RFC 4880, section 4.2.2.4.
+type partialLengthReader struct {
+       r         io.Reader
+       remaining int64
+       isPartial bool
+}
+
+func (r *partialLengthReader) Read(p []byte) (n int, err error) {
+       for r.remaining == 0 {
+               if !r.isPartial {
+                       return 0, io.EOF
+               }
+               r.remaining, r.isPartial, err = readLength(r.r)
+               if err != nil {
+                       return 0, err
+               }
+       }
+
+       toRead := int64(len(p))
+       if toRead > r.remaining {
+               toRead = r.remaining
+       }
+
+       n, err = r.r.Read(p[:int(toRead)])
+       r.remaining -= int64(n)
+       if n < int(toRead) && err == io.EOF {
+               err = io.ErrUnexpectedEOF
+       }
+       return
+}
+
+// partialLengthWriter writes a stream of data using OpenPGP partial lengths.
+// See RFC 4880, section 4.2.2.4.
+type partialLengthWriter struct {
+       w          io.WriteCloser
+       lengthByte [1]byte
+}
+
+func (w *partialLengthWriter) Write(p []byte) (n int, err error) {
+       for len(p) > 0 {
+               for power := uint(14); power < 32; power-- {
+                       l := 1 << power
+                       if len(p) >= l {
+                               w.lengthByte[0] = 224 + uint8(power)
+                               _, err = w.w.Write(w.lengthByte[:])
+                               if err != nil {
+                                       return
+                               }
+                               var m int
+                               m, err = w.w.Write(p[:l])
+                               n += m
+                               if err != nil {
+                                       return
+                               }
+                               p = p[l:]
+                               break
+                       }
+               }
+       }
+       return
+}
+
+func (w *partialLengthWriter) Close() error {
+       w.lengthByte[0] = 0
+       _, err := w.w.Write(w.lengthByte[:])
+       if err != nil {
+               return err
+       }
+       return w.w.Close()
+}
+
+// A spanReader is an io.LimitReader, but it returns ErrUnexpectedEOF if the
+// underlying Reader returns EOF before the limit has been reached.
+type spanReader struct {
+       r io.Reader
+       n int64
+}
+
+func (l *spanReader) Read(p []byte) (n int, err error) {
+       if l.n <= 0 {
+               return 0, io.EOF
+       }
+       if int64(len(p)) > l.n {
+               p = p[0:l.n]
+       }
+       n, err = l.r.Read(p)
+       l.n -= int64(n)
+       if l.n > 0 && err == io.EOF {
+               err = io.ErrUnexpectedEOF
+       }
+       return
+}
+
+// readHeader parses a packet header and returns an io.Reader which will return
+// the contents of the packet. See RFC 4880, section 4.2.
+func readHeader(r io.Reader) (tag packetType, length int64, contents io.Reader, err error) {
+       var buf [4]byte
+       _, err = io.ReadFull(r, buf[:1])
+       if err != nil {
+               return
+       }
+       if buf[0]&0x80 == 0 {
+               err = errors.StructuralError("tag byte does not have MSB set")
+               return
+       }
+       if buf[0]&0x40 == 0 {
+               // Old format packet
+               tag = packetType((buf[0] & 0x3f) >> 2)
+               lengthType := buf[0] & 3
+               if lengthType == 3 {
+                       length = -1
+                       contents = r
+                       return
+               }
+               lengthBytes := 1 << lengthType
+               _, err = readFull(r, buf[0:lengthBytes])
+               if err != nil {
+                       return
+               }
+               for i := 0; i < lengthBytes; i++ {
+                       length <<= 8
+                       length |= int64(buf[i])
+               }
+               contents = &spanReader{r, length}
+               return
+       }
+
+       // New format packet
+       tag = packetType(buf[0] & 0x3f)
+       length, isPartial, err := readLength(r)
+       if err != nil {
+               return
+       }
+       if isPartial {
+               contents = &partialLengthReader{
+                       remaining: length,
+                       isPartial: true,
+                       r:         r,
+               }
+               length = -1
+       } else {
+               contents = &spanReader{r, length}
+       }
+       return
+}
+
+// serializeHeader writes an OpenPGP packet header to w. See RFC 4880, section
+// 4.2.
+func serializeHeader(w io.Writer, ptype packetType, length int) (err error) {
+       var buf [6]byte
+       var n int
+
+       buf[0] = 0x80 | 0x40 | byte(ptype)
+       if length < 192 {
+               buf[1] = byte(length)
+               n = 2
+       } else if length < 8384 {
+               length -= 192
+               buf[1] = 192 + byte(length>>8)
+               buf[2] = byte(length)
+               n = 3
+       } else {
+               buf[1] = 255
+               buf[2] = byte(length >> 24)
+               buf[3] = byte(length >> 16)
+               buf[4] = byte(length >> 8)
+               buf[5] = byte(length)
+               n = 6
+       }
+
+       _, err = w.Write(buf[:n])
+       return
+}
+
+// serializeStreamHeader writes an OpenPGP packet header to w where the
+// length of the packet is unknown. It returns a io.WriteCloser which can be
+// used to write the contents of the packet. See RFC 4880, section 4.2.
+func serializeStreamHeader(w io.WriteCloser, ptype packetType) (out io.WriteCloser, err error) {
+       var buf [1]byte
+       buf[0] = 0x80 | 0x40 | byte(ptype)
+       _, err = w.Write(buf[:])
+       if err != nil {
+               return
+       }
+       out = &partialLengthWriter{w: w}
+       return
+}
+
+// Packet represents an OpenPGP packet. Users are expected to try casting
+// instances of this interface to specific packet types.
+type Packet interface {
+       parse(io.Reader) error
+}
+
+// consumeAll reads from the given Reader until error, returning the number of
+// bytes read.
+func consumeAll(r io.Reader) (n int64, err error) {
+       var m int
+       var buf [1024]byte
+
+       for {
+               m, err = r.Read(buf[:])
+               n += int64(m)
+               if err == io.EOF {
+                       err = nil
+                       return
+               }
+               if err != nil {
+                       return
+               }
+       }
+}
+
+// packetType represents the numeric ids of the different OpenPGP packet types. See
+// http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-2
+type packetType uint8
+
+const (
+       packetTypeEncryptedKey              packetType = 1
+       packetTypeSignature                 packetType = 2
+       packetTypeSymmetricKeyEncrypted     packetType = 3
+       packetTypeOnePassSignature          packetType = 4
+       packetTypePrivateKey                packetType = 5
+       packetTypePublicKey                 packetType = 6
+       packetTypePrivateSubkey             packetType = 7
+       packetTypeCompressed                packetType = 8
+       packetTypeSymmetricallyEncrypted    packetType = 9
+       packetTypeLiteralData               packetType = 11
+       packetTypeUserId                    packetType = 13
+       packetTypePublicSubkey              packetType = 14
+       packetTypeUserAttribute             packetType = 17
+       packetTypeSymmetricallyEncryptedMDC packetType = 18
+)
+
+// peekVersion detects the version of a public key packet about to
+// be read. A bufio.Reader at the original position of the io.Reader
+// is returned.
+func peekVersion(r io.Reader) (bufr *bufio.Reader, ver byte, err error) {
+       bufr = bufio.NewReader(r)
+       var verBuf []byte
+       if verBuf, err = bufr.Peek(1); err != nil {
+               return
+       }
+       ver = verBuf[0]
+       return
+}
+
+// Read reads a single OpenPGP packet from the given io.Reader. If there is an
+// error parsing a packet, the whole packet is consumed from the input.
+func Read(r io.Reader) (p Packet, err error) {
+       tag, _, contents, err := readHeader(r)
+       if err != nil {
+               return
+       }
+
+       switch tag {
+       case packetTypeEncryptedKey:
+               p = new(EncryptedKey)
+       case packetTypeSignature:
+               var version byte
+               // Detect signature version
+               if contents, version, err = peekVersion(contents); err != nil {
+                       return
+               }
+               if version < 4 {
+                       p = new(SignatureV3)
+               } else {
+                       p = new(Signature)
+               }
+       case packetTypeSymmetricKeyEncrypted:
+               p = new(SymmetricKeyEncrypted)
+       case packetTypeOnePassSignature:
+               p = new(OnePassSignature)
+       case packetTypePrivateKey, packetTypePrivateSubkey:
+               pk := new(PrivateKey)
+               if tag == packetTypePrivateSubkey {
+                       pk.IsSubkey = true
+               }
+               p = pk
+       case packetTypePublicKey, packetTypePublicSubkey:
+               var version byte
+               if contents, version, err = peekVersion(contents); err != nil {
+                       return
+               }
+               isSubkey := tag == packetTypePublicSubkey
+               if version < 4 {
+                       p = &PublicKeyV3{IsSubkey: isSubkey}
+               } else {
+                       p = &PublicKey{IsSubkey: isSubkey}
+               }
+       case packetTypeCompressed:
+               p = new(Compressed)
+       case packetTypeSymmetricallyEncrypted:
+               p = new(SymmetricallyEncrypted)
+       case packetTypeLiteralData:
+               p = new(LiteralData)
+       case packetTypeUserId:
+               p = new(UserId)
+       case packetTypeUserAttribute:
+               p = new(UserAttribute)
+       case packetTypeSymmetricallyEncryptedMDC:
+               se := new(SymmetricallyEncrypted)
+               se.MDC = true
+               p = se
+       default:
+               err = errors.UnknownPacketTypeError(tag)
+       }
+       if p != nil {
+               err = p.parse(contents)
+       }
+       if err != nil {
+               consumeAll(contents)
+       }
+       return
+}
+
+// SignatureType represents the different semantic meanings of an OpenPGP
+// signature. See RFC 4880, section 5.2.1.
+type SignatureType uint8
+
+const (
+       SigTypeBinary            SignatureType = 0
+       SigTypeText                            = 1
+       SigTypeGenericCert                     = 0x10
+       SigTypePersonaCert                     = 0x11
+       SigTypeCasualCert                      = 0x12
+       SigTypePositiveCert                    = 0x13
+       SigTypeSubkeyBinding                   = 0x18
+       SigTypePrimaryKeyBinding               = 0x19
+       SigTypeDirectSignature                 = 0x1F
+       SigTypeKeyRevocation                   = 0x20
+       SigTypeSubkeyRevocation                = 0x28
+)
+
+// PublicKeyAlgorithm represents the different public key system specified for
+// OpenPGP. See
+// http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-12
+type PublicKeyAlgorithm uint8
+
+const (
+       PubKeyAlgoRSA            PublicKeyAlgorithm = 1
+       PubKeyAlgoRSAEncryptOnly PublicKeyAlgorithm = 2
+       PubKeyAlgoRSASignOnly    PublicKeyAlgorithm = 3
+       PubKeyAlgoElGamal        PublicKeyAlgorithm = 16
+       PubKeyAlgoDSA            PublicKeyAlgorithm = 17
+       // RFC 6637, Section 5.
+       PubKeyAlgoECDH  PublicKeyAlgorithm = 18
+       PubKeyAlgoECDSA PublicKeyAlgorithm = 19
+)
+
+// CanEncrypt returns true if it's possible to encrypt a message to a public
+// key of the given type.
+func (pka PublicKeyAlgorithm) CanEncrypt() bool {
+       switch pka {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoElGamal:
+               return true
+       }
+       return false
+}
+
+// CanSign returns true if it's possible for a public key of the given type to
+// sign a message.
+func (pka PublicKeyAlgorithm) CanSign() bool {
+       switch pka {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly, PubKeyAlgoDSA, PubKeyAlgoECDSA:
+               return true
+       }
+       return false
+}
+
+// CipherFunction represents the different block ciphers specified for OpenPGP. See
+// http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-13
+type CipherFunction uint8
+
+const (
+       Cipher3DES   CipherFunction = 2
+       CipherCAST5  CipherFunction = 3
+       CipherAES128 CipherFunction = 7
+       CipherAES192 CipherFunction = 8
+       CipherAES256 CipherFunction = 9
+)
+
+// KeySize returns the key size, in bytes, of cipher.
+func (cipher CipherFunction) KeySize() int {
+       switch cipher {
+       case Cipher3DES:
+               return 24
+       case CipherCAST5:
+               return cast5.KeySize
+       case CipherAES128:
+               return 16
+       case CipherAES192:
+               return 24
+       case CipherAES256:
+               return 32
+       }
+       return 0
+}
+
+// blockSize returns the block size, in bytes, of cipher.
+func (cipher CipherFunction) blockSize() int {
+       switch cipher {
+       case Cipher3DES:
+               return des.BlockSize
+       case CipherCAST5:
+               return 8
+       case CipherAES128, CipherAES192, CipherAES256:
+               return 16
+       }
+       return 0
+}
+
+// new returns a fresh instance of the given cipher.
+func (cipher CipherFunction) new(key []byte) (block cipher.Block) {
+       switch cipher {
+       case Cipher3DES:
+               block, _ = des.NewTripleDESCipher(key)
+       case CipherCAST5:
+               block, _ = cast5.NewCipher(key)
+       case CipherAES128, CipherAES192, CipherAES256:
+               block, _ = aes.NewCipher(key)
+       }
+       return
+}
+
+// readMPI reads a big integer from r. The bit length returned is the bit
+// length that was specified in r. This is preserved so that the integer can be
+// reserialized exactly.
+func readMPI(r io.Reader) (mpi []byte, bitLength uint16, err error) {
+       var buf [2]byte
+       _, err = readFull(r, buf[0:])
+       if err != nil {
+               return
+       }
+       bitLength = uint16(buf[0])<<8 | uint16(buf[1])
+       numBytes := (int(bitLength) + 7) / 8
+       mpi = make([]byte, numBytes)
+       _, err = readFull(r, mpi)
+       return
+}
+
+// mpiLength returns the length of the given *big.Int when serialized as an
+// MPI.
+func mpiLength(n *big.Int) (mpiLengthInBytes int) {
+       mpiLengthInBytes = 2 /* MPI length */
+       mpiLengthInBytes += (n.BitLen() + 7) / 8
+       return
+}
+
+// writeMPI serializes a big integer to w.
+func writeMPI(w io.Writer, bitLength uint16, mpiBytes []byte) (err error) {
+       _, err = w.Write([]byte{byte(bitLength >> 8), byte(bitLength)})
+       if err == nil {
+               _, err = w.Write(mpiBytes)
+       }
+       return
+}
+
+// writeBig serializes a *big.Int to w.
+func writeBig(w io.Writer, i *big.Int) error {
+       return writeMPI(w, uint16(i.BitLen()), i.Bytes())
+}
+
+// CompressionAlgo Represents the different compression algorithms
+// supported by OpenPGP (except for BZIP2, which is not currently
+// supported). See Section 9.3 of RFC 4880.
+type CompressionAlgo uint8
+
+const (
+       CompressionNone CompressionAlgo = 0
+       CompressionZIP  CompressionAlgo = 1
+       CompressionZLIB CompressionAlgo = 2
+)
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/private_key.go b/vendor/golang.org/x/crypto/openpgp/packet/private_key.go
new file mode 100644 (file)
index 0000000..34734cc
--- /dev/null
@@ -0,0 +1,380 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "bytes"
+       "crypto"
+       "crypto/cipher"
+       "crypto/dsa"
+       "crypto/ecdsa"
+       "crypto/rsa"
+       "crypto/sha1"
+       "io"
+       "io/ioutil"
+       "math/big"
+       "strconv"
+       "time"
+
+       "golang.org/x/crypto/openpgp/elgamal"
+       "golang.org/x/crypto/openpgp/errors"
+       "golang.org/x/crypto/openpgp/s2k"
+)
+
+// PrivateKey represents a possibly encrypted private key. See RFC 4880,
+// section 5.5.3.
+type PrivateKey struct {
+       PublicKey
+       Encrypted     bool // if true then the private key is unavailable until Decrypt has been called.
+       encryptedData []byte
+       cipher        CipherFunction
+       s2k           func(out, in []byte)
+       PrivateKey    interface{} // An *{rsa|dsa|ecdsa}.PrivateKey or a crypto.Signer.
+       sha1Checksum  bool
+       iv            []byte
+}
+
+func NewRSAPrivateKey(currentTime time.Time, priv *rsa.PrivateKey) *PrivateKey {
+       pk := new(PrivateKey)
+       pk.PublicKey = *NewRSAPublicKey(currentTime, &priv.PublicKey)
+       pk.PrivateKey = priv
+       return pk
+}
+
+func NewDSAPrivateKey(currentTime time.Time, priv *dsa.PrivateKey) *PrivateKey {
+       pk := new(PrivateKey)
+       pk.PublicKey = *NewDSAPublicKey(currentTime, &priv.PublicKey)
+       pk.PrivateKey = priv
+       return pk
+}
+
+func NewElGamalPrivateKey(currentTime time.Time, priv *elgamal.PrivateKey) *PrivateKey {
+       pk := new(PrivateKey)
+       pk.PublicKey = *NewElGamalPublicKey(currentTime, &priv.PublicKey)
+       pk.PrivateKey = priv
+       return pk
+}
+
+func NewECDSAPrivateKey(currentTime time.Time, priv *ecdsa.PrivateKey) *PrivateKey {
+       pk := new(PrivateKey)
+       pk.PublicKey = *NewECDSAPublicKey(currentTime, &priv.PublicKey)
+       pk.PrivateKey = priv
+       return pk
+}
+
+// NewSignerPrivateKey creates a sign-only PrivateKey from a crypto.Signer that
+// implements RSA or ECDSA.
+func NewSignerPrivateKey(currentTime time.Time, signer crypto.Signer) *PrivateKey {
+       pk := new(PrivateKey)
+       switch pubkey := signer.Public().(type) {
+       case rsa.PublicKey:
+               pk.PublicKey = *NewRSAPublicKey(currentTime, &pubkey)
+               pk.PubKeyAlgo = PubKeyAlgoRSASignOnly
+       case ecdsa.PublicKey:
+               pk.PublicKey = *NewECDSAPublicKey(currentTime, &pubkey)
+       default:
+               panic("openpgp: unknown crypto.Signer type in NewSignerPrivateKey")
+       }
+       pk.PrivateKey = signer
+       return pk
+}
+
+func (pk *PrivateKey) parse(r io.Reader) (err error) {
+       err = (&pk.PublicKey).parse(r)
+       if err != nil {
+               return
+       }
+       var buf [1]byte
+       _, err = readFull(r, buf[:])
+       if err != nil {
+               return
+       }
+
+       s2kType := buf[0]
+
+       switch s2kType {
+       case 0:
+               pk.s2k = nil
+               pk.Encrypted = false
+       case 254, 255:
+               _, err = readFull(r, buf[:])
+               if err != nil {
+                       return
+               }
+               pk.cipher = CipherFunction(buf[0])
+               pk.Encrypted = true
+               pk.s2k, err = s2k.Parse(r)
+               if err != nil {
+                       return
+               }
+               if s2kType == 254 {
+                       pk.sha1Checksum = true
+               }
+       default:
+               return errors.UnsupportedError("deprecated s2k function in private key")
+       }
+
+       if pk.Encrypted {
+               blockSize := pk.cipher.blockSize()
+               if blockSize == 0 {
+                       return errors.UnsupportedError("unsupported cipher in private key: " + strconv.Itoa(int(pk.cipher)))
+               }
+               pk.iv = make([]byte, blockSize)
+               _, err = readFull(r, pk.iv)
+               if err != nil {
+                       return
+               }
+       }
+
+       pk.encryptedData, err = ioutil.ReadAll(r)
+       if err != nil {
+               return
+       }
+
+       if !pk.Encrypted {
+               return pk.parsePrivateKey(pk.encryptedData)
+       }
+
+       return
+}
+
+func mod64kHash(d []byte) uint16 {
+       var h uint16
+       for _, b := range d {
+               h += uint16(b)
+       }
+       return h
+}
+
+func (pk *PrivateKey) Serialize(w io.Writer) (err error) {
+       // TODO(agl): support encrypted private keys
+       buf := bytes.NewBuffer(nil)
+       err = pk.PublicKey.serializeWithoutHeaders(buf)
+       if err != nil {
+               return
+       }
+       buf.WriteByte(0 /* no encryption */)
+
+       privateKeyBuf := bytes.NewBuffer(nil)
+
+       switch priv := pk.PrivateKey.(type) {
+       case *rsa.PrivateKey:
+               err = serializeRSAPrivateKey(privateKeyBuf, priv)
+       case *dsa.PrivateKey:
+               err = serializeDSAPrivateKey(privateKeyBuf, priv)
+       case *elgamal.PrivateKey:
+               err = serializeElGamalPrivateKey(privateKeyBuf, priv)
+       case *ecdsa.PrivateKey:
+               err = serializeECDSAPrivateKey(privateKeyBuf, priv)
+       default:
+               err = errors.InvalidArgumentError("unknown private key type")
+       }
+       if err != nil {
+               return
+       }
+
+       ptype := packetTypePrivateKey
+       contents := buf.Bytes()
+       privateKeyBytes := privateKeyBuf.Bytes()
+       if pk.IsSubkey {
+               ptype = packetTypePrivateSubkey
+       }
+       err = serializeHeader(w, ptype, len(contents)+len(privateKeyBytes)+2)
+       if err != nil {
+               return
+       }
+       _, err = w.Write(contents)
+       if err != nil {
+               return
+       }
+       _, err = w.Write(privateKeyBytes)
+       if err != nil {
+               return
+       }
+
+       checksum := mod64kHash(privateKeyBytes)
+       var checksumBytes [2]byte
+       checksumBytes[0] = byte(checksum >> 8)
+       checksumBytes[1] = byte(checksum)
+       _, err = w.Write(checksumBytes[:])
+
+       return
+}
+
+func serializeRSAPrivateKey(w io.Writer, priv *rsa.PrivateKey) error {
+       err := writeBig(w, priv.D)
+       if err != nil {
+               return err
+       }
+       err = writeBig(w, priv.Primes[1])
+       if err != nil {
+               return err
+       }
+       err = writeBig(w, priv.Primes[0])
+       if err != nil {
+               return err
+       }
+       return writeBig(w, priv.Precomputed.Qinv)
+}
+
+func serializeDSAPrivateKey(w io.Writer, priv *dsa.PrivateKey) error {
+       return writeBig(w, priv.X)
+}
+
+func serializeElGamalPrivateKey(w io.Writer, priv *elgamal.PrivateKey) error {
+       return writeBig(w, priv.X)
+}
+
+func serializeECDSAPrivateKey(w io.Writer, priv *ecdsa.PrivateKey) error {
+       return writeBig(w, priv.D)
+}
+
+// Decrypt decrypts an encrypted private key using a passphrase.
+func (pk *PrivateKey) Decrypt(passphrase []byte) error {
+       if !pk.Encrypted {
+               return nil
+       }
+
+       key := make([]byte, pk.cipher.KeySize())
+       pk.s2k(key, passphrase)
+       block := pk.cipher.new(key)
+       cfb := cipher.NewCFBDecrypter(block, pk.iv)
+
+       data := make([]byte, len(pk.encryptedData))
+       cfb.XORKeyStream(data, pk.encryptedData)
+
+       if pk.sha1Checksum {
+               if len(data) < sha1.Size {
+                       return errors.StructuralError("truncated private key data")
+               }
+               h := sha1.New()
+               h.Write(data[:len(data)-sha1.Size])
+               sum := h.Sum(nil)
+               if !bytes.Equal(sum, data[len(data)-sha1.Size:]) {
+                       return errors.StructuralError("private key checksum failure")
+               }
+               data = data[:len(data)-sha1.Size]
+       } else {
+               if len(data) < 2 {
+                       return errors.StructuralError("truncated private key data")
+               }
+               var sum uint16
+               for i := 0; i < len(data)-2; i++ {
+                       sum += uint16(data[i])
+               }
+               if data[len(data)-2] != uint8(sum>>8) ||
+                       data[len(data)-1] != uint8(sum) {
+                       return errors.StructuralError("private key checksum failure")
+               }
+               data = data[:len(data)-2]
+       }
+
+       return pk.parsePrivateKey(data)
+}
+
+func (pk *PrivateKey) parsePrivateKey(data []byte) (err error) {
+       switch pk.PublicKey.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly, PubKeyAlgoRSAEncryptOnly:
+               return pk.parseRSAPrivateKey(data)
+       case PubKeyAlgoDSA:
+               return pk.parseDSAPrivateKey(data)
+       case PubKeyAlgoElGamal:
+               return pk.parseElGamalPrivateKey(data)
+       case PubKeyAlgoECDSA:
+               return pk.parseECDSAPrivateKey(data)
+       }
+       panic("impossible")
+}
+
+func (pk *PrivateKey) parseRSAPrivateKey(data []byte) (err error) {
+       rsaPub := pk.PublicKey.PublicKey.(*rsa.PublicKey)
+       rsaPriv := new(rsa.PrivateKey)
+       rsaPriv.PublicKey = *rsaPub
+
+       buf := bytes.NewBuffer(data)
+       d, _, err := readMPI(buf)
+       if err != nil {
+               return
+       }
+       p, _, err := readMPI(buf)
+       if err != nil {
+               return
+       }
+       q, _, err := readMPI(buf)
+       if err != nil {
+               return
+       }
+
+       rsaPriv.D = new(big.Int).SetBytes(d)
+       rsaPriv.Primes = make([]*big.Int, 2)
+       rsaPriv.Primes[0] = new(big.Int).SetBytes(p)
+       rsaPriv.Primes[1] = new(big.Int).SetBytes(q)
+       if err := rsaPriv.Validate(); err != nil {
+               return err
+       }
+       rsaPriv.Precompute()
+       pk.PrivateKey = rsaPriv
+       pk.Encrypted = false
+       pk.encryptedData = nil
+
+       return nil
+}
+
+func (pk *PrivateKey) parseDSAPrivateKey(data []byte) (err error) {
+       dsaPub := pk.PublicKey.PublicKey.(*dsa.PublicKey)
+       dsaPriv := new(dsa.PrivateKey)
+       dsaPriv.PublicKey = *dsaPub
+
+       buf := bytes.NewBuffer(data)
+       x, _, err := readMPI(buf)
+       if err != nil {
+               return
+       }
+
+       dsaPriv.X = new(big.Int).SetBytes(x)
+       pk.PrivateKey = dsaPriv
+       pk.Encrypted = false
+       pk.encryptedData = nil
+
+       return nil
+}
+
+func (pk *PrivateKey) parseElGamalPrivateKey(data []byte) (err error) {
+       pub := pk.PublicKey.PublicKey.(*elgamal.PublicKey)
+       priv := new(elgamal.PrivateKey)
+       priv.PublicKey = *pub
+
+       buf := bytes.NewBuffer(data)
+       x, _, err := readMPI(buf)
+       if err != nil {
+               return
+       }
+
+       priv.X = new(big.Int).SetBytes(x)
+       pk.PrivateKey = priv
+       pk.Encrypted = false
+       pk.encryptedData = nil
+
+       return nil
+}
+
+func (pk *PrivateKey) parseECDSAPrivateKey(data []byte) (err error) {
+       ecdsaPub := pk.PublicKey.PublicKey.(*ecdsa.PublicKey)
+
+       buf := bytes.NewBuffer(data)
+       d, _, err := readMPI(buf)
+       if err != nil {
+               return
+       }
+
+       pk.PrivateKey = &ecdsa.PrivateKey{
+               PublicKey: *ecdsaPub,
+               D:         new(big.Int).SetBytes(d),
+       }
+       pk.Encrypted = false
+       pk.encryptedData = nil
+
+       return nil
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/public_key.go b/vendor/golang.org/x/crypto/openpgp/packet/public_key.go
new file mode 100644 (file)
index 0000000..ead2623
--- /dev/null
@@ -0,0 +1,748 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "bytes"
+       "crypto"
+       "crypto/dsa"
+       "crypto/ecdsa"
+       "crypto/elliptic"
+       "crypto/rsa"
+       "crypto/sha1"
+       _ "crypto/sha256"
+       _ "crypto/sha512"
+       "encoding/binary"
+       "fmt"
+       "hash"
+       "io"
+       "math/big"
+       "strconv"
+       "time"
+
+       "golang.org/x/crypto/openpgp/elgamal"
+       "golang.org/x/crypto/openpgp/errors"
+)
+
+var (
+       // NIST curve P-256
+       oidCurveP256 []byte = []byte{0x2A, 0x86, 0x48, 0xCE, 0x3D, 0x03, 0x01, 0x07}
+       // NIST curve P-384
+       oidCurveP384 []byte = []byte{0x2B, 0x81, 0x04, 0x00, 0x22}
+       // NIST curve P-521
+       oidCurveP521 []byte = []byte{0x2B, 0x81, 0x04, 0x00, 0x23}
+)
+
+const maxOIDLength = 8
+
+// ecdsaKey stores the algorithm-specific fields for ECDSA keys.
+// as defined in RFC 6637, Section 9.
+type ecdsaKey struct {
+       // oid contains the OID byte sequence identifying the elliptic curve used
+       oid []byte
+       // p contains the elliptic curve point that represents the public key
+       p parsedMPI
+}
+
+// parseOID reads the OID for the curve as defined in RFC 6637, Section 9.
+func parseOID(r io.Reader) (oid []byte, err error) {
+       buf := make([]byte, maxOIDLength)
+       if _, err = readFull(r, buf[:1]); err != nil {
+               return
+       }
+       oidLen := buf[0]
+       if int(oidLen) > len(buf) {
+               err = errors.UnsupportedError("invalid oid length: " + strconv.Itoa(int(oidLen)))
+               return
+       }
+       oid = buf[:oidLen]
+       _, err = readFull(r, oid)
+       return
+}
+
+func (f *ecdsaKey) parse(r io.Reader) (err error) {
+       if f.oid, err = parseOID(r); err != nil {
+               return err
+       }
+       f.p.bytes, f.p.bitLength, err = readMPI(r)
+       return
+}
+
+func (f *ecdsaKey) serialize(w io.Writer) (err error) {
+       buf := make([]byte, maxOIDLength+1)
+       buf[0] = byte(len(f.oid))
+       copy(buf[1:], f.oid)
+       if _, err = w.Write(buf[:len(f.oid)+1]); err != nil {
+               return
+       }
+       return writeMPIs(w, f.p)
+}
+
+func (f *ecdsaKey) newECDSA() (*ecdsa.PublicKey, error) {
+       var c elliptic.Curve
+       if bytes.Equal(f.oid, oidCurveP256) {
+               c = elliptic.P256()
+       } else if bytes.Equal(f.oid, oidCurveP384) {
+               c = elliptic.P384()
+       } else if bytes.Equal(f.oid, oidCurveP521) {
+               c = elliptic.P521()
+       } else {
+               return nil, errors.UnsupportedError(fmt.Sprintf("unsupported oid: %x", f.oid))
+       }
+       x, y := elliptic.Unmarshal(c, f.p.bytes)
+       if x == nil {
+               return nil, errors.UnsupportedError("failed to parse EC point")
+       }
+       return &ecdsa.PublicKey{Curve: c, X: x, Y: y}, nil
+}
+
+func (f *ecdsaKey) byteLen() int {
+       return 1 + len(f.oid) + 2 + len(f.p.bytes)
+}
+
+type kdfHashFunction byte
+type kdfAlgorithm byte
+
+// ecdhKdf stores key derivation function parameters
+// used for ECDH encryption. See RFC 6637, Section 9.
+type ecdhKdf struct {
+       KdfHash kdfHashFunction
+       KdfAlgo kdfAlgorithm
+}
+
+func (f *ecdhKdf) parse(r io.Reader) (err error) {
+       buf := make([]byte, 1)
+       if _, err = readFull(r, buf); err != nil {
+               return
+       }
+       kdfLen := int(buf[0])
+       if kdfLen < 3 {
+               return errors.UnsupportedError("Unsupported ECDH KDF length: " + strconv.Itoa(kdfLen))
+       }
+       buf = make([]byte, kdfLen)
+       if _, err = readFull(r, buf); err != nil {
+               return
+       }
+       reserved := int(buf[0])
+       f.KdfHash = kdfHashFunction(buf[1])
+       f.KdfAlgo = kdfAlgorithm(buf[2])
+       if reserved != 0x01 {
+               return errors.UnsupportedError("Unsupported KDF reserved field: " + strconv.Itoa(reserved))
+       }
+       return
+}
+
+func (f *ecdhKdf) serialize(w io.Writer) (err error) {
+       buf := make([]byte, 4)
+       // See RFC 6637, Section 9, Algorithm-Specific Fields for ECDH keys.
+       buf[0] = byte(0x03) // Length of the following fields
+       buf[1] = byte(0x01) // Reserved for future extensions, must be 1 for now
+       buf[2] = byte(f.KdfHash)
+       buf[3] = byte(f.KdfAlgo)
+       _, err = w.Write(buf[:])
+       return
+}
+
+func (f *ecdhKdf) byteLen() int {
+       return 4
+}
+
+// PublicKey represents an OpenPGP public key. See RFC 4880, section 5.5.2.
+type PublicKey struct {
+       CreationTime time.Time
+       PubKeyAlgo   PublicKeyAlgorithm
+       PublicKey    interface{} // *rsa.PublicKey, *dsa.PublicKey or *ecdsa.PublicKey
+       Fingerprint  [20]byte
+       KeyId        uint64
+       IsSubkey     bool
+
+       n, e, p, q, g, y parsedMPI
+
+       // RFC 6637 fields
+       ec   *ecdsaKey
+       ecdh *ecdhKdf
+}
+
+// signingKey provides a convenient abstraction over signature verification
+// for v3 and v4 public keys.
+type signingKey interface {
+       SerializeSignaturePrefix(io.Writer)
+       serializeWithoutHeaders(io.Writer) error
+}
+
+func fromBig(n *big.Int) parsedMPI {
+       return parsedMPI{
+               bytes:     n.Bytes(),
+               bitLength: uint16(n.BitLen()),
+       }
+}
+
+// NewRSAPublicKey returns a PublicKey that wraps the given rsa.PublicKey.
+func NewRSAPublicKey(creationTime time.Time, pub *rsa.PublicKey) *PublicKey {
+       pk := &PublicKey{
+               CreationTime: creationTime,
+               PubKeyAlgo:   PubKeyAlgoRSA,
+               PublicKey:    pub,
+               n:            fromBig(pub.N),
+               e:            fromBig(big.NewInt(int64(pub.E))),
+       }
+
+       pk.setFingerPrintAndKeyId()
+       return pk
+}
+
+// NewDSAPublicKey returns a PublicKey that wraps the given dsa.PublicKey.
+func NewDSAPublicKey(creationTime time.Time, pub *dsa.PublicKey) *PublicKey {
+       pk := &PublicKey{
+               CreationTime: creationTime,
+               PubKeyAlgo:   PubKeyAlgoDSA,
+               PublicKey:    pub,
+               p:            fromBig(pub.P),
+               q:            fromBig(pub.Q),
+               g:            fromBig(pub.G),
+               y:            fromBig(pub.Y),
+       }
+
+       pk.setFingerPrintAndKeyId()
+       return pk
+}
+
+// NewElGamalPublicKey returns a PublicKey that wraps the given elgamal.PublicKey.
+func NewElGamalPublicKey(creationTime time.Time, pub *elgamal.PublicKey) *PublicKey {
+       pk := &PublicKey{
+               CreationTime: creationTime,
+               PubKeyAlgo:   PubKeyAlgoElGamal,
+               PublicKey:    pub,
+               p:            fromBig(pub.P),
+               g:            fromBig(pub.G),
+               y:            fromBig(pub.Y),
+       }
+
+       pk.setFingerPrintAndKeyId()
+       return pk
+}
+
+func NewECDSAPublicKey(creationTime time.Time, pub *ecdsa.PublicKey) *PublicKey {
+       pk := &PublicKey{
+               CreationTime: creationTime,
+               PubKeyAlgo:   PubKeyAlgoECDSA,
+               PublicKey:    pub,
+               ec:           new(ecdsaKey),
+       }
+
+       switch pub.Curve {
+       case elliptic.P256():
+               pk.ec.oid = oidCurveP256
+       case elliptic.P384():
+               pk.ec.oid = oidCurveP384
+       case elliptic.P521():
+               pk.ec.oid = oidCurveP521
+       default:
+               panic("unknown elliptic curve")
+       }
+
+       pk.ec.p.bytes = elliptic.Marshal(pub.Curve, pub.X, pub.Y)
+       pk.ec.p.bitLength = uint16(8 * len(pk.ec.p.bytes))
+
+       pk.setFingerPrintAndKeyId()
+       return pk
+}
+
+func (pk *PublicKey) parse(r io.Reader) (err error) {
+       // RFC 4880, section 5.5.2
+       var buf [6]byte
+       _, err = readFull(r, buf[:])
+       if err != nil {
+               return
+       }
+       if buf[0] != 4 {
+               return errors.UnsupportedError("public key version")
+       }
+       pk.CreationTime = time.Unix(int64(uint32(buf[1])<<24|uint32(buf[2])<<16|uint32(buf[3])<<8|uint32(buf[4])), 0)
+       pk.PubKeyAlgo = PublicKeyAlgorithm(buf[5])
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+               err = pk.parseRSA(r)
+       case PubKeyAlgoDSA:
+               err = pk.parseDSA(r)
+       case PubKeyAlgoElGamal:
+               err = pk.parseElGamal(r)
+       case PubKeyAlgoECDSA:
+               pk.ec = new(ecdsaKey)
+               if err = pk.ec.parse(r); err != nil {
+                       return err
+               }
+               pk.PublicKey, err = pk.ec.newECDSA()
+       case PubKeyAlgoECDH:
+               pk.ec = new(ecdsaKey)
+               if err = pk.ec.parse(r); err != nil {
+                       return
+               }
+               pk.ecdh = new(ecdhKdf)
+               if err = pk.ecdh.parse(r); err != nil {
+                       return
+               }
+               // The ECDH key is stored in an ecdsa.PublicKey for convenience.
+               pk.PublicKey, err = pk.ec.newECDSA()
+       default:
+               err = errors.UnsupportedError("public key type: " + strconv.Itoa(int(pk.PubKeyAlgo)))
+       }
+       if err != nil {
+               return
+       }
+
+       pk.setFingerPrintAndKeyId()
+       return
+}
+
+func (pk *PublicKey) setFingerPrintAndKeyId() {
+       // RFC 4880, section 12.2
+       fingerPrint := sha1.New()
+       pk.SerializeSignaturePrefix(fingerPrint)
+       pk.serializeWithoutHeaders(fingerPrint)
+       copy(pk.Fingerprint[:], fingerPrint.Sum(nil))
+       pk.KeyId = binary.BigEndian.Uint64(pk.Fingerprint[12:20])
+}
+
+// parseRSA parses RSA public key material from the given Reader. See RFC 4880,
+// section 5.5.2.
+func (pk *PublicKey) parseRSA(r io.Reader) (err error) {
+       pk.n.bytes, pk.n.bitLength, err = readMPI(r)
+       if err != nil {
+               return
+       }
+       pk.e.bytes, pk.e.bitLength, err = readMPI(r)
+       if err != nil {
+               return
+       }
+
+       if len(pk.e.bytes) > 3 {
+               err = errors.UnsupportedError("large public exponent")
+               return
+       }
+       rsa := &rsa.PublicKey{
+               N: new(big.Int).SetBytes(pk.n.bytes),
+               E: 0,
+       }
+       for i := 0; i < len(pk.e.bytes); i++ {
+               rsa.E <<= 8
+               rsa.E |= int(pk.e.bytes[i])
+       }
+       pk.PublicKey = rsa
+       return
+}
+
+// parseDSA parses DSA public key material from the given Reader. See RFC 4880,
+// section 5.5.2.
+func (pk *PublicKey) parseDSA(r io.Reader) (err error) {
+       pk.p.bytes, pk.p.bitLength, err = readMPI(r)
+       if err != nil {
+               return
+       }
+       pk.q.bytes, pk.q.bitLength, err = readMPI(r)
+       if err != nil {
+               return
+       }
+       pk.g.bytes, pk.g.bitLength, err = readMPI(r)
+       if err != nil {
+               return
+       }
+       pk.y.bytes, pk.y.bitLength, err = readMPI(r)
+       if err != nil {
+               return
+       }
+
+       dsa := new(dsa.PublicKey)
+       dsa.P = new(big.Int).SetBytes(pk.p.bytes)
+       dsa.Q = new(big.Int).SetBytes(pk.q.bytes)
+       dsa.G = new(big.Int).SetBytes(pk.g.bytes)
+       dsa.Y = new(big.Int).SetBytes(pk.y.bytes)
+       pk.PublicKey = dsa
+       return
+}
+
+// parseElGamal parses ElGamal public key material from the given Reader. See
+// RFC 4880, section 5.5.2.
+func (pk *PublicKey) parseElGamal(r io.Reader) (err error) {
+       pk.p.bytes, pk.p.bitLength, err = readMPI(r)
+       if err != nil {
+               return
+       }
+       pk.g.bytes, pk.g.bitLength, err = readMPI(r)
+       if err != nil {
+               return
+       }
+       pk.y.bytes, pk.y.bitLength, err = readMPI(r)
+       if err != nil {
+               return
+       }
+
+       elgamal := new(elgamal.PublicKey)
+       elgamal.P = new(big.Int).SetBytes(pk.p.bytes)
+       elgamal.G = new(big.Int).SetBytes(pk.g.bytes)
+       elgamal.Y = new(big.Int).SetBytes(pk.y.bytes)
+       pk.PublicKey = elgamal
+       return
+}
+
+// SerializeSignaturePrefix writes the prefix for this public key to the given Writer.
+// The prefix is used when calculating a signature over this public key. See
+// RFC 4880, section 5.2.4.
+func (pk *PublicKey) SerializeSignaturePrefix(h io.Writer) {
+       var pLength uint16
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+               pLength += 2 + uint16(len(pk.n.bytes))
+               pLength += 2 + uint16(len(pk.e.bytes))
+       case PubKeyAlgoDSA:
+               pLength += 2 + uint16(len(pk.p.bytes))
+               pLength += 2 + uint16(len(pk.q.bytes))
+               pLength += 2 + uint16(len(pk.g.bytes))
+               pLength += 2 + uint16(len(pk.y.bytes))
+       case PubKeyAlgoElGamal:
+               pLength += 2 + uint16(len(pk.p.bytes))
+               pLength += 2 + uint16(len(pk.g.bytes))
+               pLength += 2 + uint16(len(pk.y.bytes))
+       case PubKeyAlgoECDSA:
+               pLength += uint16(pk.ec.byteLen())
+       case PubKeyAlgoECDH:
+               pLength += uint16(pk.ec.byteLen())
+               pLength += uint16(pk.ecdh.byteLen())
+       default:
+               panic("unknown public key algorithm")
+       }
+       pLength += 6
+       h.Write([]byte{0x99, byte(pLength >> 8), byte(pLength)})
+       return
+}
+
+func (pk *PublicKey) Serialize(w io.Writer) (err error) {
+       length := 6 // 6 byte header
+
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+               length += 2 + len(pk.n.bytes)
+               length += 2 + len(pk.e.bytes)
+       case PubKeyAlgoDSA:
+               length += 2 + len(pk.p.bytes)
+               length += 2 + len(pk.q.bytes)
+               length += 2 + len(pk.g.bytes)
+               length += 2 + len(pk.y.bytes)
+       case PubKeyAlgoElGamal:
+               length += 2 + len(pk.p.bytes)
+               length += 2 + len(pk.g.bytes)
+               length += 2 + len(pk.y.bytes)
+       case PubKeyAlgoECDSA:
+               length += pk.ec.byteLen()
+       case PubKeyAlgoECDH:
+               length += pk.ec.byteLen()
+               length += pk.ecdh.byteLen()
+       default:
+               panic("unknown public key algorithm")
+       }
+
+       packetType := packetTypePublicKey
+       if pk.IsSubkey {
+               packetType = packetTypePublicSubkey
+       }
+       err = serializeHeader(w, packetType, length)
+       if err != nil {
+               return
+       }
+       return pk.serializeWithoutHeaders(w)
+}
+
+// serializeWithoutHeaders marshals the PublicKey to w in the form of an
+// OpenPGP public key packet, not including the packet header.
+func (pk *PublicKey) serializeWithoutHeaders(w io.Writer) (err error) {
+       var buf [6]byte
+       buf[0] = 4
+       t := uint32(pk.CreationTime.Unix())
+       buf[1] = byte(t >> 24)
+       buf[2] = byte(t >> 16)
+       buf[3] = byte(t >> 8)
+       buf[4] = byte(t)
+       buf[5] = byte(pk.PubKeyAlgo)
+
+       _, err = w.Write(buf[:])
+       if err != nil {
+               return
+       }
+
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+               return writeMPIs(w, pk.n, pk.e)
+       case PubKeyAlgoDSA:
+               return writeMPIs(w, pk.p, pk.q, pk.g, pk.y)
+       case PubKeyAlgoElGamal:
+               return writeMPIs(w, pk.p, pk.g, pk.y)
+       case PubKeyAlgoECDSA:
+               return pk.ec.serialize(w)
+       case PubKeyAlgoECDH:
+               if err = pk.ec.serialize(w); err != nil {
+                       return
+               }
+               return pk.ecdh.serialize(w)
+       }
+       return errors.InvalidArgumentError("bad public-key algorithm")
+}
+
+// CanSign returns true iff this public key can generate signatures
+func (pk *PublicKey) CanSign() bool {
+       return pk.PubKeyAlgo != PubKeyAlgoRSAEncryptOnly && pk.PubKeyAlgo != PubKeyAlgoElGamal
+}
+
+// VerifySignature returns nil iff sig is a valid signature, made by this
+// public key, of the data hashed into signed. signed is mutated by this call.
+func (pk *PublicKey) VerifySignature(signed hash.Hash, sig *Signature) (err error) {
+       if !pk.CanSign() {
+               return errors.InvalidArgumentError("public key cannot generate signatures")
+       }
+
+       signed.Write(sig.HashSuffix)
+       hashBytes := signed.Sum(nil)
+
+       if hashBytes[0] != sig.HashTag[0] || hashBytes[1] != sig.HashTag[1] {
+               return errors.SignatureError("hash tag doesn't match")
+       }
+
+       if pk.PubKeyAlgo != sig.PubKeyAlgo {
+               return errors.InvalidArgumentError("public key and signature use different algorithms")
+       }
+
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+               rsaPublicKey, _ := pk.PublicKey.(*rsa.PublicKey)
+               err = rsa.VerifyPKCS1v15(rsaPublicKey, sig.Hash, hashBytes, sig.RSASignature.bytes)
+               if err != nil {
+                       return errors.SignatureError("RSA verification failure")
+               }
+               return nil
+       case PubKeyAlgoDSA:
+               dsaPublicKey, _ := pk.PublicKey.(*dsa.PublicKey)
+               // Need to truncate hashBytes to match FIPS 186-3 section 4.6.
+               subgroupSize := (dsaPublicKey.Q.BitLen() + 7) / 8
+               if len(hashBytes) > subgroupSize {
+                       hashBytes = hashBytes[:subgroupSize]
+               }
+               if !dsa.Verify(dsaPublicKey, hashBytes, new(big.Int).SetBytes(sig.DSASigR.bytes), new(big.Int).SetBytes(sig.DSASigS.bytes)) {
+                       return errors.SignatureError("DSA verification failure")
+               }
+               return nil
+       case PubKeyAlgoECDSA:
+               ecdsaPublicKey := pk.PublicKey.(*ecdsa.PublicKey)
+               if !ecdsa.Verify(ecdsaPublicKey, hashBytes, new(big.Int).SetBytes(sig.ECDSASigR.bytes), new(big.Int).SetBytes(sig.ECDSASigS.bytes)) {
+                       return errors.SignatureError("ECDSA verification failure")
+               }
+               return nil
+       default:
+               return errors.SignatureError("Unsupported public key algorithm used in signature")
+       }
+}
+
+// VerifySignatureV3 returns nil iff sig is a valid signature, made by this
+// public key, of the data hashed into signed. signed is mutated by this call.
+func (pk *PublicKey) VerifySignatureV3(signed hash.Hash, sig *SignatureV3) (err error) {
+       if !pk.CanSign() {
+               return errors.InvalidArgumentError("public key cannot generate signatures")
+       }
+
+       suffix := make([]byte, 5)
+       suffix[0] = byte(sig.SigType)
+       binary.BigEndian.PutUint32(suffix[1:], uint32(sig.CreationTime.Unix()))
+       signed.Write(suffix)
+       hashBytes := signed.Sum(nil)
+
+       if hashBytes[0] != sig.HashTag[0] || hashBytes[1] != sig.HashTag[1] {
+               return errors.SignatureError("hash tag doesn't match")
+       }
+
+       if pk.PubKeyAlgo != sig.PubKeyAlgo {
+               return errors.InvalidArgumentError("public key and signature use different algorithms")
+       }
+
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+               rsaPublicKey := pk.PublicKey.(*rsa.PublicKey)
+               if err = rsa.VerifyPKCS1v15(rsaPublicKey, sig.Hash, hashBytes, sig.RSASignature.bytes); err != nil {
+                       return errors.SignatureError("RSA verification failure")
+               }
+               return
+       case PubKeyAlgoDSA:
+               dsaPublicKey := pk.PublicKey.(*dsa.PublicKey)
+               // Need to truncate hashBytes to match FIPS 186-3 section 4.6.
+               subgroupSize := (dsaPublicKey.Q.BitLen() + 7) / 8
+               if len(hashBytes) > subgroupSize {
+                       hashBytes = hashBytes[:subgroupSize]
+               }
+               if !dsa.Verify(dsaPublicKey, hashBytes, new(big.Int).SetBytes(sig.DSASigR.bytes), new(big.Int).SetBytes(sig.DSASigS.bytes)) {
+                       return errors.SignatureError("DSA verification failure")
+               }
+               return nil
+       default:
+               panic("shouldn't happen")
+       }
+}
+
+// keySignatureHash returns a Hash of the message that needs to be signed for
+// pk to assert a subkey relationship to signed.
+func keySignatureHash(pk, signed signingKey, hashFunc crypto.Hash) (h hash.Hash, err error) {
+       if !hashFunc.Available() {
+               return nil, errors.UnsupportedError("hash function")
+       }
+       h = hashFunc.New()
+
+       // RFC 4880, section 5.2.4
+       pk.SerializeSignaturePrefix(h)
+       pk.serializeWithoutHeaders(h)
+       signed.SerializeSignaturePrefix(h)
+       signed.serializeWithoutHeaders(h)
+       return
+}
+
+// VerifyKeySignature returns nil iff sig is a valid signature, made by this
+// public key, of signed.
+func (pk *PublicKey) VerifyKeySignature(signed *PublicKey, sig *Signature) error {
+       h, err := keySignatureHash(pk, signed, sig.Hash)
+       if err != nil {
+               return err
+       }
+       if err = pk.VerifySignature(h, sig); err != nil {
+               return err
+       }
+
+       if sig.FlagSign {
+               // Signing subkeys must be cross-signed. See
+               // https://www.gnupg.org/faq/subkey-cross-certify.html.
+               if sig.EmbeddedSignature == nil {
+                       return errors.StructuralError("signing subkey is missing cross-signature")
+               }
+               // Verify the cross-signature. This is calculated over the same
+               // data as the main signature, so we cannot just recursively
+               // call signed.VerifyKeySignature(...)
+               if h, err = keySignatureHash(pk, signed, sig.EmbeddedSignature.Hash); err != nil {
+                       return errors.StructuralError("error while hashing for cross-signature: " + err.Error())
+               }
+               if err := signed.VerifySignature(h, sig.EmbeddedSignature); err != nil {
+                       return errors.StructuralError("error while verifying cross-signature: " + err.Error())
+               }
+       }
+
+       return nil
+}
+
+func keyRevocationHash(pk signingKey, hashFunc crypto.Hash) (h hash.Hash, err error) {
+       if !hashFunc.Available() {
+               return nil, errors.UnsupportedError("hash function")
+       }
+       h = hashFunc.New()
+
+       // RFC 4880, section 5.2.4
+       pk.SerializeSignaturePrefix(h)
+       pk.serializeWithoutHeaders(h)
+
+       return
+}
+
+// VerifyRevocationSignature returns nil iff sig is a valid signature, made by this
+// public key.
+func (pk *PublicKey) VerifyRevocationSignature(sig *Signature) (err error) {
+       h, err := keyRevocationHash(pk, sig.Hash)
+       if err != nil {
+               return err
+       }
+       return pk.VerifySignature(h, sig)
+}
+
+// userIdSignatureHash returns a Hash of the message that needs to be signed
+// to assert that pk is a valid key for id.
+func userIdSignatureHash(id string, pk *PublicKey, hashFunc crypto.Hash) (h hash.Hash, err error) {
+       if !hashFunc.Available() {
+               return nil, errors.UnsupportedError("hash function")
+       }
+       h = hashFunc.New()
+
+       // RFC 4880, section 5.2.4
+       pk.SerializeSignaturePrefix(h)
+       pk.serializeWithoutHeaders(h)
+
+       var buf [5]byte
+       buf[0] = 0xb4
+       buf[1] = byte(len(id) >> 24)
+       buf[2] = byte(len(id) >> 16)
+       buf[3] = byte(len(id) >> 8)
+       buf[4] = byte(len(id))
+       h.Write(buf[:])
+       h.Write([]byte(id))
+
+       return
+}
+
+// VerifyUserIdSignature returns nil iff sig is a valid signature, made by this
+// public key, that id is the identity of pub.
+func (pk *PublicKey) VerifyUserIdSignature(id string, pub *PublicKey, sig *Signature) (err error) {
+       h, err := userIdSignatureHash(id, pub, sig.Hash)
+       if err != nil {
+               return err
+       }
+       return pk.VerifySignature(h, sig)
+}
+
+// VerifyUserIdSignatureV3 returns nil iff sig is a valid signature, made by this
+// public key, that id is the identity of pub.
+func (pk *PublicKey) VerifyUserIdSignatureV3(id string, pub *PublicKey, sig *SignatureV3) (err error) {
+       h, err := userIdSignatureV3Hash(id, pub, sig.Hash)
+       if err != nil {
+               return err
+       }
+       return pk.VerifySignatureV3(h, sig)
+}
+
+// KeyIdString returns the public key's fingerprint in capital hex
+// (e.g. "6C7EE1B8621CC013").
+func (pk *PublicKey) KeyIdString() string {
+       return fmt.Sprintf("%X", pk.Fingerprint[12:20])
+}
+
+// KeyIdShortString returns the short form of public key's fingerprint
+// in capital hex, as shown by gpg --list-keys (e.g. "621CC013").
+func (pk *PublicKey) KeyIdShortString() string {
+       return fmt.Sprintf("%X", pk.Fingerprint[16:20])
+}
+
+// A parsedMPI is used to store the contents of a big integer, along with the
+// bit length that was specified in the original input. This allows the MPI to
+// be reserialized exactly.
+type parsedMPI struct {
+       bytes     []byte
+       bitLength uint16
+}
+
+// writeMPIs is a utility function for serializing several big integers to the
+// given Writer.
+func writeMPIs(w io.Writer, mpis ...parsedMPI) (err error) {
+       for _, mpi := range mpis {
+               err = writeMPI(w, mpi.bitLength, mpi.bytes)
+               if err != nil {
+                       return
+               }
+       }
+       return
+}
+
+// BitLength returns the bit length for the given public key.
+func (pk *PublicKey) BitLength() (bitLength uint16, err error) {
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+               bitLength = pk.n.bitLength
+       case PubKeyAlgoDSA:
+               bitLength = pk.p.bitLength
+       case PubKeyAlgoElGamal:
+               bitLength = pk.p.bitLength
+       default:
+               err = errors.InvalidArgumentError("bad public-key algorithm")
+       }
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/public_key_v3.go b/vendor/golang.org/x/crypto/openpgp/packet/public_key_v3.go
new file mode 100644 (file)
index 0000000..5daf7b6
--- /dev/null
@@ -0,0 +1,279 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "crypto"
+       "crypto/md5"
+       "crypto/rsa"
+       "encoding/binary"
+       "fmt"
+       "hash"
+       "io"
+       "math/big"
+       "strconv"
+       "time"
+
+       "golang.org/x/crypto/openpgp/errors"
+)
+
+// PublicKeyV3 represents older, version 3 public keys. These keys are less secure and
+// should not be used for signing or encrypting. They are supported here only for
+// parsing version 3 key material and validating signatures.
+// See RFC 4880, section 5.5.2.
+type PublicKeyV3 struct {
+       CreationTime time.Time
+       DaysToExpire uint16
+       PubKeyAlgo   PublicKeyAlgorithm
+       PublicKey    *rsa.PublicKey
+       Fingerprint  [16]byte
+       KeyId        uint64
+       IsSubkey     bool
+
+       n, e parsedMPI
+}
+
+// newRSAPublicKeyV3 returns a PublicKey that wraps the given rsa.PublicKey.
+// Included here for testing purposes only. RFC 4880, section 5.5.2:
+// "an implementation MUST NOT generate a V3 key, but MAY accept it."
+func newRSAPublicKeyV3(creationTime time.Time, pub *rsa.PublicKey) *PublicKeyV3 {
+       pk := &PublicKeyV3{
+               CreationTime: creationTime,
+               PublicKey:    pub,
+               n:            fromBig(pub.N),
+               e:            fromBig(big.NewInt(int64(pub.E))),
+       }
+
+       pk.setFingerPrintAndKeyId()
+       return pk
+}
+
+func (pk *PublicKeyV3) parse(r io.Reader) (err error) {
+       // RFC 4880, section 5.5.2
+       var buf [8]byte
+       if _, err = readFull(r, buf[:]); err != nil {
+               return
+       }
+       if buf[0] < 2 || buf[0] > 3 {
+               return errors.UnsupportedError("public key version")
+       }
+       pk.CreationTime = time.Unix(int64(uint32(buf[1])<<24|uint32(buf[2])<<16|uint32(buf[3])<<8|uint32(buf[4])), 0)
+       pk.DaysToExpire = binary.BigEndian.Uint16(buf[5:7])
+       pk.PubKeyAlgo = PublicKeyAlgorithm(buf[7])
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+               err = pk.parseRSA(r)
+       default:
+               err = errors.UnsupportedError("public key type: " + strconv.Itoa(int(pk.PubKeyAlgo)))
+       }
+       if err != nil {
+               return
+       }
+
+       pk.setFingerPrintAndKeyId()
+       return
+}
+
+func (pk *PublicKeyV3) setFingerPrintAndKeyId() {
+       // RFC 4880, section 12.2
+       fingerPrint := md5.New()
+       fingerPrint.Write(pk.n.bytes)
+       fingerPrint.Write(pk.e.bytes)
+       fingerPrint.Sum(pk.Fingerprint[:0])
+       pk.KeyId = binary.BigEndian.Uint64(pk.n.bytes[len(pk.n.bytes)-8:])
+}
+
+// parseRSA parses RSA public key material from the given Reader. See RFC 4880,
+// section 5.5.2.
+func (pk *PublicKeyV3) parseRSA(r io.Reader) (err error) {
+       if pk.n.bytes, pk.n.bitLength, err = readMPI(r); err != nil {
+               return
+       }
+       if pk.e.bytes, pk.e.bitLength, err = readMPI(r); err != nil {
+               return
+       }
+
+       // RFC 4880 Section 12.2 requires the low 8 bytes of the
+       // modulus to form the key id.
+       if len(pk.n.bytes) < 8 {
+               return errors.StructuralError("v3 public key modulus is too short")
+       }
+       if len(pk.e.bytes) > 3 {
+               err = errors.UnsupportedError("large public exponent")
+               return
+       }
+       rsa := &rsa.PublicKey{N: new(big.Int).SetBytes(pk.n.bytes)}
+       for i := 0; i < len(pk.e.bytes); i++ {
+               rsa.E <<= 8
+               rsa.E |= int(pk.e.bytes[i])
+       }
+       pk.PublicKey = rsa
+       return
+}
+
+// SerializeSignaturePrefix writes the prefix for this public key to the given Writer.
+// The prefix is used when calculating a signature over this public key. See
+// RFC 4880, section 5.2.4.
+func (pk *PublicKeyV3) SerializeSignaturePrefix(w io.Writer) {
+       var pLength uint16
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+               pLength += 2 + uint16(len(pk.n.bytes))
+               pLength += 2 + uint16(len(pk.e.bytes))
+       default:
+               panic("unknown public key algorithm")
+       }
+       pLength += 6
+       w.Write([]byte{0x99, byte(pLength >> 8), byte(pLength)})
+       return
+}
+
+func (pk *PublicKeyV3) Serialize(w io.Writer) (err error) {
+       length := 8 // 8 byte header
+
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+               length += 2 + len(pk.n.bytes)
+               length += 2 + len(pk.e.bytes)
+       default:
+               panic("unknown public key algorithm")
+       }
+
+       packetType := packetTypePublicKey
+       if pk.IsSubkey {
+               packetType = packetTypePublicSubkey
+       }
+       if err = serializeHeader(w, packetType, length); err != nil {
+               return
+       }
+       return pk.serializeWithoutHeaders(w)
+}
+
+// serializeWithoutHeaders marshals the PublicKey to w in the form of an
+// OpenPGP public key packet, not including the packet header.
+func (pk *PublicKeyV3) serializeWithoutHeaders(w io.Writer) (err error) {
+       var buf [8]byte
+       // Version 3
+       buf[0] = 3
+       // Creation time
+       t := uint32(pk.CreationTime.Unix())
+       buf[1] = byte(t >> 24)
+       buf[2] = byte(t >> 16)
+       buf[3] = byte(t >> 8)
+       buf[4] = byte(t)
+       // Days to expire
+       buf[5] = byte(pk.DaysToExpire >> 8)
+       buf[6] = byte(pk.DaysToExpire)
+       // Public key algorithm
+       buf[7] = byte(pk.PubKeyAlgo)
+
+       if _, err = w.Write(buf[:]); err != nil {
+               return
+       }
+
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+               return writeMPIs(w, pk.n, pk.e)
+       }
+       return errors.InvalidArgumentError("bad public-key algorithm")
+}
+
+// CanSign returns true iff this public key can generate signatures
+func (pk *PublicKeyV3) CanSign() bool {
+       return pk.PubKeyAlgo != PubKeyAlgoRSAEncryptOnly
+}
+
+// VerifySignatureV3 returns nil iff sig is a valid signature, made by this
+// public key, of the data hashed into signed. signed is mutated by this call.
+func (pk *PublicKeyV3) VerifySignatureV3(signed hash.Hash, sig *SignatureV3) (err error) {
+       if !pk.CanSign() {
+               return errors.InvalidArgumentError("public key cannot generate signatures")
+       }
+
+       suffix := make([]byte, 5)
+       suffix[0] = byte(sig.SigType)
+       binary.BigEndian.PutUint32(suffix[1:], uint32(sig.CreationTime.Unix()))
+       signed.Write(suffix)
+       hashBytes := signed.Sum(nil)
+
+       if hashBytes[0] != sig.HashTag[0] || hashBytes[1] != sig.HashTag[1] {
+               return errors.SignatureError("hash tag doesn't match")
+       }
+
+       if pk.PubKeyAlgo != sig.PubKeyAlgo {
+               return errors.InvalidArgumentError("public key and signature use different algorithms")
+       }
+
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+               if err = rsa.VerifyPKCS1v15(pk.PublicKey, sig.Hash, hashBytes, sig.RSASignature.bytes); err != nil {
+                       return errors.SignatureError("RSA verification failure")
+               }
+               return
+       default:
+               // V3 public keys only support RSA.
+               panic("shouldn't happen")
+       }
+}
+
+// VerifyUserIdSignatureV3 returns nil iff sig is a valid signature, made by this
+// public key, that id is the identity of pub.
+func (pk *PublicKeyV3) VerifyUserIdSignatureV3(id string, pub *PublicKeyV3, sig *SignatureV3) (err error) {
+       h, err := userIdSignatureV3Hash(id, pk, sig.Hash)
+       if err != nil {
+               return err
+       }
+       return pk.VerifySignatureV3(h, sig)
+}
+
+// VerifyKeySignatureV3 returns nil iff sig is a valid signature, made by this
+// public key, of signed.
+func (pk *PublicKeyV3) VerifyKeySignatureV3(signed *PublicKeyV3, sig *SignatureV3) (err error) {
+       h, err := keySignatureHash(pk, signed, sig.Hash)
+       if err != nil {
+               return err
+       }
+       return pk.VerifySignatureV3(h, sig)
+}
+
+// userIdSignatureV3Hash returns a Hash of the message that needs to be signed
+// to assert that pk is a valid key for id.
+func userIdSignatureV3Hash(id string, pk signingKey, hfn crypto.Hash) (h hash.Hash, err error) {
+       if !hfn.Available() {
+               return nil, errors.UnsupportedError("hash function")
+       }
+       h = hfn.New()
+
+       // RFC 4880, section 5.2.4
+       pk.SerializeSignaturePrefix(h)
+       pk.serializeWithoutHeaders(h)
+
+       h.Write([]byte(id))
+
+       return
+}
+
+// KeyIdString returns the public key's fingerprint in capital hex
+// (e.g. "6C7EE1B8621CC013").
+func (pk *PublicKeyV3) KeyIdString() string {
+       return fmt.Sprintf("%X", pk.KeyId)
+}
+
+// KeyIdShortString returns the short form of public key's fingerprint
+// in capital hex, as shown by gpg --list-keys (e.g. "621CC013").
+func (pk *PublicKeyV3) KeyIdShortString() string {
+       return fmt.Sprintf("%X", pk.KeyId&0xFFFFFFFF)
+}
+
+// BitLength returns the bit length for the given public key.
+func (pk *PublicKeyV3) BitLength() (bitLength uint16, err error) {
+       switch pk.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+               bitLength = pk.n.bitLength
+       default:
+               err = errors.InvalidArgumentError("bad public-key algorithm")
+       }
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/reader.go b/vendor/golang.org/x/crypto/openpgp/packet/reader.go
new file mode 100644 (file)
index 0000000..34bc7c6
--- /dev/null
@@ -0,0 +1,76 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "golang.org/x/crypto/openpgp/errors"
+       "io"
+)
+
+// Reader reads packets from an io.Reader and allows packets to be 'unread' so
+// that they result from the next call to Next.
+type Reader struct {
+       q       []Packet
+       readers []io.Reader
+}
+
+// New io.Readers are pushed when a compressed or encrypted packet is processed
+// and recursively treated as a new source of packets. However, a carefully
+// crafted packet can trigger an infinite recursive sequence of packets. See
+// http://mumble.net/~campbell/misc/pgp-quine
+// https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-4402
+// This constant limits the number of recursive packets that may be pushed.
+const maxReaders = 32
+
+// Next returns the most recently unread Packet, or reads another packet from
+// the top-most io.Reader. Unknown packet types are skipped.
+func (r *Reader) Next() (p Packet, err error) {
+       if len(r.q) > 0 {
+               p = r.q[len(r.q)-1]
+               r.q = r.q[:len(r.q)-1]
+               return
+       }
+
+       for len(r.readers) > 0 {
+               p, err = Read(r.readers[len(r.readers)-1])
+               if err == nil {
+                       return
+               }
+               if err == io.EOF {
+                       r.readers = r.readers[:len(r.readers)-1]
+                       continue
+               }
+               if _, ok := err.(errors.UnknownPacketTypeError); !ok {
+                       return nil, err
+               }
+       }
+
+       return nil, io.EOF
+}
+
+// Push causes the Reader to start reading from a new io.Reader. When an EOF
+// error is seen from the new io.Reader, it is popped and the Reader continues
+// to read from the next most recent io.Reader. Push returns a StructuralError
+// if pushing the reader would exceed the maximum recursion level, otherwise it
+// returns nil.
+func (r *Reader) Push(reader io.Reader) (err error) {
+       if len(r.readers) >= maxReaders {
+               return errors.StructuralError("too many layers of packets")
+       }
+       r.readers = append(r.readers, reader)
+       return nil
+}
+
+// Unread causes the given Packet to be returned from the next call to Next.
+func (r *Reader) Unread(p Packet) {
+       r.q = append(r.q, p)
+}
+
+func NewReader(r io.Reader) *Reader {
+       return &Reader{
+               q:       nil,
+               readers: []io.Reader{r},
+       }
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/signature.go b/vendor/golang.org/x/crypto/openpgp/packet/signature.go
new file mode 100644 (file)
index 0000000..6ce0cbe
--- /dev/null
@@ -0,0 +1,731 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "bytes"
+       "crypto"
+       "crypto/dsa"
+       "crypto/ecdsa"
+       "encoding/asn1"
+       "encoding/binary"
+       "hash"
+       "io"
+       "math/big"
+       "strconv"
+       "time"
+
+       "golang.org/x/crypto/openpgp/errors"
+       "golang.org/x/crypto/openpgp/s2k"
+)
+
+const (
+       // See RFC 4880, section 5.2.3.21 for details.
+       KeyFlagCertify = 1 << iota
+       KeyFlagSign
+       KeyFlagEncryptCommunications
+       KeyFlagEncryptStorage
+)
+
+// Signature represents a signature. See RFC 4880, section 5.2.
+type Signature struct {
+       SigType    SignatureType
+       PubKeyAlgo PublicKeyAlgorithm
+       Hash       crypto.Hash
+
+       // HashSuffix is extra data that is hashed in after the signed data.
+       HashSuffix []byte
+       // HashTag contains the first two bytes of the hash for fast rejection
+       // of bad signed data.
+       HashTag      [2]byte
+       CreationTime time.Time
+
+       RSASignature         parsedMPI
+       DSASigR, DSASigS     parsedMPI
+       ECDSASigR, ECDSASigS parsedMPI
+
+       // rawSubpackets contains the unparsed subpackets, in order.
+       rawSubpackets []outputSubpacket
+
+       // The following are optional so are nil when not included in the
+       // signature.
+
+       SigLifetimeSecs, KeyLifetimeSecs                        *uint32
+       PreferredSymmetric, PreferredHash, PreferredCompression []uint8
+       IssuerKeyId                                             *uint64
+       IsPrimaryId                                             *bool
+
+       // FlagsValid is set if any flags were given. See RFC 4880, section
+       // 5.2.3.21 for details.
+       FlagsValid                                                           bool
+       FlagCertify, FlagSign, FlagEncryptCommunications, FlagEncryptStorage bool
+
+       // RevocationReason is set if this signature has been revoked.
+       // See RFC 4880, section 5.2.3.23 for details.
+       RevocationReason     *uint8
+       RevocationReasonText string
+
+       // MDC is set if this signature has a feature packet that indicates
+       // support for MDC subpackets.
+       MDC bool
+
+       // EmbeddedSignature, if non-nil, is a signature of the parent key, by
+       // this key. This prevents an attacker from claiming another's signing
+       // subkey as their own.
+       EmbeddedSignature *Signature
+
+       outSubpackets []outputSubpacket
+}
+
+func (sig *Signature) parse(r io.Reader) (err error) {
+       // RFC 4880, section 5.2.3
+       var buf [5]byte
+       _, err = readFull(r, buf[:1])
+       if err != nil {
+               return
+       }
+       if buf[0] != 4 {
+               err = errors.UnsupportedError("signature packet version " + strconv.Itoa(int(buf[0])))
+               return
+       }
+
+       _, err = readFull(r, buf[:5])
+       if err != nil {
+               return
+       }
+       sig.SigType = SignatureType(buf[0])
+       sig.PubKeyAlgo = PublicKeyAlgorithm(buf[1])
+       switch sig.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly, PubKeyAlgoDSA, PubKeyAlgoECDSA:
+       default:
+               err = errors.UnsupportedError("public key algorithm " + strconv.Itoa(int(sig.PubKeyAlgo)))
+               return
+       }
+
+       var ok bool
+       sig.Hash, ok = s2k.HashIdToHash(buf[2])
+       if !ok {
+               return errors.UnsupportedError("hash function " + strconv.Itoa(int(buf[2])))
+       }
+
+       hashedSubpacketsLength := int(buf[3])<<8 | int(buf[4])
+       l := 6 + hashedSubpacketsLength
+       sig.HashSuffix = make([]byte, l+6)
+       sig.HashSuffix[0] = 4
+       copy(sig.HashSuffix[1:], buf[:5])
+       hashedSubpackets := sig.HashSuffix[6:l]
+       _, err = readFull(r, hashedSubpackets)
+       if err != nil {
+               return
+       }
+       // See RFC 4880, section 5.2.4
+       trailer := sig.HashSuffix[l:]
+       trailer[0] = 4
+       trailer[1] = 0xff
+       trailer[2] = uint8(l >> 24)
+       trailer[3] = uint8(l >> 16)
+       trailer[4] = uint8(l >> 8)
+       trailer[5] = uint8(l)
+
+       err = parseSignatureSubpackets(sig, hashedSubpackets, true)
+       if err != nil {
+               return
+       }
+
+       _, err = readFull(r, buf[:2])
+       if err != nil {
+               return
+       }
+       unhashedSubpacketsLength := int(buf[0])<<8 | int(buf[1])
+       unhashedSubpackets := make([]byte, unhashedSubpacketsLength)
+       _, err = readFull(r, unhashedSubpackets)
+       if err != nil {
+               return
+       }
+       err = parseSignatureSubpackets(sig, unhashedSubpackets, false)
+       if err != nil {
+               return
+       }
+
+       _, err = readFull(r, sig.HashTag[:2])
+       if err != nil {
+               return
+       }
+
+       switch sig.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+               sig.RSASignature.bytes, sig.RSASignature.bitLength, err = readMPI(r)
+       case PubKeyAlgoDSA:
+               sig.DSASigR.bytes, sig.DSASigR.bitLength, err = readMPI(r)
+               if err == nil {
+                       sig.DSASigS.bytes, sig.DSASigS.bitLength, err = readMPI(r)
+               }
+       case PubKeyAlgoECDSA:
+               sig.ECDSASigR.bytes, sig.ECDSASigR.bitLength, err = readMPI(r)
+               if err == nil {
+                       sig.ECDSASigS.bytes, sig.ECDSASigS.bitLength, err = readMPI(r)
+               }
+       default:
+               panic("unreachable")
+       }
+       return
+}
+
+// parseSignatureSubpackets parses subpackets of the main signature packet. See
+// RFC 4880, section 5.2.3.1.
+func parseSignatureSubpackets(sig *Signature, subpackets []byte, isHashed bool) (err error) {
+       for len(subpackets) > 0 {
+               subpackets, err = parseSignatureSubpacket(sig, subpackets, isHashed)
+               if err != nil {
+                       return
+               }
+       }
+
+       if sig.CreationTime.IsZero() {
+               err = errors.StructuralError("no creation time in signature")
+       }
+
+       return
+}
+
+type signatureSubpacketType uint8
+
+const (
+       creationTimeSubpacket        signatureSubpacketType = 2
+       signatureExpirationSubpacket signatureSubpacketType = 3
+       keyExpirationSubpacket       signatureSubpacketType = 9
+       prefSymmetricAlgosSubpacket  signatureSubpacketType = 11
+       issuerSubpacket              signatureSubpacketType = 16
+       prefHashAlgosSubpacket       signatureSubpacketType = 21
+       prefCompressionSubpacket     signatureSubpacketType = 22
+       primaryUserIdSubpacket       signatureSubpacketType = 25
+       keyFlagsSubpacket            signatureSubpacketType = 27
+       reasonForRevocationSubpacket signatureSubpacketType = 29
+       featuresSubpacket            signatureSubpacketType = 30
+       embeddedSignatureSubpacket   signatureSubpacketType = 32
+)
+
+// parseSignatureSubpacket parses a single subpacket. len(subpacket) is >= 1.
+func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (rest []byte, err error) {
+       // RFC 4880, section 5.2.3.1
+       var (
+               length     uint32
+               packetType signatureSubpacketType
+               isCritical bool
+       )
+       switch {
+       case subpacket[0] < 192:
+               length = uint32(subpacket[0])
+               subpacket = subpacket[1:]
+       case subpacket[0] < 255:
+               if len(subpacket) < 2 {
+                       goto Truncated
+               }
+               length = uint32(subpacket[0]-192)<<8 + uint32(subpacket[1]) + 192
+               subpacket = subpacket[2:]
+       default:
+               if len(subpacket) < 5 {
+                       goto Truncated
+               }
+               length = uint32(subpacket[1])<<24 |
+                       uint32(subpacket[2])<<16 |
+                       uint32(subpacket[3])<<8 |
+                       uint32(subpacket[4])
+               subpacket = subpacket[5:]
+       }
+       if length > uint32(len(subpacket)) {
+               goto Truncated
+       }
+       rest = subpacket[length:]
+       subpacket = subpacket[:length]
+       if len(subpacket) == 0 {
+               err = errors.StructuralError("zero length signature subpacket")
+               return
+       }
+       packetType = signatureSubpacketType(subpacket[0] & 0x7f)
+       isCritical = subpacket[0]&0x80 == 0x80
+       subpacket = subpacket[1:]
+       sig.rawSubpackets = append(sig.rawSubpackets, outputSubpacket{isHashed, packetType, isCritical, subpacket})
+       switch packetType {
+       case creationTimeSubpacket:
+               if !isHashed {
+                       err = errors.StructuralError("signature creation time in non-hashed area")
+                       return
+               }
+               if len(subpacket) != 4 {
+                       err = errors.StructuralError("signature creation time not four bytes")
+                       return
+               }
+               t := binary.BigEndian.Uint32(subpacket)
+               sig.CreationTime = time.Unix(int64(t), 0)
+       case signatureExpirationSubpacket:
+               // Signature expiration time, section 5.2.3.10
+               if !isHashed {
+                       return
+               }
+               if len(subpacket) != 4 {
+                       err = errors.StructuralError("expiration subpacket with bad length")
+                       return
+               }
+               sig.SigLifetimeSecs = new(uint32)
+               *sig.SigLifetimeSecs = binary.BigEndian.Uint32(subpacket)
+       case keyExpirationSubpacket:
+               // Key expiration time, section 5.2.3.6
+               if !isHashed {
+                       return
+               }
+               if len(subpacket) != 4 {
+                       err = errors.StructuralError("key expiration subpacket with bad length")
+                       return
+               }
+               sig.KeyLifetimeSecs = new(uint32)
+               *sig.KeyLifetimeSecs = binary.BigEndian.Uint32(subpacket)
+       case prefSymmetricAlgosSubpacket:
+               // Preferred symmetric algorithms, section 5.2.3.7
+               if !isHashed {
+                       return
+               }
+               sig.PreferredSymmetric = make([]byte, len(subpacket))
+               copy(sig.PreferredSymmetric, subpacket)
+       case issuerSubpacket:
+               // Issuer, section 5.2.3.5
+               if len(subpacket) != 8 {
+                       err = errors.StructuralError("issuer subpacket with bad length")
+                       return
+               }
+               sig.IssuerKeyId = new(uint64)
+               *sig.IssuerKeyId = binary.BigEndian.Uint64(subpacket)
+       case prefHashAlgosSubpacket:
+               // Preferred hash algorithms, section 5.2.3.8
+               if !isHashed {
+                       return
+               }
+               sig.PreferredHash = make([]byte, len(subpacket))
+               copy(sig.PreferredHash, subpacket)
+       case prefCompressionSubpacket:
+               // Preferred compression algorithms, section 5.2.3.9
+               if !isHashed {
+                       return
+               }
+               sig.PreferredCompression = make([]byte, len(subpacket))
+               copy(sig.PreferredCompression, subpacket)
+       case primaryUserIdSubpacket:
+               // Primary User ID, section 5.2.3.19
+               if !isHashed {
+                       return
+               }
+               if len(subpacket) != 1 {
+                       err = errors.StructuralError("primary user id subpacket with bad length")
+                       return
+               }
+               sig.IsPrimaryId = new(bool)
+               if subpacket[0] > 0 {
+                       *sig.IsPrimaryId = true
+               }
+       case keyFlagsSubpacket:
+               // Key flags, section 5.2.3.21
+               if !isHashed {
+                       return
+               }
+               if len(subpacket) == 0 {
+                       err = errors.StructuralError("empty key flags subpacket")
+                       return
+               }
+               sig.FlagsValid = true
+               if subpacket[0]&KeyFlagCertify != 0 {
+                       sig.FlagCertify = true
+               }
+               if subpacket[0]&KeyFlagSign != 0 {
+                       sig.FlagSign = true
+               }
+               if subpacket[0]&KeyFlagEncryptCommunications != 0 {
+                       sig.FlagEncryptCommunications = true
+               }
+               if subpacket[0]&KeyFlagEncryptStorage != 0 {
+                       sig.FlagEncryptStorage = true
+               }
+       case reasonForRevocationSubpacket:
+               // Reason For Revocation, section 5.2.3.23
+               if !isHashed {
+                       return
+               }
+               if len(subpacket) == 0 {
+                       err = errors.StructuralError("empty revocation reason subpacket")
+                       return
+               }
+               sig.RevocationReason = new(uint8)
+               *sig.RevocationReason = subpacket[0]
+               sig.RevocationReasonText = string(subpacket[1:])
+       case featuresSubpacket:
+               // Features subpacket, section 5.2.3.24 specifies a very general
+               // mechanism for OpenPGP implementations to signal support for new
+               // features. In practice, the subpacket is used exclusively to
+               // indicate support for MDC-protected encryption.
+               sig.MDC = len(subpacket) >= 1 && subpacket[0]&1 == 1
+       case embeddedSignatureSubpacket:
+               // Only usage is in signatures that cross-certify
+               // signing subkeys. section 5.2.3.26 describes the
+               // format, with its usage described in section 11.1
+               if sig.EmbeddedSignature != nil {
+                       err = errors.StructuralError("Cannot have multiple embedded signatures")
+                       return
+               }
+               sig.EmbeddedSignature = new(Signature)
+               // Embedded signatures are required to be v4 signatures see
+               // section 12.1. However, we only parse v4 signatures in this
+               // file anyway.
+               if err := sig.EmbeddedSignature.parse(bytes.NewBuffer(subpacket)); err != nil {
+                       return nil, err
+               }
+               if sigType := sig.EmbeddedSignature.SigType; sigType != SigTypePrimaryKeyBinding {
+                       return nil, errors.StructuralError("cross-signature has unexpected type " + strconv.Itoa(int(sigType)))
+               }
+       default:
+               if isCritical {
+                       err = errors.UnsupportedError("unknown critical signature subpacket type " + strconv.Itoa(int(packetType)))
+                       return
+               }
+       }
+       return
+
+Truncated:
+       err = errors.StructuralError("signature subpacket truncated")
+       return
+}
+
+// subpacketLengthLength returns the length, in bytes, of an encoded length value.
+func subpacketLengthLength(length int) int {
+       if length < 192 {
+               return 1
+       }
+       if length < 16320 {
+               return 2
+       }
+       return 5
+}
+
+// serializeSubpacketLength marshals the given length into to.
+func serializeSubpacketLength(to []byte, length int) int {
+       // RFC 4880, Section 4.2.2.
+       if length < 192 {
+               to[0] = byte(length)
+               return 1
+       }
+       if length < 16320 {
+               length -= 192
+               to[0] = byte((length >> 8) + 192)
+               to[1] = byte(length)
+               return 2
+       }
+       to[0] = 255
+       to[1] = byte(length >> 24)
+       to[2] = byte(length >> 16)
+       to[3] = byte(length >> 8)
+       to[4] = byte(length)
+       return 5
+}
+
+// subpacketsLength returns the serialized length, in bytes, of the given
+// subpackets.
+func subpacketsLength(subpackets []outputSubpacket, hashed bool) (length int) {
+       for _, subpacket := range subpackets {
+               if subpacket.hashed == hashed {
+                       length += subpacketLengthLength(len(subpacket.contents) + 1)
+                       length += 1 // type byte
+                       length += len(subpacket.contents)
+               }
+       }
+       return
+}
+
+// serializeSubpackets marshals the given subpackets into to.
+func serializeSubpackets(to []byte, subpackets []outputSubpacket, hashed bool) {
+       for _, subpacket := range subpackets {
+               if subpacket.hashed == hashed {
+                       n := serializeSubpacketLength(to, len(subpacket.contents)+1)
+                       to[n] = byte(subpacket.subpacketType)
+                       to = to[1+n:]
+                       n = copy(to, subpacket.contents)
+                       to = to[n:]
+               }
+       }
+       return
+}
+
+// KeyExpired returns whether sig is a self-signature of a key that has
+// expired.
+func (sig *Signature) KeyExpired(currentTime time.Time) bool {
+       if sig.KeyLifetimeSecs == nil {
+               return false
+       }
+       expiry := sig.CreationTime.Add(time.Duration(*sig.KeyLifetimeSecs) * time.Second)
+       return currentTime.After(expiry)
+}
+
+// buildHashSuffix constructs the HashSuffix member of sig in preparation for signing.
+func (sig *Signature) buildHashSuffix() (err error) {
+       hashedSubpacketsLen := subpacketsLength(sig.outSubpackets, true)
+
+       var ok bool
+       l := 6 + hashedSubpacketsLen
+       sig.HashSuffix = make([]byte, l+6)
+       sig.HashSuffix[0] = 4
+       sig.HashSuffix[1] = uint8(sig.SigType)
+       sig.HashSuffix[2] = uint8(sig.PubKeyAlgo)
+       sig.HashSuffix[3], ok = s2k.HashToHashId(sig.Hash)
+       if !ok {
+               sig.HashSuffix = nil
+               return errors.InvalidArgumentError("hash cannot be represented in OpenPGP: " + strconv.Itoa(int(sig.Hash)))
+       }
+       sig.HashSuffix[4] = byte(hashedSubpacketsLen >> 8)
+       sig.HashSuffix[5] = byte(hashedSubpacketsLen)
+       serializeSubpackets(sig.HashSuffix[6:l], sig.outSubpackets, true)
+       trailer := sig.HashSuffix[l:]
+       trailer[0] = 4
+       trailer[1] = 0xff
+       trailer[2] = byte(l >> 24)
+       trailer[3] = byte(l >> 16)
+       trailer[4] = byte(l >> 8)
+       trailer[5] = byte(l)
+       return
+}
+
+func (sig *Signature) signPrepareHash(h hash.Hash) (digest []byte, err error) {
+       err = sig.buildHashSuffix()
+       if err != nil {
+               return
+       }
+
+       h.Write(sig.HashSuffix)
+       digest = h.Sum(nil)
+       copy(sig.HashTag[:], digest)
+       return
+}
+
+// Sign signs a message with a private key. The hash, h, must contain
+// the hash of the message to be signed and will be mutated by this function.
+// On success, the signature is stored in sig. Call Serialize to write it out.
+// If config is nil, sensible defaults will be used.
+func (sig *Signature) Sign(h hash.Hash, priv *PrivateKey, config *Config) (err error) {
+       sig.outSubpackets = sig.buildSubpackets()
+       digest, err := sig.signPrepareHash(h)
+       if err != nil {
+               return
+       }
+
+       switch priv.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+               // supports both *rsa.PrivateKey and crypto.Signer
+               sig.RSASignature.bytes, err = priv.PrivateKey.(crypto.Signer).Sign(config.Random(), digest, sig.Hash)
+               sig.RSASignature.bitLength = uint16(8 * len(sig.RSASignature.bytes))
+       case PubKeyAlgoDSA:
+               dsaPriv := priv.PrivateKey.(*dsa.PrivateKey)
+
+               // Need to truncate hashBytes to match FIPS 186-3 section 4.6.
+               subgroupSize := (dsaPriv.Q.BitLen() + 7) / 8
+               if len(digest) > subgroupSize {
+                       digest = digest[:subgroupSize]
+               }
+               r, s, err := dsa.Sign(config.Random(), dsaPriv, digest)
+               if err == nil {
+                       sig.DSASigR.bytes = r.Bytes()
+                       sig.DSASigR.bitLength = uint16(8 * len(sig.DSASigR.bytes))
+                       sig.DSASigS.bytes = s.Bytes()
+                       sig.DSASigS.bitLength = uint16(8 * len(sig.DSASigS.bytes))
+               }
+       case PubKeyAlgoECDSA:
+               var r, s *big.Int
+               if pk, ok := priv.PrivateKey.(*ecdsa.PrivateKey); ok {
+                       // direct support, avoid asn1 wrapping/unwrapping
+                       r, s, err = ecdsa.Sign(config.Random(), pk, digest)
+               } else {
+                       var b []byte
+                       b, err = priv.PrivateKey.(crypto.Signer).Sign(config.Random(), digest, nil)
+                       if err == nil {
+                               r, s, err = unwrapECDSASig(b)
+                       }
+               }
+               if err == nil {
+                       sig.ECDSASigR = fromBig(r)
+                       sig.ECDSASigS = fromBig(s)
+               }
+       default:
+               err = errors.UnsupportedError("public key algorithm: " + strconv.Itoa(int(sig.PubKeyAlgo)))
+       }
+
+       return
+}
+
+// unwrapECDSASig parses the two integer components of an ASN.1-encoded ECDSA
+// signature.
+func unwrapECDSASig(b []byte) (r, s *big.Int, err error) {
+       var ecsdaSig struct {
+               R, S *big.Int
+       }
+       _, err = asn1.Unmarshal(b, &ecsdaSig)
+       if err != nil {
+               return
+       }
+       return ecsdaSig.R, ecsdaSig.S, nil
+}
+
+// SignUserId computes a signature from priv, asserting that pub is a valid
+// key for the identity id.  On success, the signature is stored in sig. Call
+// Serialize to write it out.
+// If config is nil, sensible defaults will be used.
+func (sig *Signature) SignUserId(id string, pub *PublicKey, priv *PrivateKey, config *Config) error {
+       h, err := userIdSignatureHash(id, pub, sig.Hash)
+       if err != nil {
+               return err
+       }
+       return sig.Sign(h, priv, config)
+}
+
+// SignKey computes a signature from priv, asserting that pub is a subkey. On
+// success, the signature is stored in sig. Call Serialize to write it out.
+// If config is nil, sensible defaults will be used.
+func (sig *Signature) SignKey(pub *PublicKey, priv *PrivateKey, config *Config) error {
+       h, err := keySignatureHash(&priv.PublicKey, pub, sig.Hash)
+       if err != nil {
+               return err
+       }
+       return sig.Sign(h, priv, config)
+}
+
+// Serialize marshals sig to w. Sign, SignUserId or SignKey must have been
+// called first.
+func (sig *Signature) Serialize(w io.Writer) (err error) {
+       if len(sig.outSubpackets) == 0 {
+               sig.outSubpackets = sig.rawSubpackets
+       }
+       if sig.RSASignature.bytes == nil && sig.DSASigR.bytes == nil && sig.ECDSASigR.bytes == nil {
+               return errors.InvalidArgumentError("Signature: need to call Sign, SignUserId or SignKey before Serialize")
+       }
+
+       sigLength := 0
+       switch sig.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+               sigLength = 2 + len(sig.RSASignature.bytes)
+       case PubKeyAlgoDSA:
+               sigLength = 2 + len(sig.DSASigR.bytes)
+               sigLength += 2 + len(sig.DSASigS.bytes)
+       case PubKeyAlgoECDSA:
+               sigLength = 2 + len(sig.ECDSASigR.bytes)
+               sigLength += 2 + len(sig.ECDSASigS.bytes)
+       default:
+               panic("impossible")
+       }
+
+       unhashedSubpacketsLen := subpacketsLength(sig.outSubpackets, false)
+       length := len(sig.HashSuffix) - 6 /* trailer not included */ +
+               2 /* length of unhashed subpackets */ + unhashedSubpacketsLen +
+               2 /* hash tag */ + sigLength
+       err = serializeHeader(w, packetTypeSignature, length)
+       if err != nil {
+               return
+       }
+
+       _, err = w.Write(sig.HashSuffix[:len(sig.HashSuffix)-6])
+       if err != nil {
+               return
+       }
+
+       unhashedSubpackets := make([]byte, 2+unhashedSubpacketsLen)
+       unhashedSubpackets[0] = byte(unhashedSubpacketsLen >> 8)
+       unhashedSubpackets[1] = byte(unhashedSubpacketsLen)
+       serializeSubpackets(unhashedSubpackets[2:], sig.outSubpackets, false)
+
+       _, err = w.Write(unhashedSubpackets)
+       if err != nil {
+               return
+       }
+       _, err = w.Write(sig.HashTag[:])
+       if err != nil {
+               return
+       }
+
+       switch sig.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+               err = writeMPIs(w, sig.RSASignature)
+       case PubKeyAlgoDSA:
+               err = writeMPIs(w, sig.DSASigR, sig.DSASigS)
+       case PubKeyAlgoECDSA:
+               err = writeMPIs(w, sig.ECDSASigR, sig.ECDSASigS)
+       default:
+               panic("impossible")
+       }
+       return
+}
+
+// outputSubpacket represents a subpacket to be marshaled.
+type outputSubpacket struct {
+       hashed        bool // true if this subpacket is in the hashed area.
+       subpacketType signatureSubpacketType
+       isCritical    bool
+       contents      []byte
+}
+
+func (sig *Signature) buildSubpackets() (subpackets []outputSubpacket) {
+       creationTime := make([]byte, 4)
+       binary.BigEndian.PutUint32(creationTime, uint32(sig.CreationTime.Unix()))
+       subpackets = append(subpackets, outputSubpacket{true, creationTimeSubpacket, false, creationTime})
+
+       if sig.IssuerKeyId != nil {
+               keyId := make([]byte, 8)
+               binary.BigEndian.PutUint64(keyId, *sig.IssuerKeyId)
+               subpackets = append(subpackets, outputSubpacket{true, issuerSubpacket, false, keyId})
+       }
+
+       if sig.SigLifetimeSecs != nil && *sig.SigLifetimeSecs != 0 {
+               sigLifetime := make([]byte, 4)
+               binary.BigEndian.PutUint32(sigLifetime, *sig.SigLifetimeSecs)
+               subpackets = append(subpackets, outputSubpacket{true, signatureExpirationSubpacket, true, sigLifetime})
+       }
+
+       // Key flags may only appear in self-signatures or certification signatures.
+
+       if sig.FlagsValid {
+               var flags byte
+               if sig.FlagCertify {
+                       flags |= KeyFlagCertify
+               }
+               if sig.FlagSign {
+                       flags |= KeyFlagSign
+               }
+               if sig.FlagEncryptCommunications {
+                       flags |= KeyFlagEncryptCommunications
+               }
+               if sig.FlagEncryptStorage {
+                       flags |= KeyFlagEncryptStorage
+               }
+               subpackets = append(subpackets, outputSubpacket{true, keyFlagsSubpacket, false, []byte{flags}})
+       }
+
+       // The following subpackets may only appear in self-signatures
+
+       if sig.KeyLifetimeSecs != nil && *sig.KeyLifetimeSecs != 0 {
+               keyLifetime := make([]byte, 4)
+               binary.BigEndian.PutUint32(keyLifetime, *sig.KeyLifetimeSecs)
+               subpackets = append(subpackets, outputSubpacket{true, keyExpirationSubpacket, true, keyLifetime})
+       }
+
+       if sig.IsPrimaryId != nil && *sig.IsPrimaryId {
+               subpackets = append(subpackets, outputSubpacket{true, primaryUserIdSubpacket, false, []byte{1}})
+       }
+
+       if len(sig.PreferredSymmetric) > 0 {
+               subpackets = append(subpackets, outputSubpacket{true, prefSymmetricAlgosSubpacket, false, sig.PreferredSymmetric})
+       }
+
+       if len(sig.PreferredHash) > 0 {
+               subpackets = append(subpackets, outputSubpacket{true, prefHashAlgosSubpacket, false, sig.PreferredHash})
+       }
+
+       if len(sig.PreferredCompression) > 0 {
+               subpackets = append(subpackets, outputSubpacket{true, prefCompressionSubpacket, false, sig.PreferredCompression})
+       }
+
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/signature_v3.go b/vendor/golang.org/x/crypto/openpgp/packet/signature_v3.go
new file mode 100644 (file)
index 0000000..6edff88
--- /dev/null
@@ -0,0 +1,146 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "crypto"
+       "encoding/binary"
+       "fmt"
+       "io"
+       "strconv"
+       "time"
+
+       "golang.org/x/crypto/openpgp/errors"
+       "golang.org/x/crypto/openpgp/s2k"
+)
+
+// SignatureV3 represents older version 3 signatures. These signatures are less secure
+// than version 4 and should not be used to create new signatures. They are included
+// here for backwards compatibility to read and validate with older key material.
+// See RFC 4880, section 5.2.2.
+type SignatureV3 struct {
+       SigType      SignatureType
+       CreationTime time.Time
+       IssuerKeyId  uint64
+       PubKeyAlgo   PublicKeyAlgorithm
+       Hash         crypto.Hash
+       HashTag      [2]byte
+
+       RSASignature     parsedMPI
+       DSASigR, DSASigS parsedMPI
+}
+
+func (sig *SignatureV3) parse(r io.Reader) (err error) {
+       // RFC 4880, section 5.2.2
+       var buf [8]byte
+       if _, err = readFull(r, buf[:1]); err != nil {
+               return
+       }
+       if buf[0] < 2 || buf[0] > 3 {
+               err = errors.UnsupportedError("signature packet version " + strconv.Itoa(int(buf[0])))
+               return
+       }
+       if _, err = readFull(r, buf[:1]); err != nil {
+               return
+       }
+       if buf[0] != 5 {
+               err = errors.UnsupportedError(
+                       "invalid hashed material length " + strconv.Itoa(int(buf[0])))
+               return
+       }
+
+       // Read hashed material: signature type + creation time
+       if _, err = readFull(r, buf[:5]); err != nil {
+               return
+       }
+       sig.SigType = SignatureType(buf[0])
+       t := binary.BigEndian.Uint32(buf[1:5])
+       sig.CreationTime = time.Unix(int64(t), 0)
+
+       // Eight-octet Key ID of signer.
+       if _, err = readFull(r, buf[:8]); err != nil {
+               return
+       }
+       sig.IssuerKeyId = binary.BigEndian.Uint64(buf[:])
+
+       // Public-key and hash algorithm
+       if _, err = readFull(r, buf[:2]); err != nil {
+               return
+       }
+       sig.PubKeyAlgo = PublicKeyAlgorithm(buf[0])
+       switch sig.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly, PubKeyAlgoDSA:
+       default:
+               err = errors.UnsupportedError("public key algorithm " + strconv.Itoa(int(sig.PubKeyAlgo)))
+               return
+       }
+       var ok bool
+       if sig.Hash, ok = s2k.HashIdToHash(buf[1]); !ok {
+               return errors.UnsupportedError("hash function " + strconv.Itoa(int(buf[2])))
+       }
+
+       // Two-octet field holding left 16 bits of signed hash value.
+       if _, err = readFull(r, sig.HashTag[:2]); err != nil {
+               return
+       }
+
+       switch sig.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+               sig.RSASignature.bytes, sig.RSASignature.bitLength, err = readMPI(r)
+       case PubKeyAlgoDSA:
+               if sig.DSASigR.bytes, sig.DSASigR.bitLength, err = readMPI(r); err != nil {
+                       return
+               }
+               sig.DSASigS.bytes, sig.DSASigS.bitLength, err = readMPI(r)
+       default:
+               panic("unreachable")
+       }
+       return
+}
+
+// Serialize marshals sig to w. Sign, SignUserId or SignKey must have been
+// called first.
+func (sig *SignatureV3) Serialize(w io.Writer) (err error) {
+       buf := make([]byte, 8)
+
+       // Write the sig type and creation time
+       buf[0] = byte(sig.SigType)
+       binary.BigEndian.PutUint32(buf[1:5], uint32(sig.CreationTime.Unix()))
+       if _, err = w.Write(buf[:5]); err != nil {
+               return
+       }
+
+       // Write the issuer long key ID
+       binary.BigEndian.PutUint64(buf[:8], sig.IssuerKeyId)
+       if _, err = w.Write(buf[:8]); err != nil {
+               return
+       }
+
+       // Write public key algorithm, hash ID, and hash value
+       buf[0] = byte(sig.PubKeyAlgo)
+       hashId, ok := s2k.HashToHashId(sig.Hash)
+       if !ok {
+               return errors.UnsupportedError(fmt.Sprintf("hash function %v", sig.Hash))
+       }
+       buf[1] = hashId
+       copy(buf[2:4], sig.HashTag[:])
+       if _, err = w.Write(buf[:4]); err != nil {
+               return
+       }
+
+       if sig.RSASignature.bytes == nil && sig.DSASigR.bytes == nil {
+               return errors.InvalidArgumentError("Signature: need to call Sign, SignUserId or SignKey before Serialize")
+       }
+
+       switch sig.PubKeyAlgo {
+       case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+               err = writeMPIs(w, sig.RSASignature)
+       case PubKeyAlgoDSA:
+               err = writeMPIs(w, sig.DSASigR, sig.DSASigS)
+       default:
+               panic("impossible")
+       }
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted.go b/vendor/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted.go
new file mode 100644 (file)
index 0000000..744c2d2
--- /dev/null
@@ -0,0 +1,155 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "bytes"
+       "crypto/cipher"
+       "io"
+       "strconv"
+
+       "golang.org/x/crypto/openpgp/errors"
+       "golang.org/x/crypto/openpgp/s2k"
+)
+
+// This is the largest session key that we'll support. Since no 512-bit cipher
+// has even been seriously used, this is comfortably large.
+const maxSessionKeySizeInBytes = 64
+
+// SymmetricKeyEncrypted represents a passphrase protected session key. See RFC
+// 4880, section 5.3.
+type SymmetricKeyEncrypted struct {
+       CipherFunc   CipherFunction
+       s2k          func(out, in []byte)
+       encryptedKey []byte
+}
+
+const symmetricKeyEncryptedVersion = 4
+
+func (ske *SymmetricKeyEncrypted) parse(r io.Reader) error {
+       // RFC 4880, section 5.3.
+       var buf [2]byte
+       if _, err := readFull(r, buf[:]); err != nil {
+               return err
+       }
+       if buf[0] != symmetricKeyEncryptedVersion {
+               return errors.UnsupportedError("SymmetricKeyEncrypted version")
+       }
+       ske.CipherFunc = CipherFunction(buf[1])
+
+       if ske.CipherFunc.KeySize() == 0 {
+               return errors.UnsupportedError("unknown cipher: " + strconv.Itoa(int(buf[1])))
+       }
+
+       var err error
+       ske.s2k, err = s2k.Parse(r)
+       if err != nil {
+               return err
+       }
+
+       encryptedKey := make([]byte, maxSessionKeySizeInBytes)
+       // The session key may follow. We just have to try and read to find
+       // out. If it exists then we limit it to maxSessionKeySizeInBytes.
+       n, err := readFull(r, encryptedKey)
+       if err != nil && err != io.ErrUnexpectedEOF {
+               return err
+       }
+
+       if n != 0 {
+               if n == maxSessionKeySizeInBytes {
+                       return errors.UnsupportedError("oversized encrypted session key")
+               }
+               ske.encryptedKey = encryptedKey[:n]
+       }
+
+       return nil
+}
+
+// Decrypt attempts to decrypt an encrypted session key and returns the key and
+// the cipher to use when decrypting a subsequent Symmetrically Encrypted Data
+// packet.
+func (ske *SymmetricKeyEncrypted) Decrypt(passphrase []byte) ([]byte, CipherFunction, error) {
+       key := make([]byte, ske.CipherFunc.KeySize())
+       ske.s2k(key, passphrase)
+
+       if len(ske.encryptedKey) == 0 {
+               return key, ske.CipherFunc, nil
+       }
+
+       // the IV is all zeros
+       iv := make([]byte, ske.CipherFunc.blockSize())
+       c := cipher.NewCFBDecrypter(ske.CipherFunc.new(key), iv)
+       plaintextKey := make([]byte, len(ske.encryptedKey))
+       c.XORKeyStream(plaintextKey, ske.encryptedKey)
+       cipherFunc := CipherFunction(plaintextKey[0])
+       if cipherFunc.blockSize() == 0 {
+               return nil, ske.CipherFunc, errors.UnsupportedError("unknown cipher: " + strconv.Itoa(int(cipherFunc)))
+       }
+       plaintextKey = plaintextKey[1:]
+       if l, cipherKeySize := len(plaintextKey), cipherFunc.KeySize(); l != cipherFunc.KeySize() {
+               return nil, cipherFunc, errors.StructuralError("length of decrypted key (" + strconv.Itoa(l) + ") " +
+                       "not equal to cipher keysize (" + strconv.Itoa(cipherKeySize) + ")")
+       }
+       return plaintextKey, cipherFunc, nil
+}
+
+// SerializeSymmetricKeyEncrypted serializes a symmetric key packet to w. The
+// packet contains a random session key, encrypted by a key derived from the
+// given passphrase. The session key is returned and must be passed to
+// SerializeSymmetricallyEncrypted.
+// If config is nil, sensible defaults will be used.
+func SerializeSymmetricKeyEncrypted(w io.Writer, passphrase []byte, config *Config) (key []byte, err error) {
+       cipherFunc := config.Cipher()
+       keySize := cipherFunc.KeySize()
+       if keySize == 0 {
+               return nil, errors.UnsupportedError("unknown cipher: " + strconv.Itoa(int(cipherFunc)))
+       }
+
+       s2kBuf := new(bytes.Buffer)
+       keyEncryptingKey := make([]byte, keySize)
+       // s2k.Serialize salts and stretches the passphrase, and writes the
+       // resulting key to keyEncryptingKey and the s2k descriptor to s2kBuf.
+       err = s2k.Serialize(s2kBuf, keyEncryptingKey, config.Random(), passphrase, &s2k.Config{Hash: config.Hash(), S2KCount: config.PasswordHashIterations()})
+       if err != nil {
+               return
+       }
+       s2kBytes := s2kBuf.Bytes()
+
+       packetLength := 2 /* header */ + len(s2kBytes) + 1 /* cipher type */ + keySize
+       err = serializeHeader(w, packetTypeSymmetricKeyEncrypted, packetLength)
+       if err != nil {
+               return
+       }
+
+       var buf [2]byte
+       buf[0] = symmetricKeyEncryptedVersion
+       buf[1] = byte(cipherFunc)
+       _, err = w.Write(buf[:])
+       if err != nil {
+               return
+       }
+       _, err = w.Write(s2kBytes)
+       if err != nil {
+               return
+       }
+
+       sessionKey := make([]byte, keySize)
+       _, err = io.ReadFull(config.Random(), sessionKey)
+       if err != nil {
+               return
+       }
+       iv := make([]byte, cipherFunc.blockSize())
+       c := cipher.NewCFBEncrypter(cipherFunc.new(keyEncryptingKey), iv)
+       encryptedCipherAndKey := make([]byte, keySize+1)
+       c.XORKeyStream(encryptedCipherAndKey, buf[1:])
+       c.XORKeyStream(encryptedCipherAndKey[1:], sessionKey)
+       _, err = w.Write(encryptedCipherAndKey)
+       if err != nil {
+               return
+       }
+
+       key = sessionKey
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted.go b/vendor/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted.go
new file mode 100644 (file)
index 0000000..6126030
--- /dev/null
@@ -0,0 +1,290 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "crypto/cipher"
+       "crypto/sha1"
+       "crypto/subtle"
+       "golang.org/x/crypto/openpgp/errors"
+       "hash"
+       "io"
+       "strconv"
+)
+
+// SymmetricallyEncrypted represents a symmetrically encrypted byte string. The
+// encrypted contents will consist of more OpenPGP packets. See RFC 4880,
+// sections 5.7 and 5.13.
+type SymmetricallyEncrypted struct {
+       MDC      bool // true iff this is a type 18 packet and thus has an embedded MAC.
+       contents io.Reader
+       prefix   []byte
+}
+
+const symmetricallyEncryptedVersion = 1
+
+func (se *SymmetricallyEncrypted) parse(r io.Reader) error {
+       if se.MDC {
+               // See RFC 4880, section 5.13.
+               var buf [1]byte
+               _, err := readFull(r, buf[:])
+               if err != nil {
+                       return err
+               }
+               if buf[0] != symmetricallyEncryptedVersion {
+                       return errors.UnsupportedError("unknown SymmetricallyEncrypted version")
+               }
+       }
+       se.contents = r
+       return nil
+}
+
+// Decrypt returns a ReadCloser, from which the decrypted contents of the
+// packet can be read. An incorrect key can, with high probability, be detected
+// immediately and this will result in a KeyIncorrect error being returned.
+func (se *SymmetricallyEncrypted) Decrypt(c CipherFunction, key []byte) (io.ReadCloser, error) {
+       keySize := c.KeySize()
+       if keySize == 0 {
+               return nil, errors.UnsupportedError("unknown cipher: " + strconv.Itoa(int(c)))
+       }
+       if len(key) != keySize {
+               return nil, errors.InvalidArgumentError("SymmetricallyEncrypted: incorrect key length")
+       }
+
+       if se.prefix == nil {
+               se.prefix = make([]byte, c.blockSize()+2)
+               _, err := readFull(se.contents, se.prefix)
+               if err != nil {
+                       return nil, err
+               }
+       } else if len(se.prefix) != c.blockSize()+2 {
+               return nil, errors.InvalidArgumentError("can't try ciphers with different block lengths")
+       }
+
+       ocfbResync := OCFBResync
+       if se.MDC {
+               // MDC packets use a different form of OCFB mode.
+               ocfbResync = OCFBNoResync
+       }
+
+       s := NewOCFBDecrypter(c.new(key), se.prefix, ocfbResync)
+       if s == nil {
+               return nil, errors.ErrKeyIncorrect
+       }
+
+       plaintext := cipher.StreamReader{S: s, R: se.contents}
+
+       if se.MDC {
+               // MDC packets have an embedded hash that we need to check.
+               h := sha1.New()
+               h.Write(se.prefix)
+               return &seMDCReader{in: plaintext, h: h}, nil
+       }
+
+       // Otherwise, we just need to wrap plaintext so that it's a valid ReadCloser.
+       return seReader{plaintext}, nil
+}
+
+// seReader wraps an io.Reader with a no-op Close method.
+type seReader struct {
+       in io.Reader
+}
+
+func (ser seReader) Read(buf []byte) (int, error) {
+       return ser.in.Read(buf)
+}
+
+func (ser seReader) Close() error {
+       return nil
+}
+
+const mdcTrailerSize = 1 /* tag byte */ + 1 /* length byte */ + sha1.Size
+
+// An seMDCReader wraps an io.Reader, maintains a running hash and keeps hold
+// of the most recent 22 bytes (mdcTrailerSize). Upon EOF, those bytes form an
+// MDC packet containing a hash of the previous contents which is checked
+// against the running hash. See RFC 4880, section 5.13.
+type seMDCReader struct {
+       in          io.Reader
+       h           hash.Hash
+       trailer     [mdcTrailerSize]byte
+       scratch     [mdcTrailerSize]byte
+       trailerUsed int
+       error       bool
+       eof         bool
+}
+
+func (ser *seMDCReader) Read(buf []byte) (n int, err error) {
+       if ser.error {
+               err = io.ErrUnexpectedEOF
+               return
+       }
+       if ser.eof {
+               err = io.EOF
+               return
+       }
+
+       // If we haven't yet filled the trailer buffer then we must do that
+       // first.
+       for ser.trailerUsed < mdcTrailerSize {
+               n, err = ser.in.Read(ser.trailer[ser.trailerUsed:])
+               ser.trailerUsed += n
+               if err == io.EOF {
+                       if ser.trailerUsed != mdcTrailerSize {
+                               n = 0
+                               err = io.ErrUnexpectedEOF
+                               ser.error = true
+                               return
+                       }
+                       ser.eof = true
+                       n = 0
+                       return
+               }
+
+               if err != nil {
+                       n = 0
+                       return
+               }
+       }
+
+       // If it's a short read then we read into a temporary buffer and shift
+       // the data into the caller's buffer.
+       if len(buf) <= mdcTrailerSize {
+               n, err = readFull(ser.in, ser.scratch[:len(buf)])
+               copy(buf, ser.trailer[:n])
+               ser.h.Write(buf[:n])
+               copy(ser.trailer[:], ser.trailer[n:])
+               copy(ser.trailer[mdcTrailerSize-n:], ser.scratch[:])
+               if n < len(buf) {
+                       ser.eof = true
+                       err = io.EOF
+               }
+               return
+       }
+
+       n, err = ser.in.Read(buf[mdcTrailerSize:])
+       copy(buf, ser.trailer[:])
+       ser.h.Write(buf[:n])
+       copy(ser.trailer[:], buf[n:])
+
+       if err == io.EOF {
+               ser.eof = true
+       }
+       return
+}
+
+// This is a new-format packet tag byte for a type 19 (MDC) packet.
+const mdcPacketTagByte = byte(0x80) | 0x40 | 19
+
+func (ser *seMDCReader) Close() error {
+       if ser.error {
+               return errors.SignatureError("error during reading")
+       }
+
+       for !ser.eof {
+               // We haven't seen EOF so we need to read to the end
+               var buf [1024]byte
+               _, err := ser.Read(buf[:])
+               if err == io.EOF {
+                       break
+               }
+               if err != nil {
+                       return errors.SignatureError("error during reading")
+               }
+       }
+
+       if ser.trailer[0] != mdcPacketTagByte || ser.trailer[1] != sha1.Size {
+               return errors.SignatureError("MDC packet not found")
+       }
+       ser.h.Write(ser.trailer[:2])
+
+       final := ser.h.Sum(nil)
+       if subtle.ConstantTimeCompare(final, ser.trailer[2:]) != 1 {
+               return errors.SignatureError("hash mismatch")
+       }
+       return nil
+}
+
+// An seMDCWriter writes through to an io.WriteCloser while maintains a running
+// hash of the data written. On close, it emits an MDC packet containing the
+// running hash.
+type seMDCWriter struct {
+       w io.WriteCloser
+       h hash.Hash
+}
+
+func (w *seMDCWriter) Write(buf []byte) (n int, err error) {
+       w.h.Write(buf)
+       return w.w.Write(buf)
+}
+
+func (w *seMDCWriter) Close() (err error) {
+       var buf [mdcTrailerSize]byte
+
+       buf[0] = mdcPacketTagByte
+       buf[1] = sha1.Size
+       w.h.Write(buf[:2])
+       digest := w.h.Sum(nil)
+       copy(buf[2:], digest)
+
+       _, err = w.w.Write(buf[:])
+       if err != nil {
+               return
+       }
+       return w.w.Close()
+}
+
+// noOpCloser is like an ioutil.NopCloser, but for an io.Writer.
+type noOpCloser struct {
+       w io.Writer
+}
+
+func (c noOpCloser) Write(data []byte) (n int, err error) {
+       return c.w.Write(data)
+}
+
+func (c noOpCloser) Close() error {
+       return nil
+}
+
+// SerializeSymmetricallyEncrypted serializes a symmetrically encrypted packet
+// to w and returns a WriteCloser to which the to-be-encrypted packets can be
+// written.
+// If config is nil, sensible defaults will be used.
+func SerializeSymmetricallyEncrypted(w io.Writer, c CipherFunction, key []byte, config *Config) (contents io.WriteCloser, err error) {
+       if c.KeySize() != len(key) {
+               return nil, errors.InvalidArgumentError("SymmetricallyEncrypted.Serialize: bad key length")
+       }
+       writeCloser := noOpCloser{w}
+       ciphertext, err := serializeStreamHeader(writeCloser, packetTypeSymmetricallyEncryptedMDC)
+       if err != nil {
+               return
+       }
+
+       _, err = ciphertext.Write([]byte{symmetricallyEncryptedVersion})
+       if err != nil {
+               return
+       }
+
+       block := c.new(key)
+       blockSize := block.BlockSize()
+       iv := make([]byte, blockSize)
+       _, err = config.Random().Read(iv)
+       if err != nil {
+               return
+       }
+       s, prefix := NewOCFBEncrypter(block, iv, OCFBNoResync)
+       _, err = ciphertext.Write(prefix)
+       if err != nil {
+               return
+       }
+       plaintext := cipher.StreamWriter{S: s, W: ciphertext}
+
+       h := sha1.New()
+       h.Write(iv)
+       h.Write(iv[blockSize-2:])
+       contents = &seMDCWriter{w: plaintext, h: h}
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/userattribute.go b/vendor/golang.org/x/crypto/openpgp/packet/userattribute.go
new file mode 100644 (file)
index 0000000..96a2b38
--- /dev/null
@@ -0,0 +1,91 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "bytes"
+       "image"
+       "image/jpeg"
+       "io"
+       "io/ioutil"
+)
+
+const UserAttrImageSubpacket = 1
+
+// UserAttribute is capable of storing other types of data about a user
+// beyond name, email and a text comment. In practice, user attributes are typically used
+// to store a signed thumbnail photo JPEG image of the user.
+// See RFC 4880, section 5.12.
+type UserAttribute struct {
+       Contents []*OpaqueSubpacket
+}
+
+// NewUserAttributePhoto creates a user attribute packet
+// containing the given images.
+func NewUserAttributePhoto(photos ...image.Image) (uat *UserAttribute, err error) {
+       uat = new(UserAttribute)
+       for _, photo := range photos {
+               var buf bytes.Buffer
+               // RFC 4880, Section 5.12.1.
+               data := []byte{
+                       0x10, 0x00, // Little-endian image header length (16 bytes)
+                       0x01,       // Image header version 1
+                       0x01,       // JPEG
+                       0, 0, 0, 0, // 12 reserved octets, must be all zero.
+                       0, 0, 0, 0,
+                       0, 0, 0, 0}
+               if _, err = buf.Write(data); err != nil {
+                       return
+               }
+               if err = jpeg.Encode(&buf, photo, nil); err != nil {
+                       return
+               }
+               uat.Contents = append(uat.Contents, &OpaqueSubpacket{
+                       SubType:  UserAttrImageSubpacket,
+                       Contents: buf.Bytes()})
+       }
+       return
+}
+
+// NewUserAttribute creates a new user attribute packet containing the given subpackets.
+func NewUserAttribute(contents ...*OpaqueSubpacket) *UserAttribute {
+       return &UserAttribute{Contents: contents}
+}
+
+func (uat *UserAttribute) parse(r io.Reader) (err error) {
+       // RFC 4880, section 5.13
+       b, err := ioutil.ReadAll(r)
+       if err != nil {
+               return
+       }
+       uat.Contents, err = OpaqueSubpackets(b)
+       return
+}
+
+// Serialize marshals the user attribute to w in the form of an OpenPGP packet, including
+// header.
+func (uat *UserAttribute) Serialize(w io.Writer) (err error) {
+       var buf bytes.Buffer
+       for _, sp := range uat.Contents {
+               sp.Serialize(&buf)
+       }
+       if err = serializeHeader(w, packetTypeUserAttribute, buf.Len()); err != nil {
+               return err
+       }
+       _, err = w.Write(buf.Bytes())
+       return
+}
+
+// ImageData returns zero or more byte slices, each containing
+// JPEG File Interchange Format (JFIF), for each photo in the
+// the user attribute packet.
+func (uat *UserAttribute) ImageData() (imageData [][]byte) {
+       for _, sp := range uat.Contents {
+               if sp.SubType == UserAttrImageSubpacket && len(sp.Contents) > 16 {
+                       imageData = append(imageData, sp.Contents[16:])
+               }
+       }
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/packet/userid.go b/vendor/golang.org/x/crypto/openpgp/packet/userid.go
new file mode 100644 (file)
index 0000000..d6bea7d
--- /dev/null
@@ -0,0 +1,160 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+       "io"
+       "io/ioutil"
+       "strings"
+)
+
+// UserId contains text that is intended to represent the name and email
+// address of the key holder. See RFC 4880, section 5.11. By convention, this
+// takes the form "Full Name (Comment) <email@example.com>"
+type UserId struct {
+       Id string // By convention, this takes the form "Full Name (Comment) <email@example.com>" which is split out in the fields below.
+
+       Name, Comment, Email string
+}
+
+func hasInvalidCharacters(s string) bool {
+       for _, c := range s {
+               switch c {
+               case '(', ')', '<', '>', 0:
+                       return true
+               }
+       }
+       return false
+}
+
+// NewUserId returns a UserId or nil if any of the arguments contain invalid
+// characters. The invalid characters are '\x00', '(', ')', '<' and '>'
+func NewUserId(name, comment, email string) *UserId {
+       // RFC 4880 doesn't deal with the structure of userid strings; the
+       // name, comment and email form is just a convention. However, there's
+       // no convention about escaping the metacharacters and GPG just refuses
+       // to create user ids where, say, the name contains a '('. We mirror
+       // this behaviour.
+
+       if hasInvalidCharacters(name) || hasInvalidCharacters(comment) || hasInvalidCharacters(email) {
+               return nil
+       }
+
+       uid := new(UserId)
+       uid.Name, uid.Comment, uid.Email = name, comment, email
+       uid.Id = name
+       if len(comment) > 0 {
+               if len(uid.Id) > 0 {
+                       uid.Id += " "
+               }
+               uid.Id += "("
+               uid.Id += comment
+               uid.Id += ")"
+       }
+       if len(email) > 0 {
+               if len(uid.Id) > 0 {
+                       uid.Id += " "
+               }
+               uid.Id += "<"
+               uid.Id += email
+               uid.Id += ">"
+       }
+       return uid
+}
+
+func (uid *UserId) parse(r io.Reader) (err error) {
+       // RFC 4880, section 5.11
+       b, err := ioutil.ReadAll(r)
+       if err != nil {
+               return
+       }
+       uid.Id = string(b)
+       uid.Name, uid.Comment, uid.Email = parseUserId(uid.Id)
+       return
+}
+
+// Serialize marshals uid to w in the form of an OpenPGP packet, including
+// header.
+func (uid *UserId) Serialize(w io.Writer) error {
+       err := serializeHeader(w, packetTypeUserId, len(uid.Id))
+       if err != nil {
+               return err
+       }
+       _, err = w.Write([]byte(uid.Id))
+       return err
+}
+
+// parseUserId extracts the name, comment and email from a user id string that
+// is formatted as "Full Name (Comment) <email@example.com>".
+func parseUserId(id string) (name, comment, email string) {
+       var n, c, e struct {
+               start, end int
+       }
+       var state int
+
+       for offset, rune := range id {
+               switch state {
+               case 0:
+                       // Entering name
+                       n.start = offset
+                       state = 1
+                       fallthrough
+               case 1:
+                       // In name
+                       if rune == '(' {
+                               state = 2
+                               n.end = offset
+                       } else if rune == '<' {
+                               state = 5
+                               n.end = offset
+                       }
+               case 2:
+                       // Entering comment
+                       c.start = offset
+                       state = 3
+                       fallthrough
+               case 3:
+                       // In comment
+                       if rune == ')' {
+                               state = 4
+                               c.end = offset
+                       }
+               case 4:
+                       // Between comment and email
+                       if rune == '<' {
+                               state = 5
+                       }
+               case 5:
+                       // Entering email
+                       e.start = offset
+                       state = 6
+                       fallthrough
+               case 6:
+                       // In email
+                       if rune == '>' {
+                               state = 7
+                               e.end = offset
+                       }
+               default:
+                       // After email
+               }
+       }
+       switch state {
+       case 1:
+               // ended in the name
+               n.end = len(id)
+       case 3:
+               // ended in comment
+               c.end = len(id)
+       case 6:
+               // ended in email
+               e.end = len(id)
+       }
+
+       name = strings.TrimSpace(id[n.start:n.end])
+       comment = strings.TrimSpace(id[c.start:c.end])
+       email = strings.TrimSpace(id[e.start:e.end])
+       return
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/read.go b/vendor/golang.org/x/crypto/openpgp/read.go
new file mode 100644 (file)
index 0000000..6ec664f
--- /dev/null
@@ -0,0 +1,442 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package openpgp implements high level operations on OpenPGP messages.
+package openpgp // import "golang.org/x/crypto/openpgp"
+
+import (
+       "crypto"
+       _ "crypto/sha256"
+       "hash"
+       "io"
+       "strconv"
+
+       "golang.org/x/crypto/openpgp/armor"
+       "golang.org/x/crypto/openpgp/errors"
+       "golang.org/x/crypto/openpgp/packet"
+)
+
+// SignatureType is the armor type for a PGP signature.
+var SignatureType = "PGP SIGNATURE"
+
+// readArmored reads an armored block with the given type.
+func readArmored(r io.Reader, expectedType string) (body io.Reader, err error) {
+       block, err := armor.Decode(r)
+       if err != nil {
+               return
+       }
+
+       if block.Type != expectedType {
+               return nil, errors.InvalidArgumentError("expected '" + expectedType + "', got: " + block.Type)
+       }
+
+       return block.Body, nil
+}
+
+// MessageDetails contains the result of parsing an OpenPGP encrypted and/or
+// signed message.
+type MessageDetails struct {
+       IsEncrypted              bool                // true if the message was encrypted.
+       EncryptedToKeyIds        []uint64            // the list of recipient key ids.
+       IsSymmetricallyEncrypted bool                // true if a passphrase could have decrypted the message.
+       DecryptedWith            Key                 // the private key used to decrypt the message, if any.
+       IsSigned                 bool                // true if the message is signed.
+       SignedByKeyId            uint64              // the key id of the signer, if any.
+       SignedBy                 *Key                // the key of the signer, if available.
+       LiteralData              *packet.LiteralData // the metadata of the contents
+       UnverifiedBody           io.Reader           // the contents of the message.
+
+       // If IsSigned is true and SignedBy is non-zero then the signature will
+       // be verified as UnverifiedBody is read. The signature cannot be
+       // checked until the whole of UnverifiedBody is read so UnverifiedBody
+       // must be consumed until EOF before the data can be trusted. Even if a
+       // message isn't signed (or the signer is unknown) the data may contain
+       // an authentication code that is only checked once UnverifiedBody has
+       // been consumed. Once EOF has been seen, the following fields are
+       // valid. (An authentication code failure is reported as a
+       // SignatureError error when reading from UnverifiedBody.)
+       SignatureError error               // nil if the signature is good.
+       Signature      *packet.Signature   // the signature packet itself, if v4 (default)
+       SignatureV3    *packet.SignatureV3 // the signature packet if it is a v2 or v3 signature
+
+       decrypted io.ReadCloser
+}
+
+// A PromptFunction is used as a callback by functions that may need to decrypt
+// a private key, or prompt for a passphrase. It is called with a list of
+// acceptable, encrypted private keys and a boolean that indicates whether a
+// passphrase is usable. It should either decrypt a private key or return a
+// passphrase to try. If the decrypted private key or given passphrase isn't
+// correct, the function will be called again, forever. Any error returned will
+// be passed up.
+type PromptFunction func(keys []Key, symmetric bool) ([]byte, error)
+
+// A keyEnvelopePair is used to store a private key with the envelope that
+// contains a symmetric key, encrypted with that key.
+type keyEnvelopePair struct {
+       key          Key
+       encryptedKey *packet.EncryptedKey
+}
+
+// ReadMessage parses an OpenPGP message that may be signed and/or encrypted.
+// The given KeyRing should contain both public keys (for signature
+// verification) and, possibly encrypted, private keys for decrypting.
+// If config is nil, sensible defaults will be used.
+func ReadMessage(r io.Reader, keyring KeyRing, prompt PromptFunction, config *packet.Config) (md *MessageDetails, err error) {
+       var p packet.Packet
+
+       var symKeys []*packet.SymmetricKeyEncrypted
+       var pubKeys []keyEnvelopePair
+       var se *packet.SymmetricallyEncrypted
+
+       packets := packet.NewReader(r)
+       md = new(MessageDetails)
+       md.IsEncrypted = true
+
+       // The message, if encrypted, starts with a number of packets
+       // containing an encrypted decryption key. The decryption key is either
+       // encrypted to a public key, or with a passphrase. This loop
+       // collects these packets.
+ParsePackets:
+       for {
+               p, err = packets.Next()
+               if err != nil {
+                       return nil, err
+               }
+               switch p := p.(type) {
+               case *packet.SymmetricKeyEncrypted:
+                       // This packet contains the decryption key encrypted with a passphrase.
+                       md.IsSymmetricallyEncrypted = true
+                       symKeys = append(symKeys, p)
+               case *packet.EncryptedKey:
+                       // This packet contains the decryption key encrypted to a public key.
+                       md.EncryptedToKeyIds = append(md.EncryptedToKeyIds, p.KeyId)
+                       switch p.Algo {
+                       case packet.PubKeyAlgoRSA, packet.PubKeyAlgoRSAEncryptOnly, packet.PubKeyAlgoElGamal:
+                               break
+                       default:
+                               continue
+                       }
+                       var keys []Key
+                       if p.KeyId == 0 {
+                               keys = keyring.DecryptionKeys()
+                       } else {
+                               keys = keyring.KeysById(p.KeyId)
+                       }
+                       for _, k := range keys {
+                               pubKeys = append(pubKeys, keyEnvelopePair{k, p})
+                       }
+               case *packet.SymmetricallyEncrypted:
+                       se = p
+                       break ParsePackets
+               case *packet.Compressed, *packet.LiteralData, *packet.OnePassSignature:
+                       // This message isn't encrypted.
+                       if len(symKeys) != 0 || len(pubKeys) != 0 {
+                               return nil, errors.StructuralError("key material not followed by encrypted message")
+                       }
+                       packets.Unread(p)
+                       return readSignedMessage(packets, nil, keyring)
+               }
+       }
+
+       var candidates []Key
+       var decrypted io.ReadCloser
+
+       // Now that we have the list of encrypted keys we need to decrypt at
+       // least one of them or, if we cannot, we need to call the prompt
+       // function so that it can decrypt a key or give us a passphrase.
+FindKey:
+       for {
+               // See if any of the keys already have a private key available
+               candidates = candidates[:0]
+               candidateFingerprints := make(map[string]bool)
+
+               for _, pk := range pubKeys {
+                       if pk.key.PrivateKey == nil {
+                               continue
+                       }
+                       if !pk.key.PrivateKey.Encrypted {
+                               if len(pk.encryptedKey.Key) == 0 {
+                                       pk.encryptedKey.Decrypt(pk.key.PrivateKey, config)
+                               }
+                               if len(pk.encryptedKey.Key) == 0 {
+                                       continue
+                               }
+                               decrypted, err = se.Decrypt(pk.encryptedKey.CipherFunc, pk.encryptedKey.Key)
+                               if err != nil && err != errors.ErrKeyIncorrect {
+                                       return nil, err
+                               }
+                               if decrypted != nil {
+                                       md.DecryptedWith = pk.key
+                                       break FindKey
+                               }
+                       } else {
+                               fpr := string(pk.key.PublicKey.Fingerprint[:])
+                               if v := candidateFingerprints[fpr]; v {
+                                       continue
+                               }
+                               candidates = append(candidates, pk.key)
+                               candidateFingerprints[fpr] = true
+                       }
+               }
+
+               if len(candidates) == 0 && len(symKeys) == 0 {
+                       return nil, errors.ErrKeyIncorrect
+               }
+
+               if prompt == nil {
+                       return nil, errors.ErrKeyIncorrect
+               }
+
+               passphrase, err := prompt(candidates, len(symKeys) != 0)
+               if err != nil {
+                       return nil, err
+               }
+
+               // Try the symmetric passphrase first
+               if len(symKeys) != 0 && passphrase != nil {
+                       for _, s := range symKeys {
+                               key, cipherFunc, err := s.Decrypt(passphrase)
+                               if err == nil {
+                                       decrypted, err = se.Decrypt(cipherFunc, key)
+                                       if err != nil && err != errors.ErrKeyIncorrect {
+                                               return nil, err
+                                       }
+                                       if decrypted != nil {
+                                               break FindKey
+                                       }
+                               }
+
+                       }
+               }
+       }
+
+       md.decrypted = decrypted
+       if err := packets.Push(decrypted); err != nil {
+               return nil, err
+       }
+       return readSignedMessage(packets, md, keyring)
+}
+
+// readSignedMessage reads a possibly signed message if mdin is non-zero then
+// that structure is updated and returned. Otherwise a fresh MessageDetails is
+// used.
+func readSignedMessage(packets *packet.Reader, mdin *MessageDetails, keyring KeyRing) (md *MessageDetails, err error) {
+       if mdin == nil {
+               mdin = new(MessageDetails)
+       }
+       md = mdin
+
+       var p packet.Packet
+       var h hash.Hash
+       var wrappedHash hash.Hash
+FindLiteralData:
+       for {
+               p, err = packets.Next()
+               if err != nil {
+                       return nil, err
+               }
+               switch p := p.(type) {
+               case *packet.Compressed:
+                       if err := packets.Push(p.Body); err != nil {
+                               return nil, err
+                       }
+               case *packet.OnePassSignature:
+                       if !p.IsLast {
+                               return nil, errors.UnsupportedError("nested signatures")
+                       }
+
+                       h, wrappedHash, err = hashForSignature(p.Hash, p.SigType)
+                       if err != nil {
+                               md = nil
+                               return
+                       }
+
+                       md.IsSigned = true
+                       md.SignedByKeyId = p.KeyId
+                       keys := keyring.KeysByIdUsage(p.KeyId, packet.KeyFlagSign)
+                       if len(keys) > 0 {
+                               md.SignedBy = &keys[0]
+                       }
+               case *packet.LiteralData:
+                       md.LiteralData = p
+                       break FindLiteralData
+               }
+       }
+
+       if md.SignedBy != nil {
+               md.UnverifiedBody = &signatureCheckReader{packets, h, wrappedHash, md}
+       } else if md.decrypted != nil {
+               md.UnverifiedBody = checkReader{md}
+       } else {
+               md.UnverifiedBody = md.LiteralData.Body
+       }
+
+       return md, nil
+}
+
+// hashForSignature returns a pair of hashes that can be used to verify a
+// signature. The signature may specify that the contents of the signed message
+// should be preprocessed (i.e. to normalize line endings). Thus this function
+// returns two hashes. The second should be used to hash the message itself and
+// performs any needed preprocessing.
+func hashForSignature(hashId crypto.Hash, sigType packet.SignatureType) (hash.Hash, hash.Hash, error) {
+       if !hashId.Available() {
+               return nil, nil, errors.UnsupportedError("hash not available: " + strconv.Itoa(int(hashId)))
+       }
+       h := hashId.New()
+
+       switch sigType {
+       case packet.SigTypeBinary:
+               return h, h, nil
+       case packet.SigTypeText:
+               return h, NewCanonicalTextHash(h), nil
+       }
+
+       return nil, nil, errors.UnsupportedError("unsupported signature type: " + strconv.Itoa(int(sigType)))
+}
+
+// checkReader wraps an io.Reader from a LiteralData packet. When it sees EOF
+// it closes the ReadCloser from any SymmetricallyEncrypted packet to trigger
+// MDC checks.
+type checkReader struct {
+       md *MessageDetails
+}
+
+func (cr checkReader) Read(buf []byte) (n int, err error) {
+       n, err = cr.md.LiteralData.Body.Read(buf)
+       if err == io.EOF {
+               mdcErr := cr.md.decrypted.Close()
+               if mdcErr != nil {
+                       err = mdcErr
+               }
+       }
+       return
+}
+
+// signatureCheckReader wraps an io.Reader from a LiteralData packet and hashes
+// the data as it is read. When it sees an EOF from the underlying io.Reader
+// it parses and checks a trailing Signature packet and triggers any MDC checks.
+type signatureCheckReader struct {
+       packets        *packet.Reader
+       h, wrappedHash hash.Hash
+       md             *MessageDetails
+}
+
+func (scr *signatureCheckReader) Read(buf []byte) (n int, err error) {
+       n, err = scr.md.LiteralData.Body.Read(buf)
+       scr.wrappedHash.Write(buf[:n])
+       if err == io.EOF {
+               var p packet.Packet
+               p, scr.md.SignatureError = scr.packets.Next()
+               if scr.md.SignatureError != nil {
+                       return
+               }
+
+               var ok bool
+               if scr.md.Signature, ok = p.(*packet.Signature); ok {
+                       scr.md.SignatureError = scr.md.SignedBy.PublicKey.VerifySignature(scr.h, scr.md.Signature)
+               } else if scr.md.SignatureV3, ok = p.(*packet.SignatureV3); ok {
+                       scr.md.SignatureError = scr.md.SignedBy.PublicKey.VerifySignatureV3(scr.h, scr.md.SignatureV3)
+               } else {
+                       scr.md.SignatureError = errors.StructuralError("LiteralData not followed by Signature")
+                       return
+               }
+
+               // The SymmetricallyEncrypted packet, if any, might have an
+               // unsigned hash of its own. In order to check this we need to
+               // close that Reader.
+               if scr.md.decrypted != nil {
+                       mdcErr := scr.md.decrypted.Close()
+                       if mdcErr != nil {
+                               err = mdcErr
+                       }
+               }
+       }
+       return
+}
+
+// CheckDetachedSignature takes a signed file and a detached signature and
+// returns the signer if the signature is valid. If the signer isn't known,
+// ErrUnknownIssuer is returned.
+func CheckDetachedSignature(keyring KeyRing, signed, signature io.Reader) (signer *Entity, err error) {
+       var issuerKeyId uint64
+       var hashFunc crypto.Hash
+       var sigType packet.SignatureType
+       var keys []Key
+       var p packet.Packet
+
+       packets := packet.NewReader(signature)
+       for {
+               p, err = packets.Next()
+               if err == io.EOF {
+                       return nil, errors.ErrUnknownIssuer
+               }
+               if err != nil {
+                       return nil, err
+               }
+
+               switch sig := p.(type) {
+               case *packet.Signature:
+                       if sig.IssuerKeyId == nil {
+                               return nil, errors.StructuralError("signature doesn't have an issuer")
+                       }
+                       issuerKeyId = *sig.IssuerKeyId
+                       hashFunc = sig.Hash
+                       sigType = sig.SigType
+               case *packet.SignatureV3:
+                       issuerKeyId = sig.IssuerKeyId
+                       hashFunc = sig.Hash
+                       sigType = sig.SigType
+               default:
+                       return nil, errors.StructuralError("non signature packet found")
+               }
+
+               keys = keyring.KeysByIdUsage(issuerKeyId, packet.KeyFlagSign)
+               if len(keys) > 0 {
+                       break
+               }
+       }
+
+       if len(keys) == 0 {
+               panic("unreachable")
+       }
+
+       h, wrappedHash, err := hashForSignature(hashFunc, sigType)
+       if err != nil {
+               return nil, err
+       }
+
+       if _, err := io.Copy(wrappedHash, signed); err != nil && err != io.EOF {
+               return nil, err
+       }
+
+       for _, key := range keys {
+               switch sig := p.(type) {
+               case *packet.Signature:
+                       err = key.PublicKey.VerifySignature(h, sig)
+               case *packet.SignatureV3:
+                       err = key.PublicKey.VerifySignatureV3(h, sig)
+               default:
+                       panic("unreachable")
+               }
+
+               if err == nil {
+                       return key.Entity, nil
+               }
+       }
+
+       return nil, err
+}
+
+// CheckArmoredDetachedSignature performs the same actions as
+// CheckDetachedSignature but expects the signature to be armored.
+func CheckArmoredDetachedSignature(keyring KeyRing, signed, signature io.Reader) (signer *Entity, err error) {
+       body, err := readArmored(signature, SignatureType)
+       if err != nil {
+               return
+       }
+
+       return CheckDetachedSignature(keyring, signed, body)
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/s2k/s2k.go b/vendor/golang.org/x/crypto/openpgp/s2k/s2k.go
new file mode 100644 (file)
index 0000000..4b9a44c
--- /dev/null
@@ -0,0 +1,273 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package s2k implements the various OpenPGP string-to-key transforms as
+// specified in RFC 4800 section 3.7.1.
+package s2k // import "golang.org/x/crypto/openpgp/s2k"
+
+import (
+       "crypto"
+       "hash"
+       "io"
+       "strconv"
+
+       "golang.org/x/crypto/openpgp/errors"
+)
+
+// Config collects configuration parameters for s2k key-stretching
+// transformatioms. A nil *Config is valid and results in all default
+// values. Currently, Config is used only by the Serialize function in
+// this package.
+type Config struct {
+       // Hash is the default hash function to be used. If
+       // nil, SHA1 is used.
+       Hash crypto.Hash
+       // S2KCount is only used for symmetric encryption. It
+       // determines the strength of the passphrase stretching when
+       // the said passphrase is hashed to produce a key. S2KCount
+       // should be between 1024 and 65011712, inclusive. If Config
+       // is nil or S2KCount is 0, the value 65536 used. Not all
+       // values in the above range can be represented. S2KCount will
+       // be rounded up to the next representable value if it cannot
+       // be encoded exactly. When set, it is strongly encrouraged to
+       // use a value that is at least 65536. See RFC 4880 Section
+       // 3.7.1.3.
+       S2KCount int
+}
+
+func (c *Config) hash() crypto.Hash {
+       if c == nil || uint(c.Hash) == 0 {
+               // SHA1 is the historical default in this package.
+               return crypto.SHA1
+       }
+
+       return c.Hash
+}
+
+func (c *Config) encodedCount() uint8 {
+       if c == nil || c.S2KCount == 0 {
+               return 96 // The common case. Correspoding to 65536
+       }
+
+       i := c.S2KCount
+       switch {
+       // Behave like GPG. Should we make 65536 the lowest value used?
+       case i < 1024:
+               i = 1024
+       case i > 65011712:
+               i = 65011712
+       }
+
+       return encodeCount(i)
+}
+
+// encodeCount converts an iterative "count" in the range 1024 to
+// 65011712, inclusive, to an encoded count. The return value is the
+// octet that is actually stored in the GPG file. encodeCount panics
+// if i is not in the above range (encodedCount above takes care to
+// pass i in the correct range). See RFC 4880 Section 3.7.7.1.
+func encodeCount(i int) uint8 {
+       if i < 1024 || i > 65011712 {
+               panic("count arg i outside the required range")
+       }
+
+       for encoded := 0; encoded < 256; encoded++ {
+               count := decodeCount(uint8(encoded))
+               if count >= i {
+                       return uint8(encoded)
+               }
+       }
+
+       return 255
+}
+
+// decodeCount returns the s2k mode 3 iterative "count" corresponding to
+// the encoded octet c.
+func decodeCount(c uint8) int {
+       return (16 + int(c&15)) << (uint32(c>>4) + 6)
+}
+
+// Simple writes to out the result of computing the Simple S2K function (RFC
+// 4880, section 3.7.1.1) using the given hash and input passphrase.
+func Simple(out []byte, h hash.Hash, in []byte) {
+       Salted(out, h, in, nil)
+}
+
+var zero [1]byte
+
+// Salted writes to out the result of computing the Salted S2K function (RFC
+// 4880, section 3.7.1.2) using the given hash, input passphrase and salt.
+func Salted(out []byte, h hash.Hash, in []byte, salt []byte) {
+       done := 0
+       var digest []byte
+
+       for i := 0; done < len(out); i++ {
+               h.Reset()
+               for j := 0; j < i; j++ {
+                       h.Write(zero[:])
+               }
+               h.Write(salt)
+               h.Write(in)
+               digest = h.Sum(digest[:0])
+               n := copy(out[done:], digest)
+               done += n
+       }
+}
+
+// Iterated writes to out the result of computing the Iterated and Salted S2K
+// function (RFC 4880, section 3.7.1.3) using the given hash, input passphrase,
+// salt and iteration count.
+func Iterated(out []byte, h hash.Hash, in []byte, salt []byte, count int) {
+       combined := make([]byte, len(in)+len(salt))
+       copy(combined, salt)
+       copy(combined[len(salt):], in)
+
+       if count < len(combined) {
+               count = len(combined)
+       }
+
+       done := 0
+       var digest []byte
+       for i := 0; done < len(out); i++ {
+               h.Reset()
+               for j := 0; j < i; j++ {
+                       h.Write(zero[:])
+               }
+               written := 0
+               for written < count {
+                       if written+len(combined) > count {
+                               todo := count - written
+                               h.Write(combined[:todo])
+                               written = count
+                       } else {
+                               h.Write(combined)
+                               written += len(combined)
+                       }
+               }
+               digest = h.Sum(digest[:0])
+               n := copy(out[done:], digest)
+               done += n
+       }
+}
+
+// Parse reads a binary specification for a string-to-key transformation from r
+// and returns a function which performs that transform.
+func Parse(r io.Reader) (f func(out, in []byte), err error) {
+       var buf [9]byte
+
+       _, err = io.ReadFull(r, buf[:2])
+       if err != nil {
+               return
+       }
+
+       hash, ok := HashIdToHash(buf[1])
+       if !ok {
+               return nil, errors.UnsupportedError("hash for S2K function: " + strconv.Itoa(int(buf[1])))
+       }
+       if !hash.Available() {
+               return nil, errors.UnsupportedError("hash not available: " + strconv.Itoa(int(hash)))
+       }
+       h := hash.New()
+
+       switch buf[0] {
+       case 0:
+               f := func(out, in []byte) {
+                       Simple(out, h, in)
+               }
+               return f, nil
+       case 1:
+               _, err = io.ReadFull(r, buf[:8])
+               if err != nil {
+                       return
+               }
+               f := func(out, in []byte) {
+                       Salted(out, h, in, buf[:8])
+               }
+               return f, nil
+       case 3:
+               _, err = io.ReadFull(r, buf[:9])
+               if err != nil {
+                       return
+               }
+               count := decodeCount(buf[8])
+               f := func(out, in []byte) {
+                       Iterated(out, h, in, buf[:8], count)
+               }
+               return f, nil
+       }
+
+       return nil, errors.UnsupportedError("S2K function")
+}
+
+// Serialize salts and stretches the given passphrase and writes the
+// resulting key into key. It also serializes an S2K descriptor to
+// w. The key stretching can be configured with c, which may be
+// nil. In that case, sensible defaults will be used.
+func Serialize(w io.Writer, key []byte, rand io.Reader, passphrase []byte, c *Config) error {
+       var buf [11]byte
+       buf[0] = 3 /* iterated and salted */
+       buf[1], _ = HashToHashId(c.hash())
+       salt := buf[2:10]
+       if _, err := io.ReadFull(rand, salt); err != nil {
+               return err
+       }
+       encodedCount := c.encodedCount()
+       count := decodeCount(encodedCount)
+       buf[10] = encodedCount
+       if _, err := w.Write(buf[:]); err != nil {
+               return err
+       }
+
+       Iterated(key, c.hash().New(), passphrase, salt, count)
+       return nil
+}
+
+// hashToHashIdMapping contains pairs relating OpenPGP's hash identifier with
+// Go's crypto.Hash type. See RFC 4880, section 9.4.
+var hashToHashIdMapping = []struct {
+       id   byte
+       hash crypto.Hash
+       name string
+}{
+       {1, crypto.MD5, "MD5"},
+       {2, crypto.SHA1, "SHA1"},
+       {3, crypto.RIPEMD160, "RIPEMD160"},
+       {8, crypto.SHA256, "SHA256"},
+       {9, crypto.SHA384, "SHA384"},
+       {10, crypto.SHA512, "SHA512"},
+       {11, crypto.SHA224, "SHA224"},
+}
+
+// HashIdToHash returns a crypto.Hash which corresponds to the given OpenPGP
+// hash id.
+func HashIdToHash(id byte) (h crypto.Hash, ok bool) {
+       for _, m := range hashToHashIdMapping {
+               if m.id == id {
+                       return m.hash, true
+               }
+       }
+       return 0, false
+}
+
+// HashIdToString returns the name of the hash function corresponding to the
+// given OpenPGP hash id.
+func HashIdToString(id byte) (name string, ok bool) {
+       for _, m := range hashToHashIdMapping {
+               if m.id == id {
+                       return m.name, true
+               }
+       }
+
+       return "", false
+}
+
+// HashIdToHash returns an OpenPGP hash id which corresponds the given Hash.
+func HashToHashId(h crypto.Hash) (id byte, ok bool) {
+       for _, m := range hashToHashIdMapping {
+               if m.hash == h {
+                       return m.id, true
+               }
+       }
+       return 0, false
+}
diff --git a/vendor/golang.org/x/crypto/openpgp/write.go b/vendor/golang.org/x/crypto/openpgp/write.go
new file mode 100644 (file)
index 0000000..65a304c
--- /dev/null
@@ -0,0 +1,378 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package openpgp
+
+import (
+       "crypto"
+       "hash"
+       "io"
+       "strconv"
+       "time"
+
+       "golang.org/x/crypto/openpgp/armor"
+       "golang.org/x/crypto/openpgp/errors"
+       "golang.org/x/crypto/openpgp/packet"
+       "golang.org/x/crypto/openpgp/s2k"
+)
+
+// DetachSign signs message with the private key from signer (which must
+// already have been decrypted) and writes the signature to w.
+// If config is nil, sensible defaults will be used.
+func DetachSign(w io.Writer, signer *Entity, message io.Reader, config *packet.Config) error {
+       return detachSign(w, signer, message, packet.SigTypeBinary, config)
+}
+
+// ArmoredDetachSign signs message with the private key from signer (which
+// must already have been decrypted) and writes an armored signature to w.
+// If config is nil, sensible defaults will be used.
+func ArmoredDetachSign(w io.Writer, signer *Entity, message io.Reader, config *packet.Config) (err error) {
+       return armoredDetachSign(w, signer, message, packet.SigTypeBinary, config)
+}
+
+// DetachSignText signs message (after canonicalising the line endings) with
+// the private key from signer (which must already have been decrypted) and
+// writes the signature to w.
+// If config is nil, sensible defaults will be used.
+func DetachSignText(w io.Writer, signer *Entity, message io.Reader, config *packet.Config) error {
+       return detachSign(w, signer, message, packet.SigTypeText, config)
+}
+
+// ArmoredDetachSignText signs message (after canonicalising the line endings)
+// with the private key from signer (which must already have been decrypted)
+// and writes an armored signature to w.
+// If config is nil, sensible defaults will be used.
+func ArmoredDetachSignText(w io.Writer, signer *Entity, message io.Reader, config *packet.Config) error {
+       return armoredDetachSign(w, signer, message, packet.SigTypeText, config)
+}
+
+func armoredDetachSign(w io.Writer, signer *Entity, message io.Reader, sigType packet.SignatureType, config *packet.Config) (err error) {
+       out, err := armor.Encode(w, SignatureType, nil)
+       if err != nil {
+               return
+       }
+       err = detachSign(out, signer, message, sigType, config)
+       if err != nil {
+               return
+       }
+       return out.Close()
+}
+
+func detachSign(w io.Writer, signer *Entity, message io.Reader, sigType packet.SignatureType, config *packet.Config) (err error) {
+       if signer.PrivateKey == nil {
+               return errors.InvalidArgumentError("signing key doesn't have a private key")
+       }
+       if signer.PrivateKey.Encrypted {
+               return errors.InvalidArgumentError("signing key is encrypted")
+       }
+
+       sig := new(packet.Signature)
+       sig.SigType = sigType
+       sig.PubKeyAlgo = signer.PrivateKey.PubKeyAlgo
+       sig.Hash = config.Hash()
+       sig.CreationTime = config.Now()
+       sig.IssuerKeyId = &signer.PrivateKey.KeyId
+
+       h, wrappedHash, err := hashForSignature(sig.Hash, sig.SigType)
+       if err != nil {
+               return
+       }
+       io.Copy(wrappedHash, message)
+
+       err = sig.Sign(h, signer.PrivateKey, config)
+       if err != nil {
+               return
+       }
+
+       return sig.Serialize(w)
+}
+
+// FileHints contains metadata about encrypted files. This metadata is, itself,
+// encrypted.
+type FileHints struct {
+       // IsBinary can be set to hint that the contents are binary data.
+       IsBinary bool
+       // FileName hints at the name of the file that should be written. It's
+       // truncated to 255 bytes if longer. It may be empty to suggest that the
+       // file should not be written to disk. It may be equal to "_CONSOLE" to
+       // suggest the data should not be written to disk.
+       FileName string
+       // ModTime contains the modification time of the file, or the zero time if not applicable.
+       ModTime time.Time
+}
+
+// SymmetricallyEncrypt acts like gpg -c: it encrypts a file with a passphrase.
+// The resulting WriteCloser must be closed after the contents of the file have
+// been written.
+// If config is nil, sensible defaults will be used.
+func SymmetricallyEncrypt(ciphertext io.Writer, passphrase []byte, hints *FileHints, config *packet.Config) (plaintext io.WriteCloser, err error) {
+       if hints == nil {
+               hints = &FileHints{}
+       }
+
+       key, err := packet.SerializeSymmetricKeyEncrypted(ciphertext, passphrase, config)
+       if err != nil {
+               return
+       }
+       w, err := packet.SerializeSymmetricallyEncrypted(ciphertext, config.Cipher(), key, config)
+       if err != nil {
+               return
+       }
+
+       literaldata := w
+       if algo := config.Compression(); algo != packet.CompressionNone {
+               var compConfig *packet.CompressionConfig
+               if config != nil {
+                       compConfig = config.CompressionConfig
+               }
+               literaldata, err = packet.SerializeCompressed(w, algo, compConfig)
+               if err != nil {
+                       return
+               }
+       }
+
+       var epochSeconds uint32
+       if !hints.ModTime.IsZero() {
+               epochSeconds = uint32(hints.ModTime.Unix())
+       }
+       return packet.SerializeLiteral(literaldata, hints.IsBinary, hints.FileName, epochSeconds)
+}
+
+// intersectPreferences mutates and returns a prefix of a that contains only
+// the values in the intersection of a and b. The order of a is preserved.
+func intersectPreferences(a []uint8, b []uint8) (intersection []uint8) {
+       var j int
+       for _, v := range a {
+               for _, v2 := range b {
+                       if v == v2 {
+                               a[j] = v
+                               j++
+                               break
+                       }
+               }
+       }
+
+       return a[:j]
+}
+
+func hashToHashId(h crypto.Hash) uint8 {
+       v, ok := s2k.HashToHashId(h)
+       if !ok {
+               panic("tried to convert unknown hash")
+       }
+       return v
+}
+
+// Encrypt encrypts a message to a number of recipients and, optionally, signs
+// it. hints contains optional information, that is also encrypted, that aids
+// the recipients in processing the message. The resulting WriteCloser must
+// be closed after the contents of the file have been written.
+// If config is nil, sensible defaults will be used.
+func Encrypt(ciphertext io.Writer, to []*Entity, signed *Entity, hints *FileHints, config *packet.Config) (plaintext io.WriteCloser, err error) {
+       var signer *packet.PrivateKey
+       if signed != nil {
+               signKey, ok := signed.signingKey(config.Now())
+               if !ok {
+                       return nil, errors.InvalidArgumentError("no valid signing keys")
+               }
+               signer = signKey.PrivateKey
+               if signer == nil {
+                       return nil, errors.InvalidArgumentError("no private key in signing key")
+               }
+               if signer.Encrypted {
+                       return nil, errors.InvalidArgumentError("signing key must be decrypted")
+               }
+       }
+
+       // These are the possible ciphers that we'll use for the message.
+       candidateCiphers := []uint8{
+               uint8(packet.CipherAES128),
+               uint8(packet.CipherAES256),
+               uint8(packet.CipherCAST5),
+       }
+       // These are the possible hash functions that we'll use for the signature.
+       candidateHashes := []uint8{
+               hashToHashId(crypto.SHA256),
+               hashToHashId(crypto.SHA512),
+               hashToHashId(crypto.SHA1),
+               hashToHashId(crypto.RIPEMD160),
+       }
+       // In the event that a recipient doesn't specify any supported ciphers
+       // or hash functions, these are the ones that we assume that every
+       // implementation supports.
+       defaultCiphers := candidateCiphers[len(candidateCiphers)-1:]
+       defaultHashes := candidateHashes[len(candidateHashes)-1:]
+
+       encryptKeys := make([]Key, len(to))
+       for i := range to {
+               var ok bool
+               encryptKeys[i], ok = to[i].encryptionKey(config.Now())
+               if !ok {
+                       return nil, errors.InvalidArgumentError("cannot encrypt a message to key id " + strconv.FormatUint(to[i].PrimaryKey.KeyId, 16) + " because it has no encryption keys")
+               }
+
+               sig := to[i].primaryIdentity().SelfSignature
+
+               preferredSymmetric := sig.PreferredSymmetric
+               if len(preferredSymmetric) == 0 {
+                       preferredSymmetric = defaultCiphers
+               }
+               preferredHashes := sig.PreferredHash
+               if len(preferredHashes) == 0 {
+                       preferredHashes = defaultHashes
+               }
+               candidateCiphers = intersectPreferences(candidateCiphers, preferredSymmetric)
+               candidateHashes = intersectPreferences(candidateHashes, preferredHashes)
+       }
+
+       if len(candidateCiphers) == 0 || len(candidateHashes) == 0 {
+               return nil, errors.InvalidArgumentError("cannot encrypt because recipient set shares no common algorithms")
+       }
+
+       cipher := packet.CipherFunction(candidateCiphers[0])
+       // If the cipher specified by config is a candidate, we'll use that.
+       configuredCipher := config.Cipher()
+       for _, c := range candidateCiphers {
+               cipherFunc := packet.CipherFunction(c)
+               if cipherFunc == configuredCipher {
+                       cipher = cipherFunc
+                       break
+               }
+       }
+
+       var hash crypto.Hash
+       for _, hashId := range candidateHashes {
+               if h, ok := s2k.HashIdToHash(hashId); ok && h.Available() {
+                       hash = h
+                       break
+               }
+       }
+
+       // If the hash specified by config is a candidate, we'll use that.
+       if configuredHash := config.Hash(); configuredHash.Available() {
+               for _, hashId := range candidateHashes {
+                       if h, ok := s2k.HashIdToHash(hashId); ok && h == configuredHash {
+                               hash = h
+                               break
+                       }
+               }
+       }
+
+       if hash == 0 {
+               hashId := candidateHashes[0]
+               name, ok := s2k.HashIdToString(hashId)
+               if !ok {
+                       name = "#" + strconv.Itoa(int(hashId))
+               }
+               return nil, errors.InvalidArgumentError("cannot encrypt because no candidate hash functions are compiled in. (Wanted " + name + " in this case.)")
+       }
+
+       symKey := make([]byte, cipher.KeySize())
+       if _, err := io.ReadFull(config.Random(), symKey); err != nil {
+               return nil, err
+       }
+
+       for _, key := range encryptKeys {
+               if err := packet.SerializeEncryptedKey(ciphertext, key.PublicKey, cipher, symKey, config); err != nil {
+                       return nil, err
+               }
+       }
+
+       encryptedData, err := packet.SerializeSymmetricallyEncrypted(ciphertext, cipher, symKey, config)
+       if err != nil {
+               return
+       }
+
+       if signer != nil {
+               ops := &packet.OnePassSignature{
+                       SigType:    packet.SigTypeBinary,
+                       Hash:       hash,
+                       PubKeyAlgo: signer.PubKeyAlgo,
+                       KeyId:      signer.KeyId,
+                       IsLast:     true,
+               }
+               if err := ops.Serialize(encryptedData); err != nil {
+                       return nil, err
+               }
+       }
+
+       if hints == nil {
+               hints = &FileHints{}
+       }
+
+       w := encryptedData
+       if signer != nil {
+               // If we need to write a signature packet after the literal
+               // data then we need to stop literalData from closing
+               // encryptedData.
+               w = noOpCloser{encryptedData}
+
+       }
+       var epochSeconds uint32
+       if !hints.ModTime.IsZero() {
+               epochSeconds = uint32(hints.ModTime.Unix())
+       }
+       literalData, err := packet.SerializeLiteral(w, hints.IsBinary, hints.FileName, epochSeconds)
+       if err != nil {
+               return nil, err
+       }
+
+       if signer != nil {
+               return signatureWriter{encryptedData, literalData, hash, hash.New(), signer, config}, nil
+       }
+       return literalData, nil
+}
+
+// signatureWriter hashes the contents of a message while passing it along to
+// literalData. When closed, it closes literalData, writes a signature packet
+// to encryptedData and then also closes encryptedData.
+type signatureWriter struct {
+       encryptedData io.WriteCloser
+       literalData   io.WriteCloser
+       hashType      crypto.Hash
+       h             hash.Hash
+       signer        *packet.PrivateKey
+       config        *packet.Config
+}
+
+func (s signatureWriter) Write(data []byte) (int, error) {
+       s.h.Write(data)
+       return s.literalData.Write(data)
+}
+
+func (s signatureWriter) Close() error {
+       sig := &packet.Signature{
+               SigType:      packet.SigTypeBinary,
+               PubKeyAlgo:   s.signer.PubKeyAlgo,
+               Hash:         s.hashType,
+               CreationTime: s.config.Now(),
+               IssuerKeyId:  &s.signer.KeyId,
+       }
+
+       if err := sig.Sign(s.h, s.signer, s.config); err != nil {
+               return err
+       }
+       if err := s.literalData.Close(); err != nil {
+               return err
+       }
+       if err := sig.Serialize(s.encryptedData); err != nil {
+               return err
+       }
+       return s.encryptedData.Close()
+}
+
+// noOpCloser is like an ioutil.NopCloser, but for an io.Writer.
+// TODO: we have two of these in OpenPGP packages alone. This probably needs
+// to be promoted somewhere more common.
+type noOpCloser struct {
+       w io.Writer
+}
+
+func (c noOpCloser) Write(data []byte) (n int, err error) {
+       return c.w.Write(data)
+}
+
+func (c noOpCloser) Close() error {
+       return nil
+}
diff --git a/vendor/golang.org/x/net/LICENSE b/vendor/golang.org/x/net/LICENSE
new file mode 100644 (file)
index 0000000..6a66aea
--- /dev/null
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+   * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+   * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+   * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/net/PATENTS b/vendor/golang.org/x/net/PATENTS
new file mode 100644 (file)
index 0000000..7330990
--- /dev/null
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go.  This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation.  If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/net/html/atom/atom.go b/vendor/golang.org/x/net/html/atom/atom.go
new file mode 100644 (file)
index 0000000..cd0a8ac
--- /dev/null
@@ -0,0 +1,78 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package atom provides integer codes (also known as atoms) for a fixed set of
+// frequently occurring HTML strings: tag names and attribute keys such as "p"
+// and "id".
+//
+// Sharing an atom's name between all elements with the same tag can result in
+// fewer string allocations when tokenizing and parsing HTML. Integer
+// comparisons are also generally faster than string comparisons.
+//
+// The value of an atom's particular code is not guaranteed to stay the same
+// between versions of this package. Neither is any ordering guaranteed:
+// whether atom.H1 < atom.H2 may also change. The codes are not guaranteed to
+// be dense. The only guarantees are that e.g. looking up "div" will yield
+// atom.Div, calling atom.Div.String will return "div", and atom.Div != 0.
+package atom // import "golang.org/x/net/html/atom"
+
+// Atom is an integer code for a string. The zero value maps to "".
+type Atom uint32
+
+// String returns the atom's name.
+func (a Atom) String() string {
+       start := uint32(a >> 8)
+       n := uint32(a & 0xff)
+       if start+n > uint32(len(atomText)) {
+               return ""
+       }
+       return atomText[start : start+n]
+}
+
+func (a Atom) string() string {
+       return atomText[a>>8 : a>>8+a&0xff]
+}
+
+// fnv computes the FNV hash with an arbitrary starting value h.
+func fnv(h uint32, s []byte) uint32 {
+       for i := range s {
+               h ^= uint32(s[i])
+               h *= 16777619
+       }
+       return h
+}
+
+func match(s string, t []byte) bool {
+       for i, c := range t {
+               if s[i] != c {
+                       return false
+               }
+       }
+       return true
+}
+
+// Lookup returns the atom whose name is s. It returns zero if there is no
+// such atom. The lookup is case sensitive.
+func Lookup(s []byte) Atom {
+       if len(s) == 0 || len(s) > maxAtomLen {
+               return 0
+       }
+       h := fnv(hash0, s)
+       if a := table[h&uint32(len(table)-1)]; int(a&0xff) == len(s) && match(a.string(), s) {
+               return a
+       }
+       if a := table[(h>>16)&uint32(len(table)-1)]; int(a&0xff) == len(s) && match(a.string(), s) {
+               return a
+       }
+       return 0
+}
+
+// String returns a string whose contents are equal to s. In that sense, it is
+// equivalent to string(s) but may be more efficient.
+func String(s []byte) string {
+       if a := Lookup(s); a != 0 {
+               return a.String()
+       }
+       return string(s)
+}
diff --git a/vendor/golang.org/x/net/html/atom/table.go b/vendor/golang.org/x/net/html/atom/table.go
new file mode 100644 (file)
index 0000000..2605ba3
--- /dev/null
@@ -0,0 +1,713 @@
+// generated by go run gen.go; DO NOT EDIT
+
+package atom
+
+const (
+       A                   Atom = 0x1
+       Abbr                Atom = 0x4
+       Accept              Atom = 0x2106
+       AcceptCharset       Atom = 0x210e
+       Accesskey           Atom = 0x3309
+       Action              Atom = 0x1f606
+       Address             Atom = 0x4f307
+       Align               Atom = 0x1105
+       Alt                 Atom = 0x4503
+       Annotation          Atom = 0x1670a
+       AnnotationXml       Atom = 0x1670e
+       Applet              Atom = 0x2b306
+       Area                Atom = 0x2fa04
+       Article             Atom = 0x38807
+       Aside               Atom = 0x8305
+       Async               Atom = 0x7b05
+       Audio               Atom = 0xa605
+       Autocomplete        Atom = 0x1fc0c
+       Autofocus           Atom = 0xb309
+       Autoplay            Atom = 0xce08
+       B                   Atom = 0x101
+       Base                Atom = 0xd604
+       Basefont            Atom = 0xd608
+       Bdi                 Atom = 0x1a03
+       Bdo                 Atom = 0xe703
+       Bgsound             Atom = 0x11807
+       Big                 Atom = 0x12403
+       Blink               Atom = 0x12705
+       Blockquote          Atom = 0x12c0a
+       Body                Atom = 0x2f04
+       Br                  Atom = 0x202
+       Button              Atom = 0x13606
+       Canvas              Atom = 0x7f06
+       Caption             Atom = 0x1bb07
+       Center              Atom = 0x5b506
+       Challenge           Atom = 0x21f09
+       Charset             Atom = 0x2807
+       Checked             Atom = 0x32807
+       Cite                Atom = 0x3c804
+       Class               Atom = 0x4de05
+       Code                Atom = 0x14904
+       Col                 Atom = 0x15003
+       Colgroup            Atom = 0x15008
+       Color               Atom = 0x15d05
+       Cols                Atom = 0x16204
+       Colspan             Atom = 0x16207
+       Command             Atom = 0x17507
+       Content             Atom = 0x42307
+       Contenteditable     Atom = 0x4230f
+       Contextmenu         Atom = 0x3310b
+       Controls            Atom = 0x18808
+       Coords              Atom = 0x19406
+       Crossorigin         Atom = 0x19f0b
+       Data                Atom = 0x44a04
+       Datalist            Atom = 0x44a08
+       Datetime            Atom = 0x23c08
+       Dd                  Atom = 0x26702
+       Default             Atom = 0x8607
+       Defer               Atom = 0x14b05
+       Del                 Atom = 0x3ef03
+       Desc                Atom = 0x4db04
+       Details             Atom = 0x4807
+       Dfn                 Atom = 0x6103
+       Dialog              Atom = 0x1b06
+       Dir                 Atom = 0x6903
+       Dirname             Atom = 0x6907
+       Disabled            Atom = 0x10c08
+       Div                 Atom = 0x11303
+       Dl                  Atom = 0x11e02
+       Download            Atom = 0x40008
+       Draggable           Atom = 0x17b09
+       Dropzone            Atom = 0x39108
+       Dt                  Atom = 0x50902
+       Em                  Atom = 0x6502
+       Embed               Atom = 0x6505
+       Enctype             Atom = 0x21107
+       Face                Atom = 0x5b304
+       Fieldset            Atom = 0x1b008
+       Figcaption          Atom = 0x1b80a
+       Figure              Atom = 0x1cc06
+       Font                Atom = 0xda04
+       Footer              Atom = 0x8d06
+       For                 Atom = 0x1d803
+       ForeignObject       Atom = 0x1d80d
+       Foreignobject       Atom = 0x1e50d
+       Form                Atom = 0x1f204
+       Formaction          Atom = 0x1f20a
+       Formenctype         Atom = 0x20d0b
+       Formmethod          Atom = 0x2280a
+       Formnovalidate      Atom = 0x2320e
+       Formtarget          Atom = 0x2470a
+       Frame               Atom = 0x9a05
+       Frameset            Atom = 0x9a08
+       H1                  Atom = 0x26e02
+       H2                  Atom = 0x29402
+       H3                  Atom = 0x2a702
+       H4                  Atom = 0x2e902
+       H5                  Atom = 0x2f302
+       H6                  Atom = 0x50b02
+       Head                Atom = 0x2d504
+       Header              Atom = 0x2d506
+       Headers             Atom = 0x2d507
+       Height              Atom = 0x25106
+       Hgroup              Atom = 0x25906
+       Hidden              Atom = 0x26506
+       High                Atom = 0x26b04
+       Hr                  Atom = 0x27002
+       Href                Atom = 0x27004
+       Hreflang            Atom = 0x27008
+       Html                Atom = 0x25504
+       HttpEquiv           Atom = 0x2780a
+       I                   Atom = 0x601
+       Icon                Atom = 0x42204
+       Id                  Atom = 0x8502
+       Iframe              Atom = 0x29606
+       Image               Atom = 0x29c05
+       Img                 Atom = 0x2a103
+       Input               Atom = 0x3e805
+       Inputmode           Atom = 0x3e809
+       Ins                 Atom = 0x1a803
+       Isindex             Atom = 0x2a907
+       Ismap               Atom = 0x2b005
+       Itemid              Atom = 0x33c06
+       Itemprop            Atom = 0x3c908
+       Itemref             Atom = 0x5ad07
+       Itemscope           Atom = 0x2b909
+       Itemtype            Atom = 0x2c308
+       Kbd                 Atom = 0x1903
+       Keygen              Atom = 0x3906
+       Keytype             Atom = 0x53707
+       Kind                Atom = 0x10904
+       Label               Atom = 0xf005
+       Lang                Atom = 0x27404
+       Legend              Atom = 0x18206
+       Li                  Atom = 0x1202
+       Link                Atom = 0x12804
+       List                Atom = 0x44e04
+       Listing             Atom = 0x44e07
+       Loop                Atom = 0xf404
+       Low                 Atom = 0x11f03
+       Malignmark          Atom = 0x100a
+       Manifest            Atom = 0x5f108
+       Map                 Atom = 0x2b203
+       Mark                Atom = 0x1604
+       Marquee             Atom = 0x2cb07
+       Math                Atom = 0x2d204
+       Max                 Atom = 0x2e103
+       Maxlength           Atom = 0x2e109
+       Media               Atom = 0x6e05
+       Mediagroup          Atom = 0x6e0a
+       Menu                Atom = 0x33804
+       Menuitem            Atom = 0x33808
+       Meta                Atom = 0x45d04
+       Meter               Atom = 0x24205
+       Method              Atom = 0x22c06
+       Mglyph              Atom = 0x2a206
+       Mi                  Atom = 0x2eb02
+       Min                 Atom = 0x2eb03
+       Minlength           Atom = 0x2eb09
+       Mn                  Atom = 0x23502
+       Mo                  Atom = 0x3ed02
+       Ms                  Atom = 0x2bc02
+       Mtext               Atom = 0x2f505
+       Multiple            Atom = 0x30308
+       Muted               Atom = 0x30b05
+       Name                Atom = 0x6c04
+       Nav                 Atom = 0x3e03
+       Nobr                Atom = 0x5704
+       Noembed             Atom = 0x6307
+       Noframes            Atom = 0x9808
+       Noscript            Atom = 0x3d208
+       Novalidate          Atom = 0x2360a
+       Object              Atom = 0x1ec06
+       Ol                  Atom = 0xc902
+       Onabort             Atom = 0x13a07
+       Onafterprint        Atom = 0x1c00c
+       Onautocomplete      Atom = 0x1fa0e
+       Onautocompleteerror Atom = 0x1fa13
+       Onbeforeprint       Atom = 0x6040d
+       Onbeforeunload      Atom = 0x4e70e
+       Onblur              Atom = 0xaa06
+       Oncancel            Atom = 0xe908
+       Oncanplay           Atom = 0x28509
+       Oncanplaythrough    Atom = 0x28510
+       Onchange            Atom = 0x3a708
+       Onclick             Atom = 0x31007
+       Onclose             Atom = 0x31707
+       Oncontextmenu       Atom = 0x32f0d
+       Oncuechange         Atom = 0x3420b
+       Ondblclick          Atom = 0x34d0a
+       Ondrag              Atom = 0x35706
+       Ondragend           Atom = 0x35709
+       Ondragenter         Atom = 0x3600b
+       Ondragleave         Atom = 0x36b0b
+       Ondragover          Atom = 0x3760a
+       Ondragstart         Atom = 0x3800b
+       Ondrop              Atom = 0x38f06
+       Ondurationchange    Atom = 0x39f10
+       Onemptied           Atom = 0x39609
+       Onended             Atom = 0x3af07
+       Onerror             Atom = 0x3b607
+       Onfocus             Atom = 0x3bd07
+       Onhashchange        Atom = 0x3da0c
+       Oninput             Atom = 0x3e607
+       Oninvalid           Atom = 0x3f209
+       Onkeydown           Atom = 0x3fb09
+       Onkeypress          Atom = 0x4080a
+       Onkeyup             Atom = 0x41807
+       Onlanguagechange    Atom = 0x43210
+       Onload              Atom = 0x44206
+       Onloadeddata        Atom = 0x4420c
+       Onloadedmetadata    Atom = 0x45510
+       Onloadstart         Atom = 0x46b0b
+       Onmessage           Atom = 0x47609
+       Onmousedown         Atom = 0x47f0b
+       Onmousemove         Atom = 0x48a0b
+       Onmouseout          Atom = 0x4950a
+       Onmouseover         Atom = 0x4a20b
+       Onmouseup           Atom = 0x4ad09
+       Onmousewheel        Atom = 0x4b60c
+       Onoffline           Atom = 0x4c209
+       Ononline            Atom = 0x4cb08
+       Onpagehide          Atom = 0x4d30a
+       Onpageshow          Atom = 0x4fe0a
+       Onpause             Atom = 0x50d07
+       Onplay              Atom = 0x51706
+       Onplaying           Atom = 0x51709
+       Onpopstate          Atom = 0x5200a
+       Onprogress          Atom = 0x52a0a
+       Onratechange        Atom = 0x53e0c
+       Onreset             Atom = 0x54a07
+       Onresize            Atom = 0x55108
+       Onscroll            Atom = 0x55f08
+       Onseeked            Atom = 0x56708
+       Onseeking           Atom = 0x56f09
+       Onselect            Atom = 0x57808
+       Onshow              Atom = 0x58206
+       Onsort              Atom = 0x58b06
+       Onstalled           Atom = 0x59509
+       Onstorage           Atom = 0x59e09
+       Onsubmit            Atom = 0x5a708
+       Onsuspend           Atom = 0x5bb09
+       Ontimeupdate        Atom = 0xdb0c
+       Ontoggle            Atom = 0x5c408
+       Onunload            Atom = 0x5cc08
+       Onvolumechange      Atom = 0x5d40e
+       Onwaiting           Atom = 0x5e209
+       Open                Atom = 0x3cf04
+       Optgroup            Atom = 0xf608
+       Optimum             Atom = 0x5eb07
+       Option              Atom = 0x60006
+       Output              Atom = 0x49c06
+       P                   Atom = 0xc01
+       Param               Atom = 0xc05
+       Pattern             Atom = 0x5107
+       Ping                Atom = 0x7704
+       Placeholder         Atom = 0xc30b
+       Plaintext           Atom = 0xfd09
+       Poster              Atom = 0x15706
+       Pre                 Atom = 0x25e03
+       Preload             Atom = 0x25e07
+       Progress            Atom = 0x52c08
+       Prompt              Atom = 0x5fa06
+       Public              Atom = 0x41e06
+       Q                   Atom = 0x13101
+       Radiogroup          Atom = 0x30a
+       Readonly            Atom = 0x2fb08
+       Rel                 Atom = 0x25f03
+       Required            Atom = 0x1d008
+       Reversed            Atom = 0x5a08
+       Rows                Atom = 0x9204
+       Rowspan             Atom = 0x9207
+       Rp                  Atom = 0x1c602
+       Rt                  Atom = 0x13f02
+       Ruby                Atom = 0xaf04
+       S                   Atom = 0x2c01
+       Samp                Atom = 0x4e04
+       Sandbox             Atom = 0xbb07
+       Scope               Atom = 0x2bd05
+       Scoped              Atom = 0x2bd06
+       Script              Atom = 0x3d406
+       Seamless            Atom = 0x31c08
+       Section             Atom = 0x4e207
+       Select              Atom = 0x57a06
+       Selected            Atom = 0x57a08
+       Shape               Atom = 0x4f905
+       Size                Atom = 0x55504
+       Sizes               Atom = 0x55505
+       Small               Atom = 0x18f05
+       Sortable            Atom = 0x58d08
+       Sorted              Atom = 0x19906
+       Source              Atom = 0x1aa06
+       Spacer              Atom = 0x2db06
+       Span                Atom = 0x9504
+       Spellcheck          Atom = 0x3230a
+       Src                 Atom = 0x3c303
+       Srcdoc              Atom = 0x3c306
+       Srclang             Atom = 0x41107
+       Start               Atom = 0x38605
+       Step                Atom = 0x5f704
+       Strike              Atom = 0x53306
+       Strong              Atom = 0x55906
+       Style               Atom = 0x61105
+       Sub                 Atom = 0x5a903
+       Summary             Atom = 0x61607
+       Sup                 Atom = 0x61d03
+       Svg                 Atom = 0x62003
+       System              Atom = 0x62306
+       Tabindex            Atom = 0x46308
+       Table               Atom = 0x42d05
+       Target              Atom = 0x24b06
+       Tbody               Atom = 0x2e05
+       Td                  Atom = 0x4702
+       Template            Atom = 0x62608
+       Textarea            Atom = 0x2f608
+       Tfoot               Atom = 0x8c05
+       Th                  Atom = 0x22e02
+       Thead               Atom = 0x2d405
+       Time                Atom = 0xdd04
+       Title               Atom = 0xa105
+       Tr                  Atom = 0x10502
+       Track               Atom = 0x10505
+       Translate           Atom = 0x14009
+       Tt                  Atom = 0x5302
+       Type                Atom = 0x21404
+       Typemustmatch       Atom = 0x2140d
+       U                   Atom = 0xb01
+       Ul                  Atom = 0x8a02
+       Usemap              Atom = 0x51106
+       Value               Atom = 0x4005
+       Var                 Atom = 0x11503
+       Video               Atom = 0x28105
+       Wbr                 Atom = 0x12103
+       Width               Atom = 0x50705
+       Wrap                Atom = 0x58704
+       Xmp                 Atom = 0xc103
+)
+
+const hash0 = 0xc17da63e
+
+const maxAtomLen = 19
+
+var table = [1 << 9]Atom{
+       0x1:   0x48a0b, // onmousemove
+       0x2:   0x5e209, // onwaiting
+       0x3:   0x1fa13, // onautocompleteerror
+       0x4:   0x5fa06, // prompt
+       0x7:   0x5eb07, // optimum
+       0x8:   0x1604,  // mark
+       0xa:   0x5ad07, // itemref
+       0xb:   0x4fe0a, // onpageshow
+       0xc:   0x57a06, // select
+       0xd:   0x17b09, // draggable
+       0xe:   0x3e03,  // nav
+       0xf:   0x17507, // command
+       0x11:  0xb01,   // u
+       0x14:  0x2d507, // headers
+       0x15:  0x44a08, // datalist
+       0x17:  0x4e04,  // samp
+       0x1a:  0x3fb09, // onkeydown
+       0x1b:  0x55f08, // onscroll
+       0x1c:  0x15003, // col
+       0x20:  0x3c908, // itemprop
+       0x21:  0x2780a, // http-equiv
+       0x22:  0x61d03, // sup
+       0x24:  0x1d008, // required
+       0x2b:  0x25e07, // preload
+       0x2c:  0x6040d, // onbeforeprint
+       0x2d:  0x3600b, // ondragenter
+       0x2e:  0x50902, // dt
+       0x2f:  0x5a708, // onsubmit
+       0x30:  0x27002, // hr
+       0x31:  0x32f0d, // oncontextmenu
+       0x33:  0x29c05, // image
+       0x34:  0x50d07, // onpause
+       0x35:  0x25906, // hgroup
+       0x36:  0x7704,  // ping
+       0x37:  0x57808, // onselect
+       0x3a:  0x11303, // div
+       0x3b:  0x1fa0e, // onautocomplete
+       0x40:  0x2eb02, // mi
+       0x41:  0x31c08, // seamless
+       0x42:  0x2807,  // charset
+       0x43:  0x8502,  // id
+       0x44:  0x5200a, // onpopstate
+       0x45:  0x3ef03, // del
+       0x46:  0x2cb07, // marquee
+       0x47:  0x3309,  // accesskey
+       0x49:  0x8d06,  // footer
+       0x4a:  0x44e04, // list
+       0x4b:  0x2b005, // ismap
+       0x51:  0x33804, // menu
+       0x52:  0x2f04,  // body
+       0x55:  0x9a08,  // frameset
+       0x56:  0x54a07, // onreset
+       0x57:  0x12705, // blink
+       0x58:  0xa105,  // title
+       0x59:  0x38807, // article
+       0x5b:  0x22e02, // th
+       0x5d:  0x13101, // q
+       0x5e:  0x3cf04, // open
+       0x5f:  0x2fa04, // area
+       0x61:  0x44206, // onload
+       0x62:  0xda04,  // font
+       0x63:  0xd604,  // base
+       0x64:  0x16207, // colspan
+       0x65:  0x53707, // keytype
+       0x66:  0x11e02, // dl
+       0x68:  0x1b008, // fieldset
+       0x6a:  0x2eb03, // min
+       0x6b:  0x11503, // var
+       0x6f:  0x2d506, // header
+       0x70:  0x13f02, // rt
+       0x71:  0x15008, // colgroup
+       0x72:  0x23502, // mn
+       0x74:  0x13a07, // onabort
+       0x75:  0x3906,  // keygen
+       0x76:  0x4c209, // onoffline
+       0x77:  0x21f09, // challenge
+       0x78:  0x2b203, // map
+       0x7a:  0x2e902, // h4
+       0x7b:  0x3b607, // onerror
+       0x7c:  0x2e109, // maxlength
+       0x7d:  0x2f505, // mtext
+       0x7e:  0xbb07,  // sandbox
+       0x7f:  0x58b06, // onsort
+       0x80:  0x100a,  // malignmark
+       0x81:  0x45d04, // meta
+       0x82:  0x7b05,  // async
+       0x83:  0x2a702, // h3
+       0x84:  0x26702, // dd
+       0x85:  0x27004, // href
+       0x86:  0x6e0a,  // mediagroup
+       0x87:  0x19406, // coords
+       0x88:  0x41107, // srclang
+       0x89:  0x34d0a, // ondblclick
+       0x8a:  0x4005,  // value
+       0x8c:  0xe908,  // oncancel
+       0x8e:  0x3230a, // spellcheck
+       0x8f:  0x9a05,  // frame
+       0x91:  0x12403, // big
+       0x94:  0x1f606, // action
+       0x95:  0x6903,  // dir
+       0x97:  0x2fb08, // readonly
+       0x99:  0x42d05, // table
+       0x9a:  0x61607, // summary
+       0x9b:  0x12103, // wbr
+       0x9c:  0x30a,   // radiogroup
+       0x9d:  0x6c04,  // name
+       0x9f:  0x62306, // system
+       0xa1:  0x15d05, // color
+       0xa2:  0x7f06,  // canvas
+       0xa3:  0x25504, // html
+       0xa5:  0x56f09, // onseeking
+       0xac:  0x4f905, // shape
+       0xad:  0x25f03, // rel
+       0xae:  0x28510, // oncanplaythrough
+       0xaf:  0x3760a, // ondragover
+       0xb0:  0x62608, // template
+       0xb1:  0x1d80d, // foreignObject
+       0xb3:  0x9204,  // rows
+       0xb6:  0x44e07, // listing
+       0xb7:  0x49c06, // output
+       0xb9:  0x3310b, // contextmenu
+       0xbb:  0x11f03, // low
+       0xbc:  0x1c602, // rp
+       0xbd:  0x5bb09, // onsuspend
+       0xbe:  0x13606, // button
+       0xbf:  0x4db04, // desc
+       0xc1:  0x4e207, // section
+       0xc2:  0x52a0a, // onprogress
+       0xc3:  0x59e09, // onstorage
+       0xc4:  0x2d204, // math
+       0xc5:  0x4503,  // alt
+       0xc7:  0x8a02,  // ul
+       0xc8:  0x5107,  // pattern
+       0xc9:  0x4b60c, // onmousewheel
+       0xca:  0x35709, // ondragend
+       0xcb:  0xaf04,  // ruby
+       0xcc:  0xc01,   // p
+       0xcd:  0x31707, // onclose
+       0xce:  0x24205, // meter
+       0xcf:  0x11807, // bgsound
+       0xd2:  0x25106, // height
+       0xd4:  0x101,   // b
+       0xd5:  0x2c308, // itemtype
+       0xd8:  0x1bb07, // caption
+       0xd9:  0x10c08, // disabled
+       0xdb:  0x33808, // menuitem
+       0xdc:  0x62003, // svg
+       0xdd:  0x18f05, // small
+       0xde:  0x44a04, // data
+       0xe0:  0x4cb08, // ononline
+       0xe1:  0x2a206, // mglyph
+       0xe3:  0x6505,  // embed
+       0xe4:  0x10502, // tr
+       0xe5:  0x46b0b, // onloadstart
+       0xe7:  0x3c306, // srcdoc
+       0xeb:  0x5c408, // ontoggle
+       0xed:  0xe703,  // bdo
+       0xee:  0x4702,  // td
+       0xef:  0x8305,  // aside
+       0xf0:  0x29402, // h2
+       0xf1:  0x52c08, // progress
+       0xf2:  0x12c0a, // blockquote
+       0xf4:  0xf005,  // label
+       0xf5:  0x601,   // i
+       0xf7:  0x9207,  // rowspan
+       0xfb:  0x51709, // onplaying
+       0xfd:  0x2a103, // img
+       0xfe:  0xf608,  // optgroup
+       0xff:  0x42307, // content
+       0x101: 0x53e0c, // onratechange
+       0x103: 0x3da0c, // onhashchange
+       0x104: 0x4807,  // details
+       0x106: 0x40008, // download
+       0x109: 0x14009, // translate
+       0x10b: 0x4230f, // contenteditable
+       0x10d: 0x36b0b, // ondragleave
+       0x10e: 0x2106,  // accept
+       0x10f: 0x57a08, // selected
+       0x112: 0x1f20a, // formaction
+       0x113: 0x5b506, // center
+       0x115: 0x45510, // onloadedmetadata
+       0x116: 0x12804, // link
+       0x117: 0xdd04,  // time
+       0x118: 0x19f0b, // crossorigin
+       0x119: 0x3bd07, // onfocus
+       0x11a: 0x58704, // wrap
+       0x11b: 0x42204, // icon
+       0x11d: 0x28105, // video
+       0x11e: 0x4de05, // class
+       0x121: 0x5d40e, // onvolumechange
+       0x122: 0xaa06,  // onblur
+       0x123: 0x2b909, // itemscope
+       0x124: 0x61105, // style
+       0x127: 0x41e06, // public
+       0x129: 0x2320e, // formnovalidate
+       0x12a: 0x58206, // onshow
+       0x12c: 0x51706, // onplay
+       0x12d: 0x3c804, // cite
+       0x12e: 0x2bc02, // ms
+       0x12f: 0xdb0c,  // ontimeupdate
+       0x130: 0x10904, // kind
+       0x131: 0x2470a, // formtarget
+       0x135: 0x3af07, // onended
+       0x136: 0x26506, // hidden
+       0x137: 0x2c01,  // s
+       0x139: 0x2280a, // formmethod
+       0x13a: 0x3e805, // input
+       0x13c: 0x50b02, // h6
+       0x13d: 0xc902,  // ol
+       0x13e: 0x3420b, // oncuechange
+       0x13f: 0x1e50d, // foreignobject
+       0x143: 0x4e70e, // onbeforeunload
+       0x144: 0x2bd05, // scope
+       0x145: 0x39609, // onemptied
+       0x146: 0x14b05, // defer
+       0x147: 0xc103,  // xmp
+       0x148: 0x39f10, // ondurationchange
+       0x149: 0x1903,  // kbd
+       0x14c: 0x47609, // onmessage
+       0x14d: 0x60006, // option
+       0x14e: 0x2eb09, // minlength
+       0x14f: 0x32807, // checked
+       0x150: 0xce08,  // autoplay
+       0x152: 0x202,   // br
+       0x153: 0x2360a, // novalidate
+       0x156: 0x6307,  // noembed
+       0x159: 0x31007, // onclick
+       0x15a: 0x47f0b, // onmousedown
+       0x15b: 0x3a708, // onchange
+       0x15e: 0x3f209, // oninvalid
+       0x15f: 0x2bd06, // scoped
+       0x160: 0x18808, // controls
+       0x161: 0x30b05, // muted
+       0x162: 0x58d08, // sortable
+       0x163: 0x51106, // usemap
+       0x164: 0x1b80a, // figcaption
+       0x165: 0x35706, // ondrag
+       0x166: 0x26b04, // high
+       0x168: 0x3c303, // src
+       0x169: 0x15706, // poster
+       0x16b: 0x1670e, // annotation-xml
+       0x16c: 0x5f704, // step
+       0x16d: 0x4,     // abbr
+       0x16e: 0x1b06,  // dialog
+       0x170: 0x1202,  // li
+       0x172: 0x3ed02, // mo
+       0x175: 0x1d803, // for
+       0x176: 0x1a803, // ins
+       0x178: 0x55504, // size
+       0x179: 0x43210, // onlanguagechange
+       0x17a: 0x8607,  // default
+       0x17b: 0x1a03,  // bdi
+       0x17c: 0x4d30a, // onpagehide
+       0x17d: 0x6907,  // dirname
+       0x17e: 0x21404, // type
+       0x17f: 0x1f204, // form
+       0x181: 0x28509, // oncanplay
+       0x182: 0x6103,  // dfn
+       0x183: 0x46308, // tabindex
+       0x186: 0x6502,  // em
+       0x187: 0x27404, // lang
+       0x189: 0x39108, // dropzone
+       0x18a: 0x4080a, // onkeypress
+       0x18b: 0x23c08, // datetime
+       0x18c: 0x16204, // cols
+       0x18d: 0x1,     // a
+       0x18e: 0x4420c, // onloadeddata
+       0x190: 0xa605,  // audio
+       0x192: 0x2e05,  // tbody
+       0x193: 0x22c06, // method
+       0x195: 0xf404,  // loop
+       0x196: 0x29606, // iframe
+       0x198: 0x2d504, // head
+       0x19e: 0x5f108, // manifest
+       0x19f: 0xb309,  // autofocus
+       0x1a0: 0x14904, // code
+       0x1a1: 0x55906, // strong
+       0x1a2: 0x30308, // multiple
+       0x1a3: 0xc05,   // param
+       0x1a6: 0x21107, // enctype
+       0x1a7: 0x5b304, // face
+       0x1a8: 0xfd09,  // plaintext
+       0x1a9: 0x26e02, // h1
+       0x1aa: 0x59509, // onstalled
+       0x1ad: 0x3d406, // script
+       0x1ae: 0x2db06, // spacer
+       0x1af: 0x55108, // onresize
+       0x1b0: 0x4a20b, // onmouseover
+       0x1b1: 0x5cc08, // onunload
+       0x1b2: 0x56708, // onseeked
+       0x1b4: 0x2140d, // typemustmatch
+       0x1b5: 0x1cc06, // figure
+       0x1b6: 0x4950a, // onmouseout
+       0x1b7: 0x25e03, // pre
+       0x1b8: 0x50705, // width
+       0x1b9: 0x19906, // sorted
+       0x1bb: 0x5704,  // nobr
+       0x1be: 0x5302,  // tt
+       0x1bf: 0x1105,  // align
+       0x1c0: 0x3e607, // oninput
+       0x1c3: 0x41807, // onkeyup
+       0x1c6: 0x1c00c, // onafterprint
+       0x1c7: 0x210e,  // accept-charset
+       0x1c8: 0x33c06, // itemid
+       0x1c9: 0x3e809, // inputmode
+       0x1cb: 0x53306, // strike
+       0x1cc: 0x5a903, // sub
+       0x1cd: 0x10505, // track
+       0x1ce: 0x38605, // start
+       0x1d0: 0xd608,  // basefont
+       0x1d6: 0x1aa06, // source
+       0x1d7: 0x18206, // legend
+       0x1d8: 0x2d405, // thead
+       0x1da: 0x8c05,  // tfoot
+       0x1dd: 0x1ec06, // object
+       0x1de: 0x6e05,  // media
+       0x1df: 0x1670a, // annotation
+       0x1e0: 0x20d0b, // formenctype
+       0x1e2: 0x3d208, // noscript
+       0x1e4: 0x55505, // sizes
+       0x1e5: 0x1fc0c, // autocomplete
+       0x1e6: 0x9504,  // span
+       0x1e7: 0x9808,  // noframes
+       0x1e8: 0x24b06, // target
+       0x1e9: 0x38f06, // ondrop
+       0x1ea: 0x2b306, // applet
+       0x1ec: 0x5a08,  // reversed
+       0x1f0: 0x2a907, // isindex
+       0x1f3: 0x27008, // hreflang
+       0x1f5: 0x2f302, // h5
+       0x1f6: 0x4f307, // address
+       0x1fa: 0x2e103, // max
+       0x1fb: 0xc30b,  // placeholder
+       0x1fc: 0x2f608, // textarea
+       0x1fe: 0x4ad09, // onmouseup
+       0x1ff: 0x3800b, // ondragstart
+}
+
+const atomText = "abbradiogrouparamalignmarkbdialogaccept-charsetbodyaccesskey" +
+       "genavaluealtdetailsampatternobreversedfnoembedirnamediagroup" +
+       "ingasyncanvasidefaultfooterowspanoframesetitleaudionblurubya" +
+       "utofocusandboxmplaceholderautoplaybasefontimeupdatebdoncance" +
+       "labelooptgrouplaintextrackindisabledivarbgsoundlowbrbigblink" +
+       "blockquotebuttonabortranslatecodefercolgroupostercolorcolspa" +
+       "nnotation-xmlcommandraggablegendcontrolsmallcoordsortedcross" +
+       "originsourcefieldsetfigcaptionafterprintfigurequiredforeignO" +
+       "bjectforeignobjectformactionautocompleteerrorformenctypemust" +
+       "matchallengeformmethodformnovalidatetimeterformtargetheightm" +
+       "lhgroupreloadhiddenhigh1hreflanghttp-equivideoncanplaythroug" +
+       "h2iframeimageimglyph3isindexismappletitemscopeditemtypemarqu" +
+       "eematheaderspacermaxlength4minlength5mtextareadonlymultiplem" +
+       "utedonclickoncloseamlesspellcheckedoncontextmenuitemidoncuec" +
+       "hangeondblclickondragendondragenterondragleaveondragoverondr" +
+       "agstarticleondropzonemptiedondurationchangeonendedonerroronf" +
+       "ocusrcdocitempropenoscriptonhashchangeoninputmodeloninvalido" +
+       "nkeydownloadonkeypressrclangonkeyupublicontenteditableonlang" +
+       "uagechangeonloadeddatalistingonloadedmetadatabindexonloadsta" +
+       "rtonmessageonmousedownonmousemoveonmouseoutputonmouseoveronm" +
+       "ouseuponmousewheelonofflineononlineonpagehidesclassectionbef" +
+       "oreunloaddresshapeonpageshowidth6onpausemaponplayingonpopsta" +
+       "teonprogresstrikeytypeonratechangeonresetonresizestrongonscr" +
+       "ollonseekedonseekingonselectedonshowraponsortableonstalledon" +
+       "storageonsubmitemrefacenteronsuspendontoggleonunloadonvolume" +
+       "changeonwaitingoptimumanifestepromptoptionbeforeprintstylesu" +
+       "mmarysupsvgsystemplate"
diff --git a/vendor/golang.org/x/net/html/const.go b/vendor/golang.org/x/net/html/const.go
new file mode 100644 (file)
index 0000000..52f651f
--- /dev/null
@@ -0,0 +1,102 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package html
+
+// Section 12.2.3.2 of the HTML5 specification says "The following elements
+// have varying levels of special parsing rules".
+// https://html.spec.whatwg.org/multipage/syntax.html#the-stack-of-open-elements
+var isSpecialElementMap = map[string]bool{
+       "address":    true,
+       "applet":     true,
+       "area":       true,
+       "article":    true,
+       "aside":      true,
+       "base":       true,
+       "basefont":   true,
+       "bgsound":    true,
+       "blockquote": true,
+       "body":       true,
+       "br":         true,
+       "button":     true,
+       "caption":    true,
+       "center":     true,
+       "col":        true,
+       "colgroup":   true,
+       "dd":         true,
+       "details":    true,
+       "dir":        true,
+       "div":        true,
+       "dl":         true,
+       "dt":         true,
+       "embed":      true,
+       "fieldset":   true,
+       "figcaption": true,
+       "figure":     true,
+       "footer":     true,
+       "form":       true,
+       "frame":      true,
+       "frameset":   true,
+       "h1":         true,
+       "h2":         true,
+       "h3":         true,
+       "h4":         true,
+       "h5":         true,
+       "h6":         true,
+       "head":       true,
+       "header":     true,
+       "hgroup":     true,
+       "hr":         true,
+       "html":       true,
+       "iframe":     true,
+       "img":        true,
+       "input":      true,
+       "isindex":    true,
+       "li":         true,
+       "link":       true,
+       "listing":    true,
+       "marquee":    true,
+       "menu":       true,
+       "meta":       true,
+       "nav":        true,
+       "noembed":    true,
+       "noframes":   true,
+       "noscript":   true,
+       "object":     true,
+       "ol":         true,
+       "p":          true,
+       "param":      true,
+       "plaintext":  true,
+       "pre":        true,
+       "script":     true,
+       "section":    true,
+       "select":     true,
+       "source":     true,
+       "style":      true,
+       "summary":    true,
+       "table":      true,
+       "tbody":      true,
+       "td":         true,
+       "template":   true,
+       "textarea":   true,
+       "tfoot":      true,
+       "th":         true,
+       "thead":      true,
+       "title":      true,
+       "tr":         true,
+       "track":      true,
+       "ul":         true,
+       "wbr":        true,
+       "xmp":        true,
+}
+
+func isSpecialElement(element *Node) bool {
+       switch element.Namespace {
+       case "", "html":
+               return isSpecialElementMap[element.Data]
+       case "svg":
+               return element.Data == "foreignObject"
+       }
+       return false
+}
diff --git a/vendor/golang.org/x/net/html/doc.go b/vendor/golang.org/x/net/html/doc.go
new file mode 100644 (file)
index 0000000..94f4968
--- /dev/null
@@ -0,0 +1,106 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package html implements an HTML5-compliant tokenizer and parser.
+
+Tokenization is done by creating a Tokenizer for an io.Reader r. It is the
+caller's responsibility to ensure that r provides UTF-8 encoded HTML.
+
+       z := html.NewTokenizer(r)
+
+Given a Tokenizer z, the HTML is tokenized by repeatedly calling z.Next(),
+which parses the next token and returns its type, or an error:
+
+       for {
+               tt := z.Next()
+               if tt == html.ErrorToken {
+                       // ...
+                       return ...
+               }
+               // Process the current token.
+       }
+
+There are two APIs for retrieving the current token. The high-level API is to
+call Token; the low-level API is to call Text or TagName / TagAttr. Both APIs
+allow optionally calling Raw after Next but before Token, Text, TagName, or
+TagAttr. In EBNF notation, the valid call sequence per token is:
+
+       Next {Raw} [ Token | Text | TagName {TagAttr} ]
+
+Token returns an independent data structure that completely describes a token.
+Entities (such as "&lt;") are unescaped, tag names and attribute keys are
+lower-cased, and attributes are collected into a []Attribute. For example:
+
+       for {
+               if z.Next() == html.ErrorToken {
+                       // Returning io.EOF indicates success.
+                       return z.Err()
+               }
+               emitToken(z.Token())
+       }
+
+The low-level API performs fewer allocations and copies, but the contents of
+the []byte values returned by Text, TagName and TagAttr may change on the next
+call to Next. For example, to extract an HTML page's anchor text:
+
+       depth := 0
+       for {
+               tt := z.Next()
+               switch tt {
+               case ErrorToken:
+                       return z.Err()
+               case TextToken:
+                       if depth > 0 {
+                               // emitBytes should copy the []byte it receives,
+                               // if it doesn't process it immediately.
+                               emitBytes(z.Text())
+                       }
+               case StartTagToken, EndTagToken:
+                       tn, _ := z.TagName()
+                       if len(tn) == 1 && tn[0] == 'a' {
+                               if tt == StartTagToken {
+                                       depth++
+                               } else {
+                                       depth--
+                               }
+                       }
+               }
+       }
+
+Parsing is done by calling Parse with an io.Reader, which returns the root of
+the parse tree (the document element) as a *Node. It is the caller's
+responsibility to ensure that the Reader provides UTF-8 encoded HTML. For
+example, to process each anchor node in depth-first order:
+
+       doc, err := html.Parse(r)
+       if err != nil {
+               // ...
+       }
+       var f func(*html.Node)
+       f = func(n *html.Node) {
+               if n.Type == html.ElementNode && n.Data == "a" {
+                       // Do something with n...
+               }
+               for c := n.FirstChild; c != nil; c = c.NextSibling {
+                       f(c)
+               }
+       }
+       f(doc)
+
+The relevant specifications include:
+https://html.spec.whatwg.org/multipage/syntax.html and
+https://html.spec.whatwg.org/multipage/syntax.html#tokenization
+*/
+package html // import "golang.org/x/net/html"
+
+// The tokenization algorithm implemented by this package is not a line-by-line
+// transliteration of the relatively verbose state-machine in the WHATWG
+// specification. A more direct approach is used instead, where the program
+// counter implies the state, such as whether it is tokenizing a tag or a text
+// node. Specification compliance is verified by checking expected and actual
+// outputs over a test suite rather than aiming for algorithmic fidelity.
+
+// TODO(nigeltao): Does a DOM API belong in this package or a separate one?
+// TODO(nigeltao): How does parsing interact with a JavaScript engine?
diff --git a/vendor/golang.org/x/net/html/doctype.go b/vendor/golang.org/x/net/html/doctype.go
new file mode 100644 (file)
index 0000000..c484e5a
--- /dev/null
@@ -0,0 +1,156 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package html
+
+import (
+       "strings"
+)
+
+// parseDoctype parses the data from a DoctypeToken into a name,
+// public identifier, and system identifier. It returns a Node whose Type
+// is DoctypeNode, whose Data is the name, and which has attributes
+// named "system" and "public" for the two identifiers if they were present.
+// quirks is whether the document should be parsed in "quirks mode".
+func parseDoctype(s string) (n *Node, quirks bool) {
+       n = &Node{Type: DoctypeNode}
+
+       // Find the name.
+       space := strings.IndexAny(s, whitespace)
+       if space == -1 {
+               space = len(s)
+       }
+       n.Data = s[:space]
+       // The comparison to "html" is case-sensitive.
+       if n.Data != "html" {
+               quirks = true
+       }
+       n.Data = strings.ToLower(n.Data)
+       s = strings.TrimLeft(s[space:], whitespace)
+
+       if len(s) < 6 {
+               // It can't start with "PUBLIC" or "SYSTEM".
+               // Ignore the rest of the string.
+               return n, quirks || s != ""
+       }
+
+       key := strings.ToLower(s[:6])
+       s = s[6:]
+       for key == "public" || key == "system" {
+               s = strings.TrimLeft(s, whitespace)
+               if s == "" {
+                       break
+               }
+               quote := s[0]
+               if quote != '"' && quote != '\'' {
+                       break
+               }
+               s = s[1:]
+               q := strings.IndexRune(s, rune(quote))
+               var id string
+               if q == -1 {
+                       id = s
+                       s = ""
+               } else {
+                       id = s[:q]
+                       s = s[q+1:]
+               }
+               n.Attr = append(n.Attr, Attribute{Key: key, Val: id})
+               if key == "public" {
+                       key = "system"
+               } else {
+                       key = ""
+               }
+       }
+
+       if key != "" || s != "" {
+               quirks = true
+       } else if len(n.Attr) > 0 {
+               if n.Attr[0].Key == "public" {
+                       public := strings.ToLower(n.Attr[0].Val)
+                       switch public {
+                       case "-//w3o//dtd w3 html strict 3.0//en//", "-/w3d/dtd html 4.0 transitional/en", "html":
+                               quirks = true
+                       default:
+                               for _, q := range quirkyIDs {
+                                       if strings.HasPrefix(public, q) {
+                                               quirks = true
+                                               break
+                                       }
+                               }
+                       }
+                       // The following two public IDs only cause quirks mode if there is no system ID.
+                       if len(n.Attr) == 1 && (strings.HasPrefix(public, "-//w3c//dtd html 4.01 frameset//") ||
+                               strings.HasPrefix(public, "-//w3c//dtd html 4.01 transitional//")) {
+                               quirks = true
+                       }
+               }
+               if lastAttr := n.Attr[len(n.Attr)-1]; lastAttr.Key == "system" &&
+                       strings.ToLower(lastAttr.Val) == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd" {
+                       quirks = true
+               }
+       }
+
+       return n, quirks
+}
+
+// quirkyIDs is a list of public doctype identifiers that cause a document
+// to be interpreted in quirks mode. The identifiers should be in lower case.
+var quirkyIDs = []string{
+       "+//silmaril//dtd html pro v0r11 19970101//",
+       "-//advasoft ltd//dtd html 3.0 aswedit + extensions//",
+       "-//as//dtd html 3.0 aswedit + extensions//",
+       "-//ietf//dtd html 2.0 level 1//",
+       "-//ietf//dtd html 2.0 level 2//",
+       "-//ietf//dtd html 2.0 strict level 1//",
+       "-//ietf//dtd html 2.0 strict level 2//",
+       "-//ietf//dtd html 2.0 strict//",
+       "-//ietf//dtd html 2.0//",
+       "-//ietf//dtd html 2.1e//",
+       "-//ietf//dtd html 3.0//",
+       "-//ietf//dtd html 3.2 final//",
+       "-//ietf//dtd html 3.2//",
+       "-//ietf//dtd html 3//",
+       "-//ietf//dtd html level 0//",
+       "-//ietf//dtd html level 1//",
+       "-//ietf//dtd html level 2//",
+       "-//ietf//dtd html level 3//",
+       "-//ietf//dtd html strict level 0//",
+       "-//ietf//dtd html strict level 1//",
+       "-//ietf//dtd html strict level 2//",
+       "-//ietf//dtd html strict level 3//",
+       "-//ietf//dtd html strict//",
+       "-//ietf//dtd html//",
+       "-//metrius//dtd metrius presentational//",
+       "-//microsoft//dtd internet explorer 2.0 html strict//",
+       "-//microsoft//dtd internet explorer 2.0 html//",
+       "-//microsoft//dtd internet explorer 2.0 tables//",
+       "-//microsoft//dtd internet explorer 3.0 html strict//",
+       "-//microsoft//dtd internet explorer 3.0 html//",
+       "-//microsoft//dtd internet explorer 3.0 tables//",
+       "-//netscape comm. corp.//dtd html//",
+       "-//netscape comm. corp.//dtd strict html//",
+       "-//o'reilly and associates//dtd html 2.0//",
+       "-//o'reilly and associates//dtd html extended 1.0//",
+       "-//o'reilly and associates//dtd html extended relaxed 1.0//",
+       "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//",
+       "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//",
+       "-//spyglass//dtd html 2.0 extended//",
+       "-//sq//dtd html 2.0 hotmetal + extensions//",
+       "-//sun microsystems corp.//dtd hotjava html//",
+       "-//sun microsystems corp.//dtd hotjava strict html//",
+       "-//w3c//dtd html 3 1995-03-24//",
+       "-//w3c//dtd html 3.2 draft//",
+       "-//w3c//dtd html 3.2 final//",
+       "-//w3c//dtd html 3.2//",
+       "-//w3c//dtd html 3.2s draft//",
+       "-//w3c//dtd html 4.0 frameset//",
+       "-//w3c//dtd html 4.0 transitional//",
+       "-//w3c//dtd html experimental 19960712//",
+       "-//w3c//dtd html experimental 970421//",
+       "-//w3c//dtd w3 html//",
+       "-//w3o//dtd w3 html 3.0//",
+       "-//webtechs//dtd mozilla html 2.0//",
+       "-//webtechs//dtd mozilla html//",
+}
diff --git a/vendor/golang.org/x/net/html/entity.go b/vendor/golang.org/x/net/html/entity.go
new file mode 100644 (file)
index 0000000..a50c04c
--- /dev/null
@@ -0,0 +1,2253 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package html
+
+// All entities that do not end with ';' are 6 or fewer bytes long.
+const longestEntityWithoutSemicolon = 6
+
+// entity is a map from HTML entity names to their values. The semicolon matters:
+// https://html.spec.whatwg.org/multipage/syntax.html#named-character-references
+// lists both "amp" and "amp;" as two separate entries.
+//
+// Note that the HTML5 list is larger than the HTML4 list at
+// http://www.w3.org/TR/html4/sgml/entities.html
+var entity = map[string]rune{
+       "AElig;":                           '\U000000C6',
+       "AMP;":                             '\U00000026',
+       "Aacute;":                          '\U000000C1',
+       "Abreve;":                          '\U00000102',
+       "Acirc;":                           '\U000000C2',
+       "Acy;":                             '\U00000410',
+       "Afr;":                             '\U0001D504',
+       "Agrave;":                          '\U000000C0',
+       "Alpha;":                           '\U00000391',
+       "Amacr;":                           '\U00000100',
+       "And;":                             '\U00002A53',
+       "Aogon;":                           '\U00000104',
+       "Aopf;":                            '\U0001D538',
+       "ApplyFunction;":                   '\U00002061',
+       "Aring;":                           '\U000000C5',
+       "Ascr;":                            '\U0001D49C',
+       "Assign;":                          '\U00002254',
+       "Atilde;":                          '\U000000C3',
+       "Auml;":                            '\U000000C4',
+       "Backslash;":                       '\U00002216',
+       "Barv;":                            '\U00002AE7',
+       "Barwed;":                          '\U00002306',
+       "Bcy;":                             '\U00000411',
+       "Because;":                         '\U00002235',
+       "Bernoullis;":                      '\U0000212C',
+       "Beta;":                            '\U00000392',
+       "Bfr;":                             '\U0001D505',
+       "Bopf;":                            '\U0001D539',
+       "Breve;":                           '\U000002D8',
+       "Bscr;":                            '\U0000212C',
+       "Bumpeq;":                          '\U0000224E',
+       "CHcy;":                            '\U00000427',
+       "COPY;":                            '\U000000A9',
+       "Cacute;":                          '\U00000106',
+       "Cap;":                             '\U000022D2',
+       "CapitalDifferentialD;":            '\U00002145',
+       "Cayleys;":                         '\U0000212D',
+       "Ccaron;":                          '\U0000010C',
+       "Ccedil;":                          '\U000000C7',
+       "Ccirc;":                           '\U00000108',
+       "Cconint;":                         '\U00002230',
+       "Cdot;":                            '\U0000010A',
+       "Cedilla;":                         '\U000000B8',
+       "CenterDot;":                       '\U000000B7',
+       "Cfr;":                             '\U0000212D',
+       "Chi;":                             '\U000003A7',
+       "CircleDot;":                       '\U00002299',
+       "CircleMinus;":                     '\U00002296',
+       "CirclePlus;":                      '\U00002295',
+       "CircleTimes;":                     '\U00002297',
+       "ClockwiseContourIntegral;":        '\U00002232',
+       "CloseCurlyDoubleQuote;":           '\U0000201D',
+       "CloseCurlyQuote;":                 '\U00002019',
+       "Colon;":                           '\U00002237',
+       "Colone;":                          '\U00002A74',
+       "Congruent;":                       '\U00002261',
+       "Conint;":                          '\U0000222F',
+       "ContourIntegral;":                 '\U0000222E',
+       "Copf;":                            '\U00002102',
+       "Coproduct;":                       '\U00002210',
+       "CounterClockwiseContourIntegral;": '\U00002233',
+       "Cross;":                    '\U00002A2F',
+       "Cscr;":                     '\U0001D49E',
+       "Cup;":                      '\U000022D3',
+       "CupCap;":                   '\U0000224D',
+       "DD;":                       '\U00002145',
+       "DDotrahd;":                 '\U00002911',
+       "DJcy;":                     '\U00000402',
+       "DScy;":                     '\U00000405',
+       "DZcy;":                     '\U0000040F',
+       "Dagger;":                   '\U00002021',
+       "Darr;":                     '\U000021A1',
+       "Dashv;":                    '\U00002AE4',
+       "Dcaron;":                   '\U0000010E',
+       "Dcy;":                      '\U00000414',
+       "Del;":                      '\U00002207',
+       "Delta;":                    '\U00000394',
+       "Dfr;":                      '\U0001D507',
+       "DiacriticalAcute;":         '\U000000B4',
+       "DiacriticalDot;":           '\U000002D9',
+       "DiacriticalDoubleAcute;":   '\U000002DD',
+       "DiacriticalGrave;":         '\U00000060',
+       "DiacriticalTilde;":         '\U000002DC',
+       "Diamond;":                  '\U000022C4',
+       "DifferentialD;":            '\U00002146',
+       "Dopf;":                     '\U0001D53B',
+       "Dot;":                      '\U000000A8',
+       "DotDot;":                   '\U000020DC',
+       "DotEqual;":                 '\U00002250',
+       "DoubleContourIntegral;":    '\U0000222F',
+       "DoubleDot;":                '\U000000A8',
+       "DoubleDownArrow;":          '\U000021D3',
+       "DoubleLeftArrow;":          '\U000021D0',
+       "DoubleLeftRightArrow;":     '\U000021D4',
+       "DoubleLeftTee;":            '\U00002AE4',
+       "DoubleLongLeftArrow;":      '\U000027F8',
+       "DoubleLongLeftRightArrow;": '\U000027FA',
+       "DoubleLongRightArrow;":     '\U000027F9',
+       "DoubleRightArrow;":         '\U000021D2',
+       "DoubleRightTee;":           '\U000022A8',
+       "DoubleUpArrow;":            '\U000021D1',
+       "DoubleUpDownArrow;":        '\U000021D5',
+       "DoubleVerticalBar;":        '\U00002225',
+       "DownArrow;":                '\U00002193',
+       "DownArrowBar;":             '\U00002913',
+       "DownArrowUpArrow;":         '\U000021F5',
+       "DownBreve;":                '\U00000311',
+       "DownLeftRightVector;":      '\U00002950',
+       "DownLeftTeeVector;":        '\U0000295E',
+       "DownLeftVector;":           '\U000021BD',
+       "DownLeftVectorBar;":        '\U00002956',
+       "DownRightTeeVector;":       '\U0000295F',
+       "DownRightVector;":          '\U000021C1',
+       "DownRightVectorBar;":       '\U00002957',
+       "DownTee;":                  '\U000022A4',
+       "DownTeeArrow;":             '\U000021A7',
+       "Downarrow;":                '\U000021D3',
+       "Dscr;":                     '\U0001D49F',
+       "Dstrok;":                   '\U00000110',
+       "ENG;":                      '\U0000014A',
+       "ETH;":                      '\U000000D0',
+       "Eacute;":                   '\U000000C9',
+       "Ecaron;":                   '\U0000011A',
+       "Ecirc;":                    '\U000000CA',
+       "Ecy;":                      '\U0000042D',
+       "Edot;":                     '\U00000116',
+       "Efr;":                      '\U0001D508',
+       "Egrave;":                   '\U000000C8',
+       "Element;":                  '\U00002208',
+       "Emacr;":                    '\U00000112',
+       "EmptySmallSquare;":         '\U000025FB',
+       "EmptyVerySmallSquare;":     '\U000025AB',
+       "Eogon;":                    '\U00000118',
+       "Eopf;":                     '\U0001D53C',
+       "Epsilon;":                  '\U00000395',
+       "Equal;":                    '\U00002A75',
+       "EqualTilde;":               '\U00002242',
+       "Equilibrium;":              '\U000021CC',
+       "Escr;":                     '\U00002130',
+       "Esim;":                     '\U00002A73',
+       "Eta;":                      '\U00000397',
+       "Euml;":                     '\U000000CB',
+       "Exists;":                   '\U00002203',
+       "ExponentialE;":             '\U00002147',
+       "Fcy;":                      '\U00000424',
+       "Ffr;":                      '\U0001D509',
+       "FilledSmallSquare;":        '\U000025FC',
+       "FilledVerySmallSquare;":    '\U000025AA',
+       "Fopf;":                     '\U0001D53D',
+       "ForAll;":                   '\U00002200',
+       "Fouriertrf;":               '\U00002131',
+       "Fscr;":                     '\U00002131',
+       "GJcy;":                     '\U00000403',
+       "GT;":                       '\U0000003E',
+       "Gamma;":                    '\U00000393',
+       "Gammad;":                   '\U000003DC',
+       "Gbreve;":                   '\U0000011E',
+       "Gcedil;":                   '\U00000122',
+       "Gcirc;":                    '\U0000011C',
+       "Gcy;":                      '\U00000413',
+       "Gdot;":                     '\U00000120',
+       "Gfr;":                      '\U0001D50A',
+       "Gg;":                       '\U000022D9',
+       "Gopf;":                     '\U0001D53E',
+       "GreaterEqual;":             '\U00002265',
+       "GreaterEqualLess;":         '\U000022DB',
+       "GreaterFullEqual;":         '\U00002267',
+       "GreaterGreater;":           '\U00002AA2',
+       "GreaterLess;":              '\U00002277',
+       "GreaterSlantEqual;":        '\U00002A7E',
+       "GreaterTilde;":             '\U00002273',
+       "Gscr;":                     '\U0001D4A2',
+       "Gt;":                       '\U0000226B',
+       "HARDcy;":                   '\U0000042A',
+       "Hacek;":                    '\U000002C7',
+       "Hat;":                      '\U0000005E',
+       "Hcirc;":                    '\U00000124',
+       "Hfr;":                      '\U0000210C',
+       "HilbertSpace;":             '\U0000210B',
+       "Hopf;":                     '\U0000210D',
+       "HorizontalLine;":           '\U00002500',
+       "Hscr;":                     '\U0000210B',
+       "Hstrok;":                   '\U00000126',
+       "HumpDownHump;":             '\U0000224E',
+       "HumpEqual;":                '\U0000224F',
+       "IEcy;":                     '\U00000415',
+       "IJlig;":                    '\U00000132',
+       "IOcy;":                     '\U00000401',
+       "Iacute;":                   '\U000000CD',
+       "Icirc;":                    '\U000000CE',
+       "Icy;":                      '\U00000418',
+       "Idot;":                     '\U00000130',
+       "Ifr;":                      '\U00002111',
+       "Igrave;":                   '\U000000CC',
+       "Im;":                       '\U00002111',
+       "Imacr;":                    '\U0000012A',
+       "ImaginaryI;":               '\U00002148',
+       "Implies;":                  '\U000021D2',
+       "Int;":                      '\U0000222C',
+       "Integral;":                 '\U0000222B',
+       "Intersection;":             '\U000022C2',
+       "InvisibleComma;":           '\U00002063',
+       "InvisibleTimes;":           '\U00002062',
+       "Iogon;":                    '\U0000012E',
+       "Iopf;":                     '\U0001D540',
+       "Iota;":                     '\U00000399',
+       "Iscr;":                     '\U00002110',
+       "Itilde;":                   '\U00000128',
+       "Iukcy;":                    '\U00000406',
+       "Iuml;":                     '\U000000CF',
+       "Jcirc;":                    '\U00000134',
+       "Jcy;":                      '\U00000419',
+       "Jfr;":                      '\U0001D50D',
+       "Jopf;":                     '\U0001D541',
+       "Jscr;":                     '\U0001D4A5',
+       "Jsercy;":                   '\U00000408',
+       "Jukcy;":                    '\U00000404',
+       "KHcy;":                     '\U00000425',
+       "KJcy;":                     '\U0000040C',
+       "Kappa;":                    '\U0000039A',
+       "Kcedil;":                   '\U00000136',
+       "Kcy;":                      '\U0000041A',
+       "Kfr;":                      '\U0001D50E',
+       "Kopf;":                     '\U0001D542',
+       "Kscr;":                     '\U0001D4A6',
+       "LJcy;":                     '\U00000409',
+       "LT;":                       '\U0000003C',
+       "Lacute;":                   '\U00000139',
+       "Lambda;":                   '\U0000039B',
+       "Lang;":                     '\U000027EA',
+       "Laplacetrf;":               '\U00002112',
+       "Larr;":                     '\U0000219E',
+       "Lcaron;":                   '\U0000013D',
+       "Lcedil;":                   '\U0000013B',
+       "Lcy;":                      '\U0000041B',
+       "LeftAngleBracket;":         '\U000027E8',
+       "LeftArrow;":                '\U00002190',
+       "LeftArrowBar;":             '\U000021E4',
+       "LeftArrowRightArrow;":      '\U000021C6',
+       "LeftCeiling;":              '\U00002308',
+       "LeftDoubleBracket;":        '\U000027E6',
+       "LeftDownTeeVector;":        '\U00002961',
+       "LeftDownVector;":           '\U000021C3',
+       "LeftDownVectorBar;":        '\U00002959',
+       "LeftFloor;":                '\U0000230A',
+       "LeftRightArrow;":           '\U00002194',
+       "LeftRightVector;":          '\U0000294E',
+       "LeftTee;":                  '\U000022A3',
+       "LeftTeeArrow;":             '\U000021A4',
+       "LeftTeeVector;":            '\U0000295A',
+       "LeftTriangle;":             '\U000022B2',
+       "LeftTriangleBar;":          '\U000029CF',
+       "LeftTriangleEqual;":        '\U000022B4',
+       "LeftUpDownVector;":         '\U00002951',
+       "LeftUpTeeVector;":          '\U00002960',
+       "LeftUpVector;":             '\U000021BF',
+       "LeftUpVectorBar;":          '\U00002958',
+       "LeftVector;":               '\U000021BC',
+       "LeftVectorBar;":            '\U00002952',
+       "Leftarrow;":                '\U000021D0',
+       "Leftrightarrow;":           '\U000021D4',
+       "LessEqualGreater;":         '\U000022DA',
+       "LessFullEqual;":            '\U00002266',
+       "LessGreater;":              '\U00002276',
+       "LessLess;":                 '\U00002AA1',
+       "LessSlantEqual;":           '\U00002A7D',
+       "LessTilde;":                '\U00002272',
+       "Lfr;":                      '\U0001D50F',
+       "Ll;":                       '\U000022D8',
+       "Lleftarrow;":               '\U000021DA',
+       "Lmidot;":                   '\U0000013F',
+       "LongLeftArrow;":            '\U000027F5',
+       "LongLeftRightArrow;":       '\U000027F7',
+       "LongRightArrow;":           '\U000027F6',
+       "Longleftarrow;":            '\U000027F8',
+       "Longleftrightarrow;":       '\U000027FA',
+       "Longrightarrow;":           '\U000027F9',
+       "Lopf;":                     '\U0001D543',
+       "LowerLeftArrow;":           '\U00002199',
+       "LowerRightArrow;":          '\U00002198',
+       "Lscr;":                     '\U00002112',
+       "Lsh;":                      '\U000021B0',
+       "Lstrok;":                   '\U00000141',
+       "Lt;":                       '\U0000226A',
+       "Map;":                      '\U00002905',
+       "Mcy;":                      '\U0000041C',
+       "MediumSpace;":              '\U0000205F',
+       "Mellintrf;":                '\U00002133',
+       "Mfr;":                      '\U0001D510',
+       "MinusPlus;":                '\U00002213',
+       "Mopf;":                     '\U0001D544',
+       "Mscr;":                     '\U00002133',
+       "Mu;":                       '\U0000039C',
+       "NJcy;":                     '\U0000040A',
+       "Nacute;":                   '\U00000143',
+       "Ncaron;":                   '\U00000147',
+       "Ncedil;":                   '\U00000145',
+       "Ncy;":                      '\U0000041D',
+       "NegativeMediumSpace;":      '\U0000200B',
+       "NegativeThickSpace;":       '\U0000200B',
+       "NegativeThinSpace;":        '\U0000200B',
+       "NegativeVeryThinSpace;":    '\U0000200B',
+       "NestedGreaterGreater;":     '\U0000226B',
+       "NestedLessLess;":           '\U0000226A',
+       "NewLine;":                  '\U0000000A',
+       "Nfr;":                      '\U0001D511',
+       "NoBreak;":                  '\U00002060',
+       "NonBreakingSpace;":         '\U000000A0',
+       "Nopf;":                     '\U00002115',
+       "Not;":                      '\U00002AEC',
+       "NotCongruent;":             '\U00002262',
+       "NotCupCap;":                '\U0000226D',
+       "NotDoubleVerticalBar;":     '\U00002226',
+       "NotElement;":               '\U00002209',
+       "NotEqual;":                 '\U00002260',
+       "NotExists;":                '\U00002204',
+       "NotGreater;":               '\U0000226F',
+       "NotGreaterEqual;":          '\U00002271',
+       "NotGreaterLess;":           '\U00002279',
+       "NotGreaterTilde;":          '\U00002275',
+       "NotLeftTriangle;":          '\U000022EA',
+       "NotLeftTriangleEqual;":     '\U000022EC',
+       "NotLess;":                  '\U0000226E',
+       "NotLessEqual;":             '\U00002270',
+       "NotLessGreater;":           '\U00002278',
+       "NotLessTilde;":             '\U00002274',
+       "NotPrecedes;":              '\U00002280',
+       "NotPrecedesSlantEqual;":    '\U000022E0',
+       "NotReverseElement;":        '\U0000220C',
+       "NotRightTriangle;":         '\U000022EB',
+       "NotRightTriangleEqual;":    '\U000022ED',
+       "NotSquareSubsetEqual;":     '\U000022E2',
+       "NotSquareSupersetEqual;":   '\U000022E3',
+       "NotSubsetEqual;":           '\U00002288',
+       "NotSucceeds;":              '\U00002281',
+       "NotSucceedsSlantEqual;":    '\U000022E1',
+       "NotSupersetEqual;":         '\U00002289',
+       "NotTilde;":                 '\U00002241',
+       "NotTildeEqual;":            '\U00002244',
+       "NotTildeFullEqual;":        '\U00002247',
+       "NotTildeTilde;":            '\U00002249',
+       "NotVerticalBar;":           '\U00002224',
+       "Nscr;":                     '\U0001D4A9',
+       "Ntilde;":                   '\U000000D1',
+       "Nu;":                       '\U0000039D',
+       "OElig;":                    '\U00000152',
+       "Oacute;":                   '\U000000D3',
+       "Ocirc;":                    '\U000000D4',
+       "Ocy;":                      '\U0000041E',
+       "Odblac;":                   '\U00000150',
+       "Ofr;":                      '\U0001D512',
+       "Ograve;":                   '\U000000D2',
+       "Omacr;":                    '\U0000014C',
+       "Omega;":                    '\U000003A9',
+       "Omicron;":                  '\U0000039F',
+       "Oopf;":                     '\U0001D546',
+       "OpenCurlyDoubleQuote;":     '\U0000201C',
+       "OpenCurlyQuote;":           '\U00002018',
+       "Or;":                       '\U00002A54',
+       "Oscr;":                     '\U0001D4AA',
+       "Oslash;":                   '\U000000D8',
+       "Otilde;":                   '\U000000D5',
+       "Otimes;":                   '\U00002A37',
+       "Ouml;":                     '\U000000D6',
+       "OverBar;":                  '\U0000203E',
+       "OverBrace;":                '\U000023DE',
+       "OverBracket;":              '\U000023B4',
+       "OverParenthesis;":          '\U000023DC',
+       "PartialD;":                 '\U00002202',
+       "Pcy;":                      '\U0000041F',
+       "Pfr;":                      '\U0001D513',
+       "Phi;":                      '\U000003A6',
+       "Pi;":                       '\U000003A0',
+       "PlusMinus;":                '\U000000B1',
+       "Poincareplane;":            '\U0000210C',
+       "Popf;":                     '\U00002119',
+       "Pr;":                       '\U00002ABB',
+       "Precedes;":                 '\U0000227A',
+       "PrecedesEqual;":            '\U00002AAF',
+       "PrecedesSlantEqual;":       '\U0000227C',
+       "PrecedesTilde;":            '\U0000227E',
+       "Prime;":                    '\U00002033',
+       "Product;":                  '\U0000220F',
+       "Proportion;":               '\U00002237',
+       "Proportional;":             '\U0000221D',
+       "Pscr;":                     '\U0001D4AB',
+       "Psi;":                      '\U000003A8',
+       "QUOT;":                     '\U00000022',
+       "Qfr;":                      '\U0001D514',
+       "Qopf;":                     '\U0000211A',
+       "Qscr;":                     '\U0001D4AC',
+       "RBarr;":                    '\U00002910',
+       "REG;":                      '\U000000AE',
+       "Racute;":                   '\U00000154',
+       "Rang;":                     '\U000027EB',
+       "Rarr;":                     '\U000021A0',
+       "Rarrtl;":                   '\U00002916',
+       "Rcaron;":                   '\U00000158',
+       "Rcedil;":                   '\U00000156',
+       "Rcy;":                      '\U00000420',
+       "Re;":                       '\U0000211C',
+       "ReverseElement;":           '\U0000220B',
+       "ReverseEquilibrium;":       '\U000021CB',
+       "ReverseUpEquilibrium;":     '\U0000296F',
+       "Rfr;":                      '\U0000211C',
+       "Rho;":                      '\U000003A1',
+       "RightAngleBracket;":        '\U000027E9',
+       "RightArrow;":               '\U00002192',
+       "RightArrowBar;":            '\U000021E5',
+       "RightArrowLeftArrow;":      '\U000021C4',
+       "RightCeiling;":             '\U00002309',
+       "RightDoubleBracket;":       '\U000027E7',
+       "RightDownTeeVector;":       '\U0000295D',
+       "RightDownVector;":          '\U000021C2',
+       "RightDownVectorBar;":       '\U00002955',
+       "RightFloor;":               '\U0000230B',
+       "RightTee;":                 '\U000022A2',
+       "RightTeeArrow;":            '\U000021A6',
+       "RightTeeVector;":           '\U0000295B',
+       "RightTriangle;":            '\U000022B3',
+       "RightTriangleBar;":         '\U000029D0',
+       "RightTriangleEqual;":       '\U000022B5',
+       "RightUpDownVector;":        '\U0000294F',
+       "RightUpTeeVector;":         '\U0000295C',
+       "RightUpVector;":            '\U000021BE',
+       "RightUpVectorBar;":         '\U00002954',
+       "RightVector;":              '\U000021C0',
+       "RightVectorBar;":           '\U00002953',
+       "Rightarrow;":               '\U000021D2',
+       "Ropf;":                     '\U0000211D',
+       "RoundImplies;":             '\U00002970',
+       "Rrightarrow;":              '\U000021DB',
+       "Rscr;":                     '\U0000211B',
+       "Rsh;":                      '\U000021B1',
+       "RuleDelayed;":              '\U000029F4',
+       "SHCHcy;":                   '\U00000429',
+       "SHcy;":                     '\U00000428',
+       "SOFTcy;":                   '\U0000042C',
+       "Sacute;":                   '\U0000015A',
+       "Sc;":                       '\U00002ABC',
+       "Scaron;":                   '\U00000160',
+       "Scedil;":                   '\U0000015E',
+       "Scirc;":                    '\U0000015C',
+       "Scy;":                      '\U00000421',
+       "Sfr;":                      '\U0001D516',
+       "ShortDownArrow;":           '\U00002193',
+       "ShortLeftArrow;":           '\U00002190',
+       "ShortRightArrow;":          '\U00002192',
+       "ShortUpArrow;":             '\U00002191',
+       "Sigma;":                    '\U000003A3',
+       "SmallCircle;":              '\U00002218',
+       "Sopf;":                     '\U0001D54A',
+       "Sqrt;":                     '\U0000221A',
+       "Square;":                   '\U000025A1',
+       "SquareIntersection;":       '\U00002293',
+       "SquareSubset;":             '\U0000228F',
+       "SquareSubsetEqual;":        '\U00002291',
+       "SquareSuperset;":           '\U00002290',
+       "SquareSupersetEqual;":      '\U00002292',
+       "SquareUnion;":              '\U00002294',
+       "Sscr;":                     '\U0001D4AE',
+       "Star;":                     '\U000022C6',
+       "Sub;":                      '\U000022D0',
+       "Subset;":                   '\U000022D0',
+       "SubsetEqual;":              '\U00002286',
+       "Succeeds;":                 '\U0000227B',
+       "SucceedsEqual;":            '\U00002AB0',
+       "SucceedsSlantEqual;":       '\U0000227D',
+       "SucceedsTilde;":            '\U0000227F',
+       "SuchThat;":                 '\U0000220B',
+       "Sum;":                      '\U00002211',
+       "Sup;":                      '\U000022D1',
+       "Superset;":                 '\U00002283',
+       "SupersetEqual;":            '\U00002287',
+       "Supset;":                   '\U000022D1',
+       "THORN;":                    '\U000000DE',
+       "TRADE;":                    '\U00002122',
+       "TSHcy;":                    '\U0000040B',
+       "TScy;":                     '\U00000426',
+       "Tab;":                      '\U00000009',
+       "Tau;":                      '\U000003A4',
+       "Tcaron;":                   '\U00000164',
+       "Tcedil;":                   '\U00000162',
+       "Tcy;":                      '\U00000422',
+       "Tfr;":                      '\U0001D517',
+       "Therefore;":                '\U00002234',
+       "Theta;":                    '\U00000398',
+       "ThinSpace;":                '\U00002009',
+       "Tilde;":                    '\U0000223C',
+       "TildeEqual;":               '\U00002243',
+       "TildeFullEqual;":           '\U00002245',
+       "TildeTilde;":               '\U00002248',
+       "Topf;":                     '\U0001D54B',
+       "TripleDot;":                '\U000020DB',
+       "Tscr;":                     '\U0001D4AF',
+       "Tstrok;":                   '\U00000166',
+       "Uacute;":                   '\U000000DA',
+       "Uarr;":                     '\U0000219F',
+       "Uarrocir;":                 '\U00002949',
+       "Ubrcy;":                    '\U0000040E',
+       "Ubreve;":                   '\U0000016C',
+       "Ucirc;":                    '\U000000DB',
+       "Ucy;":                      '\U00000423',
+       "Udblac;":                   '\U00000170',
+       "Ufr;":                      '\U0001D518',
+       "Ugrave;":                   '\U000000D9',
+       "Umacr;":                    '\U0000016A',
+       "UnderBar;":                 '\U0000005F',
+       "UnderBrace;":               '\U000023DF',
+       "UnderBracket;":             '\U000023B5',
+       "UnderParenthesis;":         '\U000023DD',
+       "Union;":                    '\U000022C3',
+       "UnionPlus;":                '\U0000228E',
+       "Uogon;":                    '\U00000172',
+       "Uopf;":                     '\U0001D54C',
+       "UpArrow;":                  '\U00002191',
+       "UpArrowBar;":               '\U00002912',
+       "UpArrowDownArrow;":         '\U000021C5',
+       "UpDownArrow;":              '\U00002195',
+       "UpEquilibrium;":            '\U0000296E',
+       "UpTee;":                    '\U000022A5',
+       "UpTeeArrow;":               '\U000021A5',
+       "Uparrow;":                  '\U000021D1',
+       "Updownarrow;":              '\U000021D5',
+       "UpperLeftArrow;":           '\U00002196',
+       "UpperRightArrow;":          '\U00002197',
+       "Upsi;":                     '\U000003D2',
+       "Upsilon;":                  '\U000003A5',
+       "Uring;":                    '\U0000016E',
+       "Uscr;":                     '\U0001D4B0',
+       "Utilde;":                   '\U00000168',
+       "Uuml;":                     '\U000000DC',
+       "VDash;":                    '\U000022AB',
+       "Vbar;":                     '\U00002AEB',
+       "Vcy;":                      '\U00000412',
+       "Vdash;":                    '\U000022A9',
+       "Vdashl;":                   '\U00002AE6',
+       "Vee;":                      '\U000022C1',
+       "Verbar;":                   '\U00002016',
+       "Vert;":                     '\U00002016',
+       "VerticalBar;":              '\U00002223',
+       "VerticalLine;":             '\U0000007C',
+       "VerticalSeparator;":        '\U00002758',
+       "VerticalTilde;":            '\U00002240',
+       "VeryThinSpace;":            '\U0000200A',
+       "Vfr;":                      '\U0001D519',
+       "Vopf;":                     '\U0001D54D',
+       "Vscr;":                     '\U0001D4B1',
+       "Vvdash;":                   '\U000022AA',
+       "Wcirc;":                    '\U00000174',
+       "Wedge;":                    '\U000022C0',
+       "Wfr;":                      '\U0001D51A',
+       "Wopf;":                     '\U0001D54E',
+       "Wscr;":                     '\U0001D4B2',
+       "Xfr;":                      '\U0001D51B',
+       "Xi;":                       '\U0000039E',
+       "Xopf;":                     '\U0001D54F',
+       "Xscr;":                     '\U0001D4B3',
+       "YAcy;":                     '\U0000042F',
+       "YIcy;":                     '\U00000407',
+       "YUcy;":                     '\U0000042E',
+       "Yacute;":                   '\U000000DD',
+       "Ycirc;":                    '\U00000176',
+       "Ycy;":                      '\U0000042B',
+       "Yfr;":                      '\U0001D51C',
+       "Yopf;":                     '\U0001D550',
+       "Yscr;":                     '\U0001D4B4',
+       "Yuml;":                     '\U00000178',
+       "ZHcy;":                     '\U00000416',
+       "Zacute;":                   '\U00000179',
+       "Zcaron;":                   '\U0000017D',
+       "Zcy;":                      '\U00000417',
+       "Zdot;":                     '\U0000017B',
+       "ZeroWidthSpace;":           '\U0000200B',
+       "Zeta;":                     '\U00000396',
+       "Zfr;":                      '\U00002128',
+       "Zopf;":                     '\U00002124',
+       "Zscr;":                     '\U0001D4B5',
+       "aacute;":                   '\U000000E1',
+       "abreve;":                   '\U00000103',
+       "ac;":                       '\U0000223E',
+       "acd;":                      '\U0000223F',
+       "acirc;":                    '\U000000E2',
+       "acute;":                    '\U000000B4',
+       "acy;":                      '\U00000430',
+       "aelig;":                    '\U000000E6',
+       "af;":                       '\U00002061',
+       "afr;":                      '\U0001D51E',
+       "agrave;":                   '\U000000E0',
+       "alefsym;":                  '\U00002135',
+       "aleph;":                    '\U00002135',
+       "alpha;":                    '\U000003B1',
+       "amacr;":                    '\U00000101',
+       "amalg;":                    '\U00002A3F',
+       "amp;":                      '\U00000026',
+       "and;":                      '\U00002227',
+       "andand;":                   '\U00002A55',
+       "andd;":                     '\U00002A5C',
+       "andslope;":                 '\U00002A58',
+       "andv;":                     '\U00002A5A',
+       "ang;":                      '\U00002220',
+       "ange;":                     '\U000029A4',
+       "angle;":                    '\U00002220',
+       "angmsd;":                   '\U00002221',
+       "angmsdaa;":                 '\U000029A8',
+       "angmsdab;":                 '\U000029A9',
+       "angmsdac;":                 '\U000029AA',
+       "angmsdad;":                 '\U000029AB',
+       "angmsdae;":                 '\U000029AC',
+       "angmsdaf;":                 '\U000029AD',
+       "angmsdag;":                 '\U000029AE',
+       "angmsdah;":                 '\U000029AF',
+       "angrt;":                    '\U0000221F',
+       "angrtvb;":                  '\U000022BE',
+       "angrtvbd;":                 '\U0000299D',
+       "angsph;":                   '\U00002222',
+       "angst;":                    '\U000000C5',
+       "angzarr;":                  '\U0000237C',
+       "aogon;":                    '\U00000105',
+       "aopf;":                     '\U0001D552',
+       "ap;":                       '\U00002248',
+       "apE;":                      '\U00002A70',
+       "apacir;":                   '\U00002A6F',
+       "ape;":                      '\U0000224A',
+       "apid;":                     '\U0000224B',
+       "apos;":                     '\U00000027',
+       "approx;":                   '\U00002248',
+       "approxeq;":                 '\U0000224A',
+       "aring;":                    '\U000000E5',
+       "ascr;":                     '\U0001D4B6',
+       "ast;":                      '\U0000002A',
+       "asymp;":                    '\U00002248',
+       "asympeq;":                  '\U0000224D',
+       "atilde;":                   '\U000000E3',
+       "auml;":                     '\U000000E4',
+       "awconint;":                 '\U00002233',
+       "awint;":                    '\U00002A11',
+       "bNot;":                     '\U00002AED',
+       "backcong;":                 '\U0000224C',
+       "backepsilon;":              '\U000003F6',
+       "backprime;":                '\U00002035',
+       "backsim;":                  '\U0000223D',
+       "backsimeq;":                '\U000022CD',
+       "barvee;":                   '\U000022BD',
+       "barwed;":                   '\U00002305',
+       "barwedge;":                 '\U00002305',
+       "bbrk;":                     '\U000023B5',
+       "bbrktbrk;":                 '\U000023B6',
+       "bcong;":                    '\U0000224C',
+       "bcy;":                      '\U00000431',
+       "bdquo;":                    '\U0000201E',
+       "becaus;":                   '\U00002235',
+       "because;":                  '\U00002235',
+       "bemptyv;":                  '\U000029B0',
+       "bepsi;":                    '\U000003F6',
+       "bernou;":                   '\U0000212C',
+       "beta;":                     '\U000003B2',
+       "beth;":                     '\U00002136',
+       "between;":                  '\U0000226C',
+       "bfr;":                      '\U0001D51F',
+       "bigcap;":                   '\U000022C2',
+       "bigcirc;":                  '\U000025EF',
+       "bigcup;":                   '\U000022C3',
+       "bigodot;":                  '\U00002A00',
+       "bigoplus;":                 '\U00002A01',
+       "bigotimes;":                '\U00002A02',
+       "bigsqcup;":                 '\U00002A06',
+       "bigstar;":                  '\U00002605',
+       "bigtriangledown;":          '\U000025BD',
+       "bigtriangleup;":            '\U000025B3',
+       "biguplus;":                 '\U00002A04',
+       "bigvee;":                   '\U000022C1',
+       "bigwedge;":                 '\U000022C0',
+       "bkarow;":                   '\U0000290D',
+       "blacklozenge;":             '\U000029EB',
+       "blacksquare;":              '\U000025AA',
+       "blacktriangle;":            '\U000025B4',
+       "blacktriangledown;":        '\U000025BE',
+       "blacktriangleleft;":        '\U000025C2',
+       "blacktriangleright;":       '\U000025B8',
+       "blank;":                    '\U00002423',
+       "blk12;":                    '\U00002592',
+       "blk14;":                    '\U00002591',
+       "blk34;":                    '\U00002593',
+       "block;":                    '\U00002588',
+       "bnot;":                     '\U00002310',
+       "bopf;":                     '\U0001D553',
+       "bot;":                      '\U000022A5',
+       "bottom;":                   '\U000022A5',
+       "bowtie;":                   '\U000022C8',
+       "boxDL;":                    '\U00002557',
+       "boxDR;":                    '\U00002554',
+       "boxDl;":                    '\U00002556',
+       "boxDr;":                    '\U00002553',
+       "boxH;":                     '\U00002550',
+       "boxHD;":                    '\U00002566',
+       "boxHU;":                    '\U00002569',
+       "boxHd;":                    '\U00002564',
+       "boxHu;":                    '\U00002567',
+       "boxUL;":                    '\U0000255D',
+       "boxUR;":                    '\U0000255A',
+       "boxUl;":                    '\U0000255C',
+       "boxUr;":                    '\U00002559',
+       "boxV;":                     '\U00002551',
+       "boxVH;":                    '\U0000256C',
+       "boxVL;":                    '\U00002563',
+       "boxVR;":                    '\U00002560',
+       "boxVh;":                    '\U0000256B',
+       "boxVl;":                    '\U00002562',
+       "boxVr;":                    '\U0000255F',
+       "boxbox;":                   '\U000029C9',
+       "boxdL;":                    '\U00002555',
+       "boxdR;":                    '\U00002552',
+       "boxdl;":                    '\U00002510',
+       "boxdr;":                    '\U0000250C',
+       "boxh;":                     '\U00002500',
+       "boxhD;":                    '\U00002565',
+       "boxhU;":                    '\U00002568',
+       "boxhd;":                    '\U0000252C',
+       "boxhu;":                    '\U00002534',
+       "boxminus;":                 '\U0000229F',
+       "boxplus;":                  '\U0000229E',
+       "boxtimes;":                 '\U000022A0',
+       "boxuL;":                    '\U0000255B',
+       "boxuR;":                    '\U00002558',
+       "boxul;":                    '\U00002518',
+       "boxur;":                    '\U00002514',
+       "boxv;":                     '\U00002502',
+       "boxvH;":                    '\U0000256A',
+       "boxvL;":                    '\U00002561',
+       "boxvR;":                    '\U0000255E',
+       "boxvh;":                    '\U0000253C',
+       "boxvl;":                    '\U00002524',
+       "boxvr;":                    '\U0000251C',
+       "bprime;":                   '\U00002035',
+       "breve;":                    '\U000002D8',
+       "brvbar;":                   '\U000000A6',
+       "bscr;":                     '\U0001D4B7',
+       "bsemi;":                    '\U0000204F',
+       "bsim;":                     '\U0000223D',
+       "bsime;":                    '\U000022CD',
+       "bsol;":                     '\U0000005C',
+       "bsolb;":                    '\U000029C5',
+       "bsolhsub;":                 '\U000027C8',
+       "bull;":                     '\U00002022',
+       "bullet;":                   '\U00002022',
+       "bump;":                     '\U0000224E',
+       "bumpE;":                    '\U00002AAE',
+       "bumpe;":                    '\U0000224F',
+       "bumpeq;":                   '\U0000224F',
+       "cacute;":                   '\U00000107',
+       "cap;":                      '\U00002229',
+       "capand;":                   '\U00002A44',
+       "capbrcup;":                 '\U00002A49',
+       "capcap;":                   '\U00002A4B',
+       "capcup;":                   '\U00002A47',
+       "capdot;":                   '\U00002A40',
+       "caret;":                    '\U00002041',
+       "caron;":                    '\U000002C7',
+       "ccaps;":                    '\U00002A4D',
+       "ccaron;":                   '\U0000010D',
+       "ccedil;":                   '\U000000E7',
+       "ccirc;":                    '\U00000109',
+       "ccups;":                    '\U00002A4C',
+       "ccupssm;":                  '\U00002A50',
+       "cdot;":                     '\U0000010B',
+       "cedil;":                    '\U000000B8',
+       "cemptyv;":                  '\U000029B2',
+       "cent;":                     '\U000000A2',
+       "centerdot;":                '\U000000B7',
+       "cfr;":                      '\U0001D520',
+       "chcy;":                     '\U00000447',
+       "check;":                    '\U00002713',
+       "checkmark;":                '\U00002713',
+       "chi;":                      '\U000003C7',
+       "cir;":                      '\U000025CB',
+       "cirE;":                     '\U000029C3',
+       "circ;":                     '\U000002C6',
+       "circeq;":                   '\U00002257',
+       "circlearrowleft;":          '\U000021BA',
+       "circlearrowright;":         '\U000021BB',
+       "circledR;":                 '\U000000AE',
+       "circledS;":                 '\U000024C8',
+       "circledast;":               '\U0000229B',
+       "circledcirc;":              '\U0000229A',
+       "circleddash;":              '\U0000229D',
+       "cire;":                     '\U00002257',
+       "cirfnint;":                 '\U00002A10',
+       "cirmid;":                   '\U00002AEF',
+       "cirscir;":                  '\U000029C2',
+       "clubs;":                    '\U00002663',
+       "clubsuit;":                 '\U00002663',
+       "colon;":                    '\U0000003A',
+       "colone;":                   '\U00002254',
+       "coloneq;":                  '\U00002254',
+       "comma;":                    '\U0000002C',
+       "commat;":                   '\U00000040',
+       "comp;":                     '\U00002201',
+       "compfn;":                   '\U00002218',
+       "complement;":               '\U00002201',
+       "complexes;":                '\U00002102',
+       "cong;":                     '\U00002245',
+       "congdot;":                  '\U00002A6D',
+       "conint;":                   '\U0000222E',
+       "copf;":                     '\U0001D554',
+       "coprod;":                   '\U00002210',
+       "copy;":                     '\U000000A9',
+       "copysr;":                   '\U00002117',
+       "crarr;":                    '\U000021B5',
+       "cross;":                    '\U00002717',
+       "cscr;":                     '\U0001D4B8',
+       "csub;":                     '\U00002ACF',
+       "csube;":                    '\U00002AD1',
+       "csup;":                     '\U00002AD0',
+       "csupe;":                    '\U00002AD2',
+       "ctdot;":                    '\U000022EF',
+       "cudarrl;":                  '\U00002938',
+       "cudarrr;":                  '\U00002935',
+       "cuepr;":                    '\U000022DE',
+       "cuesc;":                    '\U000022DF',
+       "cularr;":                   '\U000021B6',
+       "cularrp;":                  '\U0000293D',
+       "cup;":                      '\U0000222A',
+       "cupbrcap;":                 '\U00002A48',
+       "cupcap;":                   '\U00002A46',
+       "cupcup;":                   '\U00002A4A',
+       "cupdot;":                   '\U0000228D',
+       "cupor;":                    '\U00002A45',
+       "curarr;":                   '\U000021B7',
+       "curarrm;":                  '\U0000293C',
+       "curlyeqprec;":              '\U000022DE',
+       "curlyeqsucc;":              '\U000022DF',
+       "curlyvee;":                 '\U000022CE',
+       "curlywedge;":               '\U000022CF',
+       "curren;":                   '\U000000A4',
+       "curvearrowleft;":           '\U000021B6',
+       "curvearrowright;":          '\U000021B7',
+       "cuvee;":                    '\U000022CE',
+       "cuwed;":                    '\U000022CF',
+       "cwconint;":                 '\U00002232',
+       "cwint;":                    '\U00002231',
+       "cylcty;":                   '\U0000232D',
+       "dArr;":                     '\U000021D3',
+       "dHar;":                     '\U00002965',
+       "dagger;":                   '\U00002020',
+       "daleth;":                   '\U00002138',
+       "darr;":                     '\U00002193',
+       "dash;":                     '\U00002010',
+       "dashv;":                    '\U000022A3',
+       "dbkarow;":                  '\U0000290F',
+       "dblac;":                    '\U000002DD',
+       "dcaron;":                   '\U0000010F',
+       "dcy;":                      '\U00000434',
+       "dd;":                       '\U00002146',
+       "ddagger;":                  '\U00002021',
+       "ddarr;":                    '\U000021CA',
+       "ddotseq;":                  '\U00002A77',
+       "deg;":                      '\U000000B0',
+       "delta;":                    '\U000003B4',
+       "demptyv;":                  '\U000029B1',
+       "dfisht;":                   '\U0000297F',
+       "dfr;":                      '\U0001D521',
+       "dharl;":                    '\U000021C3',
+       "dharr;":                    '\U000021C2',
+       "diam;":                     '\U000022C4',
+       "diamond;":                  '\U000022C4',
+       "diamondsuit;":              '\U00002666',
+       "diams;":                    '\U00002666',
+       "die;":                      '\U000000A8',
+       "digamma;":                  '\U000003DD',
+       "disin;":                    '\U000022F2',
+       "div;":                      '\U000000F7',
+       "divide;":                   '\U000000F7',
+       "divideontimes;":            '\U000022C7',
+       "divonx;":                   '\U000022C7',
+       "djcy;":                     '\U00000452',
+       "dlcorn;":                   '\U0000231E',
+       "dlcrop;":                   '\U0000230D',
+       "dollar;":                   '\U00000024',
+       "dopf;":                     '\U0001D555',
+       "dot;":                      '\U000002D9',
+       "doteq;":                    '\U00002250',
+       "doteqdot;":                 '\U00002251',
+       "dotminus;":                 '\U00002238',
+       "dotplus;":                  '\U00002214',
+       "dotsquare;":                '\U000022A1',
+       "doublebarwedge;":           '\U00002306',
+       "downarrow;":                '\U00002193',
+       "downdownarrows;":           '\U000021CA',
+       "downharpoonleft;":          '\U000021C3',
+       "downharpoonright;":         '\U000021C2',
+       "drbkarow;":                 '\U00002910',
+       "drcorn;":                   '\U0000231F',
+       "drcrop;":                   '\U0000230C',
+       "dscr;":                     '\U0001D4B9',
+       "dscy;":                     '\U00000455',
+       "dsol;":                     '\U000029F6',
+       "dstrok;":                   '\U00000111',
+       "dtdot;":                    '\U000022F1',
+       "dtri;":                     '\U000025BF',
+       "dtrif;":                    '\U000025BE',
+       "duarr;":                    '\U000021F5',
+       "duhar;":                    '\U0000296F',
+       "dwangle;":                  '\U000029A6',
+       "dzcy;":                     '\U0000045F',
+       "dzigrarr;":                 '\U000027FF',
+       "eDDot;":                    '\U00002A77',
+       "eDot;":                     '\U00002251',
+       "eacute;":                   '\U000000E9',
+       "easter;":                   '\U00002A6E',
+       "ecaron;":                   '\U0000011B',
+       "ecir;":                     '\U00002256',
+       "ecirc;":                    '\U000000EA',
+       "ecolon;":                   '\U00002255',
+       "ecy;":                      '\U0000044D',
+       "edot;":                     '\U00000117',
+       "ee;":                       '\U00002147',
+       "efDot;":                    '\U00002252',
+       "efr;":                      '\U0001D522',
+       "eg;":                       '\U00002A9A',
+       "egrave;":                   '\U000000E8',
+       "egs;":                      '\U00002A96',
+       "egsdot;":                   '\U00002A98',
+       "el;":                       '\U00002A99',
+       "elinters;":                 '\U000023E7',
+       "ell;":                      '\U00002113',
+       "els;":                      '\U00002A95',
+       "elsdot;":                   '\U00002A97',
+       "emacr;":                    '\U00000113',
+       "empty;":                    '\U00002205',
+       "emptyset;":                 '\U00002205',
+       "emptyv;":                   '\U00002205',
+       "emsp;":                     '\U00002003',
+       "emsp13;":                   '\U00002004',
+       "emsp14;":                   '\U00002005',
+       "eng;":                      '\U0000014B',
+       "ensp;":                     '\U00002002',
+       "eogon;":                    '\U00000119',
+       "eopf;":                     '\U0001D556',
+       "epar;":                     '\U000022D5',
+       "eparsl;":                   '\U000029E3',
+       "eplus;":                    '\U00002A71',
+       "epsi;":                     '\U000003B5',
+       "epsilon;":                  '\U000003B5',
+       "epsiv;":                    '\U000003F5',
+       "eqcirc;":                   '\U00002256',
+       "eqcolon;":                  '\U00002255',
+       "eqsim;":                    '\U00002242',
+       "eqslantgtr;":               '\U00002A96',
+       "eqslantless;":              '\U00002A95',
+       "equals;":                   '\U0000003D',
+       "equest;":                   '\U0000225F',
+       "equiv;":                    '\U00002261',
+       "equivDD;":                  '\U00002A78',
+       "eqvparsl;":                 '\U000029E5',
+       "erDot;":                    '\U00002253',
+       "erarr;":                    '\U00002971',
+       "escr;":                     '\U0000212F',
+       "esdot;":                    '\U00002250',
+       "esim;":                     '\U00002242',
+       "eta;":                      '\U000003B7',
+       "eth;":                      '\U000000F0',
+       "euml;":                     '\U000000EB',
+       "euro;":                     '\U000020AC',
+       "excl;":                     '\U00000021',
+       "exist;":                    '\U00002203',
+       "expectation;":              '\U00002130',
+       "exponentiale;":             '\U00002147',
+       "fallingdotseq;":            '\U00002252',
+       "fcy;":                      '\U00000444',
+       "female;":                   '\U00002640',
+       "ffilig;":                   '\U0000FB03',
+       "fflig;":                    '\U0000FB00',
+       "ffllig;":                   '\U0000FB04',
+       "ffr;":                      '\U0001D523',
+       "filig;":                    '\U0000FB01',
+       "flat;":                     '\U0000266D',
+       "fllig;":                    '\U0000FB02',
+       "fltns;":                    '\U000025B1',
+       "fnof;":                     '\U00000192',
+       "fopf;":                     '\U0001D557',
+       "forall;":                   '\U00002200',
+       "fork;":                     '\U000022D4',
+       "forkv;":                    '\U00002AD9',
+       "fpartint;":                 '\U00002A0D',
+       "frac12;":                   '\U000000BD',
+       "frac13;":                   '\U00002153',
+       "frac14;":                   '\U000000BC',
+       "frac15;":                   '\U00002155',
+       "frac16;":                   '\U00002159',
+       "frac18;":                   '\U0000215B',
+       "frac23;":                   '\U00002154',
+       "frac25;":                   '\U00002156',
+       "frac34;":                   '\U000000BE',
+       "frac35;":                   '\U00002157',
+       "frac38;":                   '\U0000215C',
+       "frac45;":                   '\U00002158',
+       "frac56;":                   '\U0000215A',
+       "frac58;":                   '\U0000215D',
+       "frac78;":                   '\U0000215E',
+       "frasl;":                    '\U00002044',
+       "frown;":                    '\U00002322',
+       "fscr;":                     '\U0001D4BB',
+       "gE;":                       '\U00002267',
+       "gEl;":                      '\U00002A8C',
+       "gacute;":                   '\U000001F5',
+       "gamma;":                    '\U000003B3',
+       "gammad;":                   '\U000003DD',
+       "gap;":                      '\U00002A86',
+       "gbreve;":                   '\U0000011F',
+       "gcirc;":                    '\U0000011D',
+       "gcy;":                      '\U00000433',
+       "gdot;":                     '\U00000121',
+       "ge;":                       '\U00002265',
+       "gel;":                      '\U000022DB',
+       "geq;":                      '\U00002265',
+       "geqq;":                     '\U00002267',
+       "geqslant;":                 '\U00002A7E',
+       "ges;":                      '\U00002A7E',
+       "gescc;":                    '\U00002AA9',
+       "gesdot;":                   '\U00002A80',
+       "gesdoto;":                  '\U00002A82',
+       "gesdotol;":                 '\U00002A84',
+       "gesles;":                   '\U00002A94',
+       "gfr;":                      '\U0001D524',
+       "gg;":                       '\U0000226B',
+       "ggg;":                      '\U000022D9',
+       "gimel;":                    '\U00002137',
+       "gjcy;":                     '\U00000453',
+       "gl;":                       '\U00002277',
+       "glE;":                      '\U00002A92',
+       "gla;":                      '\U00002AA5',
+       "glj;":                      '\U00002AA4',
+       "gnE;":                      '\U00002269',
+       "gnap;":                     '\U00002A8A',
+       "gnapprox;":                 '\U00002A8A',
+       "gne;":                      '\U00002A88',
+       "gneq;":                     '\U00002A88',
+       "gneqq;":                    '\U00002269',
+       "gnsim;":                    '\U000022E7',
+       "gopf;":                     '\U0001D558',
+       "grave;":                    '\U00000060',
+       "gscr;":                     '\U0000210A',
+       "gsim;":                     '\U00002273',
+       "gsime;":                    '\U00002A8E',
+       "gsiml;":                    '\U00002A90',
+       "gt;":                       '\U0000003E',
+       "gtcc;":                     '\U00002AA7',
+       "gtcir;":                    '\U00002A7A',
+       "gtdot;":                    '\U000022D7',
+       "gtlPar;":                   '\U00002995',
+       "gtquest;":                  '\U00002A7C',
+       "gtrapprox;":                '\U00002A86',
+       "gtrarr;":                   '\U00002978',
+       "gtrdot;":                   '\U000022D7',
+       "gtreqless;":                '\U000022DB',
+       "gtreqqless;":               '\U00002A8C',
+       "gtrless;":                  '\U00002277',
+       "gtrsim;":                   '\U00002273',
+       "hArr;":                     '\U000021D4',
+       "hairsp;":                   '\U0000200A',
+       "half;":                     '\U000000BD',
+       "hamilt;":                   '\U0000210B',
+       "hardcy;":                   '\U0000044A',
+       "harr;":                     '\U00002194',
+       "harrcir;":                  '\U00002948',
+       "harrw;":                    '\U000021AD',
+       "hbar;":                     '\U0000210F',
+       "hcirc;":                    '\U00000125',
+       "hearts;":                   '\U00002665',
+       "heartsuit;":                '\U00002665',
+       "hellip;":                   '\U00002026',
+       "hercon;":                   '\U000022B9',
+       "hfr;":                      '\U0001D525',
+       "hksearow;":                 '\U00002925',
+       "hkswarow;":                 '\U00002926',
+       "hoarr;":                    '\U000021FF',
+       "homtht;":                   '\U0000223B',
+       "hookleftarrow;":            '\U000021A9',
+       "hookrightarrow;":           '\U000021AA',
+       "hopf;":                     '\U0001D559',
+       "horbar;":                   '\U00002015',
+       "hscr;":                     '\U0001D4BD',
+       "hslash;":                   '\U0000210F',
+       "hstrok;":                   '\U00000127',
+       "hybull;":                   '\U00002043',
+       "hyphen;":                   '\U00002010',
+       "iacute;":                   '\U000000ED',
+       "ic;":                       '\U00002063',
+       "icirc;":                    '\U000000EE',
+       "icy;":                      '\U00000438',
+       "iecy;":                     '\U00000435',
+       "iexcl;":                    '\U000000A1',
+       "iff;":                      '\U000021D4',
+       "ifr;":                      '\U0001D526',
+       "igrave;":                   '\U000000EC',
+       "ii;":                       '\U00002148',
+       "iiiint;":                   '\U00002A0C',
+       "iiint;":                    '\U0000222D',
+       "iinfin;":                   '\U000029DC',
+       "iiota;":                    '\U00002129',
+       "ijlig;":                    '\U00000133',
+       "imacr;":                    '\U0000012B',
+       "image;":                    '\U00002111',
+       "imagline;":                 '\U00002110',
+       "imagpart;":                 '\U00002111',
+       "imath;":                    '\U00000131',
+       "imof;":                     '\U000022B7',
+       "imped;":                    '\U000001B5',
+       "in;":                       '\U00002208',
+       "incare;":                   '\U00002105',
+       "infin;":                    '\U0000221E',
+       "infintie;":                 '\U000029DD',
+       "inodot;":                   '\U00000131',
+       "int;":                      '\U0000222B',
+       "intcal;":                   '\U000022BA',
+       "integers;":                 '\U00002124',
+       "intercal;":                 '\U000022BA',
+       "intlarhk;":                 '\U00002A17',
+       "intprod;":                  '\U00002A3C',
+       "iocy;":                     '\U00000451',
+       "iogon;":                    '\U0000012F',
+       "iopf;":                     '\U0001D55A',
+       "iota;":                     '\U000003B9',
+       "iprod;":                    '\U00002A3C',
+       "iquest;":                   '\U000000BF',
+       "iscr;":                     '\U0001D4BE',
+       "isin;":                     '\U00002208',
+       "isinE;":                    '\U000022F9',
+       "isindot;":                  '\U000022F5',
+       "isins;":                    '\U000022F4',
+       "isinsv;":                   '\U000022F3',
+       "isinv;":                    '\U00002208',
+       "it;":                       '\U00002062',
+       "itilde;":                   '\U00000129',
+       "iukcy;":                    '\U00000456',
+       "iuml;":                     '\U000000EF',
+       "jcirc;":                    '\U00000135',
+       "jcy;":                      '\U00000439',
+       "jfr;":                      '\U0001D527',
+       "jmath;":                    '\U00000237',
+       "jopf;":                     '\U0001D55B',
+       "jscr;":                     '\U0001D4BF',
+       "jsercy;":                   '\U00000458',
+       "jukcy;":                    '\U00000454',
+       "kappa;":                    '\U000003BA',
+       "kappav;":                   '\U000003F0',
+       "kcedil;":                   '\U00000137',
+       "kcy;":                      '\U0000043A',
+       "kfr;":                      '\U0001D528',
+       "kgreen;":                   '\U00000138',
+       "khcy;":                     '\U00000445',
+       "kjcy;":                     '\U0000045C',
+       "kopf;":                     '\U0001D55C',
+       "kscr;":                     '\U0001D4C0',
+       "lAarr;":                    '\U000021DA',
+       "lArr;":                     '\U000021D0',
+       "lAtail;":                   '\U0000291B',
+       "lBarr;":                    '\U0000290E',
+       "lE;":                       '\U00002266',
+       "lEg;":                      '\U00002A8B',
+       "lHar;":                     '\U00002962',
+       "lacute;":                   '\U0000013A',
+       "laemptyv;":                 '\U000029B4',
+       "lagran;":                   '\U00002112',
+       "lambda;":                   '\U000003BB',
+       "lang;":                     '\U000027E8',
+       "langd;":                    '\U00002991',
+       "langle;":                   '\U000027E8',
+       "lap;":                      '\U00002A85',
+       "laquo;":                    '\U000000AB',
+       "larr;":                     '\U00002190',
+       "larrb;":                    '\U000021E4',
+       "larrbfs;":                  '\U0000291F',
+       "larrfs;":                   '\U0000291D',
+       "larrhk;":                   '\U000021A9',
+       "larrlp;":                   '\U000021AB',
+       "larrpl;":                   '\U00002939',
+       "larrsim;":                  '\U00002973',
+       "larrtl;":                   '\U000021A2',
+       "lat;":                      '\U00002AAB',
+       "latail;":                   '\U00002919',
+       "late;":                     '\U00002AAD',
+       "lbarr;":                    '\U0000290C',
+       "lbbrk;":                    '\U00002772',
+       "lbrace;":                   '\U0000007B',
+       "lbrack;":                   '\U0000005B',
+       "lbrke;":                    '\U0000298B',
+       "lbrksld;":                  '\U0000298F',
+       "lbrkslu;":                  '\U0000298D',
+       "lcaron;":                   '\U0000013E',
+       "lcedil;":                   '\U0000013C',
+       "lceil;":                    '\U00002308',
+       "lcub;":                     '\U0000007B',
+       "lcy;":                      '\U0000043B',
+       "ldca;":                     '\U00002936',
+       "ldquo;":                    '\U0000201C',
+       "ldquor;":                   '\U0000201E',
+       "ldrdhar;":                  '\U00002967',
+       "ldrushar;":                 '\U0000294B',
+       "ldsh;":                     '\U000021B2',
+       "le;":                       '\U00002264',
+       "leftarrow;":                '\U00002190',
+       "leftarrowtail;":            '\U000021A2',
+       "leftharpoondown;":          '\U000021BD',
+       "leftharpoonup;":            '\U000021BC',
+       "leftleftarrows;":           '\U000021C7',
+       "leftrightarrow;":           '\U00002194',
+       "leftrightarrows;":          '\U000021C6',
+       "leftrightharpoons;":        '\U000021CB',
+       "leftrightsquigarrow;":      '\U000021AD',
+       "leftthreetimes;":           '\U000022CB',
+       "leg;":                      '\U000022DA',
+       "leq;":                      '\U00002264',
+       "leqq;":                     '\U00002266',
+       "leqslant;":                 '\U00002A7D',
+       "les;":                      '\U00002A7D',
+       "lescc;":                    '\U00002AA8',
+       "lesdot;":                   '\U00002A7F',
+       "lesdoto;":                  '\U00002A81',
+       "lesdotor;":                 '\U00002A83',
+       "lesges;":                   '\U00002A93',
+       "lessapprox;":               '\U00002A85',
+       "lessdot;":                  '\U000022D6',
+       "lesseqgtr;":                '\U000022DA',
+       "lesseqqgtr;":               '\U00002A8B',
+       "lessgtr;":                  '\U00002276',
+       "lesssim;":                  '\U00002272',
+       "lfisht;":                   '\U0000297C',
+       "lfloor;":                   '\U0000230A',
+       "lfr;":                      '\U0001D529',
+       "lg;":                       '\U00002276',
+       "lgE;":                      '\U00002A91',
+       "lhard;":                    '\U000021BD',
+       "lharu;":                    '\U000021BC',
+       "lharul;":                   '\U0000296A',
+       "lhblk;":                    '\U00002584',
+       "ljcy;":                     '\U00000459',
+       "ll;":                       '\U0000226A',
+       "llarr;":                    '\U000021C7',
+       "llcorner;":                 '\U0000231E',
+       "llhard;":                   '\U0000296B',
+       "lltri;":                    '\U000025FA',
+       "lmidot;":                   '\U00000140',
+       "lmoust;":                   '\U000023B0',
+       "lmoustache;":               '\U000023B0',
+       "lnE;":                      '\U00002268',
+       "lnap;":                     '\U00002A89',
+       "lnapprox;":                 '\U00002A89',
+       "lne;":                      '\U00002A87',
+       "lneq;":                     '\U00002A87',
+       "lneqq;":                    '\U00002268',
+       "lnsim;":                    '\U000022E6',
+       "loang;":                    '\U000027EC',
+       "loarr;":                    '\U000021FD',
+       "lobrk;":                    '\U000027E6',
+       "longleftarrow;":            '\U000027F5',
+       "longleftrightarrow;":       '\U000027F7',
+       "longmapsto;":               '\U000027FC',
+       "longrightarrow;":           '\U000027F6',
+       "looparrowleft;":            '\U000021AB',
+       "looparrowright;":           '\U000021AC',
+       "lopar;":                    '\U00002985',
+       "lopf;":                     '\U0001D55D',
+       "loplus;":                   '\U00002A2D',
+       "lotimes;":                  '\U00002A34',
+       "lowast;":                   '\U00002217',
+       "lowbar;":                   '\U0000005F',
+       "loz;":                      '\U000025CA',
+       "lozenge;":                  '\U000025CA',
+       "lozf;":                     '\U000029EB',
+       "lpar;":                     '\U00000028',
+       "lparlt;":                   '\U00002993',
+       "lrarr;":                    '\U000021C6',
+       "lrcorner;":                 '\U0000231F',
+       "lrhar;":                    '\U000021CB',
+       "lrhard;":                   '\U0000296D',
+       "lrm;":                      '\U0000200E',
+       "lrtri;":                    '\U000022BF',
+       "lsaquo;":                   '\U00002039',
+       "lscr;":                     '\U0001D4C1',
+       "lsh;":                      '\U000021B0',
+       "lsim;":                     '\U00002272',
+       "lsime;":                    '\U00002A8D',
+       "lsimg;":                    '\U00002A8F',
+       "lsqb;":                     '\U0000005B',
+       "lsquo;":                    '\U00002018',
+       "lsquor;":                   '\U0000201A',
+       "lstrok;":                   '\U00000142',
+       "lt;":                       '\U0000003C',
+       "ltcc;":                     '\U00002AA6',
+       "ltcir;":                    '\U00002A79',
+       "ltdot;":                    '\U000022D6',
+       "lthree;":                   '\U000022CB',
+       "ltimes;":                   '\U000022C9',
+       "ltlarr;":                   '\U00002976',
+       "ltquest;":                  '\U00002A7B',
+       "ltrPar;":                   '\U00002996',
+       "ltri;":                     '\U000025C3',
+       "ltrie;":                    '\U000022B4',
+       "ltrif;":                    '\U000025C2',
+       "lurdshar;":                 '\U0000294A',
+       "luruhar;":                  '\U00002966',
+       "mDDot;":                    '\U0000223A',
+       "macr;":                     '\U000000AF',
+       "male;":                     '\U00002642',
+       "malt;":                     '\U00002720',
+       "maltese;":                  '\U00002720',
+       "map;":                      '\U000021A6',
+       "mapsto;":                   '\U000021A6',
+       "mapstodown;":               '\U000021A7',
+       "mapstoleft;":               '\U000021A4',
+       "mapstoup;":                 '\U000021A5',
+       "marker;":                   '\U000025AE',
+       "mcomma;":                   '\U00002A29',
+       "mcy;":                      '\U0000043C',
+       "mdash;":                    '\U00002014',
+       "measuredangle;":            '\U00002221',
+       "mfr;":                      '\U0001D52A',
+       "mho;":                      '\U00002127',
+       "micro;":                    '\U000000B5',
+       "mid;":                      '\U00002223',
+       "midast;":                   '\U0000002A',
+       "midcir;":                   '\U00002AF0',
+       "middot;":                   '\U000000B7',
+       "minus;":                    '\U00002212',
+       "minusb;":                   '\U0000229F',
+       "minusd;":                   '\U00002238',
+       "minusdu;":                  '\U00002A2A',
+       "mlcp;":                     '\U00002ADB',
+       "mldr;":                     '\U00002026',
+       "mnplus;":                   '\U00002213',
+       "models;":                   '\U000022A7',
+       "mopf;":                     '\U0001D55E',
+       "mp;":                       '\U00002213',
+       "mscr;":                     '\U0001D4C2',
+       "mstpos;":                   '\U0000223E',
+       "mu;":                       '\U000003BC',
+       "multimap;":                 '\U000022B8',
+       "mumap;":                    '\U000022B8',
+       "nLeftarrow;":               '\U000021CD',
+       "nLeftrightarrow;":          '\U000021CE',
+       "nRightarrow;":              '\U000021CF',
+       "nVDash;":                   '\U000022AF',
+       "nVdash;":                   '\U000022AE',
+       "nabla;":                    '\U00002207',
+       "nacute;":                   '\U00000144',
+       "nap;":                      '\U00002249',
+       "napos;":                    '\U00000149',
+       "napprox;":                  '\U00002249',
+       "natur;":                    '\U0000266E',
+       "natural;":                  '\U0000266E',
+       "naturals;":                 '\U00002115',
+       "nbsp;":                     '\U000000A0',
+       "ncap;":                     '\U00002A43',
+       "ncaron;":                   '\U00000148',
+       "ncedil;":                   '\U00000146',
+       "ncong;":                    '\U00002247',
+       "ncup;":                     '\U00002A42',
+       "ncy;":                      '\U0000043D',
+       "ndash;":                    '\U00002013',
+       "ne;":                       '\U00002260',
+       "neArr;":                    '\U000021D7',
+       "nearhk;":                   '\U00002924',
+       "nearr;":                    '\U00002197',
+       "nearrow;":                  '\U00002197',
+       "nequiv;":                   '\U00002262',
+       "nesear;":                   '\U00002928',
+       "nexist;":                   '\U00002204',
+       "nexists;":                  '\U00002204',
+       "nfr;":                      '\U0001D52B',
+       "nge;":                      '\U00002271',
+       "ngeq;":                     '\U00002271',
+       "ngsim;":                    '\U00002275',
+       "ngt;":                      '\U0000226F',
+       "ngtr;":                     '\U0000226F',
+       "nhArr;":                    '\U000021CE',
+       "nharr;":                    '\U000021AE',
+       "nhpar;":                    '\U00002AF2',
+       "ni;":                       '\U0000220B',
+       "nis;":                      '\U000022FC',
+       "nisd;":                     '\U000022FA',
+       "niv;":                      '\U0000220B',
+       "njcy;":                     '\U0000045A',
+       "nlArr;":                    '\U000021CD',
+       "nlarr;":                    '\U0000219A',
+       "nldr;":                     '\U00002025',
+       "nle;":                      '\U00002270',
+       "nleftarrow;":               '\U0000219A',
+       "nleftrightarrow;":          '\U000021AE',
+       "nleq;":                     '\U00002270',
+       "nless;":                    '\U0000226E',
+       "nlsim;":                    '\U00002274',
+       "nlt;":                      '\U0000226E',
+       "nltri;":                    '\U000022EA',
+       "nltrie;":                   '\U000022EC',
+       "nmid;":                     '\U00002224',
+       "nopf;":                     '\U0001D55F',
+       "not;":                      '\U000000AC',
+       "notin;":                    '\U00002209',
+       "notinva;":                  '\U00002209',
+       "notinvb;":                  '\U000022F7',
+       "notinvc;":                  '\U000022F6',
+       "notni;":                    '\U0000220C',
+       "notniva;":                  '\U0000220C',
+       "notnivb;":                  '\U000022FE',
+       "notnivc;":                  '\U000022FD',
+       "npar;":                     '\U00002226',
+       "nparallel;":                '\U00002226',
+       "npolint;":                  '\U00002A14',
+       "npr;":                      '\U00002280',
+       "nprcue;":                   '\U000022E0',
+       "nprec;":                    '\U00002280',
+       "nrArr;":                    '\U000021CF',
+       "nrarr;":                    '\U0000219B',
+       "nrightarrow;":              '\U0000219B',
+       "nrtri;":                    '\U000022EB',
+       "nrtrie;":                   '\U000022ED',
+       "nsc;":                      '\U00002281',
+       "nsccue;":                   '\U000022E1',
+       "nscr;":                     '\U0001D4C3',
+       "nshortmid;":                '\U00002224',
+       "nshortparallel;":           '\U00002226',
+       "nsim;":                     '\U00002241',
+       "nsime;":                    '\U00002244',
+       "nsimeq;":                   '\U00002244',
+       "nsmid;":                    '\U00002224',
+       "nspar;":                    '\U00002226',
+       "nsqsube;":                  '\U000022E2',
+       "nsqsupe;":                  '\U000022E3',
+       "nsub;":                     '\U00002284',
+       "nsube;":                    '\U00002288',
+       "nsubseteq;":                '\U00002288',
+       "nsucc;":                    '\U00002281',
+       "nsup;":                     '\U00002285',
+       "nsupe;":                    '\U00002289',
+       "nsupseteq;":                '\U00002289',
+       "ntgl;":                     '\U00002279',
+       "ntilde;":                   '\U000000F1',
+       "ntlg;":                     '\U00002278',
+       "ntriangleleft;":            '\U000022EA',
+       "ntrianglelefteq;":          '\U000022EC',
+       "ntriangleright;":           '\U000022EB',
+       "ntrianglerighteq;":         '\U000022ED',
+       "nu;":                       '\U000003BD',
+       "num;":                      '\U00000023',
+       "numero;":                   '\U00002116',
+       "numsp;":                    '\U00002007',
+       "nvDash;":                   '\U000022AD',
+       "nvHarr;":                   '\U00002904',
+       "nvdash;":                   '\U000022AC',
+       "nvinfin;":                  '\U000029DE',
+       "nvlArr;":                   '\U00002902',
+       "nvrArr;":                   '\U00002903',
+       "nwArr;":                    '\U000021D6',
+       "nwarhk;":                   '\U00002923',
+       "nwarr;":                    '\U00002196',
+       "nwarrow;":                  '\U00002196',
+       "nwnear;":                   '\U00002927',
+       "oS;":                       '\U000024C8',
+       "oacute;":                   '\U000000F3',
+       "oast;":                     '\U0000229B',
+       "ocir;":                     '\U0000229A',
+       "ocirc;":                    '\U000000F4',
+       "ocy;":                      '\U0000043E',
+       "odash;":                    '\U0000229D',
+       "odblac;":                   '\U00000151',
+       "odiv;":                     '\U00002A38',
+       "odot;":                     '\U00002299',
+       "odsold;":                   '\U000029BC',
+       "oelig;":                    '\U00000153',
+       "ofcir;":                    '\U000029BF',
+       "ofr;":                      '\U0001D52C',
+       "ogon;":                     '\U000002DB',
+       "ograve;":                   '\U000000F2',
+       "ogt;":                      '\U000029C1',
+       "ohbar;":                    '\U000029B5',
+       "ohm;":                      '\U000003A9',
+       "oint;":                     '\U0000222E',
+       "olarr;":                    '\U000021BA',
+       "olcir;":                    '\U000029BE',
+       "olcross;":                  '\U000029BB',
+       "oline;":                    '\U0000203E',
+       "olt;":                      '\U000029C0',
+       "omacr;":                    '\U0000014D',
+       "omega;":                    '\U000003C9',
+       "omicron;":                  '\U000003BF',
+       "omid;":                     '\U000029B6',
+       "ominus;":                   '\U00002296',
+       "oopf;":                     '\U0001D560',
+       "opar;":                     '\U000029B7',
+       "operp;":                    '\U000029B9',
+       "oplus;":                    '\U00002295',
+       "or;":                       '\U00002228',
+       "orarr;":                    '\U000021BB',
+       "ord;":                      '\U00002A5D',
+       "order;":                    '\U00002134',
+       "orderof;":                  '\U00002134',
+       "ordf;":                     '\U000000AA',
+       "ordm;":                     '\U000000BA',
+       "origof;":                   '\U000022B6',
+       "oror;":                     '\U00002A56',
+       "orslope;":                  '\U00002A57',
+       "orv;":                      '\U00002A5B',
+       "oscr;":                     '\U00002134',
+       "oslash;":                   '\U000000F8',
+       "osol;":                     '\U00002298',
+       "otilde;":                   '\U000000F5',
+       "otimes;":                   '\U00002297',
+       "otimesas;":                 '\U00002A36',
+       "ouml;":                     '\U000000F6',
+       "ovbar;":                    '\U0000233D',
+       "par;":                      '\U00002225',
+       "para;":                     '\U000000B6',
+       "parallel;":                 '\U00002225',
+       "parsim;":                   '\U00002AF3',
+       "parsl;":                    '\U00002AFD',
+       "part;":                     '\U00002202',
+       "pcy;":                      '\U0000043F',
+       "percnt;":                   '\U00000025',
+       "period;":                   '\U0000002E',
+       "permil;":                   '\U00002030',
+       "perp;":                     '\U000022A5',
+       "pertenk;":                  '\U00002031',
+       "pfr;":                      '\U0001D52D',
+       "phi;":                      '\U000003C6',
+       "phiv;":                     '\U000003D5',
+       "phmmat;":                   '\U00002133',
+       "phone;":                    '\U0000260E',
+       "pi;":                       '\U000003C0',
+       "pitchfork;":                '\U000022D4',
+       "piv;":                      '\U000003D6',
+       "planck;":                   '\U0000210F',
+       "planckh;":                  '\U0000210E',
+       "plankv;":                   '\U0000210F',
+       "plus;":                     '\U0000002B',
+       "plusacir;":                 '\U00002A23',
+       "plusb;":                    '\U0000229E',
+       "pluscir;":                  '\U00002A22',
+       "plusdo;":                   '\U00002214',
+       "plusdu;":                   '\U00002A25',
+       "pluse;":                    '\U00002A72',
+       "plusmn;":                   '\U000000B1',
+       "plussim;":                  '\U00002A26',
+       "plustwo;":                  '\U00002A27',
+       "pm;":                       '\U000000B1',
+       "pointint;":                 '\U00002A15',
+       "popf;":                     '\U0001D561',
+       "pound;":                    '\U000000A3',
+       "pr;":                       '\U0000227A',
+       "prE;":                      '\U00002AB3',
+       "prap;":                     '\U00002AB7',
+       "prcue;":                    '\U0000227C',
+       "pre;":                      '\U00002AAF',
+       "prec;":                     '\U0000227A',
+       "precapprox;":               '\U00002AB7',
+       "preccurlyeq;":              '\U0000227C',
+       "preceq;":                   '\U00002AAF',
+       "precnapprox;":              '\U00002AB9',
+       "precneqq;":                 '\U00002AB5',
+       "precnsim;":                 '\U000022E8',
+       "precsim;":                  '\U0000227E',
+       "prime;":                    '\U00002032',
+       "primes;":                   '\U00002119',
+       "prnE;":                     '\U00002AB5',
+       "prnap;":                    '\U00002AB9',
+       "prnsim;":                   '\U000022E8',
+       "prod;":                     '\U0000220F',
+       "profalar;":                 '\U0000232E',
+       "profline;":                 '\U00002312',
+       "profsurf;":                 '\U00002313',
+       "prop;":                     '\U0000221D',
+       "propto;":                   '\U0000221D',
+       "prsim;":                    '\U0000227E',
+       "prurel;":                   '\U000022B0',
+       "pscr;":                     '\U0001D4C5',
+       "psi;":                      '\U000003C8',
+       "puncsp;":                   '\U00002008',
+       "qfr;":                      '\U0001D52E',
+       "qint;":                     '\U00002A0C',
+       "qopf;":                     '\U0001D562',
+       "qprime;":                   '\U00002057',
+       "qscr;":                     '\U0001D4C6',
+       "quaternions;":              '\U0000210D',
+       "quatint;":                  '\U00002A16',
+       "quest;":                    '\U0000003F',
+       "questeq;":                  '\U0000225F',
+       "quot;":                     '\U00000022',
+       "rAarr;":                    '\U000021DB',
+       "rArr;":                     '\U000021D2',
+       "rAtail;":                   '\U0000291C',
+       "rBarr;":                    '\U0000290F',
+       "rHar;":                     '\U00002964',
+       "racute;":                   '\U00000155',
+       "radic;":                    '\U0000221A',
+       "raemptyv;":                 '\U000029B3',
+       "rang;":                     '\U000027E9',
+       "rangd;":                    '\U00002992',
+       "range;":                    '\U000029A5',
+       "rangle;":                   '\U000027E9',
+       "raquo;":                    '\U000000BB',
+       "rarr;":                     '\U00002192',
+       "rarrap;":                   '\U00002975',
+       "rarrb;":                    '\U000021E5',
+       "rarrbfs;":                  '\U00002920',
+       "rarrc;":                    '\U00002933',
+       "rarrfs;":                   '\U0000291E',
+       "rarrhk;":                   '\U000021AA',
+       "rarrlp;":                   '\U000021AC',
+       "rarrpl;":                   '\U00002945',
+       "rarrsim;":                  '\U00002974',
+       "rarrtl;":                   '\U000021A3',
+       "rarrw;":                    '\U0000219D',
+       "ratail;":                   '\U0000291A',
+       "ratio;":                    '\U00002236',
+       "rationals;":                '\U0000211A',
+       "rbarr;":                    '\U0000290D',
+       "rbbrk;":                    '\U00002773',
+       "rbrace;":                   '\U0000007D',
+       "rbrack;":                   '\U0000005D',
+       "rbrke;":                    '\U0000298C',
+       "rbrksld;":                  '\U0000298E',
+       "rbrkslu;":                  '\U00002990',
+       "rcaron;":                   '\U00000159',
+       "rcedil;":                   '\U00000157',
+       "rceil;":                    '\U00002309',
+       "rcub;":                     '\U0000007D',
+       "rcy;":                      '\U00000440',
+       "rdca;":                     '\U00002937',
+       "rdldhar;":                  '\U00002969',
+       "rdquo;":                    '\U0000201D',
+       "rdquor;":                   '\U0000201D',
+       "rdsh;":                     '\U000021B3',
+       "real;":                     '\U0000211C',
+       "realine;":                  '\U0000211B',
+       "realpart;":                 '\U0000211C',
+       "reals;":                    '\U0000211D',
+       "rect;":                     '\U000025AD',
+       "reg;":                      '\U000000AE',
+       "rfisht;":                   '\U0000297D',
+       "rfloor;":                   '\U0000230B',
+       "rfr;":                      '\U0001D52F',
+       "rhard;":                    '\U000021C1',
+       "rharu;":                    '\U000021C0',
+       "rharul;":                   '\U0000296C',
+       "rho;":                      '\U000003C1',
+       "rhov;":                     '\U000003F1',
+       "rightarrow;":               '\U00002192',
+       "rightarrowtail;":           '\U000021A3',
+       "rightharpoondown;":         '\U000021C1',
+       "rightharpoonup;":           '\U000021C0',
+       "rightleftarrows;":          '\U000021C4',
+       "rightleftharpoons;":        '\U000021CC',
+       "rightrightarrows;":         '\U000021C9',
+       "rightsquigarrow;":          '\U0000219D',
+       "rightthreetimes;":          '\U000022CC',
+       "ring;":                     '\U000002DA',
+       "risingdotseq;":             '\U00002253',
+       "rlarr;":                    '\U000021C4',
+       "rlhar;":                    '\U000021CC',
+       "rlm;":                      '\U0000200F',
+       "rmoust;":                   '\U000023B1',
+       "rmoustache;":               '\U000023B1',
+       "rnmid;":                    '\U00002AEE',
+       "roang;":                    '\U000027ED',
+       "roarr;":                    '\U000021FE',
+       "robrk;":                    '\U000027E7',
+       "ropar;":                    '\U00002986',
+       "ropf;":                     '\U0001D563',
+       "roplus;":                   '\U00002A2E',
+       "rotimes;":                  '\U00002A35',
+       "rpar;":                     '\U00000029',
+       "rpargt;":                   '\U00002994',
+       "rppolint;":                 '\U00002A12',
+       "rrarr;":                    '\U000021C9',
+       "rsaquo;":                   '\U0000203A',
+       "rscr;":                     '\U0001D4C7',
+       "rsh;":                      '\U000021B1',
+       "rsqb;":                     '\U0000005D',
+       "rsquo;":                    '\U00002019',
+       "rsquor;":                   '\U00002019',
+       "rthree;":                   '\U000022CC',
+       "rtimes;":                   '\U000022CA',
+       "rtri;":                     '\U000025B9',
+       "rtrie;":                    '\U000022B5',
+       "rtrif;":                    '\U000025B8',
+       "rtriltri;":                 '\U000029CE',
+       "ruluhar;":                  '\U00002968',
+       "rx;":                       '\U0000211E',
+       "sacute;":                   '\U0000015B',
+       "sbquo;":                    '\U0000201A',
+       "sc;":                       '\U0000227B',
+       "scE;":                      '\U00002AB4',
+       "scap;":                     '\U00002AB8',
+       "scaron;":                   '\U00000161',
+       "sccue;":                    '\U0000227D',
+       "sce;":                      '\U00002AB0',
+       "scedil;":                   '\U0000015F',
+       "scirc;":                    '\U0000015D',
+       "scnE;":                     '\U00002AB6',
+       "scnap;":                    '\U00002ABA',
+       "scnsim;":                   '\U000022E9',
+       "scpolint;":                 '\U00002A13',
+       "scsim;":                    '\U0000227F',
+       "scy;":                      '\U00000441',
+       "sdot;":                     '\U000022C5',
+       "sdotb;":                    '\U000022A1',
+       "sdote;":                    '\U00002A66',
+       "seArr;":                    '\U000021D8',
+       "searhk;":                   '\U00002925',
+       "searr;":                    '\U00002198',
+       "searrow;":                  '\U00002198',
+       "sect;":                     '\U000000A7',
+       "semi;":                     '\U0000003B',
+       "seswar;":                   '\U00002929',
+       "setminus;":                 '\U00002216',
+       "setmn;":                    '\U00002216',
+       "sext;":                     '\U00002736',
+       "sfr;":                      '\U0001D530',
+       "sfrown;":                   '\U00002322',
+       "sharp;":                    '\U0000266F',
+       "shchcy;":                   '\U00000449',
+       "shcy;":                     '\U00000448',
+       "shortmid;":                 '\U00002223',
+       "shortparallel;":            '\U00002225',
+       "shy;":                      '\U000000AD',
+       "sigma;":                    '\U000003C3',
+       "sigmaf;":                   '\U000003C2',
+       "sigmav;":                   '\U000003C2',
+       "sim;":                      '\U0000223C',
+       "simdot;":                   '\U00002A6A',
+       "sime;":                     '\U00002243',
+       "simeq;":                    '\U00002243',
+       "simg;":                     '\U00002A9E',
+       "simgE;":                    '\U00002AA0',
+       "siml;":                     '\U00002A9D',
+       "simlE;":                    '\U00002A9F',
+       "simne;":                    '\U00002246',
+       "simplus;":                  '\U00002A24',
+       "simrarr;":                  '\U00002972',
+       "slarr;":                    '\U00002190',
+       "smallsetminus;":            '\U00002216',
+       "smashp;":                   '\U00002A33',
+       "smeparsl;":                 '\U000029E4',
+       "smid;":                     '\U00002223',
+       "smile;":                    '\U00002323',
+       "smt;":                      '\U00002AAA',
+       "smte;":                     '\U00002AAC',
+       "softcy;":                   '\U0000044C',
+       "sol;":                      '\U0000002F',
+       "solb;":                     '\U000029C4',
+       "solbar;":                   '\U0000233F',
+       "sopf;":                     '\U0001D564',
+       "spades;":                   '\U00002660',
+       "spadesuit;":                '\U00002660',
+       "spar;":                     '\U00002225',
+       "sqcap;":                    '\U00002293',
+       "sqcup;":                    '\U00002294',
+       "sqsub;":                    '\U0000228F',
+       "sqsube;":                   '\U00002291',
+       "sqsubset;":                 '\U0000228F',
+       "sqsubseteq;":               '\U00002291',
+       "sqsup;":                    '\U00002290',
+       "sqsupe;":                   '\U00002292',
+       "sqsupset;":                 '\U00002290',
+       "sqsupseteq;":               '\U00002292',
+       "squ;":                      '\U000025A1',
+       "square;":                   '\U000025A1',
+       "squarf;":                   '\U000025AA',
+       "squf;":                     '\U000025AA',
+       "srarr;":                    '\U00002192',
+       "sscr;":                     '\U0001D4C8',
+       "ssetmn;":                   '\U00002216',
+       "ssmile;":                   '\U00002323',
+       "sstarf;":                   '\U000022C6',
+       "star;":                     '\U00002606',
+       "starf;":                    '\U00002605',
+       "straightepsilon;":          '\U000003F5',
+       "straightphi;":              '\U000003D5',
+       "strns;":                    '\U000000AF',
+       "sub;":                      '\U00002282',
+       "subE;":                     '\U00002AC5',
+       "subdot;":                   '\U00002ABD',
+       "sube;":                     '\U00002286',
+       "subedot;":                  '\U00002AC3',
+       "submult;":                  '\U00002AC1',
+       "subnE;":                    '\U00002ACB',
+       "subne;":                    '\U0000228A',
+       "subplus;":                  '\U00002ABF',
+       "subrarr;":                  '\U00002979',
+       "subset;":                   '\U00002282',
+       "subseteq;":                 '\U00002286',
+       "subseteqq;":                '\U00002AC5',
+       "subsetneq;":                '\U0000228A',
+       "subsetneqq;":               '\U00002ACB',
+       "subsim;":                   '\U00002AC7',
+       "subsub;":                   '\U00002AD5',
+       "subsup;":                   '\U00002AD3',
+       "succ;":                     '\U0000227B',
+       "succapprox;":               '\U00002AB8',
+       "succcurlyeq;":              '\U0000227D',
+       "succeq;":                   '\U00002AB0',
+       "succnapprox;":              '\U00002ABA',
+       "succneqq;":                 '\U00002AB6',
+       "succnsim;":                 '\U000022E9',
+       "succsim;":                  '\U0000227F',
+       "sum;":                      '\U00002211',
+       "sung;":                     '\U0000266A',
+       "sup;":                      '\U00002283',
+       "sup1;":                     '\U000000B9',
+       "sup2;":                     '\U000000B2',
+       "sup3;":                     '\U000000B3',
+       "supE;":                     '\U00002AC6',
+       "supdot;":                   '\U00002ABE',
+       "supdsub;":                  '\U00002AD8',
+       "supe;":                     '\U00002287',
+       "supedot;":                  '\U00002AC4',
+       "suphsol;":                  '\U000027C9',
+       "suphsub;":                  '\U00002AD7',
+       "suplarr;":                  '\U0000297B',
+       "supmult;":                  '\U00002AC2',
+       "supnE;":                    '\U00002ACC',
+       "supne;":                    '\U0000228B',
+       "supplus;":                  '\U00002AC0',
+       "supset;":                   '\U00002283',
+       "supseteq;":                 '\U00002287',
+       "supseteqq;":                '\U00002AC6',
+       "supsetneq;":                '\U0000228B',
+       "supsetneqq;":               '\U00002ACC',
+       "supsim;":                   '\U00002AC8',
+       "supsub;":                   '\U00002AD4',
+       "supsup;":                   '\U00002AD6',
+       "swArr;":                    '\U000021D9',
+       "swarhk;":                   '\U00002926',
+       "swarr;":                    '\U00002199',
+       "swarrow;":                  '\U00002199',
+       "swnwar;":                   '\U0000292A',
+       "szlig;":                    '\U000000DF',
+       "target;":                   '\U00002316',
+       "tau;":                      '\U000003C4',
+       "tbrk;":                     '\U000023B4',
+       "tcaron;":                   '\U00000165',
+       "tcedil;":                   '\U00000163',
+       "tcy;":                      '\U00000442',
+       "tdot;":                     '\U000020DB',
+       "telrec;":                   '\U00002315',
+       "tfr;":                      '\U0001D531',
+       "there4;":                   '\U00002234',
+       "therefore;":                '\U00002234',
+       "theta;":                    '\U000003B8',
+       "thetasym;":                 '\U000003D1',
+       "thetav;":                   '\U000003D1',
+       "thickapprox;":              '\U00002248',
+       "thicksim;":                 '\U0000223C',
+       "thinsp;":                   '\U00002009',
+       "thkap;":                    '\U00002248',
+       "thksim;":                   '\U0000223C',
+       "thorn;":                    '\U000000FE',
+       "tilde;":                    '\U000002DC',
+       "times;":                    '\U000000D7',
+       "timesb;":                   '\U000022A0',
+       "timesbar;":                 '\U00002A31',
+       "timesd;":                   '\U00002A30',
+       "tint;":                     '\U0000222D',
+       "toea;":                     '\U00002928',
+       "top;":                      '\U000022A4',
+       "topbot;":                   '\U00002336',
+       "topcir;":                   '\U00002AF1',
+       "topf;":                     '\U0001D565',
+       "topfork;":                  '\U00002ADA',
+       "tosa;":                     '\U00002929',
+       "tprime;":                   '\U00002034',
+       "trade;":                    '\U00002122',
+       "triangle;":                 '\U000025B5',
+       "triangledown;":             '\U000025BF',
+       "triangleleft;":             '\U000025C3',
+       "trianglelefteq;":           '\U000022B4',
+       "triangleq;":                '\U0000225C',
+       "triangleright;":            '\U000025B9',
+       "trianglerighteq;":          '\U000022B5',
+       "tridot;":                   '\U000025EC',
+       "trie;":                     '\U0000225C',
+       "triminus;":                 '\U00002A3A',
+       "triplus;":                  '\U00002A39',
+       "trisb;":                    '\U000029CD',
+       "tritime;":                  '\U00002A3B',
+       "trpezium;":                 '\U000023E2',
+       "tscr;":                     '\U0001D4C9',
+       "tscy;":                     '\U00000446',
+       "tshcy;":                    '\U0000045B',
+       "tstrok;":                   '\U00000167',
+       "twixt;":                    '\U0000226C',
+       "twoheadleftarrow;":         '\U0000219E',
+       "twoheadrightarrow;":        '\U000021A0',
+       "uArr;":                     '\U000021D1',
+       "uHar;":                     '\U00002963',
+       "uacute;":                   '\U000000FA',
+       "uarr;":                     '\U00002191',
+       "ubrcy;":                    '\U0000045E',
+       "ubreve;":                   '\U0000016D',
+       "ucirc;":                    '\U000000FB',
+       "ucy;":                      '\U00000443',
+       "udarr;":                    '\U000021C5',
+       "udblac;":                   '\U00000171',
+       "udhar;":                    '\U0000296E',
+       "ufisht;":                   '\U0000297E',
+       "ufr;":                      '\U0001D532',
+       "ugrave;":                   '\U000000F9',
+       "uharl;":                    '\U000021BF',
+       "uharr;":                    '\U000021BE',
+       "uhblk;":                    '\U00002580',
+       "ulcorn;":                   '\U0000231C',
+       "ulcorner;":                 '\U0000231C',
+       "ulcrop;":                   '\U0000230F',
+       "ultri;":                    '\U000025F8',
+       "umacr;":                    '\U0000016B',
+       "uml;":                      '\U000000A8',
+       "uogon;":                    '\U00000173',
+       "uopf;":                     '\U0001D566',
+       "uparrow;":                  '\U00002191',
+       "updownarrow;":              '\U00002195',
+       "upharpoonleft;":            '\U000021BF',
+       "upharpoonright;":           '\U000021BE',
+       "uplus;":                    '\U0000228E',
+       "upsi;":                     '\U000003C5',
+       "upsih;":                    '\U000003D2',
+       "upsilon;":                  '\U000003C5',
+       "upuparrows;":               '\U000021C8',
+       "urcorn;":                   '\U0000231D',
+       "urcorner;":                 '\U0000231D',
+       "urcrop;":                   '\U0000230E',
+       "uring;":                    '\U0000016F',
+       "urtri;":                    '\U000025F9',
+       "uscr;":                     '\U0001D4CA',
+       "utdot;":                    '\U000022F0',
+       "utilde;":                   '\U00000169',
+       "utri;":                     '\U000025B5',
+       "utrif;":                    '\U000025B4',
+       "uuarr;":                    '\U000021C8',
+       "uuml;":                     '\U000000FC',
+       "uwangle;":                  '\U000029A7',
+       "vArr;":                     '\U000021D5',
+       "vBar;":                     '\U00002AE8',
+       "vBarv;":                    '\U00002AE9',
+       "vDash;":                    '\U000022A8',
+       "vangrt;":                   '\U0000299C',
+       "varepsilon;":               '\U000003F5',
+       "varkappa;":                 '\U000003F0',
+       "varnothing;":               '\U00002205',
+       "varphi;":                   '\U000003D5',
+       "varpi;":                    '\U000003D6',
+       "varpropto;":                '\U0000221D',
+       "varr;":                     '\U00002195',
+       "varrho;":                   '\U000003F1',
+       "varsigma;":                 '\U000003C2',
+       "vartheta;":                 '\U000003D1',
+       "vartriangleleft;":          '\U000022B2',
+       "vartriangleright;":         '\U000022B3',
+       "vcy;":                      '\U00000432',
+       "vdash;":                    '\U000022A2',
+       "vee;":                      '\U00002228',
+       "veebar;":                   '\U000022BB',
+       "veeeq;":                    '\U0000225A',
+       "vellip;":                   '\U000022EE',
+       "verbar;":                   '\U0000007C',
+       "vert;":                     '\U0000007C',
+       "vfr;":                      '\U0001D533',
+       "vltri;":                    '\U000022B2',
+       "vopf;":                     '\U0001D567',
+       "vprop;":                    '\U0000221D',
+       "vrtri;":                    '\U000022B3',
+       "vscr;":                     '\U0001D4CB',
+       "vzigzag;":                  '\U0000299A',
+       "wcirc;":                    '\U00000175',
+       "wedbar;":                   '\U00002A5F',
+       "wedge;":                    '\U00002227',
+       "wedgeq;":                   '\U00002259',
+       "weierp;":                   '\U00002118',
+       "wfr;":                      '\U0001D534',
+       "wopf;":                     '\U0001D568',
+       "wp;":                       '\U00002118',
+       "wr;":                       '\U00002240',
+       "wreath;":                   '\U00002240',
+       "wscr;":                     '\U0001D4CC',
+       "xcap;":                     '\U000022C2',
+       "xcirc;":                    '\U000025EF',
+       "xcup;":                     '\U000022C3',
+       "xdtri;":                    '\U000025BD',
+       "xfr;":                      '\U0001D535',
+       "xhArr;":                    '\U000027FA',
+       "xharr;":                    '\U000027F7',
+       "xi;":                       '\U000003BE',
+       "xlArr;":                    '\U000027F8',
+       "xlarr;":                    '\U000027F5',
+       "xmap;":                     '\U000027FC',
+       "xnis;":                     '\U000022FB',
+       "xodot;":                    '\U00002A00',
+       "xopf;":                     '\U0001D569',
+       "xoplus;":                   '\U00002A01',
+       "xotime;":                   '\U00002A02',
+       "xrArr;":                    '\U000027F9',
+       "xrarr;":                    '\U000027F6',
+       "xscr;":                     '\U0001D4CD',
+       "xsqcup;":                   '\U00002A06',
+       "xuplus;":                   '\U00002A04',
+       "xutri;":                    '\U000025B3',
+       "xvee;":                     '\U000022C1',
+       "xwedge;":                   '\U000022C0',
+       "yacute;":                   '\U000000FD',
+       "yacy;":                     '\U0000044F',
+       "ycirc;":                    '\U00000177',
+       "ycy;":                      '\U0000044B',
+       "yen;":                      '\U000000A5',
+       "yfr;":                      '\U0001D536',
+       "yicy;":                     '\U00000457',
+       "yopf;":                     '\U0001D56A',
+       "yscr;":                     '\U0001D4CE',
+       "yucy;":                     '\U0000044E',
+       "yuml;":                     '\U000000FF',
+       "zacute;":                   '\U0000017A',
+       "zcaron;":                   '\U0000017E',
+       "zcy;":                      '\U00000437',
+       "zdot;":                     '\U0000017C',
+       "zeetrf;":                   '\U00002128',
+       "zeta;":                     '\U000003B6',
+       "zfr;":                      '\U0001D537',
+       "zhcy;":                     '\U00000436',
+       "zigrarr;":                  '\U000021DD',
+       "zopf;":                     '\U0001D56B',
+       "zscr;":                     '\U0001D4CF',
+       "zwj;":                      '\U0000200D',
+       "zwnj;":                     '\U0000200C',
+       "AElig":                     '\U000000C6',
+       "AMP":                       '\U00000026',
+       "Aacute":                    '\U000000C1',
+       "Acirc":                     '\U000000C2',
+       "Agrave":                    '\U000000C0',
+       "Aring":                     '\U000000C5',
+       "Atilde":                    '\U000000C3',
+       "Auml":                      '\U000000C4',
+       "COPY":                      '\U000000A9',
+       "Ccedil":                    '\U000000C7',
+       "ETH":                       '\U000000D0',
+       "Eacute":                    '\U000000C9',
+       "Ecirc":                     '\U000000CA',
+       "Egrave":                    '\U000000C8',
+       "Euml":                      '\U000000CB',
+       "GT":                        '\U0000003E',
+       "Iacute":                    '\U000000CD',
+       "Icirc":                     '\U000000CE',
+       "Igrave":                    '\U000000CC',
+       "Iuml":                      '\U000000CF',
+       "LT":                        '\U0000003C',
+       "Ntilde":                    '\U000000D1',
+       "Oacute":                    '\U000000D3',
+       "Ocirc":                     '\U000000D4',
+       "Ograve":                    '\U000000D2',
+       "Oslash":                    '\U000000D8',
+       "Otilde":                    '\U000000D5',
+       "Ouml":                      '\U000000D6',
+       "QUOT":                      '\U00000022',
+       "REG":                       '\U000000AE',
+       "THORN":                     '\U000000DE',
+       "Uacute":                    '\U000000DA',
+       "Ucirc":                     '\U000000DB',
+       "Ugrave":                    '\U000000D9',
+       "Uuml":                      '\U000000DC',
+       "Yacute":                    '\U000000DD',
+       "aacute":                    '\U000000E1',
+       "acirc":                     '\U000000E2',
+       "acute":                     '\U000000B4',
+       "aelig":                     '\U000000E6',
+       "agrave":                    '\U000000E0',
+       "amp":                       '\U00000026',
+       "aring":                     '\U000000E5',
+       "atilde":                    '\U000000E3',
+       "auml":                      '\U000000E4',
+       "brvbar":                    '\U000000A6',
+       "ccedil":                    '\U000000E7',
+       "cedil":                     '\U000000B8',
+       "cent":                      '\U000000A2',
+       "copy":                      '\U000000A9',
+       "curren":                    '\U000000A4',
+       "deg":                       '\U000000B0',
+       "divide":                    '\U000000F7',
+       "eacute":                    '\U000000E9',
+       "ecirc":                     '\U000000EA',
+       "egrave":                    '\U000000E8',
+       "eth":                       '\U000000F0',
+       "euml":                      '\U000000EB',
+       "frac12":                    '\U000000BD',
+       "frac14":                    '\U000000BC',
+       "frac34":                    '\U000000BE',
+       "gt":                        '\U0000003E',
+       "iacute":                    '\U000000ED',
+       "icirc":                     '\U000000EE',
+       "iexcl":                     '\U000000A1',
+       "igrave":                    '\U000000EC',
+       "iquest":                    '\U000000BF',
+       "iuml":                      '\U000000EF',
+       "laquo":                     '\U000000AB',
+       "lt":                        '\U0000003C',
+       "macr":                      '\U000000AF',
+       "micro":                     '\U000000B5',
+       "middot":                    '\U000000B7',
+       "nbsp":                      '\U000000A0',
+       "not":                       '\U000000AC',
+       "ntilde":                    '\U000000F1',
+       "oacute":                    '\U000000F3',
+       "ocirc":                     '\U000000F4',
+       "ograve":                    '\U000000F2',
+       "ordf":                      '\U000000AA',
+       "ordm":                      '\U000000BA',
+       "oslash":                    '\U000000F8',
+       "otilde":                    '\U000000F5',
+       "ouml":                      '\U000000F6',
+       "para":                      '\U000000B6',
+       "plusmn":                    '\U000000B1',
+       "pound":                     '\U000000A3',
+       "quot":                      '\U00000022',
+       "raquo":                     '\U000000BB',
+       "reg":                       '\U000000AE',
+       "sect":                      '\U000000A7',
+       "shy":                       '\U000000AD',
+       "sup1":                      '\U000000B9',
+       "sup2":                      '\U000000B2',
+       "sup3":                      '\U000000B3',
+       "szlig":                     '\U000000DF',
+       "thorn":                     '\U000000FE',
+       "times":                     '\U000000D7',
+       "uacute":                    '\U000000FA',
+       "ucirc":                     '\U000000FB',
+       "ugrave":                    '\U000000F9',
+       "uml":                       '\U000000A8',
+       "uuml":                      '\U000000FC',
+       "yacute":                    '\U000000FD',
+       "yen":                       '\U000000A5',
+       "yuml":                      '\U000000FF',
+}
+
+// HTML entities that are two unicode codepoints.
+var entity2 = map[string][2]rune{
+       // TODO(nigeltao): Handle replacements that are wider than their names.
+       // "nLt;":                     {'\u226A', '\u20D2'},
+       // "nGt;":                     {'\u226B', '\u20D2'},
+       "NotEqualTilde;":           {'\u2242', '\u0338'},
+       "NotGreaterFullEqual;":     {'\u2267', '\u0338'},
+       "NotGreaterGreater;":       {'\u226B', '\u0338'},
+       "NotGreaterSlantEqual;":    {'\u2A7E', '\u0338'},
+       "NotHumpDownHump;":         {'\u224E', '\u0338'},
+       "NotHumpEqual;":            {'\u224F', '\u0338'},
+       "NotLeftTriangleBar;":      {'\u29CF', '\u0338'},
+       "NotLessLess;":             {'\u226A', '\u0338'},
+       "NotLessSlantEqual;":       {'\u2A7D', '\u0338'},
+       "NotNestedGreaterGreater;": {'\u2AA2', '\u0338'},
+       "NotNestedLessLess;":       {'\u2AA1', '\u0338'},
+       "NotPrecedesEqual;":        {'\u2AAF', '\u0338'},
+       "NotRightTriangleBar;":     {'\u29D0', '\u0338'},
+       "NotSquareSubset;":         {'\u228F', '\u0338'},
+       "NotSquareSuperset;":       {'\u2290', '\u0338'},
+       "NotSubset;":               {'\u2282', '\u20D2'},
+       "NotSucceedsEqual;":        {'\u2AB0', '\u0338'},
+       "NotSucceedsTilde;":        {'\u227F', '\u0338'},
+       "NotSuperset;":             {'\u2283', '\u20D2'},
+       "ThickSpace;":              {'\u205F', '\u200A'},
+       "acE;":                     {'\u223E', '\u0333'},
+       "bne;":                     {'\u003D', '\u20E5'},
+       "bnequiv;":                 {'\u2261', '\u20E5'},
+       "caps;":                    {'\u2229', '\uFE00'},
+       "cups;":                    {'\u222A', '\uFE00'},
+       "fjlig;":                   {'\u0066', '\u006A'},
+       "gesl;":                    {'\u22DB', '\uFE00'},
+       "gvertneqq;":               {'\u2269', '\uFE00'},
+       "gvnE;":                    {'\u2269', '\uFE00'},
+       "lates;":                   {'\u2AAD', '\uFE00'},
+       "lesg;":                    {'\u22DA', '\uFE00'},
+       "lvertneqq;":               {'\u2268', '\uFE00'},
+       "lvnE;":                    {'\u2268', '\uFE00'},
+       "nGg;":                     {'\u22D9', '\u0338'},
+       "nGtv;":                    {'\u226B', '\u0338'},
+       "nLl;":                     {'\u22D8', '\u0338'},
+       "nLtv;":                    {'\u226A', '\u0338'},
+       "nang;":                    {'\u2220', '\u20D2'},
+       "napE;":                    {'\u2A70', '\u0338'},
+       "napid;":                   {'\u224B', '\u0338'},
+       "nbump;":                   {'\u224E', '\u0338'},
+       "nbumpe;":                  {'\u224F', '\u0338'},
+       "ncongdot;":                {'\u2A6D', '\u0338'},
+       "nedot;":                   {'\u2250', '\u0338'},
+       "nesim;":                   {'\u2242', '\u0338'},
+       "ngE;":                     {'\u2267', '\u0338'},
+       "ngeqq;":                   {'\u2267', '\u0338'},
+       "ngeqslant;":               {'\u2A7E', '\u0338'},
+       "nges;":                    {'\u2A7E', '\u0338'},
+       "nlE;":                     {'\u2266', '\u0338'},
+       "nleqq;":                   {'\u2266', '\u0338'},
+       "nleqslant;":               {'\u2A7D', '\u0338'},
+       "nles;":                    {'\u2A7D', '\u0338'},
+       "notinE;":                  {'\u22F9', '\u0338'},
+       "notindot;":                {'\u22F5', '\u0338'},
+       "nparsl;":                  {'\u2AFD', '\u20E5'},
+       "npart;":                   {'\u2202', '\u0338'},
+       "npre;":                    {'\u2AAF', '\u0338'},
+       "npreceq;":                 {'\u2AAF', '\u0338'},
+       "nrarrc;":                  {'\u2933', '\u0338'},
+       "nrarrw;":                  {'\u219D', '\u0338'},
+       "nsce;":                    {'\u2AB0', '\u0338'},
+       "nsubE;":                   {'\u2AC5', '\u0338'},
+       "nsubset;":                 {'\u2282', '\u20D2'},
+       "nsubseteqq;":              {'\u2AC5', '\u0338'},
+       "nsucceq;":                 {'\u2AB0', '\u0338'},
+       "nsupE;":                   {'\u2AC6', '\u0338'},
+       "nsupset;":                 {'\u2283', '\u20D2'},
+       "nsupseteqq;":              {'\u2AC6', '\u0338'},
+       "nvap;":                    {'\u224D', '\u20D2'},
+       "nvge;":                    {'\u2265', '\u20D2'},
+       "nvgt;":                    {'\u003E', '\u20D2'},
+       "nvle;":                    {'\u2264', '\u20D2'},
+       "nvlt;":                    {'\u003C', '\u20D2'},
+       "nvltrie;":                 {'\u22B4', '\u20D2'},
+       "nvrtrie;":                 {'\u22B5', '\u20D2'},
+       "nvsim;":                   {'\u223C', '\u20D2'},
+       "race;":                    {'\u223D', '\u0331'},
+       "smtes;":                   {'\u2AAC', '\uFE00'},
+       "sqcaps;":                  {'\u2293', '\uFE00'},
+       "sqcups;":                  {'\u2294', '\uFE00'},
+       "varsubsetneq;":            {'\u228A', '\uFE00'},
+       "varsubsetneqq;":           {'\u2ACB', '\uFE00'},
+       "varsupsetneq;":            {'\u228B', '\uFE00'},
+       "varsupsetneqq;":           {'\u2ACC', '\uFE00'},
+       "vnsub;":                   {'\u2282', '\u20D2'},
+       "vnsup;":                   {'\u2283', '\u20D2'},
+       "vsubnE;":                  {'\u2ACB', '\uFE00'},
+       "vsubne;":                  {'\u228A', '\uFE00'},
+       "vsupnE;":                  {'\u2ACC', '\uFE00'},
+       "vsupne;":                  {'\u228B', '\uFE00'},
+}
diff --git a/vendor/golang.org/x/net/html/escape.go b/vendor/golang.org/x/net/html/escape.go
new file mode 100644 (file)
index 0000000..d856139
--- /dev/null
@@ -0,0 +1,258 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package html
+
+import (
+       "bytes"
+       "strings"
+       "unicode/utf8"
+)
+
+// These replacements permit compatibility with old numeric entities that
+// assumed Windows-1252 encoding.
+// https://html.spec.whatwg.org/multipage/syntax.html#consume-a-character-reference
+var replacementTable = [...]rune{
+       '\u20AC', // First entry is what 0x80 should be replaced with.
+       '\u0081',
+       '\u201A',
+       '\u0192',
+       '\u201E',
+       '\u2026',
+       '\u2020',
+       '\u2021',
+       '\u02C6',
+       '\u2030',
+       '\u0160',
+       '\u2039',
+       '\u0152',
+       '\u008D',
+       '\u017D',
+       '\u008F',
+       '\u0090',
+       '\u2018',
+       '\u2019',
+       '\u201C',
+       '\u201D',
+       '\u2022',
+       '\u2013',
+       '\u2014',
+       '\u02DC',
+       '\u2122',
+       '\u0161',
+       '\u203A',
+       '\u0153',
+       '\u009D',
+       '\u017E',
+       '\u0178', // Last entry is 0x9F.
+       // 0x00->'\uFFFD' is handled programmatically.
+       // 0x0D->'\u000D' is a no-op.
+}
+
+// unescapeEntity reads an entity like "&lt;" from b[src:] and writes the
+// corresponding "<" to b[dst:], returning the incremented dst and src cursors.
+// Precondition: b[src] == '&' && dst <= src.
+// attribute should be true if parsing an attribute value.
+func unescapeEntity(b []byte, dst, src int, attribute bool) (dst1, src1 int) {
+       // https://html.spec.whatwg.org/multipage/syntax.html#consume-a-character-reference
+
+       // i starts at 1 because we already know that s[0] == '&'.
+       i, s := 1, b[src:]
+
+       if len(s) <= 1 {
+               b[dst] = b[src]
+               return dst + 1, src + 1
+       }
+
+       if s[i] == '#' {
+               if len(s) <= 3 { // We need to have at least "&#.".
+                       b[dst] = b[src]
+                       return dst + 1, src + 1
+               }
+               i++
+               c := s[i]
+               hex := false
+               if c == 'x' || c == 'X' {
+                       hex = true
+                       i++
+               }
+
+               x := '\x00'
+               for i < len(s) {
+                       c = s[i]
+                       i++
+                       if hex {
+                               if '0' <= c && c <= '9' {
+                                       x = 16*x + rune(c) - '0'
+                                       continue
+                               } else if 'a' <= c && c <= 'f' {
+                                       x = 16*x + rune(c) - 'a' + 10
+                                       continue
+                               } else if 'A' <= c && c <= 'F' {
+                                       x = 16*x + rune(c) - 'A' + 10
+                                       continue
+                               }
+                       } else if '0' <= c && c <= '9' {
+                               x = 10*x + rune(c) - '0'
+                               continue
+                       }
+                       if c != ';' {
+                               i--
+                       }
+                       break
+               }
+
+               if i <= 3 { // No characters matched.
+                       b[dst] = b[src]
+                       return dst + 1, src + 1
+               }
+
+               if 0x80 <= x && x <= 0x9F {
+                       // Replace characters from Windows-1252 with UTF-8 equivalents.
+                       x = replacementTable[x-0x80]
+               } else if x == 0 || (0xD800 <= x && x <= 0xDFFF) || x > 0x10FFFF {
+                       // Replace invalid characters with the replacement character.
+                       x = '\uFFFD'
+               }
+
+               return dst + utf8.EncodeRune(b[dst:], x), src + i
+       }
+
+       // Consume the maximum number of characters possible, with the
+       // consumed characters matching one of the named references.
+
+       for i < len(s) {
+               c := s[i]
+               i++
+               // Lower-cased characters are more common in entities, so we check for them first.
+               if 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9' {
+                       continue
+               }
+               if c != ';' {
+                       i--
+               }
+               break
+       }
+
+       entityName := string(s[1:i])
+       if entityName == "" {
+               // No-op.
+       } else if attribute && entityName[len(entityName)-1] != ';' && len(s) > i && s[i] == '=' {
+               // No-op.
+       } else if x := entity[entityName]; x != 0 {
+               return dst + utf8.EncodeRune(b[dst:], x), src + i
+       } else if x := entity2[entityName]; x[0] != 0 {
+               dst1 := dst + utf8.EncodeRune(b[dst:], x[0])
+               return dst1 + utf8.EncodeRune(b[dst1:], x[1]), src + i
+       } else if !attribute {
+               maxLen := len(entityName) - 1
+               if maxLen > longestEntityWithoutSemicolon {
+                       maxLen = longestEntityWithoutSemicolon
+               }
+               for j := maxLen; j > 1; j-- {
+                       if x := entity[entityName[:j]]; x != 0 {
+                               return dst + utf8.EncodeRune(b[dst:], x), src + j + 1
+                       }
+               }
+       }
+
+       dst1, src1 = dst+i, src+i
+       copy(b[dst:dst1], b[src:src1])
+       return dst1, src1
+}
+
+// unescape unescapes b's entities in-place, so that "a&lt;b" becomes "a<b".
+// attribute should be true if parsing an attribute value.
+func unescape(b []byte, attribute bool) []byte {
+       for i, c := range b {
+               if c == '&' {
+                       dst, src := unescapeEntity(b, i, i, attribute)
+                       for src < len(b) {
+                               c := b[src]
+                               if c == '&' {
+                                       dst, src = unescapeEntity(b, dst, src, attribute)
+                               } else {
+                                       b[dst] = c
+                                       dst, src = dst+1, src+1
+                               }
+                       }
+                       return b[0:dst]
+               }
+       }
+       return b
+}
+
+// lower lower-cases the A-Z bytes in b in-place, so that "aBc" becomes "abc".
+func lower(b []byte) []byte {
+       for i, c := range b {
+               if 'A' <= c && c <= 'Z' {
+                       b[i] = c + 'a' - 'A'
+               }
+       }
+       return b
+}
+
+const escapedChars = "&'<>\"\r"
+
+func escape(w writer, s string) error {
+       i := strings.IndexAny(s, escapedChars)
+       for i != -1 {
+               if _, err := w.WriteString(s[:i]); err != nil {
+                       return err
+               }
+               var esc string
+               switch s[i] {
+               case '&':
+                       esc = "&amp;"
+               case '\'':
+                       // "&#39;" is shorter than "&apos;" and apos was not in HTML until HTML5.
+                       esc = "&#39;"
+               case '<':
+                       esc = "&lt;"
+               case '>':
+                       esc = "&gt;"
+               case '"':
+                       // "&#34;" is shorter than "&quot;".
+                       esc = "&#34;"
+               case '\r':
+                       esc = "&#13;"
+               default:
+                       panic("unrecognized escape character")
+               }
+               s = s[i+1:]
+               if _, err := w.WriteString(esc); err != nil {
+                       return err
+               }
+               i = strings.IndexAny(s, escapedChars)
+       }
+       _, err := w.WriteString(s)
+       return err
+}
+
+// EscapeString escapes special characters like "<" to become "&lt;". It
+// escapes only five such characters: <, >, &, ' and ".
+// UnescapeString(EscapeString(s)) == s always holds, but the converse isn't
+// always true.
+func EscapeString(s string) string {
+       if strings.IndexAny(s, escapedChars) == -1 {
+               return s
+       }
+       var buf bytes.Buffer
+       escape(&buf, s)
+       return buf.String()
+}
+
+// UnescapeString unescapes entities like "&lt;" to become "<". It unescapes a
+// larger range of entities than EscapeString escapes. For example, "&aacute;"
+// unescapes to "á", as does "&#225;" and "&xE1;".
+// UnescapeString(EscapeString(s)) == s always holds, but the converse isn't
+// always true.
+func UnescapeString(s string) string {
+       for _, c := range s {
+               if c == '&' {
+                       return string(unescape([]byte(s), false))
+               }
+       }
+       return s
+}
diff --git a/vendor/golang.org/x/net/html/foreign.go b/vendor/golang.org/x/net/html/foreign.go
new file mode 100644 (file)
index 0000000..d3b3844
--- /dev/null
@@ -0,0 +1,226 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package html
+
+import (
+       "strings"
+)
+
+func adjustAttributeNames(aa []Attribute, nameMap map[string]string) {
+       for i := range aa {
+               if newName, ok := nameMap[aa[i].Key]; ok {
+                       aa[i].Key = newName
+               }
+       }
+}
+
+func adjustForeignAttributes(aa []Attribute) {
+       for i, a := range aa {
+               if a.Key == "" || a.Key[0] != 'x' {
+                       continue
+               }
+               switch a.Key {
+               case "xlink:actuate", "xlink:arcrole", "xlink:href", "xlink:role", "xlink:show",
+                       "xlink:title", "xlink:type", "xml:base", "xml:lang", "xml:space", "xmlns:xlink":
+                       j := strings.Index(a.Key, ":")
+                       aa[i].Namespace = a.Key[:j]
+                       aa[i].Key = a.Key[j+1:]
+               }
+       }
+}
+
+func htmlIntegrationPoint(n *Node) bool {
+       if n.Type != ElementNode {
+               return false
+       }
+       switch n.Namespace {
+       case "math":
+               if n.Data == "annotation-xml" {
+                       for _, a := range n.Attr {
+                               if a.Key == "encoding" {
+                                       val := strings.ToLower(a.Val)
+                                       if val == "text/html" || val == "application/xhtml+xml" {
+                                               return true
+                                       }
+                               }
+                       }
+               }
+       case "svg":
+               switch n.Data {
+               case "desc", "foreignObject", "title":
+                       return true
+               }
+       }
+       return false
+}
+
+func mathMLTextIntegrationPoint(n *Node) bool {
+       if n.Namespace != "math" {
+               return false
+       }
+       switch n.Data {
+       case "mi", "mo", "mn", "ms", "mtext":
+               return true
+       }
+       return false
+}
+
+// Section 12.2.5.5.
+var breakout = map[string]bool{
+       "b":          true,
+       "big":        true,
+       "blockquote": true,
+       "body":       true,
+       "br":         true,
+       "center":     true,
+       "code":       true,
+       "dd":         true,
+       "div":        true,
+       "dl":         true,
+       "dt":         true,
+       "em":         true,
+       "embed":      true,
+       "h1":         true,
+       "h2":         true,
+       "h3":         true,
+       "h4":         true,
+       "h5":         true,
+       "h6":         true,
+       "head":       true,
+       "hr":         true,
+       "i":          true,
+       "img":        true,
+       "li":         true,
+       "listing":    true,
+       "menu":       true,
+       "meta":       true,
+       "nobr":       true,
+       "ol":         true,
+       "p":          true,
+       "pre":        true,
+       "ruby":       true,
+       "s":          true,
+       "small":      true,
+       "span":       true,
+       "strong":     true,
+       "strike":     true,
+       "sub":        true,
+       "sup":        true,
+       "table":      true,
+       "tt":         true,
+       "u":          true,
+       "ul":         true,
+       "var":        true,
+}
+
+// Section 12.2.5.5.
+var svgTagNameAdjustments = map[string]string{
+       "altglyph":            "altGlyph",
+       "altglyphdef":         "altGlyphDef",
+       "altglyphitem":        "altGlyphItem",
+       "animatecolor":        "animateColor",
+       "animatemotion":       "animateMotion",
+       "animatetransform":    "animateTransform",
+       "clippath":            "clipPath",
+       "feblend":             "feBlend",
+       "fecolormatrix":       "feColorMatrix",
+       "fecomponenttransfer": "feComponentTransfer",
+       "fecomposite":         "feComposite",
+       "feconvolvematrix":    "feConvolveMatrix",
+       "fediffuselighting":   "feDiffuseLighting",
+       "fedisplacementmap":   "feDisplacementMap",
+       "fedistantlight":      "feDistantLight",
+       "feflood":             "feFlood",
+       "fefunca":             "feFuncA",
+       "fefuncb":             "feFuncB",
+       "fefuncg":             "feFuncG",
+       "fefuncr":             "feFuncR",
+       "fegaussianblur":      "feGaussianBlur",
+       "feimage":             "feImage",
+       "femerge":             "feMerge",
+       "femergenode":         "feMergeNode",
+       "femorphology":        "feMorphology",
+       "feoffset":            "feOffset",
+       "fepointlight":        "fePointLight",
+       "fespecularlighting":  "feSpecularLighting",
+       "fespotlight":         "feSpotLight",
+       "fetile":              "feTile",
+       "feturbulence":        "feTurbulence",
+       "foreignobject":       "foreignObject",
+       "glyphref":            "glyphRef",
+       "lineargradient":      "linearGradient",
+       "radialgradient":      "radialGradient",
+       "textpath":            "textPath",
+}
+
+// Section 12.2.5.1
+var mathMLAttributeAdjustments = map[string]string{
+       "definitionurl": "definitionURL",
+}
+
+var svgAttributeAdjustments = map[string]string{
+       "attributename":             "attributeName",
+       "attributetype":             "attributeType",
+       "basefrequency":             "baseFrequency",
+       "baseprofile":               "baseProfile",
+       "calcmode":                  "calcMode",
+       "clippathunits":             "clipPathUnits",
+       "contentscripttype":         "contentScriptType",
+       "contentstyletype":          "contentStyleType",
+       "diffuseconstant":           "diffuseConstant",
+       "edgemode":                  "edgeMode",
+       "externalresourcesrequired": "externalResourcesRequired",
+       "filterres":                 "filterRes",
+       "filterunits":               "filterUnits",
+       "glyphref":                  "glyphRef",
+       "gradienttransform":         "gradientTransform",
+       "gradientunits":             "gradientUnits",
+       "kernelmatrix":              "kernelMatrix",
+       "kernelunitlength":          "kernelUnitLength",
+       "keypoints":                 "keyPoints",
+       "keysplines":                "keySplines",
+       "keytimes":                  "keyTimes",
+       "lengthadjust":              "lengthAdjust",
+       "limitingconeangle":         "limitingConeAngle",
+       "markerheight":              "markerHeight",
+       "markerunits":               "markerUnits",
+       "markerwidth":               "markerWidth",
+       "maskcontentunits":          "maskContentUnits",
+       "maskunits":                 "maskUnits",
+       "numoctaves":                "numOctaves",
+       "pathlength":                "pathLength",
+       "patterncontentunits":       "patternContentUnits",
+       "patterntransform":          "patternTransform",
+       "patternunits":              "patternUnits",
+       "pointsatx":                 "pointsAtX",
+       "pointsaty":                 "pointsAtY",
+       "pointsatz":                 "pointsAtZ",
+       "preservealpha":             "preserveAlpha",
+       "preserveaspectratio":       "preserveAspectRatio",
+       "primitiveunits":            "primitiveUnits",
+       "refx":                      "refX",
+       "refy":                      "refY",
+       "repeatcount":               "repeatCount",
+       "repeatdur":                 "repeatDur",
+       "requiredextensions":        "requiredExtensions",
+       "requiredfeatures":          "requiredFeatures",
+       "specularconstant":          "specularConstant",
+       "specularexponent":          "specularExponent",
+       "spreadmethod":              "spreadMethod",
+       "startoffset":               "startOffset",
+       "stddeviation":              "stdDeviation",
+       "stitchtiles":               "stitchTiles",
+       "surfacescale":              "surfaceScale",
+       "systemlanguage":            "systemLanguage",
+       "tablevalues":               "tableValues",
+       "targetx":                   "targetX",
+       "targety":                   "targetY",
+       "textlength":                "textLength",
+       "viewbox":                   "viewBox",
+       "viewtarget":                "viewTarget",
+       "xchannelselector":          "xChannelSelector",
+       "ychannelselector":          "yChannelSelector",
+       "zoomandpan":                "zoomAndPan",
+}
diff --git a/vendor/golang.org/x/net/html/node.go b/vendor/golang.org/x/net/html/node.go
new file mode 100644 (file)
index 0000000..26b657a
--- /dev/null
@@ -0,0 +1,193 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package html
+
+import (
+       "golang.org/x/net/html/atom"
+)
+
+// A NodeType is the type of a Node.
+type NodeType uint32
+
+const (
+       ErrorNode NodeType = iota
+       TextNode
+       DocumentNode
+       ElementNode
+       CommentNode
+       DoctypeNode
+       scopeMarkerNode
+)
+
+// Section 12.2.3.3 says "scope markers are inserted when entering applet
+// elements, buttons, object elements, marquees, table cells, and table
+// captions, and are used to prevent formatting from 'leaking'".
+var scopeMarker = Node{Type: scopeMarkerNode}
+
+// A Node consists of a NodeType and some Data (tag name for element nodes,
+// content for text) and are part of a tree of Nodes. Element nodes may also
+// have a Namespace and contain a slice of Attributes. Data is unescaped, so
+// that it looks like "a<b" rather than "a&lt;b". For element nodes, DataAtom
+// is the atom for Data, or zero if Data is not a known tag name.
+//
+// An empty Namespace implies a "http://www.w3.org/1999/xhtml" namespace.
+// Similarly, "math" is short for "http://www.w3.org/1998/Math/MathML", and
+// "svg" is short for "http://www.w3.org/2000/svg".
+type Node struct {
+       Parent, FirstChild, LastChild, PrevSibling, NextSibling *Node
+
+       Type      NodeType
+       DataAtom  atom.Atom
+       Data      string
+       Namespace string
+       Attr      []Attribute
+}
+
+// InsertBefore inserts newChild as a child of n, immediately before oldChild
+// in the sequence of n's children. oldChild may be nil, in which case newChild
+// is appended to the end of n's children.
+//
+// It will panic if newChild already has a parent or siblings.
+func (n *Node) InsertBefore(newChild, oldChild *Node) {
+       if newChild.Parent != nil || newChild.PrevSibling != nil || newChild.NextSibling != nil {
+               panic("html: InsertBefore called for an attached child Node")
+       }
+       var prev, next *Node
+       if oldChild != nil {
+               prev, next = oldChild.PrevSibling, oldChild
+       } else {
+               prev = n.LastChild
+       }
+       if prev != nil {
+               prev.NextSibling = newChild
+       } else {
+               n.FirstChild = newChild
+       }
+       if next != nil {
+               next.PrevSibling = newChild
+       } else {
+               n.LastChild = newChild
+       }
+       newChild.Parent = n
+       newChild.PrevSibling = prev
+       newChild.NextSibling = next
+}
+
+// AppendChild adds a node c as a child of n.
+//
+// It will panic if c already has a parent or siblings.
+func (n *Node) AppendChild(c *Node) {
+       if c.Parent != nil || c.PrevSibling != nil || c.NextSibling != nil {
+               panic("html: AppendChild called for an attached child Node")
+       }
+       last := n.LastChild
+       if last != nil {
+               last.NextSibling = c
+       } else {
+               n.FirstChild = c
+       }
+       n.LastChild = c
+       c.Parent = n
+       c.PrevSibling = last
+}
+
+// RemoveChild removes a node c that is a child of n. Afterwards, c will have
+// no parent and no siblings.
+//
+// It will panic if c's parent is not n.
+func (n *Node) RemoveChild(c *Node) {
+       if c.Parent != n {
+               panic("html: RemoveChild called for a non-child Node")
+       }
+       if n.FirstChild == c {
+               n.FirstChild = c.NextSibling
+       }
+       if c.NextSibling != nil {
+               c.NextSibling.PrevSibling = c.PrevSibling
+       }
+       if n.LastChild == c {
+               n.LastChild = c.PrevSibling
+       }
+       if c.PrevSibling != nil {
+               c.PrevSibling.NextSibling = c.NextSibling
+       }
+       c.Parent = nil
+       c.PrevSibling = nil
+       c.NextSibling = nil
+}
+
+// reparentChildren reparents all of src's child nodes to dst.
+func reparentChildren(dst, src *Node) {
+       for {
+               child := src.FirstChild
+               if child == nil {
+                       break
+               }
+               src.RemoveChild(child)
+               dst.AppendChild(child)
+       }
+}
+
+// clone returns a new node with the same type, data and attributes.
+// The clone has no parent, no siblings and no children.
+func (n *Node) clone() *Node {
+       m := &Node{
+               Type:     n.Type,
+               DataAtom: n.DataAtom,
+               Data:     n.Data,
+               Attr:     make([]Attribute, len(n.Attr)),
+       }
+       copy(m.Attr, n.Attr)
+       return m
+}
+
+// nodeStack is a stack of nodes.
+type nodeStack []*Node
+
+// pop pops the stack. It will panic if s is empty.
+func (s *nodeStack) pop() *Node {
+       i := len(*s)
+       n := (*s)[i-1]
+       *s = (*s)[:i-1]
+       return n
+}
+
+// top returns the most recently pushed node, or nil if s is empty.
+func (s *nodeStack) top() *Node {
+       if i := len(*s); i > 0 {
+               return (*s)[i-1]
+       }
+       return nil
+}
+
+// index returns the index of the top-most occurrence of n in the stack, or -1
+// if n is not present.
+func (s *nodeStack) index(n *Node) int {
+       for i := len(*s) - 1; i >= 0; i-- {
+               if (*s)[i] == n {
+                       return i
+               }
+       }
+       return -1
+}
+
+// insert inserts a node at the given index.
+func (s *nodeStack) insert(i int, n *Node) {
+       (*s) = append(*s, nil)
+       copy((*s)[i+1:], (*s)[i:])
+       (*s)[i] = n
+}
+
+// remove removes a node from the stack. It is a no-op if n is not present.
+func (s *nodeStack) remove(n *Node) {
+       i := s.index(n)
+       if i == -1 {
+               return
+       }
+       copy((*s)[i:], (*s)[i+1:])
+       j := len(*s) - 1
+       (*s)[j] = nil
+       *s = (*s)[:j]
+}
diff --git a/vendor/golang.org/x/net/html/parse.go b/vendor/golang.org/x/net/html/parse.go
new file mode 100644 (file)
index 0000000..be4b2bf
--- /dev/null
@@ -0,0 +1,2094 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package html
+
+import (
+       "errors"
+       "fmt"
+       "io"
+       "strings"
+
+       a "golang.org/x/net/html/atom"
+)
+
+// A parser implements the HTML5 parsing algorithm:
+// https://html.spec.whatwg.org/multipage/syntax.html#tree-construction
+type parser struct {
+       // tokenizer provides the tokens for the parser.
+       tokenizer *Tokenizer
+       // tok is the most recently read token.
+       tok Token
+       // Self-closing tags like <hr/> are treated as start tags, except that
+       // hasSelfClosingToken is set while they are being processed.
+       hasSelfClosingToken bool
+       // doc is the document root element.
+       doc *Node
+       // The stack of open elements (section 12.2.3.2) and active formatting
+       // elements (section 12.2.3.3).
+       oe, afe nodeStack
+       // Element pointers (section 12.2.3.4).
+       head, form *Node
+       // Other parsing state flags (section 12.2.3.5).
+       scripting, framesetOK bool
+       // im is the current insertion mode.
+       im insertionMode
+       // originalIM is the insertion mode to go back to after completing a text
+       // or inTableText insertion mode.
+       originalIM insertionMode
+       // fosterParenting is whether new elements should be inserted according to
+       // the foster parenting rules (section 12.2.5.3).
+       fosterParenting bool
+       // quirks is whether the parser is operating in "quirks mode."
+       quirks bool
+       // fragment is whether the parser is parsing an HTML fragment.
+       fragment bool
+       // context is the context element when parsing an HTML fragment
+       // (section 12.4).
+       context *Node
+}
+
+func (p *parser) top() *Node {
+       if n := p.oe.top(); n != nil {
+               return n
+       }
+       return p.doc
+}
+
+// Stop tags for use in popUntil. These come from section 12.2.3.2.
+var (
+       defaultScopeStopTags = map[string][]a.Atom{
+               "":     {a.Applet, a.Caption, a.Html, a.Table, a.Td, a.Th, a.Marquee, a.Object, a.Template},
+               "math": {a.AnnotationXml, a.Mi, a.Mn, a.Mo, a.Ms, a.Mtext},
+               "svg":  {a.Desc, a.ForeignObject, a.Title},
+       }
+)
+
+type scope int
+
+const (
+       defaultScope scope = iota
+       listItemScope
+       buttonScope
+       tableScope
+       tableRowScope
+       tableBodyScope
+       selectScope
+)
+
+// popUntil pops the stack of open elements at the highest element whose tag
+// is in matchTags, provided there is no higher element in the scope's stop
+// tags (as defined in section 12.2.3.2). It returns whether or not there was
+// such an element. If there was not, popUntil leaves the stack unchanged.
+//
+// For example, the set of stop tags for table scope is: "html", "table". If
+// the stack was:
+// ["html", "body", "font", "table", "b", "i", "u"]
+// then popUntil(tableScope, "font") would return false, but
+// popUntil(tableScope, "i") would return true and the stack would become:
+// ["html", "body", "font", "table", "b"]
+//
+// If an element's tag is in both the stop tags and matchTags, then the stack
+// will be popped and the function returns true (provided, of course, there was
+// no higher element in the stack that was also in the stop tags). For example,
+// popUntil(tableScope, "table") returns true and leaves:
+// ["html", "body", "font"]
+func (p *parser) popUntil(s scope, matchTags ...a.Atom) bool {
+       if i := p.indexOfElementInScope(s, matchTags...); i != -1 {
+               p.oe = p.oe[:i]
+               return true
+       }
+       return false
+}
+
+// indexOfElementInScope returns the index in p.oe of the highest element whose
+// tag is in matchTags that is in scope. If no matching element is in scope, it
+// returns -1.
+func (p *parser) indexOfElementInScope(s scope, matchTags ...a.Atom) int {
+       for i := len(p.oe) - 1; i >= 0; i-- {
+               tagAtom := p.oe[i].DataAtom
+               if p.oe[i].Namespace == "" {
+                       for _, t := range matchTags {
+                               if t == tagAtom {
+                                       return i
+                               }
+                       }
+                       switch s {
+                       case defaultScope:
+                               // No-op.
+                       case listItemScope:
+                               if tagAtom == a.Ol || tagAtom == a.Ul {
+                                       return -1
+                               }
+                       case buttonScope:
+                               if tagAtom == a.Button {
+                                       return -1
+                               }
+                       case tableScope:
+                               if tagAtom == a.Html || tagAtom == a.Table {
+                                       return -1
+                               }
+                       case selectScope:
+                               if tagAtom != a.Optgroup && tagAtom != a.Option {
+                                       return -1
+                               }
+                       default:
+                               panic("unreachable")
+                       }
+               }
+               switch s {
+               case defaultScope, listItemScope, buttonScope:
+                       for _, t := range defaultScopeStopTags[p.oe[i].Namespace] {
+                               if t == tagAtom {
+                                       return -1
+                               }
+                       }
+               }
+       }
+       return -1
+}
+
+// elementInScope is like popUntil, except that it doesn't modify the stack of
+// open elements.
+func (p *parser) elementInScope(s scope, matchTags ...a.Atom) bool {
+       return p.indexOfElementInScope(s, matchTags...) != -1
+}
+
+// clearStackToContext pops elements off the stack of open elements until a
+// scope-defined element is found.
+func (p *parser) clearStackToContext(s scope) {
+       for i := len(p.oe) - 1; i >= 0; i-- {
+               tagAtom := p.oe[i].DataAtom
+               switch s {
+               case tableScope:
+                       if tagAtom == a.Html || tagAtom == a.Table {
+                               p.oe = p.oe[:i+1]
+                               return
+                       }
+               case tableRowScope:
+                       if tagAtom == a.Html || tagAtom == a.Tr {
+                               p.oe = p.oe[:i+1]
+                               return
+                       }
+               case tableBodyScope:
+                       if tagAtom == a.Html || tagAtom == a.Tbody || tagAtom == a.Tfoot || tagAtom == a.Thead {
+                               p.oe = p.oe[:i+1]
+                               return
+                       }
+               default:
+                       panic("unreachable")
+               }
+       }
+}
+
+// generateImpliedEndTags pops nodes off the stack of open elements as long as
+// the top node has a tag name of dd, dt, li, option, optgroup, p, rp, or rt.
+// If exceptions are specified, nodes with that name will not be popped off.
+func (p *parser) generateImpliedEndTags(exceptions ...string) {
+       var i int
+loop:
+       for i = len(p.oe) - 1; i >= 0; i-- {
+               n := p.oe[i]
+               if n.Type == ElementNode {
+                       switch n.DataAtom {
+                       case a.Dd, a.Dt, a.Li, a.Option, a.Optgroup, a.P, a.Rp, a.Rt:
+                               for _, except := range exceptions {
+                                       if n.Data == except {
+                                               break loop
+                                       }
+                               }
+                               continue
+                       }
+               }
+               break
+       }
+
+       p.oe = p.oe[:i+1]
+}
+
+// addChild adds a child node n to the top element, and pushes n onto the stack
+// of open elements if it is an element node.
+func (p *parser) addChild(n *Node) {
+       if p.shouldFosterParent() {
+               p.fosterParent(n)
+       } else {
+               p.top().AppendChild(n)
+       }
+
+       if n.Type == ElementNode {
+               p.oe = append(p.oe, n)
+       }
+}
+
+// shouldFosterParent returns whether the next node to be added should be
+// foster parented.
+func (p *parser) shouldFosterParent() bool {
+       if p.fosterParenting {
+               switch p.top().DataAtom {
+               case a.Table, a.Tbody, a.Tfoot, a.Thead, a.Tr:
+                       return true
+               }
+       }
+       return false
+}
+
+// fosterParent adds a child node according to the foster parenting rules.
+// Section 12.2.5.3, "foster parenting".
+func (p *parser) fosterParent(n *Node) {
+       var table, parent, prev *Node
+       var i int
+       for i = len(p.oe) - 1; i >= 0; i-- {
+               if p.oe[i].DataAtom == a.Table {
+                       table = p.oe[i]
+                       break
+               }
+       }
+
+       if table == nil {
+               // The foster parent is the html element.
+               parent = p.oe[0]
+       } else {
+               parent = table.Parent
+       }
+       if parent == nil {
+               parent = p.oe[i-1]
+       }
+
+       if table != nil {
+               prev = table.PrevSibling
+       } else {
+               prev = parent.LastChild
+       }
+       if prev != nil && prev.Type == TextNode && n.Type == TextNode {
+               prev.Data += n.Data
+               return
+       }
+
+       parent.InsertBefore(n, table)
+}
+
+// addText adds text to the preceding node if it is a text node, or else it
+// calls addChild with a new text node.
+func (p *parser) addText(text string) {
+       if text == "" {
+               return
+       }
+
+       if p.shouldFosterParent() {
+               p.fosterParent(&Node{
+                       Type: TextNode,
+                       Data: text,
+               })
+               return
+       }
+
+       t := p.top()
+       if n := t.LastChild; n != nil && n.Type == TextNode {
+               n.Data += text
+               return
+       }
+       p.addChild(&Node{
+               Type: TextNode,
+               Data: text,
+       })
+}
+
+// addElement adds a child element based on the current token.
+func (p *parser) addElement() {
+       p.addChild(&Node{
+               Type:     ElementNode,
+               DataAtom: p.tok.DataAtom,
+               Data:     p.tok.Data,
+               Attr:     p.tok.Attr,
+       })
+}
+
+// Section 12.2.3.3.
+func (p *parser) addFormattingElement() {
+       tagAtom, attr := p.tok.DataAtom, p.tok.Attr
+       p.addElement()
+
+       // Implement the Noah's Ark clause, but with three per family instead of two.
+       identicalElements := 0
+findIdenticalElements:
+       for i := len(p.afe) - 1; i >= 0; i-- {
+               n := p.afe[i]
+               if n.Type == scopeMarkerNode {
+                       break
+               }
+               if n.Type != ElementNode {
+                       continue
+               }
+               if n.Namespace != "" {
+                       continue
+               }
+               if n.DataAtom != tagAtom {
+                       continue
+               }
+               if len(n.Attr) != len(attr) {
+                       continue
+               }
+       compareAttributes:
+               for _, t0 := range n.Attr {
+                       for _, t1 := range attr {
+                               if t0.Key == t1.Key && t0.Namespace == t1.Namespace && t0.Val == t1.Val {
+                                       // Found a match for this attribute, continue with the next attribute.
+                                       continue compareAttributes
+                               }
+                       }
+                       // If we get here, there is no attribute that matches a.
+                       // Therefore the element is not identical to the new one.
+                       continue findIdenticalElements
+               }
+
+               identicalElements++
+               if identicalElements >= 3 {
+                       p.afe.remove(n)
+               }
+       }
+
+       p.afe = append(p.afe, p.top())
+}
+
+// Section 12.2.3.3.
+func (p *parser) clearActiveFormattingElements() {
+       for {
+               n := p.afe.pop()
+               if len(p.afe) == 0 || n.Type == scopeMarkerNode {
+                       return
+               }
+       }
+}
+
+// Section 12.2.3.3.
+func (p *parser) reconstructActiveFormattingElements() {
+       n := p.afe.top()
+       if n == nil {
+               return
+       }
+       if n.Type == scopeMarkerNode || p.oe.index(n) != -1 {
+               return
+       }
+       i := len(p.afe) - 1
+       for n.Type != scopeMarkerNode && p.oe.index(n) == -1 {
+               if i == 0 {
+                       i = -1
+                       break
+               }
+               i--
+               n = p.afe[i]
+       }
+       for {
+               i++
+               clone := p.afe[i].clone()
+               p.addChild(clone)
+               p.afe[i] = clone
+               if i == len(p.afe)-1 {
+                       break
+               }
+       }
+}
+
+// Section 12.2.4.
+func (p *parser) acknowledgeSelfClosingTag() {
+       p.hasSelfClosingToken = false
+}
+
+// An insertion mode (section 12.2.3.1) is the state transition function from
+// a particular state in the HTML5 parser's state machine. It updates the
+// parser's fields depending on parser.tok (where ErrorToken means EOF).
+// It returns whether the token was consumed.
+type insertionMode func(*parser) bool
+
+// setOriginalIM sets the insertion mode to return to after completing a text or
+// inTableText insertion mode.
+// Section 12.2.3.1, "using the rules for".
+func (p *parser) setOriginalIM() {
+       if p.originalIM != nil {
+               panic("html: bad parser state: originalIM was set twice")
+       }
+       p.originalIM = p.im
+}
+
+// Section 12.2.3.1, "reset the insertion mode".
+func (p *parser) resetInsertionMode() {
+       for i := len(p.oe) - 1; i >= 0; i-- {
+               n := p.oe[i]
+               if i == 0 && p.context != nil {
+                       n = p.context
+               }
+
+               switch n.DataAtom {
+               case a.Select:
+                       p.im = inSelectIM
+               case a.Td, a.Th:
+                       p.im = inCellIM
+               case a.Tr:
+                       p.im = inRowIM
+               case a.Tbody, a.Thead, a.Tfoot:
+                       p.im = inTableBodyIM
+               case a.Caption:
+                       p.im = inCaptionIM
+               case a.Colgroup:
+                       p.im = inColumnGroupIM
+               case a.Table:
+                       p.im = inTableIM
+               case a.Head:
+                       p.im = inBodyIM
+               case a.Body:
+                       p.im = inBodyIM
+               case a.Frameset:
+                       p.im = inFramesetIM
+               case a.Html:
+                       p.im = beforeHeadIM
+               default:
+                       continue
+               }
+               return
+       }
+       p.im = inBodyIM
+}
+
+const whitespace = " \t\r\n\f"
+
+// Section 12.2.5.4.1.
+func initialIM(p *parser) bool {
+       switch p.tok.Type {
+       case TextToken:
+               p.tok.Data = strings.TrimLeft(p.tok.Data, whitespace)
+               if len(p.tok.Data) == 0 {
+                       // It was all whitespace, so ignore it.
+                       return true
+               }
+       case CommentToken:
+               p.doc.AppendChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+               return true
+       case DoctypeToken:
+               n, quirks := parseDoctype(p.tok.Data)
+               p.doc.AppendChild(n)
+               p.quirks = quirks
+               p.im = beforeHTMLIM
+               return true
+       }
+       p.quirks = true
+       p.im = beforeHTMLIM
+       return false
+}
+
+// Section 12.2.5.4.2.
+func beforeHTMLIM(p *parser) bool {
+       switch p.tok.Type {
+       case DoctypeToken:
+               // Ignore the token.
+               return true
+       case TextToken:
+               p.tok.Data = strings.TrimLeft(p.tok.Data, whitespace)
+               if len(p.tok.Data) == 0 {
+                       // It was all whitespace, so ignore it.
+                       return true
+               }
+       case StartTagToken:
+               if p.tok.DataAtom == a.Html {
+                       p.addElement()
+                       p.im = beforeHeadIM
+                       return true
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Head, a.Body, a.Html, a.Br:
+                       p.parseImpliedToken(StartTagToken, a.Html, a.Html.String())
+                       return false
+               default:
+                       // Ignore the token.
+                       return true
+               }
+       case CommentToken:
+               p.doc.AppendChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+               return true
+       }
+       p.parseImpliedToken(StartTagToken, a.Html, a.Html.String())
+       return false
+}
+
+// Section 12.2.5.4.3.
+func beforeHeadIM(p *parser) bool {
+       switch p.tok.Type {
+       case TextToken:
+               p.tok.Data = strings.TrimLeft(p.tok.Data, whitespace)
+               if len(p.tok.Data) == 0 {
+                       // It was all whitespace, so ignore it.
+                       return true
+               }
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Head:
+                       p.addElement()
+                       p.head = p.top()
+                       p.im = inHeadIM
+                       return true
+               case a.Html:
+                       return inBodyIM(p)
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Head, a.Body, a.Html, a.Br:
+                       p.parseImpliedToken(StartTagToken, a.Head, a.Head.String())
+                       return false
+               default:
+                       // Ignore the token.
+                       return true
+               }
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+               return true
+       case DoctypeToken:
+               // Ignore the token.
+               return true
+       }
+
+       p.parseImpliedToken(StartTagToken, a.Head, a.Head.String())
+       return false
+}
+
+// Section 12.2.5.4.4.
+func inHeadIM(p *parser) bool {
+       switch p.tok.Type {
+       case TextToken:
+               s := strings.TrimLeft(p.tok.Data, whitespace)
+               if len(s) < len(p.tok.Data) {
+                       // Add the initial whitespace to the current node.
+                       p.addText(p.tok.Data[:len(p.tok.Data)-len(s)])
+                       if s == "" {
+                               return true
+                       }
+                       p.tok.Data = s
+               }
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Html:
+                       return inBodyIM(p)
+               case a.Base, a.Basefont, a.Bgsound, a.Command, a.Link, a.Meta:
+                       p.addElement()
+                       p.oe.pop()
+                       p.acknowledgeSelfClosingTag()
+                       return true
+               case a.Script, a.Title, a.Noscript, a.Noframes, a.Style:
+                       p.addElement()
+                       p.setOriginalIM()
+                       p.im = textIM
+                       return true
+               case a.Head:
+                       // Ignore the token.
+                       return true
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Head:
+                       n := p.oe.pop()
+                       if n.DataAtom != a.Head {
+                               panic("html: bad parser state: <head> element not found, in the in-head insertion mode")
+                       }
+                       p.im = afterHeadIM
+                       return true
+               case a.Body, a.Html, a.Br:
+                       p.parseImpliedToken(EndTagToken, a.Head, a.Head.String())
+                       return false
+               default:
+                       // Ignore the token.
+                       return true
+               }
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+               return true
+       case DoctypeToken:
+               // Ignore the token.
+               return true
+       }
+
+       p.parseImpliedToken(EndTagToken, a.Head, a.Head.String())
+       return false
+}
+
+// Section 12.2.5.4.6.
+func afterHeadIM(p *parser) bool {
+       switch p.tok.Type {
+       case TextToken:
+               s := strings.TrimLeft(p.tok.Data, whitespace)
+               if len(s) < len(p.tok.Data) {
+                       // Add the initial whitespace to the current node.
+                       p.addText(p.tok.Data[:len(p.tok.Data)-len(s)])
+                       if s == "" {
+                               return true
+                       }
+                       p.tok.Data = s
+               }
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Html:
+                       return inBodyIM(p)
+               case a.Body:
+                       p.addElement()
+                       p.framesetOK = false
+                       p.im = inBodyIM
+                       return true
+               case a.Frameset:
+                       p.addElement()
+                       p.im = inFramesetIM
+                       return true
+               case a.Base, a.Basefont, a.Bgsound, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Title:
+                       p.oe = append(p.oe, p.head)
+                       defer p.oe.remove(p.head)
+                       return inHeadIM(p)
+               case a.Head:
+                       // Ignore the token.
+                       return true
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Body, a.Html, a.Br:
+                       // Drop down to creating an implied <body> tag.
+               default:
+                       // Ignore the token.
+                       return true
+               }
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+               return true
+       case DoctypeToken:
+               // Ignore the token.
+               return true
+       }
+
+       p.parseImpliedToken(StartTagToken, a.Body, a.Body.String())
+       p.framesetOK = true
+       return false
+}
+
+// copyAttributes copies attributes of src not found on dst to dst.
+func copyAttributes(dst *Node, src Token) {
+       if len(src.Attr) == 0 {
+               return
+       }
+       attr := map[string]string{}
+       for _, t := range dst.Attr {
+               attr[t.Key] = t.Val
+       }
+       for _, t := range src.Attr {
+               if _, ok := attr[t.Key]; !ok {
+                       dst.Attr = append(dst.Attr, t)
+                       attr[t.Key] = t.Val
+               }
+       }
+}
+
+// Section 12.2.5.4.7.
+func inBodyIM(p *parser) bool {
+       switch p.tok.Type {
+       case TextToken:
+               d := p.tok.Data
+               switch n := p.oe.top(); n.DataAtom {
+               case a.Pre, a.Listing:
+                       if n.FirstChild == nil {
+                               // Ignore a newline at the start of a <pre> block.
+                               if d != "" && d[0] == '\r' {
+                                       d = d[1:]
+                               }
+                               if d != "" && d[0] == '\n' {
+                                       d = d[1:]
+                               }
+                       }
+               }
+               d = strings.Replace(d, "\x00", "", -1)
+               if d == "" {
+                       return true
+               }
+               p.reconstructActiveFormattingElements()
+               p.addText(d)
+               if p.framesetOK && strings.TrimLeft(d, whitespace) != "" {
+                       // There were non-whitespace characters inserted.
+                       p.framesetOK = false
+               }
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Html:
+                       copyAttributes(p.oe[0], p.tok)
+               case a.Base, a.Basefont, a.Bgsound, a.Command, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Title:
+                       return inHeadIM(p)
+               case a.Body:
+                       if len(p.oe) >= 2 {
+                               body := p.oe[1]
+                               if body.Type == ElementNode && body.DataAtom == a.Body {
+                                       p.framesetOK = false
+                                       copyAttributes(body, p.tok)
+                               }
+                       }
+               case a.Frameset:
+                       if !p.framesetOK || len(p.oe) < 2 || p.oe[1].DataAtom != a.Body {
+                               // Ignore the token.
+                               return true
+                       }
+                       body := p.oe[1]
+                       if body.Parent != nil {
+                               body.Parent.RemoveChild(body)
+                       }
+                       p.oe = p.oe[:1]
+                       p.addElement()
+                       p.im = inFramesetIM
+                       return true
+               case a.Address, a.Article, a.Aside, a.Blockquote, a.Center, a.Details, a.Dir, a.Div, a.Dl, a.Fieldset, a.Figcaption, a.Figure, a.Footer, a.Header, a.Hgroup, a.Menu, a.Nav, a.Ol, a.P, a.Section, a.Summary, a.Ul:
+                       p.popUntil(buttonScope, a.P)
+                       p.addElement()
+               case a.H1, a.H2, a.H3, a.H4, a.H5, a.H6:
+                       p.popUntil(buttonScope, a.P)
+                       switch n := p.top(); n.DataAtom {
+                       case a.H1, a.H2, a.H3, a.H4, a.H5, a.H6:
+                               p.oe.pop()
+                       }
+                       p.addElement()
+               case a.Pre, a.Listing:
+                       p.popUntil(buttonScope, a.P)
+                       p.addElement()
+                       // The newline, if any, will be dealt with by the TextToken case.
+                       p.framesetOK = false
+               case a.Form:
+                       if p.form == nil {
+                               p.popUntil(buttonScope, a.P)
+                               p.addElement()
+                               p.form = p.top()
+                       }
+               case a.Li:
+                       p.framesetOK = false
+                       for i := len(p.oe) - 1; i >= 0; i-- {
+                               node := p.oe[i]
+                               switch node.DataAtom {
+                               case a.Li:
+                                       p.oe = p.oe[:i]
+                               case a.Address, a.Div, a.P:
+                                       continue
+                               default:
+                                       if !isSpecialElement(node) {
+                                               continue
+                                       }
+                               }
+                               break
+                       }
+                       p.popUntil(buttonScope, a.P)
+                       p.addElement()
+               case a.Dd, a.Dt:
+                       p.framesetOK = false
+                       for i := len(p.oe) - 1; i >= 0; i-- {
+                               node := p.oe[i]
+                               switch node.DataAtom {
+                               case a.Dd, a.Dt:
+                                       p.oe = p.oe[:i]
+                               case a.Address, a.Div, a.P:
+                                       continue
+                               default:
+                                       if !isSpecialElement(node) {
+                                               continue
+                                       }
+                               }
+                               break
+                       }
+                       p.popUntil(buttonScope, a.P)
+                       p.addElement()
+               case a.Plaintext:
+                       p.popUntil(buttonScope, a.P)
+                       p.addElement()
+               case a.Button:
+                       p.popUntil(defaultScope, a.Button)
+                       p.reconstructActiveFormattingElements()
+                       p.addElement()
+                       p.framesetOK = false
+               case a.A:
+                       for i := len(p.afe) - 1; i >= 0 && p.afe[i].Type != scopeMarkerNode; i-- {
+                               if n := p.afe[i]; n.Type == ElementNode && n.DataAtom == a.A {
+                                       p.inBodyEndTagFormatting(a.A)
+                                       p.oe.remove(n)
+                                       p.afe.remove(n)
+                                       break
+                               }
+                       }
+                       p.reconstructActiveFormattingElements()
+                       p.addFormattingElement()
+               case a.B, a.Big, a.Code, a.Em, a.Font, a.I, a.S, a.Small, a.Strike, a.Strong, a.Tt, a.U:
+                       p.reconstructActiveFormattingElements()
+                       p.addFormattingElement()
+               case a.Nobr:
+                       p.reconstructActiveFormattingElements()
+                       if p.elementInScope(defaultScope, a.Nobr) {
+                               p.inBodyEndTagFormatting(a.Nobr)
+                               p.reconstructActiveFormattingElements()
+                       }
+                       p.addFormattingElement()
+               case a.Applet, a.Marquee, a.Object:
+                       p.reconstructActiveFormattingElements()
+                       p.addElement()
+                       p.afe = append(p.afe, &scopeMarker)
+                       p.framesetOK = false
+               case a.Table:
+                       if !p.quirks {
+                               p.popUntil(buttonScope, a.P)
+                       }
+                       p.addElement()
+                       p.framesetOK = false
+                       p.im = inTableIM
+                       return true
+               case a.Area, a.Br, a.Embed, a.Img, a.Input, a.Keygen, a.Wbr:
+                       p.reconstructActiveFormattingElements()
+                       p.addElement()
+                       p.oe.pop()
+                       p.acknowledgeSelfClosingTag()
+                       if p.tok.DataAtom == a.Input {
+                               for _, t := range p.tok.Attr {
+                                       if t.Key == "type" {
+                                               if strings.ToLower(t.Val) == "hidden" {
+                                                       // Skip setting framesetOK = false
+                                                       return true
+                                               }
+                                       }
+                               }
+                       }
+                       p.framesetOK = false
+               case a.Param, a.Source, a.Track:
+                       p.addElement()
+                       p.oe.pop()
+                       p.acknowledgeSelfClosingTag()
+               case a.Hr:
+                       p.popUntil(buttonScope, a.P)
+                       p.addElement()
+                       p.oe.pop()
+                       p.acknowledgeSelfClosingTag()
+                       p.framesetOK = false
+               case a.Image:
+                       p.tok.DataAtom = a.Img
+                       p.tok.Data = a.Img.String()
+                       return false
+               case a.Isindex:
+                       if p.form != nil {
+                               // Ignore the token.
+                               return true
+                       }
+                       action := ""
+                       prompt := "This is a searchable index. Enter search keywords: "
+                       attr := []Attribute{{Key: "name", Val: "isindex"}}
+                       for _, t := range p.tok.Attr {
+                               switch t.Key {
+                               case "action":
+                                       action = t.Val
+                               case "name":
+                                       // Ignore the attribute.
+                               case "prompt":
+                                       prompt = t.Val
+                               default:
+                                       attr = append(attr, t)
+                               }
+                       }
+                       p.acknowledgeSelfClosingTag()
+                       p.popUntil(buttonScope, a.P)
+                       p.parseImpliedToken(StartTagToken, a.Form, a.Form.String())
+                       if action != "" {
+                               p.form.Attr = []Attribute{{Key: "action", Val: action}}
+                       }
+                       p.parseImpliedToken(StartTagToken, a.Hr, a.Hr.String())
+                       p.parseImpliedToken(StartTagToken, a.Label, a.Label.String())
+                       p.addText(prompt)
+                       p.addChild(&Node{
+                               Type:     ElementNode,
+                               DataAtom: a.Input,
+                               Data:     a.Input.String(),
+                               Attr:     attr,
+                       })
+                       p.oe.pop()
+                       p.parseImpliedToken(EndTagToken, a.Label, a.Label.String())
+                       p.parseImpliedToken(StartTagToken, a.Hr, a.Hr.String())
+                       p.parseImpliedToken(EndTagToken, a.Form, a.Form.String())
+               case a.Textarea:
+                       p.addElement()
+                       p.setOriginalIM()
+                       p.framesetOK = false
+                       p.im = textIM
+               case a.Xmp:
+                       p.popUntil(buttonScope, a.P)
+                       p.reconstructActiveFormattingElements()
+                       p.framesetOK = false
+                       p.addElement()
+                       p.setOriginalIM()
+                       p.im = textIM
+               case a.Iframe:
+                       p.framesetOK = false
+                       p.addElement()
+                       p.setOriginalIM()
+                       p.im = textIM
+               case a.Noembed, a.Noscript:
+                       p.addElement()
+                       p.setOriginalIM()
+                       p.im = textIM
+               case a.Select:
+                       p.reconstructActiveFormattingElements()
+                       p.addElement()
+                       p.framesetOK = false
+                       p.im = inSelectIM
+                       return true
+               case a.Optgroup, a.Option:
+                       if p.top().DataAtom == a.Option {
+                               p.oe.pop()
+                       }
+                       p.reconstructActiveFormattingElements()
+                       p.addElement()
+               case a.Rp, a.Rt:
+                       if p.elementInScope(defaultScope, a.Ruby) {
+                               p.generateImpliedEndTags()
+                       }
+                       p.addElement()
+               case a.Math, a.Svg:
+                       p.reconstructActiveFormattingElements()
+                       if p.tok.DataAtom == a.Math {
+                               adjustAttributeNames(p.tok.Attr, mathMLAttributeAdjustments)
+                       } else {
+                               adjustAttributeNames(p.tok.Attr, svgAttributeAdjustments)
+                       }
+                       adjustForeignAttributes(p.tok.Attr)
+                       p.addElement()
+                       p.top().Namespace = p.tok.Data
+                       if p.hasSelfClosingToken {
+                               p.oe.pop()
+                               p.acknowledgeSelfClosingTag()
+                       }
+                       return true
+               case a.Caption, a.Col, a.Colgroup, a.Frame, a.Head, a.Tbody, a.Td, a.Tfoot, a.Th, a.Thead, a.Tr:
+                       // Ignore the token.
+               default:
+                       p.reconstructActiveFormattingElements()
+                       p.addElement()
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Body:
+                       if p.elementInScope(defaultScope, a.Body) {
+                               p.im = afterBodyIM
+                       }
+               case a.Html:
+                       if p.elementInScope(defaultScope, a.Body) {
+                               p.parseImpliedToken(EndTagToken, a.Body, a.Body.String())
+                               return false
+                       }
+                       return true
+               case a.Address, a.Article, a.Aside, a.Blockquote, a.Button, a.Center, a.Details, a.Dir, a.Div, a.Dl, a.Fieldset, a.Figcaption, a.Figure, a.Footer, a.Header, a.Hgroup, a.Listing, a.Menu, a.Nav, a.Ol, a.Pre, a.Section, a.Summary, a.Ul:
+                       p.popUntil(defaultScope, p.tok.DataAtom)
+               case a.Form:
+                       node := p.form
+                       p.form = nil
+                       i := p.indexOfElementInScope(defaultScope, a.Form)
+                       if node == nil || i == -1 || p.oe[i] != node {
+                               // Ignore the token.
+                               return true
+                       }
+                       p.generateImpliedEndTags()
+                       p.oe.remove(node)
+               case a.P:
+                       if !p.elementInScope(buttonScope, a.P) {
+                               p.parseImpliedToken(StartTagToken, a.P, a.P.String())
+                       }
+                       p.popUntil(buttonScope, a.P)
+               case a.Li:
+                       p.popUntil(listItemScope, a.Li)
+               case a.Dd, a.Dt:
+                       p.popUntil(defaultScope, p.tok.DataAtom)
+               case a.H1, a.H2, a.H3, a.H4, a.H5, a.H6:
+                       p.popUntil(defaultScope, a.H1, a.H2, a.H3, a.H4, a.H5, a.H6)
+               case a.A, a.B, a.Big, a.Code, a.Em, a.Font, a.I, a.Nobr, a.S, a.Small, a.Strike, a.Strong, a.Tt, a.U:
+                       p.inBodyEndTagFormatting(p.tok.DataAtom)
+               case a.Applet, a.Marquee, a.Object:
+                       if p.popUntil(defaultScope, p.tok.DataAtom) {
+                               p.clearActiveFormattingElements()
+                       }
+               case a.Br:
+                       p.tok.Type = StartTagToken
+                       return false
+               default:
+                       p.inBodyEndTagOther(p.tok.DataAtom)
+               }
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+       }
+
+       return true
+}
+
+func (p *parser) inBodyEndTagFormatting(tagAtom a.Atom) {
+       // This is the "adoption agency" algorithm, described at
+       // https://html.spec.whatwg.org/multipage/syntax.html#adoptionAgency
+
+       // TODO: this is a fairly literal line-by-line translation of that algorithm.
+       // Once the code successfully parses the comprehensive test suite, we should
+       // refactor this code to be more idiomatic.
+
+       // Steps 1-4. The outer loop.
+       for i := 0; i < 8; i++ {
+               // Step 5. Find the formatting element.
+               var formattingElement *Node
+               for j := len(p.afe) - 1; j >= 0; j-- {
+                       if p.afe[j].Type == scopeMarkerNode {
+                               break
+                       }
+                       if p.afe[j].DataAtom == tagAtom {
+                               formattingElement = p.afe[j]
+                               break
+                       }
+               }
+               if formattingElement == nil {
+                       p.inBodyEndTagOther(tagAtom)
+                       return
+               }
+               feIndex := p.oe.index(formattingElement)
+               if feIndex == -1 {
+                       p.afe.remove(formattingElement)
+                       return
+               }
+               if !p.elementInScope(defaultScope, tagAtom) {
+                       // Ignore the tag.
+                       return
+               }
+
+               // Steps 9-10. Find the furthest block.
+               var furthestBlock *Node
+               for _, e := range p.oe[feIndex:] {
+                       if isSpecialElement(e) {
+                               furthestBlock = e
+                               break
+                       }
+               }
+               if furthestBlock == nil {
+                       e := p.oe.pop()
+                       for e != formattingElement {
+                               e = p.oe.pop()
+                       }
+                       p.afe.remove(e)
+                       return
+               }
+
+               // Steps 11-12. Find the common ancestor and bookmark node.
+               commonAncestor := p.oe[feIndex-1]
+               bookmark := p.afe.index(formattingElement)
+
+               // Step 13. The inner loop. Find the lastNode to reparent.
+               lastNode := furthestBlock
+               node := furthestBlock
+               x := p.oe.index(node)
+               // Steps 13.1-13.2
+               for j := 0; j < 3; j++ {
+                       // Step 13.3.
+                       x--
+                       node = p.oe[x]
+                       // Step 13.4 - 13.5.
+                       if p.afe.index(node) == -1 {
+                               p.oe.remove(node)
+                               continue
+                       }
+                       // Step 13.6.
+                       if node == formattingElement {
+                               break
+                       }
+                       // Step 13.7.
+                       clone := node.clone()
+                       p.afe[p.afe.index(node)] = clone
+                       p.oe[p.oe.index(node)] = clone
+                       node = clone
+                       // Step 13.8.
+                       if lastNode == furthestBlock {
+                               bookmark = p.afe.index(node) + 1
+                       }
+                       // Step 13.9.
+                       if lastNode.Parent != nil {
+                               lastNode.Parent.RemoveChild(lastNode)
+                       }
+                       node.AppendChild(lastNode)
+                       // Step 13.10.
+                       lastNode = node
+               }
+
+               // Step 14. Reparent lastNode to the common ancestor,
+               // or for misnested table nodes, to the foster parent.
+               if lastNode.Parent != nil {
+                       lastNode.Parent.RemoveChild(lastNode)
+               }
+               switch commonAncestor.DataAtom {
+               case a.Table, a.Tbody, a.Tfoot, a.Thead, a.Tr:
+                       p.fosterParent(lastNode)
+               default:
+                       commonAncestor.AppendChild(lastNode)
+               }
+
+               // Steps 15-17. Reparent nodes from the furthest block's children
+               // to a clone of the formatting element.
+               clone := formattingElement.clone()
+               reparentChildren(clone, furthestBlock)
+               furthestBlock.AppendChild(clone)
+
+               // Step 18. Fix up the list of active formatting elements.
+               if oldLoc := p.afe.index(formattingElement); oldLoc != -1 && oldLoc < bookmark {
+                       // Move the bookmark with the rest of the list.
+                       bookmark--
+               }
+               p.afe.remove(formattingElement)
+               p.afe.insert(bookmark, clone)
+
+               // Step 19. Fix up the stack of open elements.
+               p.oe.remove(formattingElement)
+               p.oe.insert(p.oe.index(furthestBlock)+1, clone)
+       }
+}
+
+// inBodyEndTagOther performs the "any other end tag" algorithm for inBodyIM.
+// "Any other end tag" handling from 12.2.5.5 The rules for parsing tokens in foreign content
+// https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-inforeign
+func (p *parser) inBodyEndTagOther(tagAtom a.Atom) {
+       for i := len(p.oe) - 1; i >= 0; i-- {
+               if p.oe[i].DataAtom == tagAtom {
+                       p.oe = p.oe[:i]
+                       break
+               }
+               if isSpecialElement(p.oe[i]) {
+                       break
+               }
+       }
+}
+
+// Section 12.2.5.4.8.
+func textIM(p *parser) bool {
+       switch p.tok.Type {
+       case ErrorToken:
+               p.oe.pop()
+       case TextToken:
+               d := p.tok.Data
+               if n := p.oe.top(); n.DataAtom == a.Textarea && n.FirstChild == nil {
+                       // Ignore a newline at the start of a <textarea> block.
+                       if d != "" && d[0] == '\r' {
+                               d = d[1:]
+                       }
+                       if d != "" && d[0] == '\n' {
+                               d = d[1:]
+                       }
+               }
+               if d == "" {
+                       return true
+               }
+               p.addText(d)
+               return true
+       case EndTagToken:
+               p.oe.pop()
+       }
+       p.im = p.originalIM
+       p.originalIM = nil
+       return p.tok.Type == EndTagToken
+}
+
+// Section 12.2.5.4.9.
+func inTableIM(p *parser) bool {
+       switch p.tok.Type {
+       case ErrorToken:
+               // Stop parsing.
+               return true
+       case TextToken:
+               p.tok.Data = strings.Replace(p.tok.Data, "\x00", "", -1)
+               switch p.oe.top().DataAtom {
+               case a.Table, a.Tbody, a.Tfoot, a.Thead, a.Tr:
+                       if strings.Trim(p.tok.Data, whitespace) == "" {
+                               p.addText(p.tok.Data)
+                               return true
+                       }
+               }
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Caption:
+                       p.clearStackToContext(tableScope)
+                       p.afe = append(p.afe, &scopeMarker)
+                       p.addElement()
+                       p.im = inCaptionIM
+                       return true
+               case a.Colgroup:
+                       p.clearStackToContext(tableScope)
+                       p.addElement()
+                       p.im = inColumnGroupIM
+                       return true
+               case a.Col:
+                       p.parseImpliedToken(StartTagToken, a.Colgroup, a.Colgroup.String())
+                       return false
+               case a.Tbody, a.Tfoot, a.Thead:
+                       p.clearStackToContext(tableScope)
+                       p.addElement()
+                       p.im = inTableBodyIM
+                       return true
+               case a.Td, a.Th, a.Tr:
+                       p.parseImpliedToken(StartTagToken, a.Tbody, a.Tbody.String())
+                       return false
+               case a.Table:
+                       if p.popUntil(tableScope, a.Table) {
+                               p.resetInsertionMode()
+                               return false
+                       }
+                       // Ignore the token.
+                       return true
+               case a.Style, a.Script:
+                       return inHeadIM(p)
+               case a.Input:
+                       for _, t := range p.tok.Attr {
+                               if t.Key == "type" && strings.ToLower(t.Val) == "hidden" {
+                                       p.addElement()
+                                       p.oe.pop()
+                                       return true
+                               }
+                       }
+                       // Otherwise drop down to the default action.
+               case a.Form:
+                       if p.form != nil {
+                               // Ignore the token.
+                               return true
+                       }
+                       p.addElement()
+                       p.form = p.oe.pop()
+               case a.Select:
+                       p.reconstructActiveFormattingElements()
+                       switch p.top().DataAtom {
+                       case a.Table, a.Tbody, a.Tfoot, a.Thead, a.Tr:
+                               p.fosterParenting = true
+                       }
+                       p.addElement()
+                       p.fosterParenting = false
+                       p.framesetOK = false
+                       p.im = inSelectInTableIM
+                       return true
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Table:
+                       if p.popUntil(tableScope, a.Table) {
+                               p.resetInsertionMode()
+                               return true
+                       }
+                       // Ignore the token.
+                       return true
+               case a.Body, a.Caption, a.Col, a.Colgroup, a.Html, a.Tbody, a.Td, a.Tfoot, a.Th, a.Thead, a.Tr:
+                       // Ignore the token.
+                       return true
+               }
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+               return true
+       case DoctypeToken:
+               // Ignore the token.
+               return true
+       }
+
+       p.fosterParenting = true
+       defer func() { p.fosterParenting = false }()
+
+       return inBodyIM(p)
+}
+
+// Section 12.2.5.4.11.
+func inCaptionIM(p *parser) bool {
+       switch p.tok.Type {
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Caption, a.Col, a.Colgroup, a.Tbody, a.Td, a.Tfoot, a.Thead, a.Tr:
+                       if p.popUntil(tableScope, a.Caption) {
+                               p.clearActiveFormattingElements()
+                               p.im = inTableIM
+                               return false
+                       } else {
+                               // Ignore the token.
+                               return true
+                       }
+               case a.Select:
+                       p.reconstructActiveFormattingElements()
+                       p.addElement()
+                       p.framesetOK = false
+                       p.im = inSelectInTableIM
+                       return true
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Caption:
+                       if p.popUntil(tableScope, a.Caption) {
+                               p.clearActiveFormattingElements()
+                               p.im = inTableIM
+                       }
+                       return true
+               case a.Table:
+                       if p.popUntil(tableScope, a.Caption) {
+                               p.clearActiveFormattingElements()
+                               p.im = inTableIM
+                               return false
+                       } else {
+                               // Ignore the token.
+                               return true
+                       }
+               case a.Body, a.Col, a.Colgroup, a.Html, a.Tbody, a.Td, a.Tfoot, a.Th, a.Thead, a.Tr:
+                       // Ignore the token.
+                       return true
+               }
+       }
+       return inBodyIM(p)
+}
+
+// Section 12.2.5.4.12.
+func inColumnGroupIM(p *parser) bool {
+       switch p.tok.Type {
+       case TextToken:
+               s := strings.TrimLeft(p.tok.Data, whitespace)
+               if len(s) < len(p.tok.Data) {
+                       // Add the initial whitespace to the current node.
+                       p.addText(p.tok.Data[:len(p.tok.Data)-len(s)])
+                       if s == "" {
+                               return true
+                       }
+                       p.tok.Data = s
+               }
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+               return true
+       case DoctypeToken:
+               // Ignore the token.
+               return true
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Html:
+                       return inBodyIM(p)
+               case a.Col:
+                       p.addElement()
+                       p.oe.pop()
+                       p.acknowledgeSelfClosingTag()
+                       return true
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Colgroup:
+                       if p.oe.top().DataAtom != a.Html {
+                               p.oe.pop()
+                               p.im = inTableIM
+                       }
+                       return true
+               case a.Col:
+                       // Ignore the token.
+                       return true
+               }
+       }
+       if p.oe.top().DataAtom != a.Html {
+               p.oe.pop()
+               p.im = inTableIM
+               return false
+       }
+       return true
+}
+
+// Section 12.2.5.4.13.
+func inTableBodyIM(p *parser) bool {
+       switch p.tok.Type {
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Tr:
+                       p.clearStackToContext(tableBodyScope)
+                       p.addElement()
+                       p.im = inRowIM
+                       return true
+               case a.Td, a.Th:
+                       p.parseImpliedToken(StartTagToken, a.Tr, a.Tr.String())
+                       return false
+               case a.Caption, a.Col, a.Colgroup, a.Tbody, a.Tfoot, a.Thead:
+                       if p.popUntil(tableScope, a.Tbody, a.Thead, a.Tfoot) {
+                               p.im = inTableIM
+                               return false
+                       }
+                       // Ignore the token.
+                       return true
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Tbody, a.Tfoot, a.Thead:
+                       if p.elementInScope(tableScope, p.tok.DataAtom) {
+                               p.clearStackToContext(tableBodyScope)
+                               p.oe.pop()
+                               p.im = inTableIM
+                       }
+                       return true
+               case a.Table:
+                       if p.popUntil(tableScope, a.Tbody, a.Thead, a.Tfoot) {
+                               p.im = inTableIM
+                               return false
+                       }
+                       // Ignore the token.
+                       return true
+               case a.Body, a.Caption, a.Col, a.Colgroup, a.Html, a.Td, a.Th, a.Tr:
+                       // Ignore the token.
+                       return true
+               }
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+               return true
+       }
+
+       return inTableIM(p)
+}
+
+// Section 12.2.5.4.14.
+func inRowIM(p *parser) bool {
+       switch p.tok.Type {
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Td, a.Th:
+                       p.clearStackToContext(tableRowScope)
+                       p.addElement()
+                       p.afe = append(p.afe, &scopeMarker)
+                       p.im = inCellIM
+                       return true
+               case a.Caption, a.Col, a.Colgroup, a.Tbody, a.Tfoot, a.Thead, a.Tr:
+                       if p.popUntil(tableScope, a.Tr) {
+                               p.im = inTableBodyIM
+                               return false
+                       }
+                       // Ignore the token.
+                       return true
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Tr:
+                       if p.popUntil(tableScope, a.Tr) {
+                               p.im = inTableBodyIM
+                               return true
+                       }
+                       // Ignore the token.
+                       return true
+               case a.Table:
+                       if p.popUntil(tableScope, a.Tr) {
+                               p.im = inTableBodyIM
+                               return false
+                       }
+                       // Ignore the token.
+                       return true
+               case a.Tbody, a.Tfoot, a.Thead:
+                       if p.elementInScope(tableScope, p.tok.DataAtom) {
+                               p.parseImpliedToken(EndTagToken, a.Tr, a.Tr.String())
+                               return false
+                       }
+                       // Ignore the token.
+                       return true
+               case a.Body, a.Caption, a.Col, a.Colgroup, a.Html, a.Td, a.Th:
+                       // Ignore the token.
+                       return true
+               }
+       }
+
+       return inTableIM(p)
+}
+
+// Section 12.2.5.4.15.
+func inCellIM(p *parser) bool {
+       switch p.tok.Type {
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Caption, a.Col, a.Colgroup, a.Tbody, a.Td, a.Tfoot, a.Th, a.Thead, a.Tr:
+                       if p.popUntil(tableScope, a.Td, a.Th) {
+                               // Close the cell and reprocess.
+                               p.clearActiveFormattingElements()
+                               p.im = inRowIM
+                               return false
+                       }
+                       // Ignore the token.
+                       return true
+               case a.Select:
+                       p.reconstructActiveFormattingElements()
+                       p.addElement()
+                       p.framesetOK = false
+                       p.im = inSelectInTableIM
+                       return true
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Td, a.Th:
+                       if !p.popUntil(tableScope, p.tok.DataAtom) {
+                               // Ignore the token.
+                               return true
+                       }
+                       p.clearActiveFormattingElements()
+                       p.im = inRowIM
+                       return true
+               case a.Body, a.Caption, a.Col, a.Colgroup, a.Html:
+                       // Ignore the token.
+                       return true
+               case a.Table, a.Tbody, a.Tfoot, a.Thead, a.Tr:
+                       if !p.elementInScope(tableScope, p.tok.DataAtom) {
+                               // Ignore the token.
+                               return true
+                       }
+                       // Close the cell and reprocess.
+                       p.popUntil(tableScope, a.Td, a.Th)
+                       p.clearActiveFormattingElements()
+                       p.im = inRowIM
+                       return false
+               }
+       }
+       return inBodyIM(p)
+}
+
+// Section 12.2.5.4.16.
+func inSelectIM(p *parser) bool {
+       switch p.tok.Type {
+       case ErrorToken:
+               // Stop parsing.
+               return true
+       case TextToken:
+               p.addText(strings.Replace(p.tok.Data, "\x00", "", -1))
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Html:
+                       return inBodyIM(p)
+               case a.Option:
+                       if p.top().DataAtom == a.Option {
+                               p.oe.pop()
+                       }
+                       p.addElement()
+               case a.Optgroup:
+                       if p.top().DataAtom == a.Option {
+                               p.oe.pop()
+                       }
+                       if p.top().DataAtom == a.Optgroup {
+                               p.oe.pop()
+                       }
+                       p.addElement()
+               case a.Select:
+                       p.tok.Type = EndTagToken
+                       return false
+               case a.Input, a.Keygen, a.Textarea:
+                       if p.elementInScope(selectScope, a.Select) {
+                               p.parseImpliedToken(EndTagToken, a.Select, a.Select.String())
+                               return false
+                       }
+                       // In order to properly ignore <textarea>, we need to change the tokenizer mode.
+                       p.tokenizer.NextIsNotRawText()
+                       // Ignore the token.
+                       return true
+               case a.Script:
+                       return inHeadIM(p)
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Option:
+                       if p.top().DataAtom == a.Option {
+                               p.oe.pop()
+                       }
+               case a.Optgroup:
+                       i := len(p.oe) - 1
+                       if p.oe[i].DataAtom == a.Option {
+                               i--
+                       }
+                       if p.oe[i].DataAtom == a.Optgroup {
+                               p.oe = p.oe[:i]
+                       }
+               case a.Select:
+                       if p.popUntil(selectScope, a.Select) {
+                               p.resetInsertionMode()
+                       }
+               }
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+       case DoctypeToken:
+               // Ignore the token.
+               return true
+       }
+
+       return true
+}
+
+// Section 12.2.5.4.17.
+func inSelectInTableIM(p *parser) bool {
+       switch p.tok.Type {
+       case StartTagToken, EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Caption, a.Table, a.Tbody, a.Tfoot, a.Thead, a.Tr, a.Td, a.Th:
+                       if p.tok.Type == StartTagToken || p.elementInScope(tableScope, p.tok.DataAtom) {
+                               p.parseImpliedToken(EndTagToken, a.Select, a.Select.String())
+                               return false
+                       } else {
+                               // Ignore the token.
+                               return true
+                       }
+               }
+       }
+       return inSelectIM(p)
+}
+
+// Section 12.2.5.4.18.
+func afterBodyIM(p *parser) bool {
+       switch p.tok.Type {
+       case ErrorToken:
+               // Stop parsing.
+               return true
+       case TextToken:
+               s := strings.TrimLeft(p.tok.Data, whitespace)
+               if len(s) == 0 {
+                       // It was all whitespace.
+                       return inBodyIM(p)
+               }
+       case StartTagToken:
+               if p.tok.DataAtom == a.Html {
+                       return inBodyIM(p)
+               }
+       case EndTagToken:
+               if p.tok.DataAtom == a.Html {
+                       if !p.fragment {
+                               p.im = afterAfterBodyIM
+                       }
+                       return true
+               }
+       case CommentToken:
+               // The comment is attached to the <html> element.
+               if len(p.oe) < 1 || p.oe[0].DataAtom != a.Html {
+                       panic("html: bad parser state: <html> element not found, in the after-body insertion mode")
+               }
+               p.oe[0].AppendChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+               return true
+       }
+       p.im = inBodyIM
+       return false
+}
+
+// Section 12.2.5.4.19.
+func inFramesetIM(p *parser) bool {
+       switch p.tok.Type {
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+       case TextToken:
+               // Ignore all text but whitespace.
+               s := strings.Map(func(c rune) rune {
+                       switch c {
+                       case ' ', '\t', '\n', '\f', '\r':
+                               return c
+                       }
+                       return -1
+               }, p.tok.Data)
+               if s != "" {
+                       p.addText(s)
+               }
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Html:
+                       return inBodyIM(p)
+               case a.Frameset:
+                       p.addElement()
+               case a.Frame:
+                       p.addElement()
+                       p.oe.pop()
+                       p.acknowledgeSelfClosingTag()
+               case a.Noframes:
+                       return inHeadIM(p)
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Frameset:
+                       if p.oe.top().DataAtom != a.Html {
+                               p.oe.pop()
+                               if p.oe.top().DataAtom != a.Frameset {
+                                       p.im = afterFramesetIM
+                                       return true
+                               }
+                       }
+               }
+       default:
+               // Ignore the token.
+       }
+       return true
+}
+
+// Section 12.2.5.4.20.
+func afterFramesetIM(p *parser) bool {
+       switch p.tok.Type {
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+       case TextToken:
+               // Ignore all text but whitespace.
+               s := strings.Map(func(c rune) rune {
+                       switch c {
+                       case ' ', '\t', '\n', '\f', '\r':
+                               return c
+                       }
+                       return -1
+               }, p.tok.Data)
+               if s != "" {
+                       p.addText(s)
+               }
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Html:
+                       return inBodyIM(p)
+               case a.Noframes:
+                       return inHeadIM(p)
+               }
+       case EndTagToken:
+               switch p.tok.DataAtom {
+               case a.Html:
+                       p.im = afterAfterFramesetIM
+                       return true
+               }
+       default:
+               // Ignore the token.
+       }
+       return true
+}
+
+// Section 12.2.5.4.21.
+func afterAfterBodyIM(p *parser) bool {
+       switch p.tok.Type {
+       case ErrorToken:
+               // Stop parsing.
+               return true
+       case TextToken:
+               s := strings.TrimLeft(p.tok.Data, whitespace)
+               if len(s) == 0 {
+                       // It was all whitespace.
+                       return inBodyIM(p)
+               }
+       case StartTagToken:
+               if p.tok.DataAtom == a.Html {
+                       return inBodyIM(p)
+               }
+       case CommentToken:
+               p.doc.AppendChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+               return true
+       case DoctypeToken:
+               return inBodyIM(p)
+       }
+       p.im = inBodyIM
+       return false
+}
+
+// Section 12.2.5.4.22.
+func afterAfterFramesetIM(p *parser) bool {
+       switch p.tok.Type {
+       case CommentToken:
+               p.doc.AppendChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+       case TextToken:
+               // Ignore all text but whitespace.
+               s := strings.Map(func(c rune) rune {
+                       switch c {
+                       case ' ', '\t', '\n', '\f', '\r':
+                               return c
+                       }
+                       return -1
+               }, p.tok.Data)
+               if s != "" {
+                       p.tok.Data = s
+                       return inBodyIM(p)
+               }
+       case StartTagToken:
+               switch p.tok.DataAtom {
+               case a.Html:
+                       return inBodyIM(p)
+               case a.Noframes:
+                       return inHeadIM(p)
+               }
+       case DoctypeToken:
+               return inBodyIM(p)
+       default:
+               // Ignore the token.
+       }
+       return true
+}
+
+const whitespaceOrNUL = whitespace + "\x00"
+
+// Section 12.2.5.5.
+func parseForeignContent(p *parser) bool {
+       switch p.tok.Type {
+       case TextToken:
+               if p.framesetOK {
+                       p.framesetOK = strings.TrimLeft(p.tok.Data, whitespaceOrNUL) == ""
+               }
+               p.tok.Data = strings.Replace(p.tok.Data, "\x00", "\ufffd", -1)
+               p.addText(p.tok.Data)
+       case CommentToken:
+               p.addChild(&Node{
+                       Type: CommentNode,
+                       Data: p.tok.Data,
+               })
+       case StartTagToken:
+               b := breakout[p.tok.Data]
+               if p.tok.DataAtom == a.Font {
+               loop:
+                       for _, attr := range p.tok.Attr {
+                               switch attr.Key {
+                               case "color", "face", "size":
+                                       b = true
+                                       break loop
+                               }
+                       }
+               }
+               if b {
+                       for i := len(p.oe) - 1; i >= 0; i-- {
+                               n := p.oe[i]
+                               if n.Namespace == "" || htmlIntegrationPoint(n) || mathMLTextIntegrationPoint(n) {
+                                       p.oe = p.oe[:i+1]
+                                       break
+                               }
+                       }
+                       return false
+               }
+               switch p.top().Namespace {
+               case "math":
+                       adjustAttributeNames(p.tok.Attr, mathMLAttributeAdjustments)
+               case "svg":
+                       // Adjust SVG tag names. The tokenizer lower-cases tag names, but
+                       // SVG wants e.g. "foreignObject" with a capital second "O".
+                       if x := svgTagNameAdjustments[p.tok.Data]; x != "" {
+                               p.tok.DataAtom = a.Lookup([]byte(x))
+                               p.tok.Data = x
+                       }
+                       adjustAttributeNames(p.tok.Attr, svgAttributeAdjustments)
+               default:
+                       panic("html: bad parser state: unexpected namespace")
+               }
+               adjustForeignAttributes(p.tok.Attr)
+               namespace := p.top().Namespace
+               p.addElement()
+               p.top().Namespace = namespace
+               if namespace != "" {
+                       // Don't let the tokenizer go into raw text mode in foreign content
+                       // (e.g. in an SVG <title> tag).
+                       p.tokenizer.NextIsNotRawText()
+               }
+               if p.hasSelfClosingToken {
+                       p.oe.pop()
+                       p.acknowledgeSelfClosingTag()
+               }
+       case EndTagToken:
+               for i := len(p.oe) - 1; i >= 0; i-- {
+                       if p.oe[i].Namespace == "" {
+                               return p.im(p)
+                       }
+                       if strings.EqualFold(p.oe[i].Data, p.tok.Data) {
+                               p.oe = p.oe[:i]
+                               break
+                       }
+               }
+               return true
+       default:
+               // Ignore the token.
+       }
+       return true
+}
+
+// Section 12.2.5.
+func (p *parser) inForeignContent() bool {
+       if len(p.oe) == 0 {
+               return false
+       }
+       n := p.oe[len(p.oe)-1]
+       if n.Namespace == "" {
+               return false
+       }
+       if mathMLTextIntegrationPoint(n) {
+               if p.tok.Type == StartTagToken && p.tok.DataAtom != a.Mglyph && p.tok.DataAtom != a.Malignmark {
+                       return false
+               }
+               if p.tok.Type == TextToken {
+                       return false
+               }
+       }
+       if n.Namespace == "math" && n.DataAtom == a.AnnotationXml && p.tok.Type == StartTagToken && p.tok.DataAtom == a.Svg {
+               return false
+       }
+       if htmlIntegrationPoint(n) && (p.tok.Type == StartTagToken || p.tok.Type == TextToken) {
+               return false
+       }
+       if p.tok.Type == ErrorToken {
+               return false
+       }
+       return true
+}
+
+// parseImpliedToken parses a token as though it had appeared in the parser's
+// input.
+func (p *parser) parseImpliedToken(t TokenType, dataAtom a.Atom, data string) {
+       realToken, selfClosing := p.tok, p.hasSelfClosingToken
+       p.tok = Token{
+               Type:     t,
+               DataAtom: dataAtom,
+               Data:     data,
+       }
+       p.hasSelfClosingToken = false
+       p.parseCurrentToken()
+       p.tok, p.hasSelfClosingToken = realToken, selfClosing
+}
+
+// parseCurrentToken runs the current token through the parsing routines
+// until it is consumed.
+func (p *parser) parseCurrentToken() {
+       if p.tok.Type == SelfClosingTagToken {
+               p.hasSelfClosingToken = true
+               p.tok.Type = StartTagToken
+       }
+
+       consumed := false
+       for !consumed {
+               if p.inForeignContent() {
+                       consumed = parseForeignContent(p)
+               } else {
+                       consumed = p.im(p)
+               }
+       }
+
+       if p.hasSelfClosingToken {
+               // This is a parse error, but ignore it.
+               p.hasSelfClosingToken = false
+       }
+}
+
+func (p *parser) parse() error {
+       // Iterate until EOF. Any other error will cause an early return.
+       var err error
+       for err != io.EOF {
+               // CDATA sections are allowed only in foreign content.
+               n := p.oe.top()
+               p.tokenizer.AllowCDATA(n != nil && n.Namespace != "")
+               // Read and parse the next token.
+               p.tokenizer.Next()
+               p.tok = p.tokenizer.Token()
+               if p.tok.Type == ErrorToken {
+                       err = p.tokenizer.Err()
+                       if err != nil && err != io.EOF {
+                               return err
+                       }
+               }
+               p.parseCurrentToken()
+       }
+       return nil
+}
+
+// Parse returns the parse tree for the HTML from the given Reader.
+// The input is assumed to be UTF-8 encoded.
+func Parse(r io.Reader) (*Node, error) {
+       p := &parser{
+               tokenizer: NewTokenizer(r),
+               doc: &Node{
+                       Type: DocumentNode,
+               },
+               scripting:  true,
+               framesetOK: true,
+               im:         initialIM,
+       }
+       err := p.parse()
+       if err != nil {
+               return nil, err
+       }
+       return p.doc, nil
+}
+
+// ParseFragment parses a fragment of HTML and returns the nodes that were
+// found. If the fragment is the InnerHTML for an existing element, pass that
+// element in context.
+func ParseFragment(r io.Reader, context *Node) ([]*Node, error) {
+       contextTag := ""
+       if context != nil {
+               if context.Type != ElementNode {
+                       return nil, errors.New("html: ParseFragment of non-element Node")
+               }
+               // The next check isn't just context.DataAtom.String() == context.Data because
+               // it is valid to pass an element whose tag isn't a known atom. For example,
+               // DataAtom == 0 and Data = "tagfromthefuture" is perfectly consistent.
+               if context.DataAtom != a.Lookup([]byte(context.Data)) {
+                       return nil, fmt.Errorf("html: inconsistent Node: DataAtom=%q, Data=%q", context.DataAtom, context.Data)
+               }
+               contextTag = context.DataAtom.String()
+       }
+       p := &parser{
+               tokenizer: NewTokenizerFragment(r, contextTag),
+               doc: &Node{
+                       Type: DocumentNode,
+               },
+               scripting: true,
+               fragment:  true,
+               context:   context,
+       }
+
+       root := &Node{
+               Type:     ElementNode,
+               DataAtom: a.Html,
+               Data:     a.Html.String(),
+       }
+       p.doc.AppendChild(root)
+       p.oe = nodeStack{root}
+       p.resetInsertionMode()
+
+       for n := context; n != nil; n = n.Parent {
+               if n.Type == ElementNode && n.DataAtom == a.Form {
+                       p.form = n
+                       break
+               }
+       }
+
+       err := p.parse()
+       if err != nil {
+               return nil, err
+       }
+
+       parent := p.doc
+       if context != nil {
+               parent = root
+       }
+
+       var result []*Node
+       for c := parent.FirstChild; c != nil; {
+               next := c.NextSibling
+               parent.RemoveChild(c)
+               result = append(result, c)
+               c = next
+       }
+       return result, nil
+}
diff --git a/vendor/golang.org/x/net/html/render.go b/vendor/golang.org/x/net/html/render.go
new file mode 100644 (file)
index 0000000..d34564f
--- /dev/null
@@ -0,0 +1,271 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package html
+
+import (
+       "bufio"
+       "errors"
+       "fmt"
+       "io"
+       "strings"
+)
+
+type writer interface {
+       io.Writer
+       io.ByteWriter
+       WriteString(string) (int, error)
+}
+
+// Render renders the parse tree n to the given writer.
+//
+// Rendering is done on a 'best effort' basis: calling Parse on the output of
+// Render will always result in something similar to the original tree, but it
+// is not necessarily an exact clone unless the original tree was 'well-formed'.
+// 'Well-formed' is not easily specified; the HTML5 specification is
+// complicated.
+//
+// Calling Parse on arbitrary input typically results in a 'well-formed' parse
+// tree. However, it is possible for Parse to yield a 'badly-formed' parse tree.
+// For example, in a 'well-formed' parse tree, no <a> element is a child of
+// another <a> element: parsing "<a><a>" results in two sibling elements.
+// Similarly, in a 'well-formed' parse tree, no <a> element is a child of a
+// <table> element: parsing "<p><table><a>" results in a <p> with two sibling
+// children; the <a> is reparented to the <table>'s parent. However, calling
+// Parse on "<a><table><a>" does not return an error, but the result has an <a>
+// element with an <a> child, and is therefore not 'well-formed'.
+//
+// Programmatically constructed trees are typically also 'well-formed', but it
+// is possible to construct a tree that looks innocuous but, when rendered and
+// re-parsed, results in a different tree. A simple example is that a solitary
+// text node would become a tree containing <html>, <head> and <body> elements.
+// Another example is that the programmatic equivalent of "a<head>b</head>c"
+// becomes "<html><head><head/><body>abc</body></html>".
+func Render(w io.Writer, n *Node) error {
+       if x, ok := w.(writer); ok {
+               return render(x, n)
+       }
+       buf := bufio.NewWriter(w)
+       if err := render(buf, n); err != nil {
+               return err
+       }
+       return buf.Flush()
+}
+
+// plaintextAbort is returned from render1 when a <plaintext> element
+// has been rendered. No more end tags should be rendered after that.
+var plaintextAbort = errors.New("html: internal error (plaintext abort)")
+
+func render(w writer, n *Node) error {
+       err := render1(w, n)
+       if err == plaintextAbort {
+               err = nil
+       }
+       return err
+}
+
+func render1(w writer, n *Node) error {
+       // Render non-element nodes; these are the easy cases.
+       switch n.Type {
+       case ErrorNode:
+               return errors.New("html: cannot render an ErrorNode node")
+       case TextNode:
+               return escape(w, n.Data)
+       case DocumentNode:
+               for c := n.FirstChild; c != nil; c = c.NextSibling {
+                       if err := render1(w, c); err != nil {
+                               return err
+                       }
+               }
+               return nil
+       case ElementNode:
+               // No-op.
+       case CommentNode:
+               if _, err := w.WriteString("<!--"); err != nil {
+                       return err
+               }
+               if _, err := w.WriteString(n.Data); err != nil {
+                       return err
+               }
+               if _, err := w.WriteString("-->"); err != nil {
+                       return err
+               }
+               return nil
+       case DoctypeNode:
+               if _, err := w.WriteString("<!DOCTYPE "); err != nil {
+                       return err
+               }
+               if _, err := w.WriteString(n.Data); err != nil {
+                       return err
+               }
+               if n.Attr != nil {
+                       var p, s string
+                       for _, a := range n.Attr {
+                               switch a.Key {
+                               case "public":
+                                       p = a.Val
+                               case "system":
+                                       s = a.Val
+                               }
+                       }
+                       if p != "" {
+                               if _, err := w.WriteString(" PUBLIC "); err != nil {
+                                       return err
+                               }
+                               if err := writeQuoted(w, p); err != nil {
+                                       return err
+                               }
+                               if s != "" {
+                                       if err := w.WriteByte(' '); err != nil {
+                                               return err
+                                       }
+                                       if err := writeQuoted(w, s); err != nil {
+                                               return err
+                                       }
+                               }
+                       } else if s != "" {
+                               if _, err := w.WriteString(" SYSTEM "); err != nil {
+                                       return err
+                               }
+                               if err := writeQuoted(w, s); err != nil {
+                                       return err
+                               }
+                       }
+               }
+               return w.WriteByte('>')
+       default:
+               return errors.New("html: unknown node type")
+       }
+
+       // Render the <xxx> opening tag.
+       if err := w.WriteByte('<'); err != nil {
+               return err
+       }
+       if _, err := w.WriteString(n.Data); err != nil {
+               return err
+       }
+       for _, a := range n.Attr {
+               if err := w.WriteByte(' '); err != nil {
+                       return err
+               }
+               if a.Namespace != "" {
+                       if _, err := w.WriteString(a.Namespace); err != nil {
+                               return err
+                       }
+                       if err := w.WriteByte(':'); err != nil {
+                               return err
+                       }
+               }
+               if _, err := w.WriteString(a.Key); err != nil {
+                       return err
+               }
+               if _, err := w.WriteString(`="`); err != nil {
+                       return err
+               }
+               if err := escape(w, a.Val); err != nil {
+                       return err
+               }
+               if err := w.WriteByte('"'); err != nil {
+                       return err
+               }
+       }
+       if voidElements[n.Data] {
+               if n.FirstChild != nil {
+                       return fmt.Errorf("html: void element <%s> has child nodes", n.Data)
+               }
+               _, err := w.WriteString("/>")
+               return err
+       }
+       if err := w.WriteByte('>'); err != nil {
+               return err
+       }
+
+       // Add initial newline where there is danger of a newline beging ignored.
+       if c := n.FirstChild; c != nil && c.Type == TextNode && strings.HasPrefix(c.Data, "\n") {
+               switch n.Data {
+               case "pre", "listing", "textarea":
+                       if err := w.WriteByte('\n'); err != nil {
+                               return err
+                       }
+               }
+       }
+
+       // Render any child nodes.
+       switch n.Data {
+       case "iframe", "noembed", "noframes", "noscript", "plaintext", "script", "style", "xmp":
+               for c := n.FirstChild; c != nil; c = c.NextSibling {
+                       if c.Type == TextNode {
+                               if _, err := w.WriteString(c.Data); err != nil {
+                                       return err
+                               }
+                       } else {
+                               if err := render1(w, c); err != nil {
+                                       return err
+                               }
+                       }
+               }
+               if n.Data == "plaintext" {
+                       // Don't render anything else. <plaintext> must be the
+                       // last element in the file, with no closing tag.
+                       return plaintextAbort
+               }
+       default:
+               for c := n.FirstChild; c != nil; c = c.NextSibling {
+                       if err := render1(w, c); err != nil {
+                               return err
+                       }
+               }
+       }
+
+       // Render the </xxx> closing tag.
+       if _, err := w.WriteString("</"); err != nil {
+               return err
+       }
+       if _, err := w.WriteString(n.Data); err != nil {
+               return err
+       }
+       return w.WriteByte('>')
+}
+
+// writeQuoted writes s to w surrounded by quotes. Normally it will use double
+// quotes, but if s contains a double quote, it will use single quotes.
+// It is used for writing the identifiers in a doctype declaration.
+// In valid HTML, they can't contain both types of quotes.
+func writeQuoted(w writer, s string) error {
+       var q byte = '"'
+       if strings.Contains(s, `"`) {
+               q = '\''
+       }
+       if err := w.WriteByte(q); err != nil {
+               return err
+       }
+       if _, err := w.WriteString(s); err != nil {
+               return err
+       }
+       if err := w.WriteByte(q); err != nil {
+               return err
+       }
+       return nil
+}
+
+// Section 12.1.2, "Elements", gives this list of void elements. Void elements
+// are those that can't have any contents.
+var voidElements = map[string]bool{
+       "area":    true,
+       "base":    true,
+       "br":      true,
+       "col":     true,
+       "command": true,
+       "embed":   true,
+       "hr":      true,
+       "img":     true,
+       "input":   true,
+       "keygen":  true,
+       "link":    true,
+       "meta":    true,
+       "param":   true,
+       "source":  true,
+       "track":   true,
+       "wbr":     true,
+}
diff --git a/vendor/golang.org/x/net/html/token.go b/vendor/golang.org/x/net/html/token.go
new file mode 100644 (file)
index 0000000..893e272
--- /dev/null
@@ -0,0 +1,1219 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package html
+
+import (
+       "bytes"
+       "errors"
+       "io"
+       "strconv"
+       "strings"
+
+       "golang.org/x/net/html/atom"
+)
+
+// A TokenType is the type of a Token.
+type TokenType uint32
+
+const (
+       // ErrorToken means that an error occurred during tokenization.
+       ErrorToken TokenType = iota
+       // TextToken means a text node.
+       TextToken
+       // A StartTagToken looks like <a>.
+       StartTagToken
+       // An EndTagToken looks like </a>.
+       EndTagToken
+       // A SelfClosingTagToken tag looks like <br/>.
+       SelfClosingTagToken
+       // A CommentToken looks like <!--x-->.
+       CommentToken
+       // A DoctypeToken looks like <!DOCTYPE x>
+       DoctypeToken
+)
+
+// ErrBufferExceeded means that the buffering limit was exceeded.
+var ErrBufferExceeded = errors.New("max buffer exceeded")
+
+// String returns a string representation of the TokenType.
+func (t TokenType) String() string {
+       switch t {
+       case ErrorToken:
+               return "Error"
+       case TextToken:
+               return "Text"
+       case StartTagToken:
+               return "StartTag"
+       case EndTagToken:
+               return "EndTag"
+       case SelfClosingTagToken:
+               return "SelfClosingTag"
+       case CommentToken:
+               return "Comment"
+       case DoctypeToken:
+               return "Doctype"
+       }
+       return "Invalid(" + strconv.Itoa(int(t)) + ")"
+}
+
+// An Attribute is an attribute namespace-key-value triple. Namespace is
+// non-empty for foreign attributes like xlink, Key is alphabetic (and hence
+// does not contain escapable characters like '&', '<' or '>'), and Val is
+// unescaped (it looks like "a<b" rather than "a&lt;b").
+//
+// Namespace is only used by the parser, not the tokenizer.
+type Attribute struct {
+       Namespace, Key, Val string
+}
+
+// A Token consists of a TokenType and some Data (tag name for start and end
+// tags, content for text, comments and doctypes). A tag Token may also contain
+// a slice of Attributes. Data is unescaped for all Tokens (it looks like "a<b"
+// rather than "a&lt;b"). For tag Tokens, DataAtom is the atom for Data, or
+// zero if Data is not a known tag name.
+type Token struct {
+       Type     TokenType
+       DataAtom atom.Atom
+       Data     string
+       Attr     []Attribute
+}
+
+// tagString returns a string representation of a tag Token's Data and Attr.
+func (t Token) tagString() string {
+       if len(t.Attr) == 0 {
+               return t.Data
+       }
+       buf := bytes.NewBufferString(t.Data)
+       for _, a := range t.Attr {
+               buf.WriteByte(' ')
+               buf.WriteString(a.Key)
+               buf.WriteString(`="`)
+               escape(buf, a.Val)
+               buf.WriteByte('"')
+       }
+       return buf.String()
+}
+
+// String returns a string representation of the Token.
+func (t Token) String() string {
+       switch t.Type {
+       case ErrorToken:
+               return ""
+       case TextToken:
+               return EscapeString(t.Data)
+       case StartTagToken:
+               return "<" + t.tagString() + ">"
+       case EndTagToken:
+               return "</" + t.tagString() + ">"
+       case SelfClosingTagToken:
+               return "<" + t.tagString() + "/>"
+       case CommentToken:
+               return "<!--" + t.Data + "-->"
+       case DoctypeToken:
+               return "<!DOCTYPE " + t.Data + ">"
+       }
+       return "Invalid(" + strconv.Itoa(int(t.Type)) + ")"
+}
+
+// span is a range of bytes in a Tokenizer's buffer. The start is inclusive,
+// the end is exclusive.
+type span struct {
+       start, end int
+}
+
+// A Tokenizer returns a stream of HTML Tokens.
+type Tokenizer struct {
+       // r is the source of the HTML text.
+       r io.Reader
+       // tt is the TokenType of the current token.
+       tt TokenType
+       // err is the first error encountered during tokenization. It is possible
+       // for tt != Error && err != nil to hold: this means that Next returned a
+       // valid token but the subsequent Next call will return an error token.
+       // For example, if the HTML text input was just "plain", then the first
+       // Next call would set z.err to io.EOF but return a TextToken, and all
+       // subsequent Next calls would return an ErrorToken.
+       // err is never reset. Once it becomes non-nil, it stays non-nil.
+       err error
+       // readErr is the error returned by the io.Reader r. It is separate from
+       // err because it is valid for an io.Reader to return (n int, err1 error)
+       // such that n > 0 && err1 != nil, and callers should always process the
+       // n > 0 bytes before considering the error err1.
+       readErr error
+       // buf[raw.start:raw.end] holds the raw bytes of the current token.
+       // buf[raw.end:] is buffered input that will yield future tokens.
+       raw span
+       buf []byte
+       // maxBuf limits the data buffered in buf. A value of 0 means unlimited.
+       maxBuf int
+       // buf[data.start:data.end] holds the raw bytes of the current token's data:
+       // a text token's text, a tag token's tag name, etc.
+       data span
+       // pendingAttr is the attribute key and value currently being tokenized.
+       // When complete, pendingAttr is pushed onto attr. nAttrReturned is
+       // incremented on each call to TagAttr.
+       pendingAttr   [2]span
+       attr          [][2]span
+       nAttrReturned int
+       // rawTag is the "script" in "</script>" that closes the next token. If
+       // non-empty, the subsequent call to Next will return a raw or RCDATA text
+       // token: one that treats "<p>" as text instead of an element.
+       // rawTag's contents are lower-cased.
+       rawTag string
+       // textIsRaw is whether the current text token's data is not escaped.
+       textIsRaw bool
+       // convertNUL is whether NUL bytes in the current token's data should
+       // be converted into \ufffd replacement characters.
+       convertNUL bool
+       // allowCDATA is whether CDATA sections are allowed in the current context.
+       allowCDATA bool
+}
+
+// AllowCDATA sets whether or not the tokenizer recognizes <![CDATA[foo]]> as
+// the text "foo". The default value is false, which means to recognize it as
+// a bogus comment "<!-- [CDATA[foo]] -->" instead.
+//
+// Strictly speaking, an HTML5 compliant tokenizer should allow CDATA if and
+// only if tokenizing foreign content, such as MathML and SVG. However,
+// tracking foreign-contentness is difficult to do purely in the tokenizer,
+// as opposed to the parser, due to HTML integration points: an <svg> element
+// can contain a <foreignObject> that is foreign-to-SVG but not foreign-to-
+// HTML. For strict compliance with the HTML5 tokenization algorithm, it is the
+// responsibility of the user of a tokenizer to call AllowCDATA as appropriate.
+// In practice, if using the tokenizer without caring whether MathML or SVG
+// CDATA is text or comments, such as tokenizing HTML to find all the anchor
+// text, it is acceptable to ignore this responsibility.
+func (z *Tokenizer) AllowCDATA(allowCDATA bool) {
+       z.allowCDATA = allowCDATA
+}
+
+// NextIsNotRawText instructs the tokenizer that the next token should not be
+// considered as 'raw text'. Some elements, such as script and title elements,
+// normally require the next token after the opening tag to be 'raw text' that
+// has no child elements. For example, tokenizing "<title>a<b>c</b>d</title>"
+// yields a start tag token for "<title>", a text token for "a<b>c</b>d", and
+// an end tag token for "</title>". There are no distinct start tag or end tag
+// tokens for the "<b>" and "</b>".
+//
+// This tokenizer implementation will generally look for raw text at the right
+// times. Strictly speaking, an HTML5 compliant tokenizer should not look for
+// raw text if in foreign content: <title> generally needs raw text, but a
+// <title> inside an <svg> does not. Another example is that a <textarea>
+// generally needs raw text, but a <textarea> is not allowed as an immediate
+// child of a <select>; in normal parsing, a <textarea> implies </select>, but
+// one cannot close the implicit element when parsing a <select>'s InnerHTML.
+// Similarly to AllowCDATA, tracking the correct moment to override raw-text-
+// ness is difficult to do purely in the tokenizer, as opposed to the parser.
+// For strict compliance with the HTML5 tokenization algorithm, it is the
+// responsibility of the user of a tokenizer to call NextIsNotRawText as
+// appropriate. In practice, like AllowCDATA, it is acceptable to ignore this
+// responsibility for basic usage.
+//
+// Note that this 'raw text' concept is different from the one offered by the
+// Tokenizer.Raw method.
+func (z *Tokenizer) NextIsNotRawText() {
+       z.rawTag = ""
+}
+
+// Err returns the error associated with the most recent ErrorToken token.
+// This is typically io.EOF, meaning the end of tokenization.
+func (z *Tokenizer) Err() error {
+       if z.tt != ErrorToken {
+               return nil
+       }
+       return z.err
+}
+
+// readByte returns the next byte from the input stream, doing a buffered read
+// from z.r into z.buf if necessary. z.buf[z.raw.start:z.raw.end] remains a contiguous byte
+// slice that holds all the bytes read so far for the current token.
+// It sets z.err if the underlying reader returns an error.
+// Pre-condition: z.err == nil.
+func (z *Tokenizer) readByte() byte {
+       if z.raw.end >= len(z.buf) {
+               // Our buffer is exhausted and we have to read from z.r. Check if the
+               // previous read resulted in an error.
+               if z.readErr != nil {
+                       z.err = z.readErr
+                       return 0
+               }
+               // We copy z.buf[z.raw.start:z.raw.end] to the beginning of z.buf. If the length
+               // z.raw.end - z.raw.start is more than half the capacity of z.buf, then we
+               // allocate a new buffer before the copy.
+               c := cap(z.buf)
+               d := z.raw.end - z.raw.start
+               var buf1 []byte
+               if 2*d > c {
+                       buf1 = make([]byte, d, 2*c)
+               } else {
+                       buf1 = z.buf[:d]
+               }
+               copy(buf1, z.buf[z.raw.start:z.raw.end])
+               if x := z.raw.start; x != 0 {
+                       // Adjust the data/attr spans to refer to the same contents after the copy.
+                       z.data.start -= x
+                       z.data.end -= x
+                       z.pendingAttr[0].start -= x
+                       z.pendingAttr[0].end -= x
+                       z.pendingAttr[1].start -= x
+                       z.pendingAttr[1].end -= x
+                       for i := range z.attr {
+                               z.attr[i][0].start -= x
+                               z.attr[i][0].end -= x
+                               z.attr[i][1].start -= x
+                               z.attr[i][1].end -= x
+                       }
+               }
+               z.raw.start, z.raw.end, z.buf = 0, d, buf1[:d]
+               // Now that we have copied the live bytes to the start of the buffer,
+               // we read from z.r into the remainder.
+               var n int
+               n, z.readErr = readAtLeastOneByte(z.r, buf1[d:cap(buf1)])
+               if n == 0 {
+                       z.err = z.readErr
+                       return 0
+               }
+               z.buf = buf1[:d+n]
+       }
+       x := z.buf[z.raw.end]
+       z.raw.end++
+       if z.maxBuf > 0 && z.raw.end-z.raw.start >= z.maxBuf {
+               z.err = ErrBufferExceeded
+               return 0
+       }
+       return x
+}
+
+// Buffered returns a slice containing data buffered but not yet tokenized.
+func (z *Tokenizer) Buffered() []byte {
+       return z.buf[z.raw.end:]
+}
+
+// readAtLeastOneByte wraps an io.Reader so that reading cannot return (0, nil).
+// It returns io.ErrNoProgress if the underlying r.Read method returns (0, nil)
+// too many times in succession.
+func readAtLeastOneByte(r io.Reader, b []byte) (int, error) {
+       for i := 0; i < 100; i++ {
+               n, err := r.Read(b)
+               if n != 0 || err != nil {
+                       return n, err
+               }
+       }
+       return 0, io.ErrNoProgress
+}
+
+// skipWhiteSpace skips past any white space.
+func (z *Tokenizer) skipWhiteSpace() {
+       if z.err != nil {
+               return
+       }
+       for {
+               c := z.readByte()
+               if z.err != nil {
+                       return
+               }
+               switch c {
+               case ' ', '\n', '\r', '\t', '\f':
+                       // No-op.
+               default:
+                       z.raw.end--
+                       return
+               }
+       }
+}
+
+// readRawOrRCDATA reads until the next "</foo>", where "foo" is z.rawTag and
+// is typically something like "script" or "textarea".
+func (z *Tokenizer) readRawOrRCDATA() {
+       if z.rawTag == "script" {
+               z.readScript()
+               z.textIsRaw = true
+               z.rawTag = ""
+               return
+       }
+loop:
+       for {
+               c := z.readByte()
+               if z.err != nil {
+                       break loop
+               }
+               if c != '<' {
+                       continue loop
+               }
+               c = z.readByte()
+               if z.err != nil {
+                       break loop
+               }
+               if c != '/' {
+                       continue loop
+               }
+               if z.readRawEndTag() || z.err != nil {
+                       break loop
+               }
+       }
+       z.data.end = z.raw.end
+       // A textarea's or title's RCDATA can contain escaped entities.
+       z.textIsRaw = z.rawTag != "textarea" && z.rawTag != "title"
+       z.rawTag = ""
+}
+
+// readRawEndTag attempts to read a tag like "</foo>", where "foo" is z.rawTag.
+// If it succeeds, it backs up the input position to reconsume the tag and
+// returns true. Otherwise it returns false. The opening "</" has already been
+// consumed.
+func (z *Tokenizer) readRawEndTag() bool {
+       for i := 0; i < len(z.rawTag); i++ {
+               c := z.readByte()
+               if z.err != nil {
+                       return false
+               }
+               if c != z.rawTag[i] && c != z.rawTag[i]-('a'-'A') {
+                       z.raw.end--
+                       return false
+               }
+       }
+       c := z.readByte()
+       if z.err != nil {
+               return false
+       }
+       switch c {
+       case ' ', '\n', '\r', '\t', '\f', '/', '>':
+               // The 3 is 2 for the leading "</" plus 1 for the trailing character c.
+               z.raw.end -= 3 + len(z.rawTag)
+               return true
+       }
+       z.raw.end--
+       return false
+}
+
+// readScript reads until the next </script> tag, following the byzantine
+// rules for escaping/hiding the closing tag.
+func (z *Tokenizer) readScript() {
+       defer func() {
+               z.data.end = z.raw.end
+       }()
+       var c byte
+
+scriptData:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       if c == '<' {
+               goto scriptDataLessThanSign
+       }
+       goto scriptData
+
+scriptDataLessThanSign:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       switch c {
+       case '/':
+               goto scriptDataEndTagOpen
+       case '!':
+               goto scriptDataEscapeStart
+       }
+       z.raw.end--
+       goto scriptData
+
+scriptDataEndTagOpen:
+       if z.readRawEndTag() || z.err != nil {
+               return
+       }
+       goto scriptData
+
+scriptDataEscapeStart:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       if c == '-' {
+               goto scriptDataEscapeStartDash
+       }
+       z.raw.end--
+       goto scriptData
+
+scriptDataEscapeStartDash:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       if c == '-' {
+               goto scriptDataEscapedDashDash
+       }
+       z.raw.end--
+       goto scriptData
+
+scriptDataEscaped:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       switch c {
+       case '-':
+               goto scriptDataEscapedDash
+       case '<':
+               goto scriptDataEscapedLessThanSign
+       }
+       goto scriptDataEscaped
+
+scriptDataEscapedDash:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       switch c {
+       case '-':
+               goto scriptDataEscapedDashDash
+       case '<':
+               goto scriptDataEscapedLessThanSign
+       }
+       goto scriptDataEscaped
+
+scriptDataEscapedDashDash:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       switch c {
+       case '-':
+               goto scriptDataEscapedDashDash
+       case '<':
+               goto scriptDataEscapedLessThanSign
+       case '>':
+               goto scriptData
+       }
+       goto scriptDataEscaped
+
+scriptDataEscapedLessThanSign:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       if c == '/' {
+               goto scriptDataEscapedEndTagOpen
+       }
+       if 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' {
+               goto scriptDataDoubleEscapeStart
+       }
+       z.raw.end--
+       goto scriptData
+
+scriptDataEscapedEndTagOpen:
+       if z.readRawEndTag() || z.err != nil {
+               return
+       }
+       goto scriptDataEscaped
+
+scriptDataDoubleEscapeStart:
+       z.raw.end--
+       for i := 0; i < len("script"); i++ {
+               c = z.readByte()
+               if z.err != nil {
+                       return
+               }
+               if c != "script"[i] && c != "SCRIPT"[i] {
+                       z.raw.end--
+                       goto scriptDataEscaped
+               }
+       }
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       switch c {
+       case ' ', '\n', '\r', '\t', '\f', '/', '>':
+               goto scriptDataDoubleEscaped
+       }
+       z.raw.end--
+       goto scriptDataEscaped
+
+scriptDataDoubleEscaped:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       switch c {
+       case '-':
+               goto scriptDataDoubleEscapedDash
+       case '<':
+               goto scriptDataDoubleEscapedLessThanSign
+       }
+       goto scriptDataDoubleEscaped
+
+scriptDataDoubleEscapedDash:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       switch c {
+       case '-':
+               goto scriptDataDoubleEscapedDashDash
+       case '<':
+               goto scriptDataDoubleEscapedLessThanSign
+       }
+       goto scriptDataDoubleEscaped
+
+scriptDataDoubleEscapedDashDash:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       switch c {
+       case '-':
+               goto scriptDataDoubleEscapedDashDash
+       case '<':
+               goto scriptDataDoubleEscapedLessThanSign
+       case '>':
+               goto scriptData
+       }
+       goto scriptDataDoubleEscaped
+
+scriptDataDoubleEscapedLessThanSign:
+       c = z.readByte()
+       if z.err != nil {
+               return
+       }
+       if c == '/' {
+               goto scriptDataDoubleEscapeEnd
+       }
+       z.raw.end--
+       goto scriptDataDoubleEscaped
+
+scriptDataDoubleEscapeEnd:
+       if z.readRawEndTag() {
+               z.raw.end += len("</script>")
+               goto scriptDataEscaped
+       }
+       if z.err != nil {
+               return
+       }
+       goto scriptDataDoubleEscaped
+}
+
+// readComment reads the next comment token starting with "<!--". The opening
+// "<!--" has already been consumed.
+func (z *Tokenizer) readComment() {
+       z.data.start = z.raw.end
+       defer func() {
+               if z.data.end < z.data.start {
+                       // It's a comment with no data, like <!-->.
+                       z.data.end = z.data.start
+               }
+       }()
+       for dashCount := 2; ; {
+               c := z.readByte()
+               if z.err != nil {
+                       // Ignore up to two dashes at EOF.
+                       if dashCount > 2 {
+                               dashCount = 2
+                       }
+                       z.data.end = z.raw.end - dashCount
+                       return
+               }
+               switch c {
+               case '-':
+                       dashCount++
+                       continue
+               case '>':
+                       if dashCount >= 2 {
+                               z.data.end = z.raw.end - len("-->")
+                               return
+                       }
+               case '!':
+                       if dashCount >= 2 {
+                               c = z.readByte()
+                               if z.err != nil {
+                                       z.data.end = z.raw.end
+                                       return
+                               }
+                               if c == '>' {
+                                       z.data.end = z.raw.end - len("--!>")
+                                       return
+                               }
+                       }
+               }
+               dashCount = 0
+       }
+}
+
+// readUntilCloseAngle reads until the next ">".
+func (z *Tokenizer) readUntilCloseAngle() {
+       z.data.start = z.raw.end
+       for {
+               c := z.readByte()
+               if z.err != nil {
+                       z.data.end = z.raw.end
+                       return
+               }
+               if c == '>' {
+                       z.data.end = z.raw.end - len(">")
+                       return
+               }
+       }
+}
+
+// readMarkupDeclaration reads the next token starting with "<!". It might be
+// a "<!--comment-->", a "<!DOCTYPE foo>", a "<![CDATA[section]]>" or
+// "<!a bogus comment". The opening "<!" has already been consumed.
+func (z *Tokenizer) readMarkupDeclaration() TokenType {
+       z.data.start = z.raw.end
+       var c [2]byte
+       for i := 0; i < 2; i++ {
+               c[i] = z.readByte()
+               if z.err != nil {
+                       z.data.end = z.raw.end
+                       return CommentToken
+               }
+       }
+       if c[0] == '-' && c[1] == '-' {
+               z.readComment()
+               return CommentToken
+       }
+       z.raw.end -= 2
+       if z.readDoctype() {
+               return DoctypeToken
+       }
+       if z.allowCDATA && z.readCDATA() {
+               z.convertNUL = true
+               return TextToken
+       }
+       // It's a bogus comment.
+       z.readUntilCloseAngle()
+       return CommentToken
+}
+
+// readDoctype attempts to read a doctype declaration and returns true if
+// successful. The opening "<!" has already been consumed.
+func (z *Tokenizer) readDoctype() bool {
+       const s = "DOCTYPE"
+       for i := 0; i < len(s); i++ {
+               c := z.readByte()
+               if z.err != nil {
+                       z.data.end = z.raw.end
+                       return false
+               }
+               if c != s[i] && c != s[i]+('a'-'A') {
+                       // Back up to read the fragment of "DOCTYPE" again.
+                       z.raw.end = z.data.start
+                       return false
+               }
+       }
+       if z.skipWhiteSpace(); z.err != nil {
+               z.data.start = z.raw.end
+               z.data.end = z.raw.end
+               return true
+       }
+       z.readUntilCloseAngle()
+       return true
+}
+
+// readCDATA attempts to read a CDATA section and returns true if
+// successful. The opening "<!" has already been consumed.
+func (z *Tokenizer) readCDATA() bool {
+       const s = "[CDATA["
+       for i := 0; i < len(s); i++ {
+               c := z.readByte()
+               if z.err != nil {
+                       z.data.end = z.raw.end
+                       return false
+               }
+               if c != s[i] {
+                       // Back up to read the fragment of "[CDATA[" again.
+                       z.raw.end = z.data.start
+                       return false
+               }
+       }
+       z.data.start = z.raw.end
+       brackets := 0
+       for {
+               c := z.readByte()
+               if z.err != nil {
+                       z.data.end = z.raw.end
+                       return true
+               }
+               switch c {
+               case ']':
+                       brackets++
+               case '>':
+                       if brackets >= 2 {
+                               z.data.end = z.raw.end - len("]]>")
+                               return true
+                       }
+                       brackets = 0
+               default:
+                       brackets = 0
+               }
+       }
+}
+
+// startTagIn returns whether the start tag in z.buf[z.data.start:z.data.end]
+// case-insensitively matches any element of ss.
+func (z *Tokenizer) startTagIn(ss ...string) bool {
+loop:
+       for _, s := range ss {
+               if z.data.end-z.data.start != len(s) {
+                       continue loop
+               }
+               for i := 0; i < len(s); i++ {
+                       c := z.buf[z.data.start+i]
+                       if 'A' <= c && c <= 'Z' {
+                               c += 'a' - 'A'
+                       }
+                       if c != s[i] {
+                               continue loop
+                       }
+               }
+               return true
+       }
+       return false
+}
+
+// readStartTag reads the next start tag token. The opening "<a" has already
+// been consumed, where 'a' means anything in [A-Za-z].
+func (z *Tokenizer) readStartTag() TokenType {
+       z.readTag(true)
+       if z.err != nil {
+               return ErrorToken
+       }
+       // Several tags flag the tokenizer's next token as raw.
+       c, raw := z.buf[z.data.start], false
+       if 'A' <= c && c <= 'Z' {
+               c += 'a' - 'A'
+       }
+       switch c {
+       case 'i':
+               raw = z.startTagIn("iframe")
+       case 'n':
+               raw = z.startTagIn("noembed", "noframes", "noscript")
+       case 'p':
+               raw = z.startTagIn("plaintext")
+       case 's':
+               raw = z.startTagIn("script", "style")
+       case 't':
+               raw = z.startTagIn("textarea", "title")
+       case 'x':
+               raw = z.startTagIn("xmp")
+       }
+       if raw {
+               z.rawTag = strings.ToLower(string(z.buf[z.data.start:z.data.end]))
+       }
+       // Look for a self-closing token like "<br/>".
+       if z.err == nil && z.buf[z.raw.end-2] == '/' {
+               return SelfClosingTagToken
+       }
+       return StartTagToken
+}
+
+// readTag reads the next tag token and its attributes. If saveAttr, those
+// attributes are saved in z.attr, otherwise z.attr is set to an empty slice.
+// The opening "<a" or "</a" has already been consumed, where 'a' means anything
+// in [A-Za-z].
+func (z *Tokenizer) readTag(saveAttr bool) {
+       z.attr = z.attr[:0]
+       z.nAttrReturned = 0
+       // Read the tag name and attribute key/value pairs.
+       z.readTagName()
+       if z.skipWhiteSpace(); z.err != nil {
+               return
+       }
+       for {
+               c := z.readByte()
+               if z.err != nil || c == '>' {
+                       break
+               }
+               z.raw.end--
+               z.readTagAttrKey()
+               z.readTagAttrVal()
+               // Save pendingAttr if saveAttr and that attribute has a non-empty key.
+               if saveAttr && z.pendingAttr[0].start != z.pendingAttr[0].end {
+                       z.attr = append(z.attr, z.pendingAttr)
+               }
+               if z.skipWhiteSpace(); z.err != nil {
+                       break
+               }
+       }
+}
+
+// readTagName sets z.data to the "div" in "<div k=v>". The reader (z.raw.end)
+// is positioned such that the first byte of the tag name (the "d" in "<div")
+// has already been consumed.
+func (z *Tokenizer) readTagName() {
+       z.data.start = z.raw.end - 1
+       for {
+               c := z.readByte()
+               if z.err != nil {
+                       z.data.end = z.raw.end
+                       return
+               }
+               switch c {
+               case ' ', '\n', '\r', '\t', '\f':
+                       z.data.end = z.raw.end - 1
+                       return
+               case '/', '>':
+                       z.raw.end--
+                       z.data.end = z.raw.end
+                       return
+               }
+       }
+}
+
+// readTagAttrKey sets z.pendingAttr[0] to the "k" in "<div k=v>".
+// Precondition: z.err == nil.
+func (z *Tokenizer) readTagAttrKey() {
+       z.pendingAttr[0].start = z.raw.end
+       for {
+               c := z.readByte()
+               if z.err != nil {
+                       z.pendingAttr[0].end = z.raw.end
+                       return
+               }
+               switch c {
+               case ' ', '\n', '\r', '\t', '\f', '/':
+                       z.pendingAttr[0].end = z.raw.end - 1
+                       return
+               case '=', '>':
+                       z.raw.end--
+                       z.pendingAttr[0].end = z.raw.end
+                       return
+               }
+       }
+}
+
+// readTagAttrVal sets z.pendingAttr[1] to the "v" in "<div k=v>".
+func (z *Tokenizer) readTagAttrVal() {
+       z.pendingAttr[1].start = z.raw.end
+       z.pendingAttr[1].end = z.raw.end
+       if z.skipWhiteSpace(); z.err != nil {
+               return
+       }
+       c := z.readByte()
+       if z.err != nil {
+               return
+       }
+       if c != '=' {
+               z.raw.end--
+               return
+       }
+       if z.skipWhiteSpace(); z.err != nil {
+               return
+       }
+       quote := z.readByte()
+       if z.err != nil {
+               return
+       }
+       switch quote {
+       case '>':
+               z.raw.end--
+               return
+
+       case '\'', '"':
+               z.pendingAttr[1].start = z.raw.end
+               for {
+                       c := z.readByte()
+                       if z.err != nil {
+                               z.pendingAttr[1].end = z.raw.end
+                               return
+                       }
+                       if c == quote {
+                               z.pendingAttr[1].end = z.raw.end - 1
+                               return
+                       }
+               }
+
+       default:
+               z.pendingAttr[1].start = z.raw.end - 1
+               for {
+                       c := z.readByte()
+                       if z.err != nil {
+                               z.pendingAttr[1].end = z.raw.end
+                               return
+                       }
+                       switch c {
+                       case ' ', '\n', '\r', '\t', '\f':
+                               z.pendingAttr[1].end = z.raw.end - 1
+                               return
+                       case '>':
+                               z.raw.end--
+                               z.pendingAttr[1].end = z.raw.end
+                               return
+                       }
+               }
+       }
+}
+
+// Next scans the next token and returns its type.
+func (z *Tokenizer) Next() TokenType {
+       z.raw.start = z.raw.end
+       z.data.start = z.raw.end
+       z.data.end = z.raw.end
+       if z.err != nil {
+               z.tt = ErrorToken
+               return z.tt
+       }
+       if z.rawTag != "" {
+               if z.rawTag == "plaintext" {
+                       // Read everything up to EOF.
+                       for z.err == nil {
+                               z.readByte()
+                       }
+                       z.data.end = z.raw.end
+                       z.textIsRaw = true
+               } else {
+                       z.readRawOrRCDATA()
+               }
+               if z.data.end > z.data.start {
+                       z.tt = TextToken
+                       z.convertNUL = true
+                       return z.tt
+               }
+       }
+       z.textIsRaw = false
+       z.convertNUL = false
+
+loop:
+       for {
+               c := z.readByte()
+               if z.err != nil {
+                       break loop
+               }
+               if c != '<' {
+                       continue loop
+               }
+
+               // Check if the '<' we have just read is part of a tag, comment
+               // or doctype. If not, it's part of the accumulated text token.
+               c = z.readByte()
+               if z.err != nil {
+                       break loop
+               }
+               var tokenType TokenType
+               switch {
+               case 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z':
+                       tokenType = StartTagToken
+               case c == '/':
+                       tokenType = EndTagToken
+               case c == '!' || c == '?':
+                       // We use CommentToken to mean any of "<!--actual comments-->",
+                       // "<!DOCTYPE declarations>" and "<?xml processing instructions?>".
+                       tokenType = CommentToken
+               default:
+                       // Reconsume the current character.
+                       z.raw.end--
+                       continue
+               }
+
+               // We have a non-text token, but we might have accumulated some text
+               // before that. If so, we return the text first, and return the non-
+               // text token on the subsequent call to Next.
+               if x := z.raw.end - len("<a"); z.raw.start < x {
+                       z.raw.end = x
+                       z.data.end = x
+                       z.tt = TextToken
+                       return z.tt
+               }
+               switch tokenType {
+               case StartTagToken:
+                       z.tt = z.readStartTag()
+                       return z.tt
+               case EndTagToken:
+                       c = z.readByte()
+                       if z.err != nil {
+                               break loop
+                       }
+                       if c == '>' {
+                               // "</>" does not generate a token at all. Generate an empty comment
+                               // to allow passthrough clients to pick up the data using Raw.
+                               // Reset the tokenizer state and start again.
+                               z.tt = CommentToken
+                               return z.tt
+                       }
+                       if 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' {
+                               z.readTag(false)
+                               if z.err != nil {
+                                       z.tt = ErrorToken
+                               } else {
+                                       z.tt = EndTagToken
+                               }
+                               return z.tt
+                       }
+                       z.raw.end--
+                       z.readUntilCloseAngle()
+                       z.tt = CommentToken
+                       return z.tt
+               case CommentToken:
+                       if c == '!' {
+                               z.tt = z.readMarkupDeclaration()
+                               return z.tt
+                       }
+                       z.raw.end--
+                       z.readUntilCloseAngle()
+                       z.tt = CommentToken
+                       return z.tt
+               }
+       }
+       if z.raw.start < z.raw.end {
+               z.data.end = z.raw.end
+               z.tt = TextToken
+               return z.tt
+       }
+       z.tt = ErrorToken
+       return z.tt
+}
+
+// Raw returns the unmodified text of the current token. Calling Next, Token,
+// Text, TagName or TagAttr may change the contents of the returned slice.
+func (z *Tokenizer) Raw() []byte {
+       return z.buf[z.raw.start:z.raw.end]
+}
+
+// convertNewlines converts "\r" and "\r\n" in s to "\n".
+// The conversion happens in place, but the resulting slice may be shorter.
+func convertNewlines(s []byte) []byte {
+       for i, c := range s {
+               if c != '\r' {
+                       continue
+               }
+
+               src := i + 1
+               if src >= len(s) || s[src] != '\n' {
+                       s[i] = '\n'
+                       continue
+               }
+
+               dst := i
+               for src < len(s) {
+                       if s[src] == '\r' {
+                               if src+1 < len(s) && s[src+1] == '\n' {
+                                       src++
+                               }
+                               s[dst] = '\n'
+                       } else {
+                               s[dst] = s[src]
+                       }
+                       src++
+                       dst++
+               }
+               return s[:dst]
+       }
+       return s
+}
+
+var (
+       nul         = []byte("\x00")
+       replacement = []byte("\ufffd")
+)
+
+// Text returns the unescaped text of a text, comment or doctype token. The
+// contents of the returned slice may change on the next call to Next.
+func (z *Tokenizer) Text() []byte {
+       switch z.tt {
+       case TextToken, CommentToken, DoctypeToken:
+               s := z.buf[z.data.start:z.data.end]
+               z.data.start = z.raw.end
+               z.data.end = z.raw.end
+               s = convertNewlines(s)
+               if (z.convertNUL || z.tt == CommentToken) && bytes.Contains(s, nul) {
+                       s = bytes.Replace(s, nul, replacement, -1)
+               }
+               if !z.textIsRaw {
+                       s = unescape(s, false)
+               }
+               return s
+       }
+       return nil
+}
+
+// TagName returns the lower-cased name of a tag token (the `img` out of
+// `<IMG SRC="foo">`) and whether the tag has attributes.
+// The contents of the returned slice may change on the next call to Next.
+func (z *Tokenizer) TagName() (name []byte, hasAttr bool) {
+       if z.data.start < z.data.end {
+               switch z.tt {
+               case StartTagToken, EndTagToken, SelfClosingTagToken:
+                       s := z.buf[z.data.start:z.data.end]
+                       z.data.start = z.raw.end
+                       z.data.end = z.raw.end
+                       return lower(s), z.nAttrReturned < len(z.attr)
+               }
+       }
+       return nil, false
+}
+
+// TagAttr returns the lower-cased key and unescaped value of the next unparsed
+// attribute for the current tag token and whether there are more attributes.
+// The contents of the returned slices may change on the next call to Next.
+func (z *Tokenizer) TagAttr() (key, val []byte, moreAttr bool) {
+       if z.nAttrReturned < len(z.attr) {
+               switch z.tt {
+               case StartTagToken, SelfClosingTagToken:
+                       x := z.attr[z.nAttrReturned]
+                       z.nAttrReturned++
+                       key = z.buf[x[0].start:x[0].end]
+                       val = z.buf[x[1].start:x[1].end]
+                       return lower(key), unescape(convertNewlines(val), true), z.nAttrReturned < len(z.attr)
+               }
+       }
+       return nil, nil, false
+}
+
+// Token returns the next Token. The result's Data and Attr values remain valid
+// after subsequent Next calls.
+func (z *Tokenizer) Token() Token {
+       t := Token{Type: z.tt}
+       switch z.tt {
+       case TextToken, CommentToken, DoctypeToken:
+               t.Data = string(z.Text())
+       case StartTagToken, SelfClosingTagToken, EndTagToken:
+               name, moreAttr := z.TagName()
+               for moreAttr {
+                       var key, val []byte
+                       key, val, moreAttr = z.TagAttr()
+                       t.Attr = append(t.Attr, Attribute{"", atom.String(key), string(val)})
+               }
+               if a := atom.Lookup(name); a != 0 {
+                       t.DataAtom, t.Data = a, a.String()
+               } else {
+                       t.DataAtom, t.Data = 0, string(name)
+               }
+       }
+       return t
+}
+
+// SetMaxBuf sets a limit on the amount of data buffered during tokenization.
+// A value of 0 means unlimited.
+func (z *Tokenizer) SetMaxBuf(n int) {
+       z.maxBuf = n
+}
+
+// NewTokenizer returns a new HTML Tokenizer for the given Reader.
+// The input is assumed to be UTF-8 encoded.
+func NewTokenizer(r io.Reader) *Tokenizer {
+       return NewTokenizerFragment(r, "")
+}
+
+// NewTokenizerFragment returns a new HTML Tokenizer for the given Reader, for
+// tokenizing an existing element's InnerHTML fragment. contextTag is that
+// element's tag, such as "div" or "iframe".
+//
+// For example, how the InnerHTML "a<b" is tokenized depends on whether it is
+// for a <p> tag or a <script> tag.
+//
+// The input is assumed to be UTF-8 encoded.
+func NewTokenizerFragment(r io.Reader, contextTag string) *Tokenizer {
+       z := &Tokenizer{
+               r:   r,
+               buf: make([]byte, 0, 4096),
+       }
+       if contextTag != "" {
+               switch s := strings.ToLower(contextTag); s {
+               case "iframe", "noembed", "noframes", "noscript", "plaintext", "script", "style", "title", "textarea", "xmp":
+                       z.rawTag = s
+               }
+       }
+       return z
+}
index 4783ba8a995954875a1c9a10cd2b7c5990345610..8b44fbd431958e110eaad30d4b6b0cd41d2fdc44 100644 (file)
                        "revision": "9fd32a8b3d3d3f9d43c341bfe098430e07609480",
                        "revisionTime": "2014-04-22T17:41:19Z"
                },
+               {
+                       "checksumSHA1": "OT4XN9z5k69e2RsMSpwW74B+yk4=",
+                       "path": "github.com/blang/semver",
+                       "revision": "2ee87856327ba09384cabd113bc6b5d174e9ec0f",
+                       "revisionTime": "2017-07-27T06:48:18Z"
+               },
                {
                        "checksumSHA1": "dvabztWVQX8f6oMLRyv4dLH+TGY=",
                        "path": "github.com/davecgh/go-spew/spew",
                        "path": "github.com/hashicorp/errwrap",
                        "revision": "7554cd9344cec97297fa6649b055a8c98c2a1e55"
                },
+               {
+                       "checksumSHA1": "b8F628srIitj5p7Y130xc9k0QWs=",
+                       "path": "github.com/hashicorp/go-cleanhttp",
+                       "revision": "3573b8b52aa7b37b9358d966a898feb387f62437",
+                       "revisionTime": "2017-02-11T01:34:15Z"
+               },
                {
                        "checksumSHA1": "nsL2kI426RMuq1jw15e7igFqdIY=",
                        "path": "github.com/hashicorp/go-getter",
                        "revisionTime": "2015-06-09T07:04:31Z"
                },
                {
-                       "checksumSHA1": "BcxYPk5ME2ZyrHS1yK7gK9mzS1A=",
+                       "checksumSHA1": "KPrCMDPNcLmO7K6xPcJSl86LwPk=",
                        "path": "github.com/hashicorp/terraform/config",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
-                       "checksumSHA1": "YiREjXkb7CDMZuUmkPGK0yySe8A=",
+                       "checksumSHA1": "uPCJ6seQo9kvoNSfwNWKX9KzVMk=",
                        "path": "github.com/hashicorp/terraform/config/module",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
                        "checksumSHA1": "w+l+UGTmwYNJ+L0p2vTd6+yqjok=",
                        "path": "github.com/hashicorp/terraform/dag",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
-                       "checksumSHA1": "p4y7tbu9KD/3cKQKe92I3DyjgRc=",
+                       "checksumSHA1": "P8gNPDuOzmiK4Lz9xG7OBy4Rlm8=",
                        "path": "github.com/hashicorp/terraform/flatmap",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
                        "checksumSHA1": "uT6Q9RdSRAkDjyUgQlJ2XKJRab4=",
                        "path": "github.com/hashicorp/terraform/helper/config",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
                        "checksumSHA1": "Vbo55GDzPgG/L/+W2pcvDhxrPZc=",
                        "path": "github.com/hashicorp/terraform/helper/experiment",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
                        "checksumSHA1": "BmIPKTr0zDutSJdyq7pYXrK1I3E=",
                        "path": "github.com/hashicorp/terraform/helper/hashcode",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
                        "checksumSHA1": "B267stWNQd0/pBTXHfI/tJsxzfc=",
                        "path": "github.com/hashicorp/terraform/helper/hilmapstructure",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
                        "checksumSHA1": "2wJa9F3BGlbe2DNqH5lb5POayRI=",
                        "path": "github.com/hashicorp/terraform/helper/logging",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
-                       "checksumSHA1": "8VL90fHe5YRasHcZwv2q2qms/Jo=",
+                       "checksumSHA1": "dhU2woQaSEI2OnbYLdkHxf7/nu8=",
                        "path": "github.com/hashicorp/terraform/helper/resource",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
-                       "checksumSHA1": "bgaeB6ivKIK5H+7JCsp7w8aAdAg=",
+                       "checksumSHA1": "0smlb90amL15c/6nWtW4DV6Lqh8=",
                        "path": "github.com/hashicorp/terraform/helper/schema",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
-                       "checksumSHA1": "oLui7dYxhzfAczwwdNZDm4tzHtk=",
+                       "checksumSHA1": "1yCGh/Wl4H4ODBBRmIRFcV025b0=",
                        "path": "github.com/hashicorp/terraform/helper/shadow",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
-                       "checksumSHA1": "6AA7ZAzswfl7SOzleP6e6he0lq4=",
+                       "checksumSHA1": "yFWmdS6yEJZpRJzUqd/mULqCYGk=",
+                       "path": "github.com/hashicorp/terraform/moduledeps",
+                       "revision": "5bcc1bae5925f44208a83279b6d4d250da01597b",
+                       "revisionTime": "2017-08-09T21:54:59Z"
+               },
+               {
+                       "checksumSHA1": "4ODNVUds3lyBf7gV02X1EeYR4GA=",
                        "path": "github.com/hashicorp/terraform/plugin",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
-                       "checksumSHA1": "GfGSXndpVIh9sSeNf+b1TjxBEpQ=",
+                       "checksumSHA1": "mujz3BDg1X82ynvJncCFUT6/7XI=",
+                       "path": "github.com/hashicorp/terraform/plugin/discovery",
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
+               },
+               {
+                       "checksumSHA1": "ksfNQjZs/6llziARojABd6iuvdw=",
                        "path": "github.com/hashicorp/terraform/terraform",
-                       "revision": "8d560482c34e865458fd884cb0790b4f73f09ad1",
-                       "revisionTime": "2017-06-08T00:14:54Z",
-                       "version": "v0.9.8",
-                       "versionExact": "v0.9.8"
+                       "revision": "2041053ee9444fa8175a298093b55a89586a1823",
+                       "revisionTime": "2017-08-02T18:39:14Z",
+                       "version": "v0.10.0",
+                       "versionExact": "v0.10.0"
                },
                {
                        "checksumSHA1": "ZhK6IO2XN81Y+3RAjTcVm1Ic7oU=",
                        "revision": "9477e0b78b9ac3d0b03822fd95422e2fe07627cd",
                        "revisionTime": "2016-10-31T15:37:30Z"
                },
+               {
+                       "checksumSHA1": "TT1rac6kpQp2vz24m5yDGUNQ/QQ=",
+                       "path": "golang.org/x/crypto/cast5",
+                       "revision": "b176d7def5d71bdd214203491f89843ed217f420",
+                       "revisionTime": "2017-07-23T04:49:35Z"
+               },
+               {
+                       "checksumSHA1": "IIhFTrLlmlc6lEFSitqi4aw2lw0=",
+                       "path": "golang.org/x/crypto/openpgp",
+                       "revision": "b176d7def5d71bdd214203491f89843ed217f420",
+                       "revisionTime": "2017-07-23T04:49:35Z"
+               },
+               {
+                       "checksumSHA1": "olOKkhrdkYQHZ0lf1orrFQPQrv4=",
+                       "path": "golang.org/x/crypto/openpgp/armor",
+                       "revision": "b176d7def5d71bdd214203491f89843ed217f420",
+                       "revisionTime": "2017-07-23T04:49:35Z"
+               },
+               {
+                       "checksumSHA1": "eo/KtdjieJQXH7Qy+faXFcF70ME=",
+                       "path": "golang.org/x/crypto/openpgp/elgamal",
+                       "revision": "b176d7def5d71bdd214203491f89843ed217f420",
+                       "revisionTime": "2017-07-23T04:49:35Z"
+               },
+               {
+                       "checksumSHA1": "rlxVSaGgqdAgwblsErxTxIfuGfg=",
+                       "path": "golang.org/x/crypto/openpgp/errors",
+                       "revision": "b176d7def5d71bdd214203491f89843ed217f420",
+                       "revisionTime": "2017-07-23T04:49:35Z"
+               },
+               {
+                       "checksumSHA1": "Pq88+Dgh04UdXWZN6P+bLgYnbRc=",
+                       "path": "golang.org/x/crypto/openpgp/packet",
+                       "revision": "b176d7def5d71bdd214203491f89843ed217f420",
+                       "revisionTime": "2017-07-23T04:49:35Z"
+               },
+               {
+                       "checksumSHA1": "s2qT4UwvzBSkzXuiuMkowif1Olw=",
+                       "path": "golang.org/x/crypto/openpgp/s2k",
+                       "revision": "b176d7def5d71bdd214203491f89843ed217f420",
+                       "revisionTime": "2017-07-23T04:49:35Z"
+               },
+               {
+                       "checksumSHA1": "vqc3a+oTUGX8PmD0TS+qQ7gmN8I=",
+                       "path": "golang.org/x/net/html",
+                       "revision": "1c05540f6879653db88113bc4a2b70aec4bd491f",
+                       "revisionTime": "2017-08-04T00:04:37Z"
+               },
+               {
+                       "checksumSHA1": "z79z5msRzgU48FCZxSuxfU8b4rs=",
+                       "path": "golang.org/x/net/html/atom",
+                       "revision": "1c05540f6879653db88113bc4a2b70aec4bd491f",
+                       "revisionTime": "2017-08-04T00:04:37Z"
+               },
                {
                        "checksumSHA1": "wICWAGQfZcHD2y0dHesz9R2YSiw=",
                        "path": "k8s.io/kubernetes/pkg/apimachinery",